Skip to content

Commit

Permalink
Merge pull request #28 from NOAA-PSL/develop
Browse files Browse the repository at this point in the history
  • Loading branch information
chiaweh2 authored May 1, 2024
2 parents 11d2af4 + f034e76 commit aa18d32
Show file tree
Hide file tree
Showing 5 changed files with 699 additions and 2,245 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/gha_pytest_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,4 @@ jobs:

- name: pytesting
shell: bash -l {0}
run: pytest
run: pytest --location opendap
2 changes: 1 addition & 1 deletion .github/workflows/gha_pytest_push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,4 @@ jobs:

- name: pytesting
shell: bash -l {0}
run: pytest
run: pytest --location opendap
33 changes: 22 additions & 11 deletions mom6/mom6_module/mom6_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,21 +195,24 @@ def get_all(self) -> xr.Dataset:
raise OSError('for raw grid please input the path to grid file')
else:
ds_static = MOM6Static.get_grid(self.static_relative_dir)
# setup chuck
io_chunk = {}
elif self.source == 'opendap':
file_list = OpenDapStore(grid=self.grid,data_type='forecast').get_catalog()
for file in file_list:
var_flag = 'static' in file
if var_flag :
ds_static = xr.open_dataset(file)
io_chunk = {'init': 4,'member':1,'lead':-1}

file_read = [file for file in file_list if self.var in file]
file_read = [file for file in file_list if f'{self.var}_' in file]

# merge the static field with the variables
ds = xr.open_mfdataset(
file_read,
combine='nested',
concat_dim='init',
chunks={'init': 4,'member':1,'lead':-1}
chunks=io_chunk
).sortby('init')
ds = xr.merge([ds_static,ds])
# ds = ds.isel(init=slice(1,None)) # exclude the 1980 empty field due to merge
Expand All @@ -232,14 +235,16 @@ def get_all(self) -> xr.Dataset:
else:
mom6_dir = os.path.join(DATA_PATH,self.data_relative_dir)
file_list = glob.glob(f'{mom6_dir}/*.nc')
io_chunk = {}
elif self.source == 'opendap':
file_list = OpenDapStore(grid=self.grid,data_type='forecast').get_catalog()
io_chunk = {'init': 1,'member':1,'lead':-1}

file_read = [file for file in file_list if self.var in file]
file_read = [file for file in file_list if f'{self.var}_' in file]
ds = xr.open_mfdataset(
file_read,combine='nested',
concat_dim='init',
chunks={'init': 1,'member':1,'lead':1}
chunks=io_chunk
).sortby('init')

# test if accident read raw file
Expand Down Expand Up @@ -288,15 +293,17 @@ def get_tercile(
raise OSError('for raw grid please input the path to grid file')
else:
ds_static = MOM6Static.get_grid(self.static_relative_dir)
io_chunk = {}
elif self.source == 'opendap':
file_list = OpenDapStore(grid=self.grid,data_type='forecast').get_catalog()
for file in file_list:
var_flag = 'static' in file
if var_flag :
ds_static = xr.open_dataset(file)
io_chunk = {'init': 4,'member':1,'lead':-1}

# refine based on var name
file_read = [file for file in file_list if self.var in file]
file_read = [file for file in file_list if f'{self.var}_' in file]
# refine based on region
if average_type == 'grid':
file_read = [file for file in file_read if '.region.' not in file]
Expand All @@ -308,7 +315,7 @@ def get_tercile(
file_read,
combine='nested',
concat_dim='init',
chunks={'init': 4,'member':1,'lead':-1}
chunks=io_chunk
).sortby('init')
ds = xr.merge([ds_static,ds])
# ds = ds.isel(init=slice(1,None)) # exclude the 1980 empty field due to merge
Expand All @@ -331,10 +338,12 @@ def get_tercile(
else:
mom6_dir = os.path.join(DATA_PATH,self.tercile_relative_dir)
file_list = glob.glob(f'{mom6_dir}/*.nc')
io_chunk = {}
elif self.source == 'opendap':
file_list = OpenDapStore(grid=self.grid,data_type='forecast').get_catalog()
io_chunk = {'init': 4,'member':1,'lead':-1}

file_read = [file for file in file_list if self.var in file]
file_read = [file for file in file_list if f'{self.var}_' in file]

# refine based on region
if average_type == 'grid':
Expand All @@ -345,7 +354,7 @@ def get_tercile(
ds = xr.open_mfdataset(
file_read,combine='nested',
concat_dim='init',
chunks={'init': 1,'member':1,'lead':1}
chunks=io_chunk
).sortby('init')

# test if accident read raw file
Expand Down Expand Up @@ -538,20 +547,22 @@ def get_all(self) -> xr.Dataset:
raise IOError('for raw grid please input the path to grid file')
else:
ds_static = MOM6Static.get_grid(self.static_relative_dir)
io_chunk = {}
elif self.source == 'opendap':
file_list = OpenDapStore(grid=self.grid,data_type='historical').get_catalog()
for file in file_list:
var_flag = 'static' in file
if var_flag :
ds_static = xr.open_dataset(file)
io_chunk = {'time': 100}

file_read = [file for file in file_list if self.var in file]
file_read = [file for file in file_list if f'.{self.var}.' in file]

# merge the static field with the variables
ds = xr.open_mfdataset(
file_read,combine='nested',
concat_dim='time',
chunks={'time': 100}
chunks=io_chunk
).sortby('time')
ds = xr.merge([ds_static,ds])
ds = ds.isel(time=slice(1,None)) # exclude the 1980 empty field due to merge
Expand All @@ -577,7 +588,7 @@ def get_all(self) -> xr.Dataset:
elif self.source == 'opendap':
file_list = OpenDapStore(grid=self.grid,data_type='historical').get_catalog()

file_read = [file for file in file_list if self.var in file]
file_read = [file for file in file_list if f'.{self.var}.' in file]
ds = xr.open_mfdataset(
file_read,
combine='nested',
Expand Down
2,350 changes: 118 additions & 2,232 deletions mom6/notebook/gulf_stream_index.ipynb

Large diffs are not rendered by default.

557 changes: 557 additions & 0 deletions mom6/notebook/nechannel_index.ipynb

Large diffs are not rendered by default.

0 comments on commit aa18d32

Please sign in to comment.