Commit 17c7e616 authored by Jan Klaus Rieck's avatar Jan Klaus Rieck

Merge branch '17-improve-pytest-coverage' into 'master'

Resolve "improve pytest coverage"

Closes #17

See merge request !17
parents b04db7f4 f8f34b70
Pipeline #3653 passed with stage
in 1 minute and 43 seconds
......@@ -35,20 +35,27 @@ def defdepth(dataset, depth):
# check if depth has an appropriate length
# then find the index that is closest to the desired
# minimum depth and assign it to k1
k1_c = int((np.abs(dataset['depth_c'].values+depth[0])).argmin())
k1_l = int((np.abs(dataset['depth_l'].values+depth[0])).argmin())
d = {}
if 'depth_c' in dataset:
k1_c = int((np.abs(dataset['depth_c'].values+depth[0])).argmin())
if 'depth_l' in dataset:
k1_l = int((np.abs(dataset['depth_l'].values+depth[0])).argmin())
if len(depth) == 2:
# if a max. depth is given in the argument
# k2 is defined
k2_c = int((np.abs(dataset['depth_c']+depth[1])).argmin())
k2_l = int((np.abs(dataset['depth_l']+depth[1])).argmin())
d = {'z_c': slice(k1_c, k2_c),
'z_l': slice(k1_l, k2_l)}
if 'depth_c' in dataset:
k2_c = int((np.abs(dataset['depth_c']+depth[1])).argmin())
d['z_c'] = slice(k1_c, k2_c)
if 'depth_l' in dataset:
k2_l = int((np.abs(dataset['depth_l']+depth[1])).argmin())
d['z_l'] = slice(k1_l, k2_l)
elif len(depth) == 1:
# if no max. depth is given, only one depth level
# will be exracted
d = {'z_c': k1_c,
'z_l': k1_l}
if 'depth_c' in dataset:
d['z_c'] = k1_c
if 'depth_l' in dataset:
d['z_l'] = k1_l
else:
raise ValueError('please provide 1 (single depth) or 2 '
+ '(depth range) values in keyword depth')
......@@ -279,8 +286,11 @@ def getbox(dataset, time=None, depth=None, y=None, x=None, surf=False):
x = np.array(x) if x else np.array([-50, -80])
# make sure the halo points of the global ORCA grid are dropped
ds = dataset.isel(x_c=slice(1, len(dataset['x_c']) - 1),
x_r=slice(1, len(dataset['x_r']) - 1))
if 'x_c' in dataset:
dataset = dataset.isel(x_c=slice(1, len(dataset['x_c']) - 1))
if 'x_r' in dataset:
dataset = dataset.isel(x_r=slice(1, len(dataset['x_r']) - 1))
ds = dataset
# extract the time slice
ds = ds.sel(t=slice(time[0], time[1]))
......@@ -300,8 +310,9 @@ def getbox(dataset, time=None, depth=None, y=None, x=None, surf=False):
# define possible grids
grids = possible_grids(ds)
if 'W' in grids:
del grids['W'] # do not need W for getbox...
if ('W' in grids) and ('T' not in grids) and ('wmask' not in ds):
raise ValueError('no mask for grid W in dataset'
+ 'and no other grid to create mask from')
# define empty dataset to write into
ds_sel = xr.Dataset()
......@@ -317,6 +328,22 @@ def getbox(dataset, time=None, depth=None, y=None, x=None, surf=False):
Xroll[grid['x']] = int(Xroll_argmax.max())
# apply the mask (defined on the current grid) only to
# variables that share its grid
if gr == 'W':
m = ds[grids['T']['mask']].isel(z_c=0)
m3 = ds[grids['T']['mask']]
# need to construct wmask, because its not in output
m = xr.DataArray(m.data, dims=['y_c', 'x_c'])
diff_z = int(len(ds['z_l']) - len(ds['z_c']))
if diff_z > 0:
m3 = xr.DataArray(np.vstack((m3.data[0:1, ...], m3.data)),
dims=['z_l', 'y_c', 'x_c'])
elif diff_z < 0:
m3 = xr.DataArray(m3.data[0:diff_z],
dims=['z_l', 'y_c', 'x_c'])
else:
m3 = xr.DataArray(m3.data,
dims=['z_l', 'y_c', 'x_c'])
ds = ds.update({'wmask': m3}).set_coords('wmask')
if grid['box_mask'] in ds.coords:
ds[grid['box_mask']] = ds.coords[grid['box_mask']]
else:
......@@ -367,7 +394,8 @@ def is_leap_year(year):
bool
"""
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
leap = year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
return leap
def timeave(ds, monthly=False, std_count=False):
......@@ -462,9 +490,6 @@ def depthave(ds, std_count=False):
if ('z_c' in ds.dims) or ('z_l' in ds.dims):
argu = {'dim': grid['z'], 'skipna': True, 'keep_attrs': True}
arg_count = {'dim': grid['z'], 'keep_attrs': True}
elif ('z_c' in ds.dims) and ('z_l' in ds.dims):
argu = {'dim': ('z_c', 'z_l'), 'skipna': True, 'keep_attrs': True}
arg_count = {'dim': ('z_c', 'z_l'), 'keep_attrs': True}
else:
raise ValueError('no depth dimension!'
+ ' (looking for `z_c` and/or `z_l`)')
......@@ -910,7 +935,7 @@ def depthsum(ds, std_count=False):
std = xr.Dataset()
count = xr.Dataset()
grids = possible_grids(ds)
if any([e not in ds.coords for e in ['e3t', 'e3u', 'e3v', 'e3f', 'e3w']]):
if all([e not in ds.coords for e in ['e3t', 'e3u', 'e3v', 'e3f', 'e3w']]):
raise ValueError('no `e3?` found in dataset for weighting of the'
+ ' vertical average!')
for gr in grids:
......@@ -918,9 +943,6 @@ def depthsum(ds, std_count=False):
if ('z_c' in ds.dims) or ('z_l' in ds.dims):
argu = {'dim': grid['z'], 'skipna': True, 'keep_attrs': True}
arg_count = {'dim': grid['z'], 'keep_attrs': True}
elif ('z_c' in ds.dims) and ('z_l' in ds.dims):
argu = {'dim': ('z_c', 'z_l'), 'skipna': True, 'keep_attrs': True}
arg_count = {'dim': ('z_c', 'z_l'), 'keep_attrs': True}
else:
raise ValueError('no depth dimension!'
+ ' (looking for `z_c` and/or `z_l`)')
......@@ -968,7 +990,8 @@ def depthsum(ds, std_count=False):
std[v] = ds[v].std(**argu).where(m > 0)
count[v] = ds[v].where(m3 > 0).count(**arg_count)
# remove box_wmask again, because it is not a real wmask
ds_sel = ds_sel.reset_coords('box_wmask').drop('box_wmask')
if 'box_wmask' in ds_sel.coords:
ds_sel = ds_sel.reset_coords('box_wmask').drop('box_wmask')
return ds_sel, std, count
......
......@@ -10,7 +10,7 @@ ds = xr.open_dataset(filename)
# construct a dummy xgcm compatible xarray.DataSet
time_start = np.datetime64('2000-01-01T00:00:00', 's')
time_end = np.datetime64('2000-06-30T00:00:00', 's')
time_end = np.datetime64('2000-12-31T00:00:00', 's')
time_delta = (time_end - time_start) / (11)
t = time_start + np.arange(0, 12) * time_delta
z_c = np.arange(1, 47)
......@@ -84,6 +84,83 @@ ds = xr.Dataset({'dummy_0cc': (['t', 'y_c', 'x_c'], dummy_3Dt),
'vmask': (['z_c', 'y_r', 'x_c'], tmask),
'fmask': (['z_c', 'y_r', 'x_r'], tmask)})
ds_z_c = xr.Dataset({'dummy_0cc': (['t', 'y_c', 'x_c'], dummy_3Dt),
'dummy_ccc': (['t', 'z_c', 'y_c', 'x_c'], dummy_4D),
'dummy_ccr': (['t', 'z_c', 'y_c', 'x_r'], dummy_4D),
'dummy_crc': (['t', 'z_c', 'y_r', 'x_c'], dummy_4D),
'dummy_crr': (['t', 'z_c', 'y_r', 'x_r'], dummy_4D)},
coords={'t': (['t'], t),
'z_c': (['z_c'], z_c),
'y_c': (['y_c'], y_c),
'y_r': (['y_r'], y_r),
'x_c': (['x_c'], x_c),
'x_r': (['x_r'], x_r),
'depth_c': (['z_c'], depth_c),
'llat_cc': (['y_c', 'x_c'], llat_cc),
'llat_cr': (['y_c', 'x_r'], llat_cr),
'llat_rc': (['y_r', 'x_c'], llat_rc),
'llat_rr': (['y_r', 'x_r'], llat_rr),
'llon_cc': (['y_c', 'x_c'], llon_cc),
'llon_cr': (['y_c', 'x_r'], llon_cr),
'llon_rc': (['y_r', 'x_c'], llon_rc),
'llon_rr': (['y_r', 'x_r'], llon_rr),
'e1t': (['y_c', 'x_c'], dummy_e2D),
'e2t': (['y_c', 'x_c'], dummy_e2D),
'e3t': (['z_c', 'y_c', 'x_c'], dummy_e3D),
'e1u': (['y_c', 'x_r'], dummy_e2D),
'e2u': (['y_c', 'x_r'], dummy_e2D),
'e3u': (['z_c', 'y_c', 'x_r'], dummy_e3D),
'e1v': (['y_r', 'x_c'], dummy_e2D),
'e2v': (['y_r', 'x_c'], dummy_e2D),
'e3v': (['z_c', 'y_r', 'x_c'], dummy_e3D),
'e1f': (['y_r', 'x_r'], dummy_e2D),
'e2f': (['y_r', 'x_r'], dummy_e2D),
'tmask': (['z_c', 'y_c', 'x_c'], tmask),
'umask': (['z_c', 'y_c', 'x_r'], tmask),
'vmask': (['z_c', 'y_r', 'x_c'], tmask),
'fmask': (['z_c', 'y_r', 'x_r'], tmask)})
ds_z_l = xr.Dataset({'dummy_0cc': (['t', 'y_c', 'x_c'], dummy_3Dt),
'dummy_lcc': (['t', 'z_l', 'y_c', 'x_c'], dummy_4D)},
coords={'t': (['t'], t),
'z_l': (['z_l'], z_l),
'y_c': (['y_c'], y_c),
'x_c': (['x_c'], x_c),
'depth_l': (['z_l'], depth_l),
'llat_cc': (['y_c', 'x_c'], llat_cc),
'llon_cc': (['y_c', 'x_c'], llon_cc),
'e1t': (['y_c', 'x_c'], dummy_e2D),
'e2t': (['y_c', 'x_c'], dummy_e2D),
'e3w': (['z_l', 'y_c', 'x_c'], dummy_e3D)})
ds_2d = xr.Dataset({'dummy_0cc': (['t', 'y_c', 'x_c'], dummy_3Dt),
'dummy_ccc': (['t', 'y_c', 'x_c'], dummy_3Dt),
'dummy_ccr': (['t', 'y_c', 'x_r'], dummy_3Dt),
'dummy_crc': (['t', 'y_r', 'x_c'], dummy_3Dt),
'dummy_crr': (['t', 'y_r', 'x_r'], dummy_3Dt)},
coords={'t': (['t'], t),
'y_c': (['y_c'], y_c),
'y_r': (['y_r'], y_r),
'x_c': (['x_c'], x_c),
'x_r': (['x_r'], x_r),
'depth_c': (['z_c'], depth_c),
'llat_cc': (['y_c', 'x_c'], llat_cc),
'llat_cr': (['y_c', 'x_r'], llat_cr),
'llat_rc': (['y_r', 'x_c'], llat_rc),
'llat_rr': (['y_r', 'x_r'], llat_rr),
'llon_cc': (['y_c', 'x_c'], llon_cc),
'llon_cr': (['y_c', 'x_r'], llon_cr),
'llon_rc': (['y_r', 'x_c'], llon_rc),
'llon_rr': (['y_r', 'x_r'], llon_rr),
'e1t': (['y_c', 'x_c'], dummy_e2D),
'e2t': (['y_c', 'x_c'], dummy_e2D),
'e1u': (['y_c', 'x_r'], dummy_e2D),
'e2u': (['y_c', 'x_r'], dummy_e2D),
'e1v': (['y_r', 'x_c'], dummy_e2D),
'e2v': (['y_r', 'x_c'], dummy_e2D),
'e1f': (['y_r', 'x_r'], dummy_e2D),
'e2f': (['y_r', 'x_r'], dummy_e2D)})
expected_depths = {}
expected_depths['1'] = {'z_c': 9, 'z_l': 10}
expected_depths['2'] = {'z_c': slice(17, 23, None), 'z_l': slice(18, 24, None)}
......@@ -151,13 +228,24 @@ def test_checklatlon_raises_valueerror(x, y):
xbox.checklatlon(ds['llat_cc'], ds['llon_cc'], y, x)
def test_possible_grids():
@pytest.mark.parametrize('ds_in',
[ds, ds_z_c, ds_z_l])
def test_possible_grids(ds_in):
""" Test possible_grids().
"""
grids = xbox.possible_grids(ds)
grids = xbox.possible_grids(ds_in)
assert isinstance(grids, dict),\
'defining of grid dictionary failed'
if ('z_c' in ds_in.dims) and ('z_l' in ds_in.dims):
assert ('T' in grids) and ('W' in grids),\
'not all possible grids were defined'
elif ('z_c' in ds_in.dims):
assert ('T' and 'U' and 'V' and 'F' in grids),\
'not all possible grids were defined'
elif ('z_l' in ds_in.dims):
assert ('W' in grids),\
'not all possible grids were defined'
def test_inner_broadcasting_possible():
......@@ -195,6 +283,107 @@ def test_construct_ef(ds_sel):
'e3f has a depth dimension'
@pytest.mark.parametrize('year',
[1980, 1981])
def test_is_leap_year(year):
""" Test is_leap_year().
"""
leap = xbox.is_leap_year(year)
if year == 1980:
assert leap,\
'leap year detection failed'
elif year == 1981:
assert not leap,\
'false detection of leap year'
@pytest.mark.parametrize('monthly',
[True, False])
def test_timeave(monthly):
""" Test timeave().
"""
t_ave = xbox.timeave(ds_2d, monthly=monthly)[0]
assert 't' and 'month' not in t_ave.dims,\
'time averaging did not work'
def test_timeave_raises_keyerror():
""" Test whether timeave() raises key error.
"""
ds_t_ave = ds_z_l.mean('t')
with pytest.raises(KeyError):
xbox.timeave(ds_t_ave)
@pytest.mark.parametrize('monthly',
[True, False])
def test_timesum(monthly):
""" Test timesum().
"""
t_sum = xbox.timesum(ds_2d, monthly=monthly)[0]
assert 't' and 'month' not in t_sum.dims,\
'time summing did not work'
def test_timesum_raises_keyerror():
""" Test whether timesum() raises key error.
"""
ds_t_sum = ds_z_l.sum('t')
with pytest.raises(KeyError):
xbox.timesum(ds_t_sum)
@pytest.mark.parametrize('ds_in',
[ds, ds_z_c])
@pytest.mark.parametrize('dep',
[[500, 1000], [530, 990]])
def test_depthave(ds_in, dep):
""" Test depthave().
"""
ds_a = xbox.getbox(ds_in, time=['2000-01-01', '2000-06-30'],
depth=dep, x=[10, 20], y=[10, 20])
ds_d_ave = xbox.depthave(ds_a)[0]
assert 'z_c' and 'z_l' not in ds_d_ave.dims,\
'depth averaging failed'
def test_depthave_raises_valueerror():
""" Test whether depthave() raises value error.
"""
with pytest.raises(ValueError):
xbox.depthave(ds_2d)
@pytest.mark.parametrize('ds_in',
[ds, ds_z_c])
@pytest.mark.parametrize('dep',
[[500, 1000], [530, 990]])
def test_depthsum(ds_in, dep):
""" Test depthsum().
"""
ds_s = xbox.getbox(ds_in, time=['2000-01-01', '2000-06-30'],
depth=dep, x=[10, 20], y=[10, 20])
ds_d_sum = xbox.depthsum(ds_s)[0]
assert 'z_c' and 'z_l' not in ds_d_sum.dims,\
'depth summing failed'
def test_depthsum_raises_valueerror():
""" Test whether depthsum() raises value error.
"""
with pytest.raises(ValueError):
xbox.depthsum(ds_2d)
@pytest.mark.parametrize('time',
[['2000-01-01', '2000-01-31'],
['2000-01-01', '2000-06-30']])
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment