Skip to content

Commit

Permalink
Merge pull request #1868 from djhoese/bugfix-mirs-dask-compat
Browse files Browse the repository at this point in the history
Fix MiRS reader not working with new versions of dask
  • Loading branch information
djhoese authored Nov 3, 2021
2 parents 7876137 + bdb733e commit d16c73a
Show file tree
Hide file tree
Showing 4 changed files with 60 additions and 24 deletions.
2 changes: 1 addition & 1 deletion satpy/readers/amsr2_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_metadata(self, ds_id, ds_info):
"shape": self.get_shape(ds_id, ds_info),
"units": self[var_path + "/attr/UNIT"],
"platform_name": self["/attr/PlatformShortName"],
"sensor": self["/attr/SensorShortName"],
"sensor": self["/attr/SensorShortName"].lower(),
"start_orbit": int(self["/attr/StartOrbitNumber"]),
"end_orbit": int(self["/attr/StopOrbitNumber"]),
})
Expand Down
25 changes: 13 additions & 12 deletions satpy/readers/mirs.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def read_atms_limb_correction_coefficients(fn):
n_chn = 22
n_fov = 96
# make the string a generator
coeff_str = (line.strip() for line in coeff_str)
coeff_lines = (line.strip() for line in coeff_str)

all_coeffs = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32)
all_amean = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32)
Expand All @@ -107,14 +107,17 @@ def read_atms_limb_correction_coefficients(fn):
# There should be 22 sections
for chan_idx in range(n_chn):
# blank line at the start of each section
_ = next(coeff_str)
_ = next(coeff_lines)
# section header
_nx, nchx, dmean = [x.strip() for x in next(coeff_str).split(" ") if x]
next_line = next(coeff_lines)

_nx, nchx, dmean = [x.strip() for x in next_line.split(" ") if x]
all_nchx[chan_idx] = nchx = int(nchx)
all_dmean[chan_idx] = float(dmean)

# coeff locations (indexes to put the future coefficients in)
locations = [int(x.strip()) for x in next(coeff_str).split(" ") if x]
next_line = next(coeff_lines)
locations = [int(x.strip()) for x in next_line.split(" ") if x]
if len(locations) != nchx:
raise RuntimeError
for x in range(nchx):
Expand All @@ -123,7 +126,7 @@ def read_atms_limb_correction_coefficients(fn):
# Read 'nchx' coefficients for each of 96 FOV
for fov_idx in range(n_fov):
# chan_num, fov_num, *coefficients, error
coeff_line_parts = [x.strip() for x in next(coeff_str).split(" ") if x][2:]
coeff_line_parts = [x.strip() for x in next(coeff_lines).split(" ") if x][2:]
coeffs = [float(x) for x in coeff_line_parts[:nchx]]
ameans = [float(x) for x in coeff_line_parts[nchx:-1]]
# not used but nice to know the purpose of the last column.
Expand Down Expand Up @@ -161,7 +164,7 @@ def get_coeff_by_sfc(coeff_fn, bt_data, idx):
c_size = bt_data[idx, :, :].chunks
correction = da.map_blocks(apply_atms_limb_correction,
bt_data, idx,
*sfc_coeff, chunks=c_size)
*sfc_coeff, chunks=c_size, meta=np.array((), dtype=bt_data.dtype))
return correction


Expand Down Expand Up @@ -279,15 +282,13 @@ def force_date(self, key):
"""Force datetime.date for combine."""
if isinstance(self.filename_info[key], datetime.datetime):
return self.filename_info[key].date()
else:
return self.filename_info[key]
return self.filename_info[key]

def force_time(self, key):
"""Force datetime.time for combine."""
if isinstance(self.filename_info.get(key), datetime.datetime):
return self.filename_info.get(key).time()
else:
return self.filename_info.get(key)
return self.filename_info.get(key)

@property
def _get_coeff_filenames(self):
Expand Down Expand Up @@ -320,9 +321,9 @@ def _nan_for_dtype(data_arr_dtype):
# if we don't have to
if data_arr_dtype.type == np.float32:
return np.float32(np.nan)
elif np.issubdtype(data_arr_dtype, np.timedelta64):
if np.issubdtype(data_arr_dtype, np.timedelta64):
return np.timedelta64('NaT')
elif np.issubdtype(data_arr_dtype, np.datetime64):
if np.issubdtype(data_arr_dtype, np.datetime64):
return np.datetime64('NaT')
return np.nan

Expand Down
2 changes: 2 additions & 0 deletions satpy/tests/reader_tests/test_amsr2_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,8 @@ def test_load_basic(self):
(DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2))
self.assertTupleEqual(d.attrs['area'].lats.shape,
(DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2))
assert d.attrs['sensor'] == 'amsr2'
assert d.attrs['platform_name'] == 'GCOM-W1'

def test_load_89ghz(self):
"""Test loading of 89GHz channels."""
Expand Down
55 changes: 44 additions & 11 deletions satpy/tests/reader_tests/test_mirs.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

EXAMPLE_FILES = [METOP_FILE, NPP_MIRS_L2_SWATH, OTHER_MIRS_L2_SWATH]

N_CHANNEL = 3
N_CHANNEL = 22
N_FOV = 96
N_SCANLINE = 100
DEFAULT_FILE_DTYPE = np.float64
Expand All @@ -42,16 +42,20 @@
dtype=DEFAULT_FILE_DTYPE)
DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV,
dtype=DEFAULT_FILE_DTYPE)
FREQ = xr.DataArray([88, 88, 22], dims='Channel',
FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5,
57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5,
183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL],
dims='Channel',
attrs={'description': "Central Frequencies (GHz)"})
POLO = xr.DataArray([2, 2, 3], dims='Channel',
POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3,
3, 3, 3][:N_CHANNEL], dims='Channel',
attrs={'description': "Polarizations"})

DS_IDS = ['RR', 'longitude', 'latitude']
TEST_VARS = ['btemp_88v1', 'btemp_88v2',
'btemp_22h', 'RR', 'Sfc_type']
DEFAULT_UNITS = {'btemp_88v1': 'K', 'btemp_88v2': 'K',
'btemp_22h': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"}
TEST_VARS = ['btemp_88v', 'btemp_165h',
'btemp_23v', 'RR', 'Sfc_type']
DEFAULT_UNITS = {'btemp_88v': 'K', 'btemp_165h': 'K',
'btemp_23v': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"}
PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"}
SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"}

Expand All @@ -62,18 +66,46 @@
def fake_coeff_from_fn(fn):
"""Create Fake Coefficients."""
ameans = np.random.uniform(261, 267, N_CHANNEL)
all_nchx = np.linspace(2, 3, N_CHANNEL, dtype=np.int32)
locations = [
[1, 2],
[1, 2],
[3, 4, 5],
[3, 4, 5],
[4, 5, 6],
[5, 6, 7],
[6, 7, 8],
[7, 8],
[9, 10, 11],
[10, 11],
[10, 11, 12],
[11, 12, 13],
[12, 13],
[12, 13, 14],
[14, 15],
[1, 16],
[17, 18],
[18, 19],
[18, 19, 20],
[19, 20, 21],
[20, 21, 22],
[21, 22],
]
all_nchx = [len(loc) for loc in locations]

coeff_str = []
for idx in range(1, N_CHANNEL):
for idx in range(1, N_CHANNEL + 1):
nx = idx - 1
coeff_str.append('\n')
next_line = ' {} {} {}\n'.format(idx, all_nchx[nx], ameans[nx])
coeff_str.append(next_line)
next_line = ' {}\n'.format(" ".join([str(x) for x in locations[idx - 1]]))
coeff_str.append(next_line)
for fov in range(1, N_FOV+1):
random_coeff = np.random.rand(all_nchx[nx])
random_coeff = np.ones(all_nchx[nx])
str_coeff = ' '.join([str(x) for x in random_coeff])
random_means = np.random.uniform(261, 267, all_nchx[nx])
random_means = np.zeros(all_nchx[nx])
str_means = ' '.join([str(x) for x in random_means])
error_val = np.random.uniform(0, 4)
coeffs_line = ' {:>2} {:>2} {} {} {}\n'.format(idx, fov,
Expand Down Expand Up @@ -259,7 +291,7 @@ def _check_valid_range(data_arr, test_valid_range):
@staticmethod
def _check_fill_value(data_arr, test_fill_value):
assert '_FillValue' not in data_arr.attrs
assert test_fill_value not in data_arr.data
assert not (data_arr.data == test_fill_value).any()

@staticmethod
def _check_attrs(data_arr, platform_name):
Expand Down Expand Up @@ -293,10 +325,11 @@ def test_basic_load(self, filenames, loadable_ids,
fd, mock.patch('satpy.readers.mirs.retrieve'):
fd.side_effect = fake_coeff_from_fn
loaded_data_arrs = r.load(loadable_ids)
assert loaded_data_arrs
assert len(loaded_data_arrs) == len(loadable_ids)

test_data = fake_open_dataset(filenames[0])
for _data_id, data_arr in loaded_data_arrs.items():
data_arr = data_arr.compute()
var_name = data_arr.attrs["name"]
if var_name not in ['latitude', 'longitude']:
self._check_area(data_arr)
Expand Down

0 comments on commit d16c73a

Please sign in to comment.