Remove PR old association #47
64 fail, 518 pass in 1h 29m 41s
582 tests 518 ✅ 1h 29m 41s ⏱️
1 suites 0 💤
1 files 64 ❌
Results for commit 65e4807.
Annotations
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1918210292-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 3m 12s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1918210292-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918210292-GES_DISC', 'concept-id': 'G3153480346-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918210292-GES_DISC'}]}, 'meta': {'association-details': {'collect...v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/number_of_slant_columns_win2', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1918210290')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fec086b4640>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fec09fd7d40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fec09fd7c40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153480346-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1918210292-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.47980000000001%3A-60.2582%29&subset=lon%2894.47670000000001%3A172.47930000000002%29&granuleId=G3153480346-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job dd9d6645-61cd-4065-8055-30da3eb04d55
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1918210290/69667152_S5P_OFFL_L2_SO2_20240708T053223_20240708T071352_34899_03_020601_20240710T035025_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1918210023-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 39s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1918210023-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918210023-GES_DISC', 'concept-id': 'G3153572897-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918210023-GES_DISC'}]}, 'meta': {'association-details': {'collect...RL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/qa_value', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1918210020')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe25b8a3440>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe25b8a2d40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fe25b8a2c40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153572897-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1918210023-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.5553%3A-60.3147%29&subset=lon%28-32.5049%3A45.7789%29&granuleId=G3153572897-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 5bf37348-ec8c-4eed-894a-5b61c0f1a92c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1918210020/69667153_S5P_OFFL_L2_HCHO_20240708T135951_20240708T154121_34904_03_020601_20240710T060707_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2087131083-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 9s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff8f127a740>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7ff8f9a8dc60>
def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
if group in {None, "", "/"}:
# use the root group
return ds
else:
# make sure it's a string
if not isinstance(group, str):
raise ValueError("group must be a string or None")
# support path-like syntax
path = group.strip("/").split("/")
for key in path:
try:
> ds = ds.groups[key]
E KeyError: 'PRODUCT'
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError
During handling of the above exception, another exception occurred:
collection_concept_id = 'C2087131083-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087131083-GES_DISC', 'concept-id': 'G3153523491-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087131083-GES_DISC'}]}, 'meta': {'association-details': {'collect.../variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C2087131080')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
verify_collection.py:423: in group_walk
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff8f127a740>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7ff8f9a8dc60>
def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
if group in {None, "", "/"}:
# use the root group
return ds
else:
# make sure it's a string
if not isinstance(group, str):
raise ValueError("group must be a string or None")
# support path-like syntax
path = group.strip("/").split("/")
for key in path:
try:
ds = ds.groups[key]
except KeyError as e:
if mode != "r":
ds = create_group(ds, key)
else:
# wrap error to provide slightly more helpful message
> raise OSError(f"group not found: {key}", e)
E OSError: [Errno group not found: PRODUCT] 'PRODUCT'
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153523491-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087131083-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.518525%3A-60.286474999999996%29&subset=lon%28-57.830349999999996%3A20.322349999999993%29&granuleId=G3153523491-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job ca92ca89-ab32-4cb9-830e-89fdd4eb524d
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C2087131080/69667200_S5P_OFFL_L2_AER_AI_20240708T154121_20240708T172251_34905_03_020600_20240710T052719_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2832224417-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 48s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2832224417-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832224417-POCLOUD', 'concept-id': 'G3153571048-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
> lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
verify_collection.py:398:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
dataset = <xarray.Dataset> Size: 232B
Dimensions: (ydim_grid: 1, xdim_grid: 1, look: 1,
... -0.43
history_json: [{"date_time": "2024-...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410/69667223_RSS_SMAP_SSS_L2C_r50422_20240710T043351_2024192_NRT_V06.0_001.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict]):
# Try getting it from UMM-Var first
lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
lat_var_name = get_variable_name_from_umm_json(lat_var_json)
lon_var_name = get_variable_name_from_umm_json(lon_var_json)
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
logging.warning("Unable to find lat/lon vars in UMM-Var")
# If that doesn't work, try using cf-xarray to infer lat/lon variable names
try:
latitude = [lat for lat in dataset.cf.coordinates['latitude']
if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
longitude = [lon for lon in dataset.cf.coordinates['longitude']
if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
return latitude, longitude
except:
logging.warning("Unable to find lat/lon vars using cf_xarray")
# If that still doesn't work, try using l2ss-py directly
try:
# file not able to be flattened unless locally downloaded
shutil.copy(file_to_subset, 'my_copy_file.nc')
nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
# flatten the dataset
nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, 'my_copy_file.nc')
args = {
'decode_coords': False,
'mask_and_scale': False,
'decode_times': False
}
with xarray.open_dataset(
xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
**args
) as flat_dataset:
# use l2ss-py to find lat and lon names
lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
os.remove('my_copy_file.nc')
if lat_var_names and lon_var_names:
lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
return lat_var_name, lon_var_name
except ValueError:
logging.warning("Unable to find lat/lon vars using l2ss-py")
# Still no dice, try using the 'units' variable attribute
for coord_name, coord in dataset.coords.items():
if 'units' not in coord.attrs:
continue
if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
lat_var_name = coord_name
if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
lon_var_name = coord_name
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
else:
logging.warning("Unable to find lat/lon vars using 'units' attribute")
# Out of options, fail the test because we couldn't determine lat/lon variables
> pytest.fail(f"Unable to find latitude and longitude variables.")
E Failed: Unable to find latitude and longitude variables.
verify_collection.py:358: Failed
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153571048-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2832224417-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.321025%3A80.356025%29&subset=lon%28108.981%3A178.179%29&granuleId=G3153571048-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job d193dd9c-40cd-4937-92cd-0130728a6539
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410/69667223_RSS_SMAP_SSS_L2C_r50422_20240710T043351_2024192_NRT_V06.0_001.nc4
WARNING root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING root:verify_collection.py:342 Unable to find lat/lon vars using l2ss-py
WARNING root:verify_collection.py:355 Unable to find lat/lon vars using 'units' attribute
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2832221740-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 3m 48s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2832221740-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832221740-POCLOUD', 'concept-id': 'G3153023292-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832221740-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C2832221740')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7faf24202140>
job_id = '33f834fb-f7a9-4e7b-ada9-86a12651f161', show_progress = True
def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
"""Retrieve a submitted job's completion status in percent.
Args:
job_id: UUID string for the job you wish to interrogate.
Returns:
The job's processing progress as a percentage.
:raises
Exception: This can happen if an invalid job_id is provided or Harmony services
can't be reached.
"""
# How often to refresh the screen for progress updates and animating spinners.
ui_update_interval = 0.33 # in seconds
running_w_errors_logged = False
intervals = round(self.check_interval / ui_update_interval)
if show_progress:
with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
progress = 0
while progress < 100:
progress, status, message = self.progress(job_id)
if status == 'failed':
> raise ProcessingFailedException(job_id, message)
E harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153023292-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2832221740-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.2991685%3A81.8031285%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3153023292-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job 33f834fb-f7a9-4e7b-ada9-86a12651f161
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516287-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 17s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516287-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516287-GES_DISC', 'concept-id': 'G2084463561-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516287-GES_DISC'}]}, 'meta': {'association-details': {'collect...'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/corner', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f6165ca5d40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f6165ca6840>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f6165ca6a40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2084463561-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516287-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.99937499999999%3A-59.951625%29&subset=lon%28-76.6214%3A-1.5866000000000042%29&granuleId=G2084463561-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 73ea0905-370a-43c7-9a48-c194720d8a8c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1627516280/69667265_S5P_OFFL_L2_CO_20210701T170324_20210701T184453_19257_01_010400_20210703T065107_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2087216530-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 13s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087216530-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087216530-GES_DISC', 'concept-id': 'G3153523165-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087216530-GES_DISC'}]}, 'meta': {'association-details': {'collect...Var', 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/layer'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087216530')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fec087a9040>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fec087a8b40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fec087a8a40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153523165-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087216530-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.70695%3A-60.52905%29&subset=lon%2869.62944999999999%3A147.31855%29&granuleId=G3153523165-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 28a530d5-eff8-4619-a8bc-0fd011e01989
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087216530/69667275_S5P_OFFL_L2_CH4_20240708T071352_20240708T085522_34900_03_020600_20240709T231330_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516296-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 3m 5s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516296-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516296-GES_DISC', 'concept-id': 'G1902371245-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516296-GES_DISC'}]}, 'meta': {'association-details': {'collect...umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/sulfurdioxide_total_vertical_column_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1627516290')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9ea4b84340>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9ea4b86040>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f9ea4b87140>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1902371245-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516296-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1902371245-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 69ca0cc7-532b-4a0a-a0fc-ba2ee802a2f1
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1627516290/69667303_S5P_OFFL_L2_SO2_20200712T224601_20200713T002730_14238_01_010108_20200715T211427_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068510-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 46s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068510-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068510-GES_DISC', 'concept-id': 'G1628685468-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068510-GES_DISC'}]}, 'meta': {'association-details': {'collect...mm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/averaging_kernel', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1442068510')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f54b9644040>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f54b973b840>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f54b973b740>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628685468-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068510-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628685468-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 7856acdc-2bdf-4069-8787-988c1787bf4d
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1442068510/69667320_S5P_OFFL_L2_HCHO_20190806T003836_20190806T022006_09387_01_010107_20190812T015759_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2936721448-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 59s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2936721448-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2936721448-POCLOUD', 'concept-id': 'G3062447313-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2936721448-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2936721440')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
> lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
verify_collection.py:398:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
dataset = <xarray.Dataset> Size: 240B
Dimensions: (ydim_grid: 1, xdim_grid: 1, look: 1,
... -0.43
history_json: [{"date_time": "2...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2936721440/69667335_RSS_SMAP_SSS_L2C_r47700_20240106T014035_2024006_FNL_V05.3.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2936721448-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict]):
# Try getting it from UMM-Var first
lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
lat_var_name = get_variable_name_from_umm_json(lat_var_json)
lon_var_name = get_variable_name_from_umm_json(lon_var_json)
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
logging.warning("Unable to find lat/lon vars in UMM-Var")
# If that doesn't work, try using cf-xarray to infer lat/lon variable names
try:
latitude = [lat for lat in dataset.cf.coordinates['latitude']
if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
longitude = [lon for lon in dataset.cf.coordinates['longitude']
if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
return latitude, longitude
except:
logging.warning("Unable to find lat/lon vars using cf_xarray")
# If that still doesn't work, try using l2ss-py directly
try:
# file not able to be flattened unless locally downloaded
shutil.copy(file_to_subset, 'my_copy_file.nc')
nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
# flatten the dataset
nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, 'my_copy_file.nc')
args = {
'decode_coords': False,
'mask_and_scale': False,
'decode_times': False
}
with xarray.open_dataset(
xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
**args
) as flat_dataset:
# use l2ss-py to find lat and lon names
lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
os.remove('my_copy_file.nc')
if lat_var_names and lon_var_names:
lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
return lat_var_name, lon_var_name
except ValueError:
logging.warning("Unable to find lat/lon vars using l2ss-py")
# Still no dice, try using the 'units' variable attribute
for coord_name, coord in dataset.coords.items():
if 'units' not in coord.attrs:
continue
if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
lat_var_name = coord_name
if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
lon_var_name = coord_name
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
else:
logging.warning("Unable to find lat/lon vars using 'units' attribute")
# Out of options, fail the test because we couldn't determine lat/lon variables
> pytest.fail(f"Unable to find latitude and longitude variables.")
E Failed: Unable to find latitude and longitude variables.
verify_collection.py:358: Failed
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3062447313-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2936721448-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.29044999999999%3A82.08044999999998%29&subset=lon%284.51755%3A175.50045%29&granuleId=G3062447313-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job 6cb3d6c0-83a0-4ebf-9570-ad5a874fcfdf
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2936721440/69667335_RSS_SMAP_SSS_L2C_r47700_20240106T014035_2024006_FNL_V05.3.nc4
WARNING root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING root:verify_collection.py:342 Unable to find lat/lon vars using l2ss-py
WARNING root:verify_collection.py:355 Unable to find lat/lon vars using 'units' attribute
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1918209669-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 2m 16s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1918209669-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918209669-GES_DISC', 'concept-id': 'G3153571422-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918209669-GES_DISC'}]}, 'meta': {'association-details': {'collect...tracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': 9.969209968386869e+36}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1918209660')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9ea4b85f40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9ea4b86340>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f9ea4b86440>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153571422-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1918209669-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.5553%3A-60.3147%29&subset=lon%28-32.5049%3A45.7789%29&granuleId=G3153571422-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 971187ff-caf0-4207-a1ae-49c609e9f52e
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1918209660/69667398_S5P_OFFL_L2_CLOUD_20240708T135951_20240708T154121_34904_03_020601_20240710T054238_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2847232153-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 10m 0s]
Raw output
Failed: Timeout >600.0s
collection_concept_id = 'C2847232153-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2847232153-POCLOUD', 'concept-id': 'G3152781828-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2847232153-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3200, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2847232150')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7fec10fc1480>
job_id = 'a4cd946c-0996-4cd2-8631-5f9f596c2e65', show_progress = True
def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
"""Retrieve a submitted job's completion status in percent.
Args:
job_id: UUID string for the job you wish to interrogate.
Returns:
The job's processing progress as a percentage.
:raises
Exception: This can happen if an invalid job_id is provided or Harmony services
can't be reached.
"""
# How often to refresh the screen for progress updates and animating spinners.
ui_update_interval = 0.33 # in seconds
running_w_errors_logged = False
intervals = round(self.check_interval / ui_update_interval)
if show_progress:
with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
progress = 0
while progress < 100:
progress, status, message = self.progress(job_id)
if status == 'failed':
raise ProcessingFailedException(job_id, message)
if status == 'canceled':
print('Job has been canceled.')
break
if status == 'paused':
print('\nJob has been paused. Call `resume()` to resume.', file=sys.stderr)
break
if (not running_w_errors_logged and status == 'running_with_errors'):
print('\nJob is running with errors.', file=sys.stderr)
running_w_errors_logged = True
# This gets around an issue with progressbar. If we update() with 0, the
# output shows up as "N/A". If we update with, e.g. 0.1, it rounds down or
# truncates to 0 but, importantly, actually displays that.
if progress == 0:
progress = 0.1
for _ in range(intervals):
bar.update(progress) # causes spinner to rotate even when no data change
sys.stdout.flush() # ensures correct behavior in Jupyter notebooks
if progress >= 100:
break
else:
> time.sleep(ui_update_interval)
E Failed: Timeout >600.0s
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:1009: Failed
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3152781828-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2847232153-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%2869.35976775%3A85.53894224999999%29&subset=lon%28-100.48666449999999%3A69.2720845%29&granuleId=G3152781828-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job a4cd946c-0996-4cd2-8631-5f9f596c2e65
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516288-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 59s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516288-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516288-GES_DISC', 'concept-id': 'G2085128317-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516288-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ground_pixel', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f81457a6540>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f81455fc740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f81455fc640>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2085128317-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516288-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.99937499999999%3A-59.951625%29&subset=lon%28-76.6214%3A-1.5866000000000042%29&granuleId=G2085128317-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 7e2534ad-d2e7-4f5d-8bc7-c0b2d8ade0cf
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1627516280/69667631_S5P_OFFL_L2_CH4_20210701T170324_20210701T184453_19257_01_010400_20210703T102338_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068493-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 48s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068493-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068493-GES_DISC', 'concept-id': 'G1628685465-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068493-GES_DISC'}]}, 'meta': {'association-details': {'collect...arthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/carbonmonoxide_total_column', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1442068490')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff8f1278e40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff8f1279040>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7ff8f1278d40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628685465-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068493-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.468625%3A-64.100375%29&subset=lon%28-112.00605%3A163.26405%29&granuleId=G1628685465-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job c1967d26-c469-4a99-966b-27278f475669
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1442068490/69667635_S5P_OFFL_L2_CO_20190806T003836_20190806T022006_09387_01_010302_20190811T235959_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1729926922-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 37s]
Raw output
assert False
collection_concept_id = 'C1729926922-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1729926922-GES_DISC', 'concept-id': 'G3152466204-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1729926922-GES_DISC'}]}, 'meta': {'association-details': {'collect...9.989990234375}], 'LongName': 'HDFEOS/SWATHS/Temperature-APriori/Data Fields/Temperature-APrioriPrecision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1729926920')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
group_walk(f.groups, f, '')
assert lat_var_name and lon_var_name
var_ds = None
msk = None
if science_vars := get_science_vars(collection_variables):
for idx, value in enumerate(science_vars):
science_var_name = science_vars[idx]['umm']['Name']
try:
var_ds = subsetted_ds_new[science_var_name]
msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
break
except Exception:
try:
# if the variable couldn't be found because the name includes a group, e.g.,
# `geolocation/relative_azimuth_angle`,
# then try to access the variable after removing the group name.
var_ds = subsetted_ds_new[science_var_name.rsplit("/", 1)[-1]]
msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
break
except Exception:
var_ds = None
msk = None
if var_ds is None and msk is None:
pytest.fail(f"Unable to find variable from umm-v to use as science variable.")
else:
# Can't find a science var in UMM-V, just pick one
science_var_name = next(iter([v for v in subsetted_ds_new.variables if
str(v) not in lat_var_name and str(v) not in lon_var_name and 'time' not in str(v)]))
var_ds = subsetted_ds_new[science_var_name]
try:
msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
llat = subsetted_ds_new[lat_var_name].where(msk)
llon = subsetted_ds_new[lon_var_name].where(msk)
except ValueError:
llat = subsetted_ds_new[lat_var_name]
llon = subsetted_ds_new[lon_var_name]
lat_max = llat.max()
lat_min = llat.min()
lon_min = llon.min()
lon_max = llon.max()
lon_min = (lon_min + 180) % 360 - 180
lon_max = (lon_max + 180) % 360 - 180
lat_var_fill_value = subsetted_ds_new[lat_var_name].encoding.get('_FillValue')
lon_var_fill_value = subsetted_ds_new[lon_var_name].encoding.get('_FillValue')
if lat_var_fill_value:
if (lat_max <= north or np.isclose(lat_max, north)) and (lat_min >= south or np.isclose(lat_min, south)):
logging.info("Successful Latitude subsetting")
elif np.isnan(lat_max) and np.isnan(lat_min):
logging.info("Partial Lat Success - no Data")
else:
assert False
if lon_var_fill_value:
if (lon_max <= east or np.isclose(lon_max, east)) and (lon_min >= west or np.isclose(lon_min, west)):
logging.info("Successful Longitude subsetting")
elif np.isnan(lon_max) and np.isnan(lon_min):
logging.info("Partial Lon Success - no Data")
else:
> assert False
E assert False
verify_collection.py:512: AssertionError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3152466204-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1729926922-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3152466204-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job c5690bea-401a-4c62-aed0-6ea0b080eadd
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1729926920/69667645_MLS-Aura_L2GP-Temperature_v05-03-c01_2024d189_subsetted.nc4
INFO root:verify_collection.py:500 Successful Latitude subsetting
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516285-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 24s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516285-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516285-GES_DISC', 'concept-id': 'G2084435970-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516285-GES_DISC'}]}, 'meta': {'association-details': {'collect.../variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd84d8e7840>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd84d8e5440>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fd84d8e5e40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2084435970-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516285-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.82889999999999%3A-59.7251%29&subset=lon%28-77.22455%3A-1.6634499999999974%29&granuleId=G2084435970-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 07cf8f4f-e877-48be-aaf9-3e55e75ccb80
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1627516280/69667649_S5P_OFFL_L2_AER_AI_20210701T170324_20210701T184453_19257_01_010400_20210703T065109_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516300-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 26s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516300-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516300-GES_DISC', 'concept-id': 'G1902371249-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516300-GES_DISC'}]}, 'meta': {'association-details': {'collect...asa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ozone_total_vertical_column_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1627516300')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd167d80240>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd16c17bb40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fd16c17ba40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1902371249-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516300-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1902371249-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job a40d5d1b-1459-4720-9987-39193c5babaa
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1627516300/69667651_S5P_OFFL_L2_O3_20200712T224601_20200713T002730_14238_01_010108_20200715T122623_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068509-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 1s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068509-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068509-GES_DISC', 'concept-id': 'G1628706233-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068509-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/level', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1442068500')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe25b4bd640>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe25b4bdc40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fe25b4bdd40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628706233-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068509-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628706233-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 22f88dff-f31e-409d-b444-0705db4f9774
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1442068500/69667675_S5P_OFFL_L2_O3_20190806T003836_20190806T022006_09387_01_010107_20190812T015759_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2179081549-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 42s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2179081549-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2179081549-GES_DISC', 'concept-id': 'G3153007430-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2179081549-GES_DISC'}]}, 'meta': {'association-details': {'collect...escription': 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -9999}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C2179081540')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'Swath': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f6165a97140>}
nc_d = <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f6165a97040>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
> data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
E IndexError: list index out of range
verify_collection.py:427: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153007430-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2179081549-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-66.25547725%3A-60.46797275%29&subset=lon%28-155.4820265%3A158.80774649999998%29&granuleId=G3153007430-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 47d0b430-a169-44c9-ba7f-14fbd1b35165
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C2179081540/69667679_2A.GPM.DPR.GPM-SLH.20240708-S222318-E235631.058845.V07C_subsetted.nc4
WARNING root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068508-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 41s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068508-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068508-GES_DISC', 'concept-id': 'G1628710396-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068508-GES_DISC'}]}, 'meta': {'association-details': {'collect...v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/fitted_radiance_squeeze_win3', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1442068500')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff8f10c1040>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff8f10c0940>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7ff8f10c0840>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628710396-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068508-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628710396-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 10c4fa5a-96d3-409c-9827-415d72548e4c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1442068500/69667681_S5P_OFFL_L2_SO2_20190806T003836_20190806T022006_09387_01_010107_20190812T085130_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_temporal_subset[C2068529568-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 3m 27s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2068529568-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2068529568-POCLOUD', 'concept-id': 'G2586738585-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2068529568-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 4193, 'Type': 'ALONG_TRACK_DIMENSION'}, {'Name': 'ni', 'Size': 243, 'Type': 'CROSS_TRACK_DIMENSION'}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_temporal_subset_C206852950')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
temporal_subset = get_half_temporal_extent(start_time, end_time)
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection,
granule_id=[granule_json['meta']['concept-id']],
temporal=temporal_subset)
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:538:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7fd84d9b0610>
job_id = 'c87a336c-f16b-4207-b019-3779c6ac1857', show_progress = True
def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
"""Retrieve a submitted job's completion status in percent.
Args:
job_id: UUID string for the job you wish to interrogate.
Returns:
The job's processing progress as a percentage.
:raises
Exception: This can happen if an invalid job_id is provided or Harmony services
can't be reached.
"""
# How often to refresh the screen for progress updates and animating spinners.
ui_update_interval = 0.33 # in seconds
running_w_errors_logged = False
intervals = round(self.check_interval / ui_update_interval)
if show_progress:
with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
progress = 0
while progress < 100:
progress, status, message = self.progress(job_id)
if status == 'failed':
> raise ProcessingFailedException(job_id, message)
E harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:519 Using granule G2586738585-POCLOUD for test
INFO root:verify_collection.py:532 Sending harmony request https://harmony.earthdata.nasa.gov/C2068529568-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222023-01-11T10%3A04%3A49%22%3A%222023-01-11T10%3A54%3A15%22%29&granuleId=G2586738585-POCLOUD
INFO root:verify_collection.py:536 Submitted harmony job c87a336c-f16b-4207-b019-3779c6ac1857
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2089270961-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 2m 21s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9ea4864040>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f9ead382680>
def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
if group in {None, "", "/"}:
# use the root group
return ds
else:
# make sure it's a string
if not isinstance(group, str):
raise ValueError("group must be a string or None")
# support path-like syntax
path = group.strip("/").split("/")
for key in path:
try:
> ds = ds.groups[key]
E KeyError: 'PRODUCT'
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError
During handling of the above exception, another exception occurred:
collection_concept_id = 'C2089270961-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2089270961-GES_DISC', 'concept-id': 'G3153058150-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2089270961-GES_DISC'}]}, 'meta': {'association-details': {'collect....0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/nitrogendioxide_tropospheric_column_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C2089270960')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
verify_collection.py:423: in group_walk
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9ea4864040>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f9ead382680>
def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
if group in {None, "", "/"}:
# use the root group
return ds
else:
# make sure it's a string
if not isinstance(group, str):
raise ValueError("group must be a string or None")
# support path-like syntax
path = group.strip("/").split("/")
for key in path:
try:
ds = ds.groups[key]
except KeyError as e:
if mode != "r":
ds = create_group(ds, key)
else:
# wrap error to provide slightly more helpful message
> raise OSError(f"group not found: {key}", e)
E OSError: [Errno group not found: PRODUCT] 'PRODUCT'
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3153058150-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2089270961-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.879075%3A-59.781925%29&subset=lon%28-140.95485%3A-65.13915%29&granuleId=G3153058150-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 19a7b393-9e34-48a9-b538-47bf626fdf8c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C2089270960/69667707_S5P_OFFL_L2_NO2_20240701T211743_20240701T225913_34809_03_020600_20240703T133502_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068490-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 2s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068490-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068490-GES_DISC', 'concept-id': 'G1628672811-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068490-GES_DISC'}]}, 'meta': {'association-details': {'collect...ov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_pdf_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1442068490')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f81459a2440>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f81459a3740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f81459a3940>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628672811-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068490-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628672811-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job d84ae8a5-b612-4d5c-b5fb-da1cd7eaae5c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1442068490/69667713_S5P_OFFL_L2_AER_AI_20190806T003836_20190806T022006_09387_01_010302_20190812T000004_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068491-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 10s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068491-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068491-GES_DISC', 'concept-id': 'G1642673899-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068491-GES_DISC'}]}, 'meta': {'association-details': {'collect...s://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/aerosol_mid_height', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1442068490')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe25b696e40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe25b696940>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fe25b696840>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1642673899-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068491-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.32457500000001%3A-63.840424999999996%29&subset=lon%28-112.0001%3A163.1541%29&granuleId=G1642673899-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 9f3d6e84-6746-42c8-9eb8-b5cfe3cfec4c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1442068490/69667717_S5P_OFFL_L2_AER_LH_20190806T003836_20190806T022006_09387_01_010302_20190812T015801_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068511-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 2m 30s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068511-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068511-GES_DISC', 'concept-id': 'G1629705055-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068511-GES_DISC'}]}, 'meta': {'association-details': {'collect...e/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/nitrogendioxide_tropospheric_column_pdf_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1442068510')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f6165ca4940>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f6165ca6f40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f6165ca4840>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1629705055-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068511-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1629705055-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 2ee5fcb9-2d2d-4e3a-ab03-760035c487b1
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1442068510/69667747_S5P_OFFL_L2_NO2_20190806T003836_20190806T022006_09387_01_010302_20190812T015802_subsetted.nc4