Remove PR old association #47
5 errors, 63 fail, 514 pass in 48m 18s
582 tests 514 ✅ 48m 18s ⏱️
1 suites 0 💤
1 files 63 ❌ 5 🔥
Results for commit 65e4807.
Annotations
Check failure on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2799465526-POCLOUD] (tests.verify_collection) with error
test-results/ops_test_report.xml [took 6s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7fb750bfddb0>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2799653751-POCLOUD&concept_id%5B%5D=V2799653649-POCLOUD&concept_id%5B%5D...V2799654031-POCLOUD&concept_id%5B%5D=V2799654463-POCLOUD&concept_id%5B%5D=V2799653676-POCLOUD&page_size=40&page_num=17'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V27996537...OUD&concept_id%5B%5D=V2799654463-POCLOUD&concept_id%5B%5D=V2799653676-POCLOUD&page_size=40&page_num=17', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False
def urlopen( # type: ignore[override]
self,
method: str,
url: str,
body: _TYPE_BODY | None = None,
headers: typing.Mapping[str, str] | None = None,
retries: Retry | bool | int | None = None,
redirect: bool = True,
assert_same_host: bool = True,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: int | None = None,
release_conn: bool | None = None,
chunked: bool = False,
body_pos: _TYPE_BODY_POSITION | None = None,
preload_content: bool = True,
decode_content: bool = True,
**response_kw: typing.Any,
) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method
such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param bool preload_content:
If True, the response's body will be preloaded into memory.
:param bool decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of ``preload_content``
which defaults to ``True``.
:param bool chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = preload_content
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = to_str(_encode_target(url))
else:
url = to_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy() # type: ignore[attr-defined]
headers.update(self.proxy_headers) # type: ignore[union-attr]
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
# Is this a closed/new connection that requires CONNECT tunnelling?
if self.proxy is not None and http_tunnel_required and conn.is_closed:
try:
self._prepare_proxy(conn)
except (BaseSSLError, OSError, SocketTimeout) as e:
self._raise_timeout(
err=e, url=self.proxy.url, timeout_value=conn.timeout
)
raise
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Make the request on the HTTPConnection object
> response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
retries=retries,
response_conn=response_conn,
preload_content=preload_content,
decode_content=decode_content,
**response_kw,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7fb750bfd7e0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E http.client.RemoteDisconnected: Remote end closed connection without response
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected
During handling of the above exception, another exception occurred:
self = <requests.adapters.HTTPAdapter object at 0x7fb750bfe110>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
> resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7fb750bfd7e0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError
During handling of the above exception, another exception occurred:
cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C2799465526-POCLOUD', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.fixture(scope="function")
def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
collection_associations = collection_res.get("associations")
variable_concept_ids = collection_associations.get("variables")
if variable_concept_ids is None and env == 'uat':
pytest.skip('There are no umm-v associated with this collection in UAT')
variables = []
for i in range(0, len(variable_concept_ids), 40):
variables_items = variable_query \
.concept_id(variable_concept_ids[i:i + 40]) \
.token(bearer_token) \
.format('umm_json') \
> .get_all()
verify_collection.py:159:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7fb750bfe110>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
except (ProtocolError, OSError) as err:
> raise ConnectionError(err, request=request)
E requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------
Check failure on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1918210023-GES_DISC] (tests.verify_collection) with error
test-results/ops_test_report.xml [took 0s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f0dc40a8520>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2623764904-GES_DISC&concept_id%5B%5D=V2623781294-GES_DISC&concept_id%5B%...623766405-GES_DISC&concept_id%5B%5D=V2623785399-GES_DISC&concept_id%5B%5D=V2623776985-GES_DISC&page_size=40&page_num=1'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V26237649...SC&concept_id%5B%5D=V2623785399-GES_DISC&concept_id%5B%5D=V2623776985-GES_DISC&page_size=40&page_num=1', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False
def urlopen( # type: ignore[override]
self,
method: str,
url: str,
body: _TYPE_BODY | None = None,
headers: typing.Mapping[str, str] | None = None,
retries: Retry | bool | int | None = None,
redirect: bool = True,
assert_same_host: bool = True,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: int | None = None,
release_conn: bool | None = None,
chunked: bool = False,
body_pos: _TYPE_BODY_POSITION | None = None,
preload_content: bool = True,
decode_content: bool = True,
**response_kw: typing.Any,
) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method
such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param bool preload_content:
If True, the response's body will be preloaded into memory.
:param bool decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of ``preload_content``
which defaults to ``True``.
:param bool chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = preload_content
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = to_str(_encode_target(url))
else:
url = to_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy() # type: ignore[attr-defined]
headers.update(self.proxy_headers) # type: ignore[union-attr]
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
# Is this a closed/new connection that requires CONNECT tunnelling?
if self.proxy is not None and http_tunnel_required and conn.is_closed:
try:
self._prepare_proxy(conn)
except (BaseSSLError, OSError, SocketTimeout) as e:
self._raise_timeout(
err=e, url=self.proxy.url, timeout_value=conn.timeout
)
raise
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Make the request on the HTTPConnection object
> response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
retries=retries,
response_conn=response_conn,
preload_content=preload_content,
decode_content=decode_content,
**response_kw,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7f0dc40a9a80>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E http.client.RemoteDisconnected: Remote end closed connection without response
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected
During handling of the above exception, another exception occurred:
self = <requests.adapters.HTTPAdapter object at 0x7f0dc40aa2c0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
> resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7f0dc40a9a80>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError
During handling of the above exception, another exception occurred:
cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C1918210023-GES_DISC', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.fixture(scope="function")
def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
collection_associations = collection_res.get("associations")
variable_concept_ids = collection_associations.get("variables")
if variable_concept_ids is None and env == 'uat':
pytest.skip('There are no umm-v associated with this collection in UAT')
variables = []
for i in range(0, len(variable_concept_ids), 40):
variables_items = variable_query \
.concept_id(variable_concept_ids[i:i + 40]) \
.token(bearer_token) \
.format('umm_json') \
> .get_all()
verify_collection.py:159:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7f0dc40aa2c0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
except (ProtocolError, OSError) as err:
> raise ConnectionError(err, request=request)
E requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------
Check failure on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1918210292-GES_DISC] (tests.verify_collection) with error
test-results/ops_test_report.xml [took 19s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f4486be57e0>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2621875587-GES_DISC&concept_id%5B%5D=V2621874403-GES_DISC&concept_id%5B%...21878704-GES_DISC&concept_id%5B%5D=V2621867868-GES_DISC&concept_id%5B%5D=V2621882955-GES_DISC&page_size=40&page_num=26'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V26218755...C&concept_id%5B%5D=V2621867868-GES_DISC&concept_id%5B%5D=V2621882955-GES_DISC&page_size=40&page_num=26', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False
def urlopen( # type: ignore[override]
self,
method: str,
url: str,
body: _TYPE_BODY | None = None,
headers: typing.Mapping[str, str] | None = None,
retries: Retry | bool | int | None = None,
redirect: bool = True,
assert_same_host: bool = True,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: int | None = None,
release_conn: bool | None = None,
chunked: bool = False,
body_pos: _TYPE_BODY_POSITION | None = None,
preload_content: bool = True,
decode_content: bool = True,
**response_kw: typing.Any,
) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method
such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param bool preload_content:
If True, the response's body will be preloaded into memory.
:param bool decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of ``preload_content``
which defaults to ``True``.
:param bool chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = preload_content
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = to_str(_encode_target(url))
else:
url = to_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy() # type: ignore[attr-defined]
headers.update(self.proxy_headers) # type: ignore[union-attr]
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
# Is this a closed/new connection that requires CONNECT tunnelling?
if self.proxy is not None and http_tunnel_required and conn.is_closed:
try:
self._prepare_proxy(conn)
except (BaseSSLError, OSError, SocketTimeout) as e:
self._raise_timeout(
err=e, url=self.proxy.url, timeout_value=conn.timeout
)
raise
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Make the request on the HTTPConnection object
> response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
retries=retries,
response_conn=response_conn,
preload_content=preload_content,
decode_content=decode_content,
**response_kw,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7f4486be64a0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E http.client.RemoteDisconnected: Remote end closed connection without response
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected
During handling of the above exception, another exception occurred:
self = <requests.adapters.HTTPAdapter object at 0x7f4486be5ae0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
> resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7f4486be64a0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError
During handling of the above exception, another exception occurred:
cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C1918210292-GES_DISC', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.fixture(scope="function")
def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
collection_associations = collection_res.get("associations")
variable_concept_ids = collection_associations.get("variables")
if variable_concept_ids is None and env == 'uat':
pytest.skip('There are no umm-v associated with this collection in UAT')
variables = []
for i in range(0, len(variable_concept_ids), 40):
variables_items = variable_query \
.concept_id(variable_concept_ids[i:i + 40]) \
.token(bearer_token) \
.format('umm_json') \
> .get_all()
verify_collection.py:159:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7f4486be5ae0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
except (ProtocolError, OSError) as err:
> raise ConnectionError(err, request=request)
E requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2087131083-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 33s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087131083-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087131083-GES_DISC', 'concept-id': 'G3149620162-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087131083-GES_DISC'}]}, 'meta': {'association-details': {'collect.../variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C2087131080')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fa2074f9e40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fa2074f9940>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fa2074f9840>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3149620162-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087131083-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.212175%3A-60.042825%29&subset=lon%28-46.262525%3A30.743524999999995%29&granuleId=G3149620162-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 801055aa-588f-4c52-a1b4-f1bde3bb3110
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C2087131080/69507949_S5P_OFFL_L2_AER_AI_20240705T145702_20240705T163831_34862_03_020600_20240707T065841_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2832224417-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 31s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2832224417-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832224417-POCLOUD', 'concept-id': 'G3071779083-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
> lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
verify_collection.py:398:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
dataset = <xarray.Dataset> Size: 232B
Dimensions: (ydim_grid: 1, xdim_grid: 1, look: 1,
... -0.43
history_json: [{"date_time": "2024-...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410/69507952_RSS_SMAP_SSS_L2C_r49943_20240607T102958_2024159_NRT_V06.0_001.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict]):
# Try getting it from UMM-Var first
lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
lat_var_name = get_variable_name_from_umm_json(lat_var_json)
lon_var_name = get_variable_name_from_umm_json(lon_var_json)
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
logging.warning("Unable to find lat/lon vars in UMM-Var")
# If that doesn't work, try using cf-xarray to infer lat/lon variable names
try:
latitude = [lat for lat in dataset.cf.coordinates['latitude']
if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
longitude = [lon for lon in dataset.cf.coordinates['longitude']
if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
return latitude, longitude
except:
logging.warning("Unable to find lat/lon vars using cf_xarray")
# If that still doesn't work, try using l2ss-py directly
try:
# file not able to be flattened unless locally downloaded
shutil.copy(file_to_subset, 'my_copy_file.nc')
nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
# flatten the dataset
nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, 'my_copy_file.nc')
args = {
'decode_coords': False,
'mask_and_scale': False,
'decode_times': False
}
with xarray.open_dataset(
xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
**args
) as flat_dataset:
# use l2ss-py to find lat and lon names
lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
os.remove('my_copy_file.nc')
if lat_var_names and lon_var_names:
lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
return lat_var_name, lon_var_name
except ValueError:
logging.warning("Unable to find lat/lon vars using l2ss-py")
# Still no dice, try using the 'units' variable attribute
for coord_name, coord in dataset.coords.items():
if 'units' not in coord.attrs:
continue
if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
lat_var_name = coord_name
if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
lon_var_name = coord_name
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
else:
logging.warning("Unable to find lat/lon vars using 'units' attribute")
# Out of options, fail the test because we couldn't determine lat/lon variables
> pytest.fail(f"Unable to find latitude and longitude variables.")
E Failed: Unable to find latitude and longitude variables.
verify_collection.py:358: Failed
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3071779083-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2832224417-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.28062499999999%3A82.08362499999998%29&subset=lon%284.504874999999998%3A175.500125%29&granuleId=G3071779083-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job 552a9b1e-d1f6-44fb-8dbc-06747086343c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410/69507952_RSS_SMAP_SSS_L2C_r49943_20240607T102958_2024159_NRT_V06.0_001.nc4
WARNING root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING root:verify_collection.py:342 Unable to find lat/lon vars using l2ss-py
WARNING root:verify_collection.py:355 Unable to find lat/lon vars using 'units' attribute
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2087216530-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 31s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087216530-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087216530-GES_DISC', 'concept-id': 'G3149620419-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087216530-GES_DISC'}]}, 'meta': {'association-details': {'collect...Var', 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/layer'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087216530')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f4485acf140>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f4485631140>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f4485631040>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3149620419-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087216530-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.321025%3A-60.221975%29&subset=lon%28-158.01364999999998%3A129.35565000000003%29&granuleId=G3149620419-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 875a7eda-34a9-46f7-b903-e085b7dfabe9
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087216530/69507966_S5P_OFFL_L2_CH4_20240705T030633_20240705T044803_34855_03_020600_20240706T232649_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2832221740-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 52s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2832221740-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832221740-POCLOUD', 'concept-id': 'G3059748792-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832221740-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2832221740')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7f2d963354e0>
job_id = '77386574-ec9f-40bc-b181-cb5049e5056a', show_progress = True
def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
"""Retrieve a submitted job's completion status in percent.
Args:
job_id: UUID string for the job you wish to interrogate.
Returns:
The job's processing progress as a percentage.
:raises
Exception: This can happen if an invalid job_id is provided or Harmony services
can't be reached.
"""
# How often to refresh the screen for progress updates and animating spinners.
ui_update_interval = 0.33 # in seconds
running_w_errors_logged = False
intervals = round(self.check_interval / ui_update_interval)
if show_progress:
with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
progress = 0
while progress < 100:
progress, status, message = self.progress(job_id)
if status == 'failed':
> raise ProcessingFailedException(job_id, message)
E harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3059748792-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2832221740-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.29042474999999%3A82.03317474999999%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3059748792-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job 77386574-ec9f-40bc-b181-cb5049e5056a
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2205121520-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 8m 52s]
Raw output
Exception: ('Internal Server Error', 'Error: Internal server error')
collection_concept_id = 'C2205121520-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2205121520-POCLOUD', 'concept-id': 'G2276353626-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2205121520-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ample', 'Size': 673789, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C2205121520')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:984: in wait_for_processing
progress, status, message = self.progress(job_id)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:960: in progress
self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7f0dc2d5f9a0>
response = <Response [500]>
def _handle_error_response(self, response: Response):
"""Raises the appropriate exception based on the response
received from Harmony. Tries to pull out an error message
from a Harmony JSON response when possible.
Args:
response: The Response from Harmony
Raises:
Exception with a Harmony error message or a more generic
HTTPError
"""
if 'application/json' in response.headers.get('Content-Type', ''):
exception_message = None
try:
response_json = response.json()
if hasattr(response_json, 'get'):
exception_message = response_json.get('description')
if not exception_message:
exception_message = response_json.get('error')
except JSONDecodeError:
pass
if exception_message:
> raise Exception(response.reason, exception_message)
E Exception: ('Internal Server Error', 'Error: Internal server error')
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2276353626-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2205121520-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-36.03425%3A36.11825%29&subset=lon%284.500975000000011%3A175.500025%29&granuleId=G2276353626-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job 3cc9f1d8-a630-4904-9199-d40c9d2c6f79
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516287-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 33s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516287-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516287-GES_DISC', 'concept-id': 'G2084463561-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516287-GES_DISC'}]}, 'meta': {'association-details': {'collect...'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/corner', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f878ccd1a40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f878ccd1540>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f878ccd1440>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2084463561-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516287-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.99937499999999%3A-59.951625%29&subset=lon%28-76.6214%3A-1.5866000000000042%29&granuleId=G2084463561-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 66adee72-5fa4-4923-9486-b0ee7ccd62bc
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1627516280/69508018_S5P_OFFL_L2_CO_20210701T170324_20210701T184453_19257_01_010400_20210703T065107_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068510-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 47s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068510-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068510-GES_DISC', 'concept-id': 'G1628685468-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068510-GES_DISC'}]}, 'meta': {'association-details': {'collect...mm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/averaging_kernel', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1442068510')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9916e0b440>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9916e0ad40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f9916e0ac40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628685468-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068510-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628685468-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 1ad7fbc3-6f1d-4c23-afe6-8eaa898f9481
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1442068510/69508075_S5P_OFFL_L2_HCHO_20190806T003836_20190806T022006_09387_01_010107_20190812T015759_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2936721448-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 22s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2936721448-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2936721448-POCLOUD', 'concept-id': 'G3062447313-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2936721448-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C2936721440')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
> lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
verify_collection.py:398:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
dataset = <xarray.Dataset> Size: 240B
Dimensions: (ydim_grid: 1, xdim_grid: 1, look: 1,
... -0.43
history_json: [{"date_time": "2...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C2936721440/69508079_RSS_SMAP_SSS_L2C_r47700_20240106T014035_2024006_FNL_V05.3.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2936721448-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict]):
# Try getting it from UMM-Var first
lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
lat_var_name = get_variable_name_from_umm_json(lat_var_json)
lon_var_name = get_variable_name_from_umm_json(lon_var_json)
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
logging.warning("Unable to find lat/lon vars in UMM-Var")
# If that doesn't work, try using cf-xarray to infer lat/lon variable names
try:
latitude = [lat for lat in dataset.cf.coordinates['latitude']
if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
longitude = [lon for lon in dataset.cf.coordinates['longitude']
if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
return latitude, longitude
except:
logging.warning("Unable to find lat/lon vars using cf_xarray")
# If that still doesn't work, try using l2ss-py directly
try:
# file not able to be flattened unless locally downloaded
shutil.copy(file_to_subset, 'my_copy_file.nc')
nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
# flatten the dataset
nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, 'my_copy_file.nc')
args = {
'decode_coords': False,
'mask_and_scale': False,
'decode_times': False
}
with xarray.open_dataset(
xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
**args
) as flat_dataset:
# use l2ss-py to find lat and lon names
lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
os.remove('my_copy_file.nc')
if lat_var_names and lon_var_names:
lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
return lat_var_name, lon_var_name
except ValueError:
logging.warning("Unable to find lat/lon vars using l2ss-py")
# Still no dice, try using the 'units' variable attribute
for coord_name, coord in dataset.coords.items():
if 'units' not in coord.attrs:
continue
if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
lat_var_name = coord_name
if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
lon_var_name = coord_name
if lat_var_name and lon_var_name:
return lat_var_name, lon_var_name
else:
logging.warning("Unable to find lat/lon vars using 'units' attribute")
# Out of options, fail the test because we couldn't determine lat/lon variables
> pytest.fail(f"Unable to find latitude and longitude variables.")
E Failed: Unable to find latitude and longitude variables.
verify_collection.py:358: Failed
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3062447313-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2936721448-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.29044999999999%3A82.08044999999998%29&subset=lon%284.51755%3A175.50045%29&granuleId=G3062447313-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job f4a4fa23-972a-4172-bcf1-83feb771184c
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C2936721440/69508079_RSS_SMAP_SSS_L2C_r47700_20240106T014035_2024006_FNL_V05.3.nc4
WARNING root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING root:verify_collection.py:342 Unable to find lat/lon vars using l2ss-py
WARNING root:verify_collection.py:355 Unable to find lat/lon vars using 'units' attribute
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516296-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 11s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516296-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516296-GES_DISC', 'concept-id': 'G1902371245-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516296-GES_DISC'}]}, 'meta': {'association-details': {'collect...umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/sulfurdioxide_total_vertical_column_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1627516290')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe67ac53840>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe67c3bae40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fe67c3bb140>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1902371245-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516296-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1902371245-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job d852284e-3fea-40ec-840b-18a9435ca248
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1627516290/69508091_S5P_OFFL_L2_SO2_20200712T224601_20200713T002730_14238_01_010108_20200715T211427_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1266136113-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 2m 21s]
Raw output
Exception: ('Internal Server Error', 'Error: Internal server error')
collection_concept_id = 'C1266136113-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1266136113-GES_DISC', 'concept-id': 'G3148878506-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1266136113-GES_DISC'}]}, 'meta': {'association-details': {'collect...acted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -1.2676506002282294e+30}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1266136110')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:984: in wait_for_processing
progress, status, message = self.progress(job_id)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:960: in progress
self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7fb74f4d5e70>
response = <Response [500]>
def _handle_error_response(self, response: Response):
"""Raises the appropriate exception based on the response
received from Harmony. Tries to pull out an error message
from a Harmony JSON response when possible.
Args:
response: The Response from Harmony
Raises:
Exception with a Harmony error message or a more generic
HTTPError
"""
if 'application/json' in response.headers.get('Content-Type', ''):
exception_message = None
try:
response_json = response.json()
if hasattr(response_json, 'get'):
exception_message = response_json.get('description')
if not exception_message:
exception_message = response_json.get('error')
except JSONDecodeError:
pass
if exception_message:
> raise Exception(response.reason, exception_message)
E Exception: ('Internal Server Error', 'Error: Internal server error')
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3148878506-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1266136113-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3148878506-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 01907996-a884-4ae2-a4d6-5ed866394dce
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2036882055-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 5m 7s]
Raw output
Exception: ('Internal Server Error', 'Error: Internal server error')
collection_concept_id = 'C2036882055-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2036882055-POCLOUD', 'concept-id': 'G2100462331-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2036882055-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ample', 'Size': 942342, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C2036882050')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:984: in wait_for_processing
progress, status, message = self.progress(job_id)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:960: in progress
self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7ffa09fd88e0>
response = <Response [500]>
def _handle_error_response(self, response: Response):
"""Raises the appropriate exception based on the response
received from Harmony. Tries to pull out an error message
from a Harmony JSON response when possible.
Args:
response: The Response from Harmony
Raises:
Exception with a Harmony error message or a more generic
HTTPError
"""
if 'application/json' in response.headers.get('Content-Type', ''):
exception_message = None
try:
response_json = response.json()
if hasattr(response_json, 'get'):
exception_message = response_json.get('description')
if not exception_message:
exception_message = response_json.get('error')
except JSONDecodeError:
pass
if exception_message:
> raise Exception(response.reason, exception_message)
E Exception: ('Internal Server Error', 'Error: Internal server error')
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2100462331-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2036882055-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-40.5108%3A40.4748%29&subset=lon%284.5%3A175.5%29&granuleId=G2100462331-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job ad4167bf-3386-45d6-bfea-b55c84874800
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1918209669-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 6s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1918209669-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918209669-GES_DISC', 'concept-id': 'G3149620678-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918209669-GES_DISC'}]}, 'meta': {'association-details': {'collect...tracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': 9.969209968386869e+36}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1918209660')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe67ac51940>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe67ac53240>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fe67ac53340>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3149620678-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1918209669-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.16642499999999%3A-60.006575%29&subset=lon%28-20.816174999999998%3A56.01317499999999%29&granuleId=G3149620678-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 337f808e-8e99-443b-8d9a-b59dd0ef2450
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1918209660/69508127_S5P_OFFL_L2_CLOUD_20240705T131532_20240705T145702_34861_03_020601_20240707T064247_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2901523432-POCLOUD] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 20s]
Raw output
Exception: ('Internal Server Error', 'Error: Internal server error')
collection_concept_id = 'C2901523432-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2901523432-POCLOUD', 'concept-id': 'G3028880890-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2901523432-POCLOUD'}]}, 'meta': {'association-details': {'collecti...cords', 'Size': 609048, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.9}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C2901523430')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
> harmony_client.wait_for_processing(job_id, show_progress=True)
verify_collection.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:984: in wait_for_processing
progress, status, message = self.progress(job_id)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:960: in progress
self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <harmony.harmony.Client object at 0x7fe67abcbdc0>
response = <Response [500]>
def _handle_error_response(self, response: Response):
"""Raises the appropriate exception based on the response
received from Harmony. Tries to pull out an error message
from a Harmony JSON response when possible.
Args:
response: The Response from Harmony
Raises:
Exception with a Harmony error message or a more generic
HTTPError
"""
if 'application/json' in response.headers.get('Content-Type', ''):
exception_message = None
try:
response_json = response.json()
if hasattr(response_json, 'get'):
exception_message = response_json.get('description')
if not exception_message:
exception_message = response_json.get('error')
except JSONDecodeError:
pass
if exception_message:
> raise Exception(response.reason, exception_message)
E Exception: ('Internal Server Error', 'Error: Internal server error')
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3028880890-POCLOUD for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2901523432-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-62.84155%3A62.84155%29&subset=lon%284.500975000000011%3A175.500025%29&granuleId=G3028880890-POCLOUD
INFO root:verify_collection.py:385 Submitted harmony job 73e9a4c1-9066-4ae9-8943-89afeb509250
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516288-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 59s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516288-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516288-GES_DISC', 'concept-id': 'G2085128317-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516288-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ground_pixel', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f47bffcc040>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f47bffcd740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f47bffcdb40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G2085128317-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516288-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.99937499999999%3A-59.951625%29&subset=lon%28-76.6214%3A-1.5866000000000042%29&granuleId=G2085128317-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 4946b336-10c5-4897-93cf-f75d9047b44e
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1627516280/69508208_S5P_OFFL_L2_CH4_20210701T170324_20210701T184453_19257_01_010400_20210703T102338_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2089270961-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 15s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2089270961-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2089270961-GES_DISC', 'concept-id': 'G3149121426-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2089270961-GES_DISC'}]}, 'meta': {'association-details': {'collect....0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/nitrogendioxide_tropospheric_column_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C2089270960')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9916e09140>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f9916e08840>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f9916e08740>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3149121426-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2089270961-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.672175%3A-59.616825%29&subset=lon%28-154.932325%3A-79.856675%29&granuleId=G3149121426-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job e04eb564-f13f-4854-b564-0b5e4091f459
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C2089270960/69508223_S5P_OFFL_L2_NO2_20240628T221451_20240628T235620_34767_03_020600_20240630T142358_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516300-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 1s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516300-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516300-GES_DISC', 'concept-id': 'G1902371249-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516300-GES_DISC'}]}, 'meta': {'association-details': {'collect...asa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ozone_total_vertical_column_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1627516300')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f878ccd3540>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f878ccd0740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f878ccd0040>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1902371249-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516300-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1902371249-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 27ef9ba6-a003-4b01-9f28-d4db35d63990
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1627516300/69508224_S5P_OFFL_L2_O3_20200712T224601_20200713T002730_14238_01_010108_20200715T122623_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2087216100-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 1m 1s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087216100-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087216100-GES_DISC', 'concept-id': 'G3149619529-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087216100-GES_DISC'}]}, 'meta': {'association-details': {'collect...m/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_mid_pressure_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087216100')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f4485522040>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f4485523340>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f4485633340>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3149619529-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087216100-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.23865%3A-60.58135%29&subset=lon%2830.3893%3A106.93270000000001%29&granuleId=G3149619529-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job a3a91e86-bbbc-4842-bcb1-15bf7b372504
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087216100/69508317_S5P_OFFL_L2_AER_LH_20240705T095232_20240705T113402_34859_03_020600_20240707T041613_subsetted.nc4
Check failure on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1239966818-GES_DISC] (tests.verify_collection) with error
test-results/ops_test_report.xml [took 0s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f0dc2adcc70>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2623769314-GES_DISC&concept_id%5B%5D=V2623771149-GES_DISC&concept_id%5B%...623776517-GES_DISC&concept_id%5B%5D=V2623775254-GES_DISC&concept_id%5B%5D=V2623784349-GES_DISC&page_size=40&page_num=8'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V26237693...SC&concept_id%5B%5D=V2623775254-GES_DISC&concept_id%5B%5D=V2623784349-GES_DISC&page_size=40&page_num=8', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False
def urlopen( # type: ignore[override]
self,
method: str,
url: str,
body: _TYPE_BODY | None = None,
headers: typing.Mapping[str, str] | None = None,
retries: Retry | bool | int | None = None,
redirect: bool = True,
assert_same_host: bool = True,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: int | None = None,
release_conn: bool | None = None,
chunked: bool = False,
body_pos: _TYPE_BODY_POSITION | None = None,
preload_content: bool = True,
decode_content: bool = True,
**response_kw: typing.Any,
) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method
such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param bool preload_content:
If True, the response's body will be preloaded into memory.
:param bool decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of ``preload_content``
which defaults to ``True``.
:param bool chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = preload_content
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = to_str(_encode_target(url))
else:
url = to_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy() # type: ignore[attr-defined]
headers.update(self.proxy_headers) # type: ignore[union-attr]
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
# Is this a closed/new connection that requires CONNECT tunnelling?
if self.proxy is not None and http_tunnel_required and conn.is_closed:
try:
self._prepare_proxy(conn)
except (BaseSSLError, OSError, SocketTimeout) as e:
self._raise_timeout(
err=e, url=self.proxy.url, timeout_value=conn.timeout
)
raise
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Make the request on the HTTPConnection object
> response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
retries=retries,
response_conn=response_conn,
preload_content=preload_content,
decode_content=decode_content,
**response_kw,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7f0dc2adcfa0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E http.client.RemoteDisconnected: Remote end closed connection without response
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected
During handling of the above exception, another exception occurred:
self = <requests.adapters.HTTPAdapter object at 0x7f0dc2adc5b0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
> resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7f0dc2adcfa0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError
During handling of the above exception, another exception occurred:
cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C1239966818-GES_DISC', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.fixture(scope="function")
def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
collection_associations = collection_res.get("associations")
variable_concept_ids = collection_associations.get("variables")
if variable_concept_ids is None and env == 'uat':
pytest.skip('There are no umm-v associated with this collection in UAT')
variables = []
for i in range(0, len(variable_concept_ids), 40):
variables_items = variable_query \
.concept_id(variable_concept_ids[i:i + 40]) \
.token(bearer_token) \
.format('umm_json') \
> .get_all()
verify_collection.py:159:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7f0dc2adc5b0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
except (ProtocolError, OSError) as err:
> raise ConnectionError(err, request=request)
E requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------
Check failure on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1627516285-GES_DISC] (tests.verify_collection) with error
test-results/ops_test_report.xml [took 0s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7ffa08a47580>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2623763673-GES_DISC&concept_id%5B%5D=V2623763530-GES_DISC&concept_id%5B%...623784779-GES_DISC&concept_id%5B%5D=V2623777402-GES_DISC&concept_id%5B%5D=V2623785718-GES_DISC&page_size=40&page_num=2'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V26237636...SC&concept_id%5B%5D=V2623777402-GES_DISC&concept_id%5B%5D=V2623785718-GES_DISC&page_size=40&page_num=2', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False
def urlopen( # type: ignore[override]
self,
method: str,
url: str,
body: _TYPE_BODY | None = None,
headers: typing.Mapping[str, str] | None = None,
retries: Retry | bool | int | None = None,
redirect: bool = True,
assert_same_host: bool = True,
timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
pool_timeout: int | None = None,
release_conn: bool | None = None,
chunked: bool = False,
body_pos: _TYPE_BODY_POSITION | None = None,
preload_content: bool = True,
decode_content: bool = True,
**response_kw: typing.Any,
) -> BaseHTTPResponse:
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method
such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param bool preload_content:
If True, the response's body will be preloaded into memory.
:param bool decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of ``preload_content``
which defaults to ``True``.
:param bool chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = preload_content
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = to_str(_encode_target(url))
else:
url = to_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy() # type: ignore[attr-defined]
headers.update(self.proxy_headers) # type: ignore[union-attr]
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
# Is this a closed/new connection that requires CONNECT tunnelling?
if self.proxy is not None and http_tunnel_required and conn.is_closed:
try:
self._prepare_proxy(conn)
except (BaseSSLError, OSError, SocketTimeout) as e:
self._raise_timeout(
err=e, url=self.proxy.url, timeout_value=conn.timeout
)
raise
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Make the request on the HTTPConnection object
> response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
retries=retries,
response_conn=response_conn,
preload_content=preload_content,
decode_content=decode_content,
**response_kw,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7ffa08a472e0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E http.client.RemoteDisconnected: Remote end closed connection without response
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected
During handling of the above exception, another exception occurred:
self = <requests.adapters.HTTPAdapter object at 0x7ffa08a47b80>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
> resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <http.client.HTTPResponse object at 0x7ffa08a472e0>
def _read_status(self):
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
if len(line) > _MAXLINE:
raise LineTooLong("status line")
if self.debuglevel > 0:
print("reply:", repr(line))
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
> raise RemoteDisconnected("Remote end closed connection without"
" response")
E urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError
During handling of the above exception, another exception occurred:
cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C1627516285-GES_DISC', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.fixture(scope="function")
def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
collection_associations = collection_res.get("associations")
variable_concept_ids = collection_associations.get("variables")
if variable_concept_ids is None and env == 'uat':
pytest.skip('There are no umm-v associated with this collection in UAT')
variables = []
for i in range(0, len(variable_concept_ids), 40):
variables_items = variable_query \
.concept_id(variable_concept_ids[i:i + 40]) \
.token(bearer_token) \
.format('umm_json') \
> .get_all()
verify_collection.py:159:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <requests.adapters.HTTPAdapter object at 0x7ffa08a47b80>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()
def send(
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
"""
try:
conn = self.get_connection_with_tls_context(
request, verify, proxies=proxies, cert=cert
)
except LocationValueError as e:
raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(
request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
)
chunked = not (request.body is None or "Content-Length" in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError:
raise ValueError(
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
f"or a single float to set both timeouts to the same value."
)
elif isinstance(timeout, TimeoutSauce):
pass
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout,
chunked=chunked,
)
except (ProtocolError, OSError) as err:
> raise ConnectionError(err, request=request)
E requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068493-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 27s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068493-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068493-GES_DISC', 'concept-id': 'G1628685465-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068493-GES_DISC'}]}, 'meta': {'association-details': {'collect...arthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/carbonmonoxide_total_column', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068490')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ffa08e39040>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ffa08e3b140>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7ffa08e3af40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628685465-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068493-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.468625%3A-64.100375%29&subset=lon%28-112.00605%3A163.26405%29&granuleId=G1628685465-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job fa61be41-0682-4e55-b016-ddf34a0f9ad8
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068490/69508349_S5P_OFFL_L2_CO_20190806T003836_20190806T022006_09387_01_010302_20190811T235959_subsetted.nc4
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C2179081549-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 40s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2179081549-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2179081549-GES_DISC', 'concept-id': 'G3148994309-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2179081549-GES_DISC'}]}, 'meta': {'association-details': {'collect...escription': 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -9999}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2179081540')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'Swath': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f2d94febe40>}
nc_d = <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f2d94fe9d40>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
> data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
E IndexError: list index out of range
verify_collection.py:427: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G3148994309-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2179081549-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-66.2546885%3A-60.4535515%29&subset=lon%28-163.39403149999998%3A-133.1514085%29&granuleId=G3148994309-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 7ffc5d0c-74e8-45f2-b09f-533e726a3861
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2179081540/69508359_2A.GPM.DPR.GPM-SLH.20240705-S212102-E225415.058798.V07C_subsetted.nc4
WARNING root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
Check warning on line 0 in tests.verify_collection
github-actions / Regression test results for ops
test_spatial_subset[C1442068509-GES_DISC] (tests.verify_collection) failed
test-results/ops_test_report.xml [took 45s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068509-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068509-GES_DISC', 'concept-id': 'G1628706233-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068509-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/level', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1442068500')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'
@pytest.mark.timeout(600)
def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
harmony_env, tmp_path: pathlib.Path, bearer_token):
test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
# Compute a box that is smaller than the granule extent bounding box
north, south, east, west = get_bounding_box(granule_json)
east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
# Build harmony request
harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
request_collection = harmony.Collection(id=collection_concept_id)
harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
granule_id=[granule_json['meta']['concept-id']])
logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
# Submit harmony request and download result
job_id = harmony_client.submit(harmony_request)
logging.info("Submitted harmony job %s", job_id)
harmony_client.wait_for_processing(job_id, show_progress=True)
subsetted_filepath = None
for filename in [file_future.result()
for file_future
in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
logging.info(f'Downloaded: %s', filename)
subsetted_filepath = pathlib.Path(filename)
# Verify spatial subset worked
subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
group = None
# Try to read group in file
lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
lat_var_name = lat_var_name.split('/')[-1]
lon_var_name = lon_var_name.split('/')[-1]
with netCDF4.Dataset(subsetted_filepath) as f:
group_list = []
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
sci_var = list(subsetted_ds_data.variables.keys())[0]
subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
break
# recall the function on a group that has groups in it and didn't find latitude
# this is going 'deeper' into the groups
if len(list(nc_d.groups[g].groups.keys())) > 0:
group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
else:
continue
> group_walk(f.groups, f, '')
verify_collection.py:440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe67999dd40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe67999d740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fe67999d640>
current_group = ''
def group_walk(groups, nc_d, current_group):
global subsetted_ds_new
subsetted_ds_new = None
# check if the top group has lat or lon variable
if lat_var_name in list(nc_d.variables.keys()):
subsetted_ds_new = subsetted_ds
else:
# if not then we'll need to keep track of the group layers
group_list.append(current_group)
# loop through the groups in the current layer
for g in groups:
# end the loop if we've already found latitude
if subsetted_ds_new:
break
# check if the groups have latitude, define the dataset and end the loop if found
if lat_var_name in list(nc_d.groups[g].variables.keys()):
group_list.append(g)
lat_group = '/'.join(group_list)
subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
# add a science variable to the dataset if other groups are in the lat/lon group
# some GPM collections won't have any other variables in the same group as lat/lon
if len(list(nc_d.groups[g].groups.keys())) > 0:
data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
g_data = lat_group+'/'+data_group
subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
> sci_var = list(subsetted_ds_data.variables.keys())[0]
E IndexError: list index out of range
verify_collection.py:430: IndexError
--------------------------------- Captured Log ---------------------------------
INFO root:verify_collection.py:365 Using granule G1628706233-GES_DISC for test
INFO root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068509-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628706233-GES_DISC
INFO root:verify_collection.py:385 Submitted harmony job 5a13b21b-e054-418e-ac87-f235431ed4d2
INFO root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1442068500/69508394_S5P_OFFL_L2_O3_20190806T003836_20190806T022006_09387_01_010107_20190812T015759_subsetted.nc4