Skip to content

Commit

Permalink
Merge pull request #112 from asfadmin/release/1.1.0
Browse files Browse the repository at this point in the history
Release/1.1.0
  • Loading branch information
frankinspace authored Dec 9, 2021
2 parents 7c8d0e8 + bb5b771 commit 1758acd
Show file tree
Hide file tree
Showing 98 changed files with 2,173 additions and 6,765 deletions.
4 changes: 3 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
example
test
tests
.pytest_cache
.git
spec
**/*.log
**/tmp
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
tests/data/granules/** filter=lfs diff=lfs merge=lfs -text
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -138,5 +138,5 @@ templates/
testcases/
.idea/
#unittest/data/
unittest/dockervenv/
unittest/.env_unittest
tests/dockervenv/
tests/.env_unittest
6 changes: 6 additions & 0 deletions CHANGE.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
[version 1.1.0] 2021-12-09
New functionality added for being able to produce PNG output from NetCDF input

[version 1.0.17] 2021-05-05
This is the final version. We will stop the development for a while.

[version 1.0.16] 2021-04-12
This version integrates the regridding functionality. Users can set regridding parameters in the url query for the harmony api. They are outputCrs, scaleSize(xres/yres in output coordinates), scaleExtent(xmin,ymin,xmax,ymax) in output coordinates, and width/height (columnes/rows) of output file. You are not allow to use scaleSize and width/height in the same time.

Expand Down
13 changes: 7 additions & 6 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
From osgeo/gdal:ubuntu-full-3.2.0
FROM osgeo/gdal:ubuntu-full-3.3.1

RUN ln -sf /usr/bin/python3 /usr/bin/python && apt-get update && apt-get install -y python3-pip nco && pip3 install boto3
RUN ln -sf /usr/bin/python3 /usr/bin/python \
&& apt-get update \
&& apt-get install -y python3-pip nco \
&& pip3 --no-cache-dir install boto3 \
&& rm -rf /var/lib/apt/lists/*

WORKDIR "/home"

# Bundle app source
COPY . .
RUN pip3 install deps/harmony-service-lib-py
RUN pip3 install -r requirements.txt
RUN pip3 install --no-cache-dir -r requirements.txt

# To run locally during dev, build the image and run, e.g.:
# docker run --rm -it -e ENV=dev -v $(pwd):/home harmony/gdal --harmony-action invoke --harmony-input "$(cat ../harmony/example/service-operation.json)"
# Or if also working on harmony-service-lib-py in a peered directory:
# docker run --rm -it -e ENV=dev -v $(pwd):/home -v $(dirname $(pwd))/harmony-service-lib-py:/home/deps/harmony harmony/gdal --harmony-action invoke --harmony-input "$(cat ../harmony/example/service-operation.json)"
ENTRYPOINT ["python3", "-m", "gdal_subsetter"]
123 changes: 119 additions & 4 deletions Notebooks/gdal_subsetter_regression/GDAL_Subsetter_Regression.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
"scrolled": false
},
"outputs": [],
"source": [
Expand Down Expand Up @@ -151,8 +151,123 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Regression tests for the S1 InSAR (Beta) collection:\n",
"### Not currently working in SIT or UAT"
"## Regression tests for the ALOS_PALSAR_LEVEL2.2 collection:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"import requests\n",
"import alos_psr_l22_info\n",
"import get_base_url_info\n",
"import get_product_info\n",
"import get_nc_info\n",
"import compare_images\n",
"import write_testfile\n",
"import get_filetype\n",
"import write_filetype_test\n",
"import pytest\n",
"import os\n",
"import cleanup\n",
"import glob\n",
"\n",
"if 'env' not in globals():\n",
" global env\n",
" env = 'uat'\n",
"collection = 'alos_l22'\n",
"base_url_info = get_base_url_info.get_base_url_info(env,collection)\n",
"alos_psr_l22 = alos_psr_l22_info.alos_psr_l22_info()\n",
"for num_requests in alos_psr_l22: \n",
" if num_requests['test_mode'] == 'standard_subsetting' or num_requests['test_mode'] == 'filetype':\n",
" harmony_url = base_url_info['base_url'] + base_url_info['collection_id'] + '/ogc-api-coverages/1.0.0/collections/' + num_requests['request_url'] + num_requests[base_url_info['granule_env_flag']]\n",
" outfile = env + num_requests['outfile']\n",
" print(\"Running request \" + num_requests['q_num'] + ': ' + harmony_url)\n",
" print(num_requests['message'])\n",
" r = requests.get(harmony_url)\n",
" elif num_requests['test_mode'] == 'shapefile':\n",
" harmony_url = base_url_info['base_url'] + base_url_info['collection_id'] + '/ogc-api-coverages/1.0.0/collections/' + num_requests['request_url']\n",
" outfile = env + num_requests['outfile']\n",
" print(\"Running request \" + num_requests['q_num'] + ': ' + harmony_url)\n",
" print(num_requests['message'])\n",
" path = num_requests['shape_path']\n",
" file = num_requests['shapefile_input']\n",
" gid = ''\n",
" if env == 'prod':\n",
" gid = num_requests['prod_gid']\n",
" elif env == 'uat' or env == 'sit':\n",
" gid = num_requests['uat_gid']\n",
" data = {'format': num_requests['outfile_format'],'granuleID': gid}\n",
" files = {\n",
" 'shapefile': (\n",
" file,\n",
" open(path + file,'rb'),\n",
" num_requests['post_type']\n",
" ),\n",
" }\n",
" r = requests.post(harmony_url, files=files, data=data)\n",
" status_code = r.status_code \n",
" if status_code == 200:\n",
" print ('Status code: ',status_code, ' Success!')\n",
" else:\n",
" print (('Status code: ',status_code, ' Something went wrong!'))\n",
" with open(outfile, 'wb') as f:\n",
" f.write(r.content)\n",
" f.close()\n",
" file_ext = outfile[-4:]\n",
" if file_ext[0:] == 'tiff':\n",
" product_info = get_product_info.get_product_info(collection, outfile)\n",
" is_identical = compare_images.compare_images(num_requests['reference_image'],outfile)\n",
" filename = 'test_alos_l22_' + num_requests['q_num'] + '.py'\n",
" #write_the_testfile = write_testfile.write_testfile(filename, collection, product_info, num_requests)\n",
" write_the_testfile = write_testfile.write_testfile(filename, collection, is_identical, product_info, num_requests)\n",
" ! pytest -v -p no:warnings\n",
" elif file_ext[2:] == 'nc':\n",
" product_info = get_nc_info.get_nc_info(collection, outfile)\n",
" is_identical = compare_images.compare_images(num_requests['reference_image'],outfile)\n",
" filename = 'test_alos_l22_nc_' + num_requests['q_num'] + '.py'\n",
" #write_the_testfile = write_testfile.write_testfile(filename, collection, product_info, num_requests)\n",
" write_the_testfile = write_testfile.write_testfile(filename, collection, is_identical, product_info, num_requests) \n",
" ! pytest -v -p no:warnings\n",
" else:\n",
" filetype = get_filetype.get_filetype(outfile)\n",
" filename = 'test_alos_l22_' + num_requests['q_num'] + '.py'\n",
" write_the_testfile = write_filetype_test.write_filetype_test(filename, collection, filetype, num_requests)\n",
" ! pytest -v -p no:warnings\n",
" # Keep the test file around for later\n",
" if os.path.exists(filename):\n",
" os.rename(filename, 'temp'+filename)\n",
" else:\n",
" print(\"The file \" + filename + \" does not exist\")\n",
" # Get rid of the product file\n",
" clean = cleanup.cleanup(outfile) \n",
" \n",
"# Rename the temp files to test files\n",
"files = glob.glob('temp*')\n",
"for f in files:\n",
" old_name = f\n",
" new_name=f.replace('temp', '')\n",
" try:\n",
" os.rename(f, new_name)\n",
" except OSError as e:\n",
" print(\"Error: %s : %s\" % (f, e.strerror))\n",
"# Run all the tests one more time to get the cumulative result\n",
"! pytest -v -p no:warnings\n",
"# Get rid of the test files\n",
"files = glob.glob('test*')\n",
"for f in files:\n",
" clean = cleanup.cleanup(f)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Regression tests for the S1 InSAR (Beta) collection:"
]
},
{
Expand Down Expand Up @@ -246,7 +361,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false
"scrolled": true
},
"outputs": [],
"source": [
Expand Down
Loading

0 comments on commit 1758acd

Please sign in to comment.