diff --git a/compass/meta.yaml b/compass/meta.yaml index cc37d4716..4fe58e1f9 100644 --- a/compass/meta.yaml +++ b/compass/meta.yaml @@ -1,5 +1,5 @@ {% set name = "compass" %} -{% set version = "0.1.8" %} +{% set version = "0.1.9" %} {% set build = 0 %} {% if mpi == "nompi" %} @@ -30,16 +30,12 @@ build: requirements: run: - python - - geometric_features 0.1.10 - - mpas_tools 0.0.10 + - geometric_features 0.1.11 + - mpas_tools 0.0.11 - jigsaw 0.9.12 - jigsawpy 0.2.1 - metis - - pyflann - - scikit-image - - cartopy - cartopy_offlinedata - - pyamg - ffmpeg - {{ mpi }} # [mpi != 'nompi'] - esmf * {{ mpi_prefix }}_* diff --git a/conda_package/docs/api.rst b/conda_package/docs/api.rst index 08044cee9..1619cf9d6 100644 --- a/conda_package/docs/api.rst +++ b/conda_package/docs/api.rst @@ -23,8 +23,27 @@ MPAS mesh tools translate +.. currentmodule:: mpas_tools.mesh.creation -.. currentmodule:: mpas_tools.conversion +.. autosummary:: + :toctree: generated/ + + build_mesh.build_mesh + coastal_tools.coastal_refined_mesh + inject_bathymetry.inject_bathymetry + inject_meshDensity.inject_meshDensity + inject_preserve_floodplain.inject_preserve_floodplain + jigsaw_driver.jigsaw_driver + mesh_definition_tools.mergeCellWidthVsLat + mesh_definition_tools.EC_CellWidthVsLat + mesh_definition_tools.RRS_CellWidthVsLat + mesh_definition_tools.AtlanticPacificGrid + mpas_to_triangle.mpas_to_triangle + open_msh.readmsh + triangle_to_netcdf.triangle_to_netcdf + jigsaw_to_netcdf.jigsaw_to_netcdf + +.. currentmodule:: mpas_tools.mesh.conversion .. autosummary:: :toctree: generated/ diff --git a/conda_package/docs/building_docs.rst b/conda_package/docs/building_docs.rst new file mode 100644 index 000000000..706ebb9dd --- /dev/null +++ b/conda_package/docs/building_docs.rst @@ -0,0 +1,26 @@ +.. _dev_building_docs: + +************************** +Building the Documentation +************************** + +To make a local test build of the documentation, it is easiest to follow the +:ref:`dev_testing_changes` procedure for how to make a local build of the +``mpas_tools`` package. Then, you need to set up a conda environment with the +test build and some other required packages: + +code-block:: + + $ conda create -y -n test_mpas_tools_docs --use-local mpas_tools sphinx mock \ + sphinx_rtd_theme + $ conda activate test_mpas_tools_docs + +Then, to build the documentation, run: + +code-block:: + + $ export DOCS_VERSION="test" + $ cd conda_package/docs + $ make html + +Then, you can view the documentation by opening ``_build/html/index.html``. diff --git a/conda_package/docs/define_base_mesh.py b/conda_package/docs/define_base_mesh.py new file mode 100755 index 000000000..a9aa150fb --- /dev/null +++ b/conda_package/docs/define_base_mesh.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +""" +% Create cell width array for this mesh on a regular latitude-longitude grid. +% Outputs: +% cellWidth - m x n array, entries are desired cell width in km +% lat - latitude, vector of length m, with entries between -90 and 90, degrees +% lon - longitude, vector of length n, with entries between -180 and 180, degrees +""" +import numpy as np + + +def cellWidthVsLatLon(): + + ddeg = 10 + constantCellWidth = 240 + + lat = np.arange(-90, 90.01, ddeg) + lon = np.arange(-180, 180.01, ddeg) + + cellWidth = constantCellWidth * np.ones((lat.size, lon.size)) + return cellWidth, lon, lat diff --git a/conda_package/docs/index.rst b/conda_package/docs/index.rst index adea42f23..ac7fbd2fc 100644 --- a/conda_package/docs/index.rst +++ b/conda_package/docs/index.rst @@ -15,11 +15,24 @@ ocean and land-ice test cases, the `MPAS-Analysis `_ package for analyzing simulations, and in other MPAS-related workflows. +User's Guide +============ + +.. toctree:: + :maxdepth: 2 + + mesh_creation + mesh_conversion + Developer's Guide ================= .. toctree:: :maxdepth: 2 + making_changes + testing_changes + building_docs + api Indices and tables diff --git a/conda_package/docs/making_changes.rst b/conda_package/docs/making_changes.rst new file mode 100644 index 000000000..a59f5acd0 --- /dev/null +++ b/conda_package/docs/making_changes.rst @@ -0,0 +1,83 @@ +.. _dev_making_changes: + +**************************** +Making Changes to mpas_tools +**************************** + +New python functions and modules (``.py`` files) can be added within the +``conda_package/mpas_tools``. These will automatically be part of the +``mpas_tools`` package. New directories with python modules should include an +``__init__.py`` file (which can be empty) to indicate that they are also part of +the package. + +Entry Points +============ + +The best way to add new "scripts" to the package is to add a function without +any arguments somewhere in the package, and then to add it as an "entry point" +both in ``conda_package/setup.py`` and ``conda_package/recipe/meta.yaml``. + +As an example, the entry point ``planar_hex`` is defined in ``setup.py`` as: + +.. code-block:: python + + setup(name='mpas_tools', + ... + entry_points={'console_scripts': + ['planar_hex = mpas_tools.planar_hex:main', + ... + +and in ``meta.yaml`` as: + +.. code-block:: + + build: + number: 0 + entry_points: + - planar_hex = mpas_tools.planar_hex:main + +When the package is installed in a conda environment, a stub script +``planar_hex`` will be in the user's path that will call the function ``main()`` +in the module ``mpas_tools.planar_hex``: + +.. code-block:: python + + def main(): + + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('--nx', dest='nx', type=int, required=True, + help='Cells in x direction') + ... + args = parser.parse_args() + + make_planar_hex_mesh(args.nx, args.ny, args.dc, + args.nonperiodic_x, args.nonperiodic_y, + args.outFileName) + +As you can see, the function pointed to by the entry point can used to parse +command-line arguments, just as a "normal" python script would do + +By convention, entry points do not typically include the ``.py`` extension. + +Dependencies +============ + +If you changes introduce new dependencies, these need to be added to the recipe +for the conda package in ``conda_package/recipe/meta.yaml`` + +Add these changes to the end of the ``run`` section of ``requirements``: + +.. code-block:: + + requirements: + ... + run: + - python + - netcdf4 + ... + - affine + +These requirements *must* be on the ``conda-forge`` anaconda channel. If you +need help with this, please contact the developers. + diff --git a/conda_package/docs/mesh_conversion.rst b/conda_package/docs/mesh_conversion.rst new file mode 100644 index 000000000..883aef6ed --- /dev/null +++ b/conda_package/docs/mesh_conversion.rst @@ -0,0 +1,200 @@ +.. _mesh_conversion: + +*************** +Mesh Conversion +*************** + +Mesh Converter +============== + +The command-line tool ``MpasMeshConverter.x`` and the corresponding wrapper +function :py:func:`mpas_tools.mesh.conversion.convert` are used to convert a +dataset describing cell locations, vertex locations, and connectivity between +cells and vertices into a valid MPAS mesh following the `MPAS mesh specification +`_. + +Example call to the command-line tool: + +.. code-block:: + + $ planar_hex --nx 4 --ny 4 --dc 10e3 -o base_mesh.nc + $ MpasMeshConverter.x base_mesh.nc mesh.nc + +This example uses the ``planar_hex`` tool to generate a small, doubly periodic +MPAS mesh with 10-km cells, then uses the MPAS mesh converter to make sure the +resulting mesh adheres to the mesh specification. ``MpasMeshConverter.x`` takes +two arguments, the input and output mesh files, and will prompt the user for +file names if these arguments are not supplied. + +The same example in a python script can be accomplished with: + +.. code-block:: python + + from mpas_tools.planar_hex import make_planar_hex_mesh + from mpas_tools.mesh.conversion import convert + from mpas_tools.io import write_netcdf + + ds = make_planar_hex_mesh(nx=4, ny=4, dc=10e3, nonperiodic_x=False, + nonperiodic_y=False) + ds = convert(ds) + write_netcdf(ds, 'mesh.nc') + +Regardless of which of these methods is used, the input mesh must define the +following dimensions, variables and global attributes (not the dimension sizes +are merely examples from the mesh generated in the previous examples): + +.. code-block:: + + netcdf mesh { + dimensions: + nCells = 16 ; + nVertices = 32 ; + vertexDegree = 3 ; + variables: + double xCell(nCells) ; + double yCell(nCells) ; + double zCell(nCells) ; + double xVertex(nVertices) ; + double yVertex(nVertices) ; + double zVertex(nVertices) ; + int cellsOnVertex(nVertices, vertexDegree) ; + double meshDensity(nCells) ; + + // global attributes: + :on_a_sphere = "NO" ; + :sphere_radius = 0. ; + :is_periodic = "YES" ; + +The variable ``meshDensity`` is required for historical reasons and is passed +unchanged to the resulting mesh. It can contain all ones (or zeros), the +resolution of the mesh in kilometers, or whatever field the user wishes. + +The following global attributes are optional but will be passed on to the +resulting mesh: + +.. code-block:: + + // global attributes: + :x_period = 40000. ; + :y_period = 34641.0161513775 ; + :history = "Tue May 26 20:58:10 2020: /home/xylar/miniconda3/envs/mpas/bin/planar_hex --nx 4 --ny 4 --dc 10e3 -o base_mesh.nc" ; + +The ``file_id`` global attribute is also optional and is preserved in the +resulting mesh as ``parent_id``. A new ``file_id`` (a random hash tag) is +generated by the mesh converter. + +The resulting dataset has all the dimensions and variables required for an MPAS +mesh. + +The mesh converter also generates a file called ``graph.info`` that is used to +create graph partitions from tools like `Metis +`_. This file is not stored by +default when the python ``cull`` function is used but can be specified with +the ``graphInfoFileName`` argument. The python function also takes a ``logger`` +that can be used to capture the output that would otherwise go to the screen +via ``stdout`` and ``stderr``. + +Cell Culler +=========== + +The command-line tool ``MpasCellCuller.x`` and the corresponding wrapper +function :py:func:`mpas_tools.mesh.conversion.cull` are used to cull cells from +a mesh based on the ``cullCell`` field in the input dataset and/or the provided +masks. The contents of the ``cullCell`` field is merge with the mask(s) from a +masking dataset and the inverse of the mask(s) from an inverse-masking dataset. +Then, a preserve-masking dataset is used to determine where cells should *not* +be culled. + +Example call to the command-line tool, assuming you start with a spherical mesh +called ``base_mesh.nc`` (not the doubly periodic planar mesh from the examples +above): + +.. code-block:: + + $ merge_features -c natural_earth -b region -n "Land Coverage" -o land.geojson + $ MpasMaskCreator.x base_mesh.nc land.nc -f land.geojson + $ MpasCellCuller.x base_mesh.nc culled_mesh.nc -m land.nc + +This example uses the ``merge_features`` tool from the ``geometric_features`` +conda package to get a geojson file containing land coverage. Then, it uses +the mask creator (see the next section) to create a mask on the MPAS mesh that +is one inside this region and zero outside. Finally, it culls the base mesh +to only those cells where the mask is zero (i.e. the mask indicates which cells +are to be removed). + +The same example in a python script can be accomplished with: + +.. code-block:: python + + import xarray + from geometric_features import GeometricFeatures + from mpas_tools.mesh.conversion import mask, cull + + gf = GeometricFeatures() + + fcLandCoverage = gf.read(componentName='natural_earth', objectType='region', + featureNames=['Land Coverage']) + + dsBaseMesh = xarray.open_dataset('base_mesh.nc') + dsLandMask = mask(dsBaseMesh, fcMask=fcLandCoverage) + dsCulledMesh = conversion.cull(dsBaseMesh, dsMask=dsLandMask) + write_netcdf(dsCulledMesh, 'culled_mesh.nc') + +Here is the full usage of ``MpasCellCuller.x``: + +.. code-block:: + + MpasCellCuller.x [input_name] [output_name] [[-m/-i/-p] masks_name] [-c] + + input_name: + This argument specifies the input MPAS mesh. + output_name: + This argument specifies the output culled MPAS mesh. + If not specified, it defaults to culled_mesh.nc, but + it is required if additional arguments are specified. + -m/-i/-p: + These arguments control how a set of masks is used when + culling a mesh. + The -m argument applies a mask to cull based on (i.e. + where the mask is 1, the mesh will be culled). + The -i argument applies the inverse mask to cull based + on (i.e. where the mask is 0, the mesh will be + culled). + The -p argument forces any marked cells to not be + culled. + If this argument is specified, the masks_name argument + is required + -c: + Output the mapping from old to new mesh (cellMap) in + cellMapForward.txt, + and output the reverse mapping from new to old mesh in + cellMapBackward.txt. + +Mask Creator +============ + +The command-line tool ``MpasMaskCreator.x`` and the corresponding wrapper +function :py:func:`mpas_tools.mesh.conversion.mask` are used to create a set of +region masks either from mask features or from seed points to be used to flood +fill a contiguous block of cells. + +Examples usage of the mask creator can be found above under the Cell Culler. + +Here is the full usage of ``MpasMaskCreator.x``: + +.. code-block:: + + MpasMaskCreator.x in_file out_file [ [-f/-s] file.geojson ] [--positive_lon] + in_file: This argument defines the input file that masks will be created for. + out_file: This argument defines the file that masks will be written to. + -s file.geojson: This argument pair defines a set of points (from the geojson point definition) + that will be used as seed points in a flood fill algorithim. This is useful when trying to remove isolated cells from a mesh. + -f file.geojson: This argument pair defines a set of geojson features (regions, transects, or points) + that will be converted into masks / lists. + --positive_lon: It is unlikely that you want this argument. In rare cases when using a non-standard geojson + file where the logitude ranges from 0 to 360 degrees (with the prime meridian at 0 degrees), use this flag. + If this flag is not set, the logitude range is -180-180 with 0 degrees being the prime meridian, which is the + case for standar geojson files including all features from the geometric_feature repo. + The fact that longitudes in the input MPAS mesh range from 0 to 360 is not relevant to this flag, + as latitude and longitude are recomputed internally from Cartesian coordinates. + Whether this flag is passed in or not, any longitudes written are in the 0-360 range. diff --git a/conda_package/docs/mesh_creation.rst b/conda_package/docs/mesh_creation.rst new file mode 100644 index 000000000..4fd444967 --- /dev/null +++ b/conda_package/docs/mesh_creation.rst @@ -0,0 +1,118 @@ +.. _mesh_creation: + +************* +Mesh Creation +************* + +Building a Mesh +=============== + +The :py:func:`mpas_tools.mesh.creation.build_mesh.build_mesh` function is used +create an MPAS mesh using the `JIGSAW `_ +and `JIGSAW-Python `_ packages. + +The user must define a local python module ``define_base_mesh`` that provides a +function that returns a 2D array ``cellWidth`` of cell sizes in kilometers. + +If the mesh is on a sphere, this function is called ``cellWidthVsLatLon()`` +and also returns 1D ``lon`` and ``lat`` arrays. + +The mesh is planar, the function is called ``cellWidthVsXY()`` and returns 4 +arrays in addition to ``cellWidth``: 1D ``x`` and ``y`` arrays defining planar +coordinates in meters; as well as ``geom_points``, list of point coordinates for +bounding polygon for the planar mesh; and ``geom_edges``, list of edges between +points in ``geom_points`` that define the bounding polygon. + +The result is an MPAS mesh file ``base_mesh.nc`` as well as several intermediate +files: ``mesh.log``, ``mesh-HFUN.msh``, ``mesh.jig``, ``mesh-MESH.msh``, +``mesh.msh``, and ``mesh_triangles.nc``. + +The :py:func:`mpas_tools.viz.paraview_extractor.extract_vtk` function is used +to produce a VTK file in the ``base_mesh_vtk`` directory that can be viewed in +`ParaVeiw `_. + +Optionally, a field, ``cellSeedMask``, can be added to the mesh file that can +later be used preserve a "flood plain" of positive elevations in the MPAS mesh. +See :py:func:`mpas_tools.mesh.creation.inject_preserve_floodplain.inject_preserve_floodplain`. + +Optioanlly, a field, ``bottomDepthObserved``, can be added to the mesh file +with bathymetry data from one of two topography files: ``earth_relief_15s.nc`` +or ``topo.msh``. If bathymetry should be added to the mesh, a local link with +one of these file names must exist. See +py:func:`mpas_tools.mesh.creation.inject_bathymetry.inject_bathymetry``. + +A simple example of ``define_base_mesh.py`` for a spherical mesh with constant, +240-km resolution is: + +.. code-block:: python + + import numpy as np + + + def cellWidthVsLatLon(): + """ + Create cell width array for this mesh on a regular latitude-longitude grid. + + Returns + ------- + cellWidth : numpy.ndarray + m x n array of cell width in km + lon : numpy.ndarray + longitude in degrees (length n and between -180 and 180) + lat : numpy.ndarray + longitude in degrees (length m and between -90 and 90) + """ + + ddeg = 10 + constantCellWidth = 240 + + lat = np.arange(-90, 90.01, ddeg) + lon = np.arange(-180, 180.01, ddeg) + + cellWidth = constantCellWidth * np.ones((lat.size, lon.size)) + return cellWidth, lon, lat + +With this module defined locally, a mesh can be generated either with the +command-line tool ``build_mesh``: + +.. code-block:: + + $ build_mesh + +or by calling py:func:`mpas_tools.mesh.creation.build_mesh.build_mesh`: + +.. code-block:: python + + from mpas_tools.mesh.creation.build_mesh import build_mesh + + + build_mesh() + +The full usage details of the command-line tool are: + +.. code-block:: + + $ build_mesh --help + + usage: build_mesh [-h] [--preserve_floodplain] + [--floodplain_elevation FLOODPLAIN_ELEVATION] + [--inject_bathymetry] [--geometry GEOMETRY] + [--plot_cellWidth] + + optional arguments: + -h, --help show this help message and exit + --preserve_floodplain + Whether a flood plain (bathymetry above z = 0) should + be preserved in the mesh + --floodplain_elevation FLOODPLAIN_ELEVATION + The elevation in meters to which the flood plain is + preserved, default is 20 m + --inject_bathymetry Whether one of the default bathymetry datasets, + earth_relief_15s.nc or topo.msh, should be added to + the MPAS mesh + --geometry GEOMETRY Whether the mesh is on a sphere or a plane, default is + a sphere + --plot_cellWidth Whether to produce a plot of cellWidth + + + diff --git a/conda_package/docs/testing_changes.rst b/conda_package/docs/testing_changes.rst new file mode 100644 index 000000000..c648023bf --- /dev/null +++ b/conda_package/docs/testing_changes.rst @@ -0,0 +1,109 @@ +.. _dev_testing_changes: + +***************************** +Testing Changes to mpas_tools +***************************** + +There are a few different ways to test the ``mpas_tools`` package. Typically, +the quickest turn-around between making changes and seeing their results are +going to be seen if you can test the code straight out of the git repo. This +approach works for calling functions from the package within a python script +but doesn't give you easy access to the "entry points"(see +:ref:`dev_making_changes`). To more fully test the package, you will need to +build the package locally, install it into a new conda environment, and test +your code within that environment. + +Testing from the git repo +========================= + +If you are testing a simple python script that accesses ``mpas_tools``, you can +make a symlink in the same directory as your python script to ``mpas_tools`` +within ``conda_package``. Python should search the local path before looking +elsewhere so this should work even if a previous version ``mpas_tools`` is +already installed in the conda environment you are using. + +Testing the conda package +========================= + +Updating the Version +******************** + +As part of your testing, you should update the version of ``mpas_tools``. This +should be done both in ``conda_package/mpas_tools/__init__.py``: + +.. code-block:: python + + __version_info__ = (0, 0, 11) + __version__ = '.'.join(str(vi) for vi in __version_info__) + +Increment ``__version_info__`` (major, minor or micro version, depending on +what makes sense). + +The version in the conda recipe (``conda_package/recipe/meta.yaml``) needs to +match: + +.. code-block:: + + {% set name = "mpas_tools" %} + {% set version = "0.0.11" %} + +It is also a good idea to add the new version to the :ref:`versions`. The new +links won't be valid until a new release is made and Travis CI has generated +the associated documentation. Eventually, it should be possible to do this +automatically but that has not yet been implemented. + +Building the package +******************** + +To build the conda package, you will need to install conda-build into your base +conda environment. (Basic instructions on how to install Miniconda or Anaconda +are beyond the scope of this documentation.) + +.. code-block:: + + $ conda config --add channels conda-forge + $ conda config --set channel_priority strict + $ conda install -n base conda-build + +To build the package, make sure you are in the base of the repo and run: + +.. code-block:: + + $ rm -rf ~/miniconda3/conda-bld + $ conda build conda_package/recipe + +The first is to make sure you don't have existing packages already built that +would get used in your building and testing instead of the versions from +``conda-forge``. If your conda setup is installed somewhere other than +``~/miniconda3``, use the appropriate path. + +Installing the package +********************** + +To make a new test environment to try out scripts, other python packages or +other workflows that use the tools, run: + +.. code-block:: + + $ conda create -n test_mpas_tools --use-local python=3.8 mpas_tools + +You can name the environment whatever if useful to you. Activate the +environment with: + +.. code-block:: + + $ conda activate test_mpas_tools + +You should now find that ``mpas_tools`` can be imported in python codes and the +various scripts and entry points are available in the path. + +Removing the test environment +***************************** + +If you're done with testing, you can remove the test environment + +.. code-block:: + + $ conda deactivate + $ conda remove --all -n test_mpas_tools + diff --git a/conda_package/docs/versions.rst b/conda_package/docs/versions.rst index 2770e0b95..f3f9be704 100644 --- a/conda_package/docs/versions.rst +++ b/conda_package/docs/versions.rst @@ -1,3 +1,5 @@ +.. _versions: + Versions ======== @@ -5,7 +7,10 @@ Versions Documentation On GitHub ================ =============== `stable`_ `master`_ +`v0.0.11`_ `0.0.11`_ ================ =============== .. _`stable`: ../stable/index.html .. _`master`: https://github.com/MPAS-Dev/MPAS-Tools/tree/master +.. _`v0.0.11`: ../0.0.11/index.html +.. _`0.0.11`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/0.0.11 \ No newline at end of file diff --git a/conda_package/mpas_tools/__init__.py b/conda_package/mpas_tools/__init__.py index dd6323666..714773297 100644 --- a/conda_package/mpas_tools/__init__.py +++ b/conda_package/mpas_tools/__init__.py @@ -1,2 +1,2 @@ -__version_info__ = (0, 0, 10) +__version_info__ = (0, 0, 11) __version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/conda_package/mpas_tools/conversion.py b/conda_package/mpas_tools/conversion.py index 3c188fe56..e07a7478e 100644 --- a/conda_package/mpas_tools/conversion.py +++ b/conda_package/mpas_tools/conversion.py @@ -1,236 +1 @@ -from __future__ import absolute_import, division, print_function, \ - unicode_literals - -import os -import xarray -import subprocess -from backports.tempfile import TemporaryDirectory -import shutil - -from mpas_tools.io import write_netcdf - - -def convert(dsIn, graphInfoFileName=None, logger=None): - ''' - Use ``MpasMeshConverter.x`` to convert an input mesh to a valid MPAS - mesh that is fully compliant with the MPAS mesh specification. - https://mpas-dev.github.io/files/documents/MPAS-MeshSpec.pdf - - Parameters - ---------- - dsIn : ``xarray.Dataset`` - A data set to convert - - graphInfoFileName : str, optional - A file path (relative or absolute) where the graph file (typically - ``graph.info`` should be written out. By default, ``graph.info`` is - not saved. - - logger : ``logging.Logger``, optional - A logger for the output if not stdout - - Returns - ------- - dsOut : ``xarray.Dataset`` - The MPAS mesh - ''' - - with TemporaryDirectory() as tempdir: - inFileName = '{}/mesh_in.nc'.format(tempdir) - write_netcdf(dsIn, inFileName) - - outFileName = '{}/mesh_out.nc'.format(tempdir) - - if graphInfoFileName is not None: - graphInfoFileName = os.path.abspath(graphInfoFileName) - - # go into the directory of the output file so the graph.info file ends - # up in the same place - owd = os.getcwd() - outDir = os.path.dirname(outFileName) - os.chdir(outDir) - _call_subprocess(['MpasMeshConverter.x', inFileName, outFileName], - logger) - os.chdir(owd) - - dsOut = xarray.open_dataset(outFileName) - dsOut.load() - - if graphInfoFileName is not None: - shutil.copyfile('{}/graph.info'.format(outDir), - graphInfoFileName) - - return dsOut - - -def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, - graphInfoFileName=None, logger=None): - ''' - Use ``MpasCellCuller.x`` to cull cells from a mesh based on the - ``cullCell`` field in the input file or DataSet and/or the provided masks. - ``cullCell``, dsMask and dsInverse are merged together so that the final - mask is the union of these 3. The preserve mask is then used to determine - where cells should *not* be culled. - - Parameters - ---------- - dsIn : ``xarray.Dataset`` - A data set to cull, possibly with a ``cullCell`` field set to one where - cells should be removed - - dsMask : ``xarray.Dataset`` or list, optional - A data set (or data sets) with region masks that are 1 where cells - should be culled - - dsInverse : ``xarray.Dataset`` or list, optional - A data set (or data sets) with region masks that are 0 where cells - should be culled - - dsPreserve : ``xarray.Dataset`` or list, optional - A data set (or data sets) with region masks that are 1 where cells - should *not* be culled - - graphInfoFileName : str, optional - A file path (relative or absolute) where the graph file (typically - ``culled_graph.info`` should be written out. By default, - ``culled_graph.info`` is not saved. - - logger : ``logging.Logger``, optional - A logger for the output if not stdout - - Returns - ------- - dsOut : ``xarray.Dataset`` - The culled mesh - - ''' - - with TemporaryDirectory() as tempdir: - inFileName = '{}/ds_in.nc'.format(tempdir) - write_netcdf(dsIn, inFileName) - outFileName = '{}/ds_out.nc'.format(tempdir) - - args = ['MpasCellCuller.x', inFileName, outFileName] - - if dsMask is not None: - if not isinstance(dsMask, list): - dsMask = [dsMask] - for index, ds in enumerate(dsMask): - fileName = '{}/mask{}.nc'.format(tempdir, index) - write_netcdf(ds, fileName) - args.extend(['-m', fileName]) - - if dsInverse is not None: - if not isinstance(dsInverse, list): - dsInverse = [dsInverse] - for index, ds in enumerate(dsInverse): - fileName = '{}/inverse{}.nc'.format(tempdir, index) - write_netcdf(ds, fileName) - args.extend(['-i', fileName]) - - if dsPreserve is not None: - if not isinstance(dsPreserve, list): - dsPreserve = [dsPreserve] - for index, ds in enumerate(dsPreserve): - fileName = '{}/preserve{}.nc'.format(tempdir, index) - write_netcdf(ds, fileName) - args.extend(['-p', fileName]) - - # go into the directory of the output file so the graph.info file ends - # up in the same place - - if graphInfoFileName is not None: - graphInfoFileName = os.path.abspath(graphInfoFileName) - - owd = os.getcwd() - outDir = os.path.dirname(outFileName) - os.chdir(outDir) - _call_subprocess(args, logger) - os.chdir(owd) - - dsOut = xarray.open_dataset(outFileName) - dsOut.load() - - if graphInfoFileName is not None: - shutil.copyfile('{}/culled_graph.info'.format(outDir), - graphInfoFileName) - - return dsOut - - -def mask(dsMesh, fcMask=None, fcSeed=None, positiveLon=False, logger=None): - ''' - Use ``MpasMaskCreator.x`` to create a set of region masks either from - mask feature collecitons or from seed points to be used to flood fill - - Parameters - ---------- - dsMesh : ``xarray.Dataset``, optional - An MPAS mesh on which the masks should be created - - fcMask : ``geometric_features.FeatureCollection``, optional - A feature collection containing features to use to create the mask - - fcSeed : ``geometric_features.FeatureCollection``, optional - A feature collection with points to use a seeds for a flood fill that - will create a mask of all cells connected to the seed points - - logger : ``logging.Logger``, optional - A logger for the output if not stdout - - Returns - ------- - dsMask : ``xarray.Dataset`` - The masks - - ''' - - with TemporaryDirectory() as tempdir: - inFileName = '{}/mesh_in.nc'.format(tempdir) - write_netcdf(dsMesh, inFileName) - outFileName = '{}/mesh_out.nc'.format(tempdir) - - args = ['MpasMaskCreator.x', inFileName, outFileName] - - if fcMask is not None: - fileName = '{}/mask.geojson'.format(tempdir) - fcMask.to_geojson(fileName) - args.extend(['-f', fileName]) - - if fcSeed is not None: - fileName = '{}/seed.geojson'.format(tempdir) - fcSeed.to_geojson(fileName) - args.extend(['-s', fileName]) - - if positiveLon: - args.append('--positive_lon') - - _call_subprocess(args, logger) - - dsOut = xarray.open_dataset(outFileName) - dsOut.load() - - return dsOut - - -def _call_subprocess(args, logger): - """Call the given subprocess and send the output to the logger""" - if logger is None: - subprocess.check_call(args) - else: - process = subprocess.Popen(args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - stdout, stderr = process.communicate() - - if stdout: - stdout = stdout.decode('utf-8') - for line in stdout.split('\n'): - logger.info(line) - if stderr: - stderr = stderr.decode('utf-8') - for line in stderr.split('\n'): - logger.error(line) - - if process.returncode != 0: - raise subprocess.CalledProcessError(process.returncode, - ' '.join(args)) +from mpas_tools.mesh.conversion import convert, cull, mask diff --git a/conda_package/mpas_tools/mesh/__init__.py b/conda_package/mpas_tools/mesh/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/conda_package/mpas_tools/mesh/conversion.py b/conda_package/mpas_tools/mesh/conversion.py new file mode 100644 index 000000000..3c188fe56 --- /dev/null +++ b/conda_package/mpas_tools/mesh/conversion.py @@ -0,0 +1,236 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import os +import xarray +import subprocess +from backports.tempfile import TemporaryDirectory +import shutil + +from mpas_tools.io import write_netcdf + + +def convert(dsIn, graphInfoFileName=None, logger=None): + ''' + Use ``MpasMeshConverter.x`` to convert an input mesh to a valid MPAS + mesh that is fully compliant with the MPAS mesh specification. + https://mpas-dev.github.io/files/documents/MPAS-MeshSpec.pdf + + Parameters + ---------- + dsIn : ``xarray.Dataset`` + A data set to convert + + graphInfoFileName : str, optional + A file path (relative or absolute) where the graph file (typically + ``graph.info`` should be written out. By default, ``graph.info`` is + not saved. + + logger : ``logging.Logger``, optional + A logger for the output if not stdout + + Returns + ------- + dsOut : ``xarray.Dataset`` + The MPAS mesh + ''' + + with TemporaryDirectory() as tempdir: + inFileName = '{}/mesh_in.nc'.format(tempdir) + write_netcdf(dsIn, inFileName) + + outFileName = '{}/mesh_out.nc'.format(tempdir) + + if graphInfoFileName is not None: + graphInfoFileName = os.path.abspath(graphInfoFileName) + + # go into the directory of the output file so the graph.info file ends + # up in the same place + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + _call_subprocess(['MpasMeshConverter.x', inFileName, outFileName], + logger) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + if graphInfoFileName is not None: + shutil.copyfile('{}/graph.info'.format(outDir), + graphInfoFileName) + + return dsOut + + +def cull(dsIn, dsMask=None, dsInverse=None, dsPreserve=None, + graphInfoFileName=None, logger=None): + ''' + Use ``MpasCellCuller.x`` to cull cells from a mesh based on the + ``cullCell`` field in the input file or DataSet and/or the provided masks. + ``cullCell``, dsMask and dsInverse are merged together so that the final + mask is the union of these 3. The preserve mask is then used to determine + where cells should *not* be culled. + + Parameters + ---------- + dsIn : ``xarray.Dataset`` + A data set to cull, possibly with a ``cullCell`` field set to one where + cells should be removed + + dsMask : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 1 where cells + should be culled + + dsInverse : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 0 where cells + should be culled + + dsPreserve : ``xarray.Dataset`` or list, optional + A data set (or data sets) with region masks that are 1 where cells + should *not* be culled + + graphInfoFileName : str, optional + A file path (relative or absolute) where the graph file (typically + ``culled_graph.info`` should be written out. By default, + ``culled_graph.info`` is not saved. + + logger : ``logging.Logger``, optional + A logger for the output if not stdout + + Returns + ------- + dsOut : ``xarray.Dataset`` + The culled mesh + + ''' + + with TemporaryDirectory() as tempdir: + inFileName = '{}/ds_in.nc'.format(tempdir) + write_netcdf(dsIn, inFileName) + outFileName = '{}/ds_out.nc'.format(tempdir) + + args = ['MpasCellCuller.x', inFileName, outFileName] + + if dsMask is not None: + if not isinstance(dsMask, list): + dsMask = [dsMask] + for index, ds in enumerate(dsMask): + fileName = '{}/mask{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-m', fileName]) + + if dsInverse is not None: + if not isinstance(dsInverse, list): + dsInverse = [dsInverse] + for index, ds in enumerate(dsInverse): + fileName = '{}/inverse{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-i', fileName]) + + if dsPreserve is not None: + if not isinstance(dsPreserve, list): + dsPreserve = [dsPreserve] + for index, ds in enumerate(dsPreserve): + fileName = '{}/preserve{}.nc'.format(tempdir, index) + write_netcdf(ds, fileName) + args.extend(['-p', fileName]) + + # go into the directory of the output file so the graph.info file ends + # up in the same place + + if graphInfoFileName is not None: + graphInfoFileName = os.path.abspath(graphInfoFileName) + + owd = os.getcwd() + outDir = os.path.dirname(outFileName) + os.chdir(outDir) + _call_subprocess(args, logger) + os.chdir(owd) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + if graphInfoFileName is not None: + shutil.copyfile('{}/culled_graph.info'.format(outDir), + graphInfoFileName) + + return dsOut + + +def mask(dsMesh, fcMask=None, fcSeed=None, positiveLon=False, logger=None): + ''' + Use ``MpasMaskCreator.x`` to create a set of region masks either from + mask feature collecitons or from seed points to be used to flood fill + + Parameters + ---------- + dsMesh : ``xarray.Dataset``, optional + An MPAS mesh on which the masks should be created + + fcMask : ``geometric_features.FeatureCollection``, optional + A feature collection containing features to use to create the mask + + fcSeed : ``geometric_features.FeatureCollection``, optional + A feature collection with points to use a seeds for a flood fill that + will create a mask of all cells connected to the seed points + + logger : ``logging.Logger``, optional + A logger for the output if not stdout + + Returns + ------- + dsMask : ``xarray.Dataset`` + The masks + + ''' + + with TemporaryDirectory() as tempdir: + inFileName = '{}/mesh_in.nc'.format(tempdir) + write_netcdf(dsMesh, inFileName) + outFileName = '{}/mesh_out.nc'.format(tempdir) + + args = ['MpasMaskCreator.x', inFileName, outFileName] + + if fcMask is not None: + fileName = '{}/mask.geojson'.format(tempdir) + fcMask.to_geojson(fileName) + args.extend(['-f', fileName]) + + if fcSeed is not None: + fileName = '{}/seed.geojson'.format(tempdir) + fcSeed.to_geojson(fileName) + args.extend(['-s', fileName]) + + if positiveLon: + args.append('--positive_lon') + + _call_subprocess(args, logger) + + dsOut = xarray.open_dataset(outFileName) + dsOut.load() + + return dsOut + + +def _call_subprocess(args, logger): + """Call the given subprocess and send the output to the logger""" + if logger is None: + subprocess.check_call(args) + else: + process = subprocess.Popen(args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + + if stdout: + stdout = stdout.decode('utf-8') + for line in stdout.split('\n'): + logger.info(line) + if stderr: + stderr = stderr.decode('utf-8') + for line in stderr.split('\n'): + logger.error(line) + + if process.returncode != 0: + raise subprocess.CalledProcessError(process.returncode, + ' '.join(args)) diff --git a/conda_package/mpas_tools/mesh/creation/__init__.py b/conda_package/mpas_tools/mesh/creation/__init__.py new file mode 100644 index 000000000..5653559e7 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/__init__.py @@ -0,0 +1,5 @@ +from mpas_tools.mesh.creation.triangle_to_netcdf import triangle_to_netcdf +from mpas_tools.mesh.creation.jigsaw_to_netcdf import jigsaw_to_netcdf +from mpas_tools.mesh.creation.inject_meshDensity import inject_meshDensity +from mpas_tools.mesh.creation.mpas_to_triangle import mpas_to_triangle +from mpas_tools.mesh.creation.open_msh import readmsh diff --git a/conda_package/mpas_tools/mesh/creation/build_mesh.py b/conda_package/mpas_tools/mesh/creation/build_mesh.py new file mode 100644 index 000000000..0e4ffa9e0 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/build_mesh.py @@ -0,0 +1,213 @@ +""" +This script performs the first step of initializing the global ocean. This +includes: +Step 1. Build cellWidth array as function of latitude and longitude +Step 2. Build mesh using JIGSAW +Step 3. Convert triangles from jigsaw format to netcdf +Step 4. Convert from triangles to MPAS mesh +Step 5. Create vtk file for visualization +""" + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import xarray +import argparse +import matplotlib +matplotlib.use('Agg') +import matplotlib.pyplot as plt +import cartopy.crs as ccrs +import cartopy + +from mpas_tools.mesh.conversion import convert +from mpas_tools.io import write_netcdf +from mpas_tools.viz.paraview_extractor import extract_vtk + +from mpas_tools.mesh.creation.jigsaw_driver import jigsaw_driver +from mpas_tools.mesh.creation.jigsaw_to_netcdf import jigsaw_to_netcdf +from mpas_tools.mesh.creation.inject_bathymetry import inject_bathymetry +from mpas_tools.mesh.creation.inject_meshDensity import inject_meshDensity +from mpas_tools.mesh.creation.inject_preserve_floodplain import \ + inject_preserve_floodplain +from mpas_tools.viz.colormaps import register_sci_viz_colormaps + +import sys +import os +# add the current working directory to the system path +sys.path.append(os.getcwd()) +import define_base_mesh +# okay, now we don't want to get anything else from CWD +del sys.path[-1] + +def build_mesh( + preserve_floodplain=False, + floodplain_elevation=20.0, + do_inject_bathymetry=False, + geometry='sphere', + plot_cellWidth=True): + """ + Build an MPAS mesh using JIGSAW with the given cell sizes as a function of + latitude and longitude (on a sphere) or x and y (on a plane). + + The user must define a local python module ``define_base_mesh`` that + provides a function that returns a 2D array ``cellWidth`` of cell sizes in + kilometers. + + If ``geometry = 'sphere'``, this function is called ``cellWidthVsLatLon()`` + and also returns 1D ``lon`` and ``lat`` arrays. + + If ``geometry = 'plane'`` (or any value other than `'sphere'``), the + function is called ``cellWidthVsXY()`` and returns 4 arrays in addition to + ``cellWidth``: 1D ``x`` and ``y`` arrays defining planar coordinates in + meters; as well as ``geom_points``, list of point coordinates for bounding + polygon for the planar mesh; and ``geom_edges``, list of edges between + points in ``geom_points`` that define the bounding polygon. + + The result is ``base_mesh.nc`` as well as several intermediate files: + ``mesh.log``, ``mesh-HFUN.msh``, ``mesh.jig``, ``mesh-MESH.msh``, + ``mesh.msh``, and ``mesh_triangles.nc``. + + The ``extract_vtk()`` function is used to produce a VTK file in the + ``base_mesh_vtk`` directory that can be viewed in ParaVeiw. + + Parameters + ---------- + preserve_floodplain : bool, optional + Whether a flood plain (bathymetry above z = 0) should be preserved in + the mesh. If so, a field ``cellSeedMask`` is added to the MPAS mesh + indicating positive elevations that should be preserved. + + floodplain_elevation : float, optional + The elevation in meters to which the flood plain is preserved. + + do_inject_bathymetry : bool, optional + Whether one of the default bathymetry datasets, ``earth_relief_15s.nc`` + or ``topo.msh``, should be added to the MPAS mesh in the field + ``bottomDepthObserved``. If so, a local link to one of these file names + must exist. + + geometry : {'sphere', 'plane'}, optional + Whether the mesh is spherical or planar + + plot_cellWidth : bool, optional + If ``geometry = 'sphere'``, whether to produce a plot of ``cellWidth``. + If so, it will be written to ``cellWidthGlobal.png``. + """ + + if geometry == 'sphere': + on_sphere = True + else: + on_sphere = False + + print('Step 1. Build cellWidth array as function of horizontal coordinates') + if on_sphere: + cellWidth, lon, lat = define_base_mesh.cellWidthVsLatLon() + da = xarray.DataArray(cellWidth, + dims=['lat', 'lon'], + coords={'lat': lat, 'lon': lon}, + name='cellWidth') + cw_filename = 'cellWidthVsLatLon.nc' + da.to_netcdf(cw_filename) + plot_cellWidth = True + if plot_cellWidth: + register_sci_viz_colormaps() + fig = plt.figure(figsize=[16.0, 8.0]) + ax = plt.axes(projection=ccrs.PlateCarree()) + ax.set_global() + im = ax.imshow(cellWidth, origin='lower', + transform=ccrs.PlateCarree(), + extent=[-180, 180, -90, 90], cmap='3Wbgy5', + zorder=0) + ax.add_feature(cartopy.feature.LAND, edgecolor='black', zorder=1) + gl = ax.gridlines( + crs=ccrs.PlateCarree(), + draw_labels=True, + linewidth=1, + color='gray', + alpha=0.5, + linestyle='-', zorder=2) + gl.top_labels = False + gl.right_labels = False + plt.title( + 'Grid cell size, km, min: {:.1f} max: {:.1f}'.format( + cellWidth.min(),cellWidth.max())) + plt.colorbar(im, shrink=.60) + fig.canvas.draw() + plt.tight_layout() + plt.savefig('cellWidthGlobal.png', bbox_inches='tight') + plt.close() + + else: + cellWidth, x, y, geom_points, geom_edges = define_base_mesh.cellWidthVsXY() + da = xarray.DataArray(cellWidth, + dims=['y', 'x'], + coords={'y': y, 'x': x}, + name='cellWidth') + cw_filename = 'cellWidthVsXY.nc' + da.to_netcdf(cw_filename) + + print('Step 2. Generate mesh with JIGSAW') + if on_sphere: + jigsaw_driver(cellWidth, lon, lat) + else: + jigsaw_driver( + cellWidth, + x, + y, + on_sphere=False, + geom_points=geom_points, + geom_edges=geom_edges) + + print('Step 3. Convert triangles from jigsaw format to netcdf') + jigsaw_to_netcdf(msh_filename='mesh-MESH.msh', + output_name='mesh_triangles.nc', on_sphere=on_sphere) + + print('Step 4. Convert from triangles to MPAS mesh') + write_netcdf(convert(xarray.open_dataset('mesh_triangles.nc')), + 'base_mesh.nc') + + print('Step 5. Inject correct meshDensity variable into base mesh file') + inject_meshDensity(cw_filename=cw_filename, + mesh_filename='base_mesh.nc', on_sphere=on_sphere) + + if do_inject_bathymetry: + print('Step 6. Injecting bathymetry') + inject_bathymetry(mesh_file='base_mesh.nc') + + if preserve_floodplain: + print('Step 7. Injecting flag to preserve floodplain') + inject_preserve_floodplain(mesh_file='base_mesh.nc', + floodplain_elevation=floodplain_elevation) + + print('Step 8. Create vtk file for visualization') + extract_vtk(ignore_time=True, lonlat=True, dimension_list=['maxEdges='], + variable_list=['allOnCells'], filename_pattern='base_mesh.nc', + out_dir='base_mesh_vtk') + + print("***********************************************") + print("** The global mesh file is base_mesh.nc **") + print("***********************************************") + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--preserve_floodplain', action='store_true', + help='Whether a flood plain (bathymetry above z = 0) ' + 'should be preserved in the mesh') + parser.add_argument('--floodplain_elevation', action='store', + type=float, default=20.0, + help='The elevation in meters to which the flood plain ' + 'is preserved, default is 20 m') + parser.add_argument('--inject_bathymetry', action='store_true', + help='Whether one of the default bathymetry datasets, ' + 'earth_relief_15s.nc or topo.msh, should be added ' + 'to the MPAS mesh') + parser.add_argument('--geometry', default='sphere', + help='Whether the mesh is on a sphere or a plane, ' + 'default is a sphere') + parser.add_argument('--plot_cellWidth', action='store_true', + help='Whether to produce a plot of cellWidth') + cl_args = parser.parse_args() + build_mesh(cl_args.preserve_floodplain, cl_args.floodplain_elevation, + cl_args.inject_bathymetry, cl_args.geometry, + cl_args.plot_cellWidth) diff --git a/conda_package/mpas_tools/mesh/creation/coastal_tools.py b/conda_package/mpas_tools/mesh/creation/coastal_tools.py new file mode 100644 index 000000000..7fbaceebe --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/coastal_tools.py @@ -0,0 +1,1148 @@ +#!/usr/bin/env python +''' +name: coastal_tools +authors: Steven Brus + +last modified: 07/09/2018 + +''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy as np +import pyflann +from skimage import measure +from netCDF4 import Dataset +import matplotlib.pyplot as plt +from scipy import spatial, io +import timeit +import os +import cartopy +import cartopy.crs as ccrs +import cartopy.feature as cfeature +from rasterio.features import rasterize +from affine import Affine +import shapely.geometry +from geometric_features.plot import subdivide_geom +import mpas_tools.mesh.creation.mesh_definition_tools as mdt +plt.switch_backend('agg') +cartopy.config['pre_existing_data_dir'] = \ + os.getenv('CARTOPY_DIR', cartopy.config.get('pre_existing_data_dir')) + +# Constants +km = 1000.0 +deg2rad = np.pi / 180.0 +rad2deg = 180.0 / np.pi + +call_count = 0 + +########################################################################## +# Bounding box declarations (for coastal refinements) +########################################################################## + +# --------------- +# Region boxes +# --------------- + +# Bays and estuaries +Delaware_Bay = {"include": [np.array([-75.61903, -74.22, 37.8, 40.312747])], + "exclude": []} +Galveston_Bay = {"include": [np.array([-95.45, -94.4, 29, 30])], + "exclude": []} + +# Regions +Delaware_Region = {"include": [np.array([-77, -69.8, 35.5, 41])], + "exclude": []} + +# Coastlines +US_East_Coast = {"include": [np.array([-81.7, -62.3, 25.1, 44.50])], # East FL to Bay of Fundy + "exclude": [np.array([-66.0, -64.0, 31.5, 33.0]), # Bermuda + np.array([-79.75, -70.0, 20.0, 28.5]), # Bahamas + np.array([-65.15, -62.43, 43.0, 45.55]), # Gulf of St. Lawence + np.array([-66.65, -62.43, 43.0, 45.0])]} # '' + +US_Gulf_Coast = {"include": [np.array([-98.0, -80.0, 24.0, 31.0]), # West FL to NE Mexico + np.array([-98.5, -95.5, 20.0, 24.0]), # East Mexico + np.array([-91.0, -86.0, 20.0, 22.0]) # Yucatan + ], + "exclude": []} + +Caribbean = {"include": [np.array([-89.85, -59.73, 9.35, 20.86]), + ], + "exclude": []} + +US_West_Coast = {"include": [np.array([-127.0, -116.0, 32.5, 49.0]), # California + np.array([-117.5, -108.0, 22.8, 32.5]) # Baja and West Mexico + ], + "exclude": [np.array([-116.5, -115.0, 32.8, 33.8]), # Salton Sea + np.array([-120.5, -116.5, 35.5, 40.5]), # Lake Tahoe, etc. + np.array([[-111.89, 21.24], # Baja + [-107.17, 22.48], + [-113.94, 30.77], + [-119.44, 33.09]])]} + +Hawaii = {"include": [np.array([-161.0, -154.0, 18.0, 23.0])], + "exclude": []} + +Alaska = {"include": [np.array([-170.0, -141.0, 49.0, 63.0]), + np.array([-141.0, -129.5, 49.0, 61.0]), # Southeast + np.array([-129.5, -121.0, 49.0, 55.0]) # Connects AK to CA + ], + "exclude": [np.array([-171.0, -151.79, 49.54, 58.83])]} # Aleutian Islands + +Bering_Sea_E = {"include": [np.array([-180.0, -168.5, 56.00, 64.0])], + "exclude": []} + +Bering_Sea_W = {"include": [np.array([161.90, 180.0, 57.85, 66.63])], + "exclude": []} + +Aleutian_Islands_E = {"include": [np.array([-180.0, -151.79, 49.54, 58.83])], + "exclude": [np.array([-173.16, -164.37, 55.81, 57.55])]} + +Aleutian_Islands_W = {"include": [np.array([164.9, 180.0, 50.77, 56.31])], + "exclude": [np.array([178.5, 179.5, 51.25, 51.75])]} + +Greenland = {"include":[np.array([-81.5,-12.5,60,85])], + "exclude":[np.array([[-87.6,58.7], + [-51.9,56.6], + [-68.9,75.5], + [-107.0,73.3]]), + np.array([[-119.0,74.5], + [-92.7,75.9], + [-52.84,83.25], + [-100.8,84.0]]), + np.array([[-101.3,68.5], + [-82.4,72.3], + [-68.7,81.24], + [-117.29,77.75]]), + np.array([-25.0,-10.0,62.5,67.5])]} +Atlantic = {"include": [np.array([-78.5, -77.5, 23.5, 25.25])], # Bahamas, use with large transition start to fill Atlantic + "exclude": []} + +# Combined coastlines +CONUS = {"include": [], "exclude": []} +CONUS["include"].extend(US_East_Coast["include"]) +CONUS["include"].extend(US_Gulf_Coast["include"]) +CONUS["include"].extend(US_West_Coast["include"]) +CONUS["exclude"].extend(US_East_Coast["exclude"]) +CONUS["exclude"].extend(US_West_Coast["exclude"]) + +Continental_US = {"include": [], "exclude": []} +Continental_US["include"].extend(CONUS["include"]) +Continental_US["include"].extend(Alaska["include"]) +Continental_US["exclude"].extend(CONUS["exclude"]) + +# ---------------- +# Plotting boxes +# ---------------- + +Western_Atlantic = np.array([-98.186645, -59.832744, 7.791301, 45.942453]) +Contiguous_US = np.array([-132.0, -59.832744, 7.791301, 51.0]) +North_America = np.array([-175.0, -60.0, 7.5, 72.0]) +Delaware = np.array([-77, -69.8, 35.5, 41]) +Entire_Globe = np.array([-180, 180, -90, 90]) + +# ----------------- +# Restrict Boxes +# ----------------- + +Empty = {"include": [], + "exclude": []} + +Delaware_restrict = {"include": [np.array([[-75.853, 39.732], + [-74.939, 36.678], + [-71.519, 40.156], + [-75.153, 40.077]]), + np.array([[-76.024, 37.188], + [-75.214, 36.756], + [-74.512, 37.925], + [-75.274, 38.318]])], + "exclude": []} + +Gulf_restrict = {"include": [np.array([[-85.04, 13.80], + [-76.90, 16.60], + [-86.24, 36.80], + [-105.55, 22.63]])], + "exclude": []} + +Caribbean_restrict = {"include": [np.array([[-76.39, 4.55], + [-53.22, 4.29], + [-53.22, 38.94], + [-94.99, 18.47]])], + "exclude": [np.array([[-80.72, 1.66], + [-73.70, 3.03], + [-78.94, 9.33], + [-84.98, 7.67]]), + np.array([[-100.18, 13.76], + [-82.93, 6.51], + [-85.08, 13.74], + [-95.86, 18.04]])]} + +East_Coast_restrict = {"include": [], + "exclude": [np.array([[-72.0, 46.69], + [-61.74, 45.48], + [-44.07, 49.49], + [-63.47, 53.76]])]} +Bering_Sea_restrict = {"include": [], + "exclude": [np.array([[143.46, 51.79], + [155.65, 50.13], + [166.23, 63.78], + [148.63, 62.30]]), + np.array([[154.36, 68.22], + [-173.80, 65.94], + [-161.81, 72.02], + [163.64, 73.70]])]} + +Atlantic_restrict = {"include": [np.array([[-86.39, 13.67], + [-24.44, 21.32], + [-50.09, 55.98], + [-105.58, 23.61]]), + np.array([[-76.39, 4.55], + [-30.74, -2.66], + [-30.83, 43.95], + [-94.99, 18.47]])], + "exclude": [np.array([[-80.72, 1.66], + [-73.70, 3.03], + [-78.94, 9.33], + [-84.98, 7.67]]), + np.array([[-100.18, 13.76], + [-82.93, 6.51], + [-85.08, 13.74], + [-95.86, 18.04]])]} + +########################################################################## +# User-defined inputs +########################################################################## + +default_params = { + + # Path to bathymetry data and name of file + "nc_file": "./earth_relief_15s.nc", + "nc_vars": ["lon","lat","z"], + + # Bounding box of coastal refinement region + "region_box": Continental_US, + "origin": np.array([-100, 40]), + "restrict_box": Empty, + + # Coastline extraction parameters + "z_contour": 0.0, + "n_longest": 10, + "smooth_coastline": 0, + "point_list": None, + + # Global mesh parameters + "grd_box": Entire_Globe, + "ddeg": .1, + # 'EC' (defaults to 60to30), 'QU' (uses dx_max_global), 'RRS' (uses dx_max_global and dx_min_global) + "mesh_type": 'EC', + "dx_max_global": 30.0 * km, + "dx_min_global": 10.0 * km, + + # Coastal mesh parameters + "dx_min_coastal": 10.0 * km, + "trans_width": 600.0 * km, + "trans_start": 400.0 * km, + + # Bounding box of plotting region + "plot_box": North_America, + + # Options + "nn_search": "flann", + "plot_option": True + +} + +########################################################################## +# Functions +########################################################################## + + +def coastal_refined_mesh(params, cell_width=None, lon_grd=None, lat_grd=None): # {{{ + + coastal_refined_mesh.counter += 1 + call_count = coastal_refined_mesh.counter + + if cell_width is None: + # Create the background cell width array + lon_grd, lat_grd, cell_width = create_background_mesh( + params["grd_box"], + params["ddeg"], + params["mesh_type"], + params["dx_min_global"], + params["dx_max_global"], + params["plot_option"], + params["plot_box"], + call_count) + + # Get coastlines from bathy/topo data set + coastlines = extract_coastlines( + params["nc_file"], + params["nc_vars"], + params["region_box"], + params["z_contour"], + params["n_longest"], + params["point_list"], + params["plot_option"], + params["plot_box"], + call_count) + + # Compute distance from background grid points to coastline + D = distance_to_coast( + coastlines, + lon_grd, + lat_grd, + params["origin"], + params["nn_search"], + params["smooth_coastline"], + params["plot_option"], + params["plot_box"], + call_count) + + # Blend coastline and background resolution, save cell_width array as .mat file + cell_width = compute_cell_width( + D, + cell_width, + lon_grd, + lat_grd, + params["dx_min_coastal"], + params["trans_start"], + params["trans_width"], + params["restrict_box"], + params["plot_option"], + params["plot_box"], + lon_grd, + lat_grd, + coastlines, + call_count) + + # Save matfile + # save_matfile(cell_width/km,lon_grd,lat_grd) + print("") + + return (cell_width, lon_grd, lat_grd) + + + # }}} +coastal_refined_mesh.counter = 0 + +############################################################## + + +def create_background_mesh(grd_box, ddeg, mesh_type, dx_min, dx_max, # {{{ + plot_option=False, plot_box=[], call=None): + + print("Create background mesh") + print("------------------------") + + # Create cell width background grid + lat_grd = np.arange(grd_box[2], grd_box[3], ddeg) + lon_grd = np.arange(grd_box[0], grd_box[1], ddeg) + nx_grd = lon_grd.size + ny_grd = lat_grd.size + print(" Background grid dimensions:", ny_grd, nx_grd) + + # Assign background grid cell width values + if mesh_type == 'QU': + cell_width_lat = dx_max / km * np.ones(lat_grd.size) + elif mesh_type == 'EC': + cell_width_lat = mdt.EC_CellWidthVsLat(lat_grd) + elif mesh_type == 'RRS': + cell_width_lat = mdt.RRS_CellWidthVsLat(lat_grd, dx_max / km, dx_min / km) + cell_width = np.tile(cell_width_lat, (nx_grd, 1)).T * km + + # Plot background cell width + if plot_option: + print(" Plotting background cell width") + + plt.figure() + plt.plot(lat_grd, cell_width_lat) + plt.savefig('bckgrnd_grid_cell_width_vs_lat' + str(call) + '.png') + + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) + plt.contourf(lon_grd, lat_grd, cell_width, transform=ccrs.PlateCarree()) + plot_coarse_coast(ax, plot_box) + plt.colorbar() + plt.savefig( + 'bckgnd_grid_cell_width' + + str(call) + + '.png', + bbox_inches='tight') + plt.close() + print(" Done") + + return (lon_grd, lat_grd, cell_width) # }}} + +############################################################## + + +def extract_coastlines(nc_file, nc_vars, region_box, z_contour=0, n_longest=10, point_list=None, # {{{ + plot_option=False, plot_box=[], call=None): + + print("Extract coastlines") + print("------------------") + + # Open NetCDF file and read cooordintes + nc_fid = Dataset(nc_file, "r") + lon = nc_fid.variables[nc_vars[0]][:] + lat = nc_fid.variables[nc_vars[1]][:] + bathy_data = nc_fid.variables[nc_vars[2]] + + # Get coastlines for refined region + coastline_list = [] + for i,box in enumerate(region_box["include"]): + + # Find coordinates and data inside bounding box + xb,rect= get_convex_hull_coordinates(box) + lon_region, lat_region, z_region = get_data_inside_box(lon, lat, bathy_data, xb) + z_data = np.zeros(z_region.shape) + z_data.fill(np.nan) + idx = get_indices_inside_quad(lon_region,lat_region,box) + z_data[idx] = z_region[idx] + print(" Regional bathymetry data shape:", z_region.shape) + + # Find coastline contours + print(" Extracting coastline "+str(i+1)+"/"+str(len(region_box["include"]))) + contours = measure.find_contours(z_data, z_contour) + + # Keep only n_longest coastlines and those not within exclude areas + contours.sort(key=len, reverse=True) + for c in contours[:n_longest]: + # Convert from pixel to lon,lat + c[:, 0] = (xb[3] - xb[2]) / float(len(lat_region)) * c[:, 0] + xb[2] + c[:, 1] = (xb[1] - xb[0]) / float(len(lon_region)) * c[:, 1] + xb[0] + c = np.fliplr(c) + + exclude = False + for area in region_box["exclude"]: + # Determine coastline coordinates in exclude area + idx = get_indices_inside_quad( + c[:, 0], c[:, 1], area, grid=False) + + # Exlude coastlines that are entirely contained in exclude area + if idx.size == c.shape[0]: + exclude = True + break + elif idx.size != 0: + c = np.delete(c, idx, axis=0) + + # Keep coastlines not entirely contained in exclude areas + if not exclude: + cpad = np.vstack((c, [np.nan, np.nan])) + coastline_list.append(cpad) + + print(" Done") + + # Add in user-specified points + if point_list: + for i,points in enumerate(point_list): + cpad = np.vstack((points, [np.nan, np.nan])) + coastline_list.append(cpad) + + + + # Combine coastlines + coastlines = np.concatenate(coastline_list) + + if plot_option: + print(" Plotting coastlines") + + # Find coordinates and data inside plotting box + lon_plot, lat_plot, z_plot = get_data_inside_box( + lon, lat, bathy_data, plot_box) + + # Plot bathymetry data, coastlines and region boxes + + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) + levels = np.linspace(np.amin(z_plot), np.amax(z_plot), 100) + ds = 100 # Downsample + dsx = np.arange(0, lon_plot.size, ds) # bathy data + dsy = np.arange(0, lat_plot.size, ds) # to speed up + dsxy = np.ix_(dsy, dsx) # plotting + plt.contourf(lon_plot[dsx], lat_plot[dsy], z_plot[dsxy], levels=levels, + transform=ccrs.PlateCarree()) + plot_coarse_coast(ax, plot_box) + plt.plot(coastlines[:, 0], coastlines[:, 1], color='white') + for box in region_box["include"]: + plot_region_box(box, 'b') + for box in region_box["exclude"]: + plot_region_box(box, 'r') + plt.colorbar() + plt.axis('equal') + plt.savefig( + 'bathy_coastlines' + + str(call) + + '.png', + bbox_inches='tight') + plt.close() + print(" Done") + + return coastlines # }}} + +############################################################## + + +def distance_to_coast(coastlines, lon_grd, lat_grd, origin, nn_search, smooth_window, # {{{ + plot_option=False, plot_box=[], call=None): + + print("Distance to coast") + print("-----------------") + + # Remove Nan values separating coastlines + coast_pts = coastlines[np.isfinite(coastlines).all(axis=1)] + + # Smooth coast points if necessary + if smooth_window > 1: + coast_pts[:, 0], coast_pts[:, 1] = smooth_coastline( + coast_pts[:, 0], coast_pts[:, 1], smooth_window) + + # Convert coastline points to x,y,z and create kd-tree + npts = coast_pts.shape[0] + coast_pts_xyz = np.zeros((npts,3)) + coast_pts_xyz[:, 0], coast_pts_xyz[:, 1], coast_pts_xyz[:, 2] = lonlat2xyz(coast_pts[:, 0], coast_pts[:, 1]) + if nn_search == "kdtree": + tree = spatial.KDTree(coast_pts_xyz) + elif nn_search == "flann": + flann = pyflann.FLANN() + flann.build_index( + coast_pts_xyz, + algorithm='kdtree', + target_precision=1.0, + random_seed=0) + + # Convert backgound grid coordinates to x,y,z and put in a nx_grd x 3 array for kd-tree query + Lon_grd, Lat_grd = np.meshgrid(lon_grd, lat_grd) + X_grd, Y_grd, Z_grd = lonlat2xyz(Lon_grd,Lat_grd) + pts = np.vstack([X_grd.ravel(), Y_grd.ravel(), Z_grd.ravel()]).T + + # Find distances of background grid coordinates to the coast + print(" Finding distance") + start = timeit.default_timer() + if nn_search == "kdtree": + d, idx = tree.query(pts) + elif nn_search == "flann": + idx, d = flann.nn_index(pts, checks=2000, random_seed=0) + d = np.sqrt(d) + end = timeit.default_timer() + print(" Done") + print(" " + str(end - start) + " seconds") + + # Make distance array that corresponds with cell_width array + D = np.reshape(d, Lon_grd.shape) + + if plot_option: + print(" Plotting distance to coast") + + # Find coordinates and data inside plotting box + lon_plot, lat_plot, D_plot = get_data_inside_box( + lon_grd, lat_grd, D, plot_box) + + # Plot distance to coast + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) + D_plot = D_plot / km + levels = np.linspace(np.amin(D_plot), np.amax(D_plot), 10) + plt.contourf(lon_plot, lat_plot, D_plot, levels=levels, + transform=ccrs.PlateCarree()) + plot_coarse_coast(ax, plot_box) + plt.plot(coastlines[:, 0], coastlines[:, 1], color='white') + plt.grid( + xdata=lon_plot, + ydata=lat_plot, + c='k', + ls='-', + lw=0.1, + alpha=0.5) + plt.colorbar() + plt.axis('equal') + plt.savefig('distance' + str(call) + '.png', bbox_inches='tight') + plt.close() + print(" Done") + + return D # }}} + +############################################################## + + +def signed_distance_from_geojson(fc, lon_grd, lat_grd, max_length=None): # {{{ + """ + Get the distance for each point on a lon/lat grid from the closest point + on the boundary of the geojson regions. + + Parameters + ---------- + fc : geometrics_features.FeatureCollection + The regions to be rasterized + lon_grd : numpy.ndarray + A 1D array of evenly spaced longitude values + lat_grd : numpy.ndarray + A 1D array of evenly spaced latitude values + max_length : float, optional + The maximum distance (in degrees) between points on the boundary of the + geojson region. If the boundary is too coarse, it will be subdivided. + + Returns + ------- + signed_distance : numpy.ndarray + A 2D field of distances (negative inside the region, positive outside) + to the shape boundary + """ + distance = distance_from_geojson(fc, lon_grd, lat_grd, + nn_search='flann', max_length=max_length) + + mask = mask_from_geojson(fc, lon_grd, lat_grd) + + signed_distance = (-2.0 * mask + 1.0) * distance + return signed_distance + + +def mask_from_geojson(fc, lon_grd, lat_grd): # {{{ + """ + Make a rasterized mask on a lon/lat grid from shapes (geojson multipolygon + data). + + Parameters + ---------- + fc : geometrics_features.FeatureCollection + The regions to be rasterized + lon_grd : numpy.ndarray + A 1D array of evenly spaced longitude values + lat_grd : numpy.ndarray + A 1D array of evenly spaced latitude values + + Returns + ------- + mask : numpy.ndarray + A 2D mask with the shapes rasterized (0.0 outside, 1.0 inside) + """ + + print("Mask from geojson") + print("-----------------") + + nlon = len(lon_grd) + nlat = len(lat_grd) + + dlon = (lon_grd[-1] - lon_grd[0])/(nlon-1) + dlat = (lat_grd[-1] - lat_grd[0])/(nlat-1) + + transform = Affine(dlon, 0.0, lon_grd[0], + 0.0, dlat, lat_grd[0]) + + shapes = [] + for feature in fc.features: + # a list of feature geometries and mask values (always 1.0) + shape = shapely.geometry.shape(feature['geometry']) + # expand a bit to make sure we hit the edges of the domain + shape = shape.buffer(dlat) + shapes.append((shapely.geometry.mapping(shape), 1.0)) + + mask = rasterize(shapes, out_shape=(nlat, nlon), transform=transform) + if np.abs(lon_grd[0] - (-180.)) < 1e-3 and \ + np.abs(lon_grd[-1] - (180.)) < 1e-3: + # the extra column at the periodic boundary needs to be copied + print(' fixing periodic boundary') + mask[:, -1] = mask[:, 0] + return mask # }}} + + +def distance_from_geojson(fc, lon_grd, lat_grd, nn_search, max_length=None): + # {{{ + """ + Get the distance for each point on a lon/lat grid from the closest point + on the boundary of the geojson regions. + + Parameters + ---------- + fc : geometrics_features.FeatureCollection + The regions to be rasterized + lon_grd : numpy.ndarray + A 1D array of evenly spaced longitude values + lat_grd : numpy.ndarray + A 1D array of evenly spaced latitude values + nn_search: {'kdtree', 'flann'} + The method used to find the nearest point on the shape boundary + max_length : float, optional + The maximum distance (in degrees) between points on the boundary of the + geojson region. If the boundary is too coarse, it will be subdivided. + + Returns + ------- + distance : numpy.ndarray + A 2D field of distances to the shape boundary + """ + print("Distance from geojson") + print("---------------------") + + print(" Finding region boundaries") + boundary_lon = [] + boundary_lat = [] + for feature in fc.features: + # get the boundary of each shape + shape = shapely.geometry.shape(feature['geometry']).boundary + if max_length is not None: + # subdivide the shape if it's too coarse + geom_type = shape.geom_type + shape = subdivide_geom(shape, geom_type, max_length) + x, y = shape.coords.xy + boundary_lon.extend(x) + boundary_lat.extend(y) + + boundary_lon = np.array(boundary_lon) + boundary_lat = np.array(boundary_lat) + + # Remove point at +/- 180 lon and +/- 90 lat because these are "fake". + # Need a little buffer (0.01 degrees) to avoid misses due to rounding. + mask = np.logical_not(np.logical_or( + np.logical_or(boundary_lon <= -179.99, boundary_lon >= 179.99), + np.logical_or(boundary_lat <= -89.99, boundary_lat >= 89.99))) + + boundary_lon = boundary_lon[mask] + boundary_lat = boundary_lat[mask] + + print(" Mean boundary latitude: {0:.2f}".format(np.mean(boundary_lat))) + + # Convert coastline points to x,y,z and create kd-tree + npoints = len(boundary_lon) + boundary_xyz = np.zeros((npoints, 3)) + boundary_xyz[:, 0], boundary_xyz[:, 1], boundary_xyz[:, 2] = \ + lonlat2xyz(boundary_lon, boundary_lat) + flann = None + tree = None + if nn_search == "kdtree": + tree = spatial.KDTree(boundary_xyz) + elif nn_search == "flann": + flann = pyflann.FLANN() + flann.build_index( + boundary_xyz, + algorithm='kdtree', + target_precision=1.0, + random_seed=0) + else: + raise ValueError('Bad nn_search: expected kdtree or flann, got ' + '{}'.format(nn_search)) + + # Convert backgound grid coordinates to x,y,z and put in a nx_grd x 3 array + # for kd-tree query + Lon_grd, Lat_grd = np.meshgrid(lon_grd, lat_grd) + X_grd, Y_grd, Z_grd = lonlat2xyz(Lon_grd, Lat_grd) + pts = np.vstack([X_grd.ravel(), Y_grd.ravel(), Z_grd.ravel()]).T + + # Find distances of background grid coordinates to the coast + print(" Finding distance") + start = timeit.default_timer() + distance = None + if nn_search == "kdtree": + distance, _ = tree.query(pts) + elif nn_search == "flann": + _, distance = flann.nn_index(pts, checks=2000, random_seed=0) + distance = np.sqrt(distance) + end = timeit.default_timer() + print(" Done") + print(" {0:.0f} seconds".format(end-start)) + + # Make distance array that corresponds with cell_width array + distance = np.reshape(distance, Lon_grd.shape) + + return distance # }}} + + +def compute_cell_width(D, cell_width, lon, lat, dx_min, trans_start, trans_width, restrict_box, # {{{ + plot_option=False, plot_box=[], lon_grd=[], lat_grd=[], coastlines=[], call=None): + + print("Compute cell width") + print("------------------") + + # Compute cell width based on distance + print(" Computing cell width") + backgnd_weight = .5 * \ + (np.tanh((D - trans_start - .5 * trans_width) / (.2 * trans_width)) + 1.0) + dist_weight = 1.0 - backgnd_weight + ## Use later for depth and slope dependent resolution + ##hres = np.maximum(dx_min*bathy_grd/20,dx_min) + ##hres = np.minimum(hres,dx_max) + #hw = np.zeros(Lon_grd.shape) + dx_max + #hw[ocn_idx] = np.sqrt(9.81*bathy_grd[ocn_idx])*12.42*3600/25 + #hs = .20*1/dbathy_grd + #h = np.fmin(hs,hw) + #h = np.fmin(h,dx_max) + #h = np.fmax(dx_min,h) + + cell_width_old = np.copy(cell_width) + + # Apply cell width function + if len(restrict_box["include"]) > 0: + # Only apply inside include regions + for box in restrict_box["include"]: + idx = get_indices_inside_quad(lon, lat, box) + cell_width[idx] = (dx_min*dist_weight[idx] + + np.multiply(cell_width_old[idx], backgnd_weight[idx])) + else: + # Apply everywhere + cell_width = (dx_min*dist_weight + + np.multiply(cell_width_old, backgnd_weight)) + + # Don't applt cell width function in exclude regions (revert to previous values) + if len(restrict_box["exclude"]) > 0: + for box in restrict_box["exclude"]: + idx = get_indices_inside_quad(lon, lat, box) + cell_width[idx] = cell_width_old[idx] + print(" Done") + + if plot_option: + print(" Plotting cell width") + + # Find coordinates and data inside plotting box + lon_plot, lat_plot, cell_width_plot = get_data_inside_box( + lon_grd, lat_grd, cell_width / km, plot_box) + + # Plot cell width + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) + levels = np.linspace(np.amin(cell_width_plot), + np.amax(cell_width_plot), 100) + plt.contourf(lon_plot, lat_plot, cell_width_plot, levels=levels, + transform=ccrs.PlateCarree()) + plot_coarse_coast(ax, plot_box) + plt.plot(coastlines[:, 0], coastlines[:, 1], color='white') + if restrict_box: + for box in restrict_box["include"]: + plot_region_box(box, 'b') + for box in restrict_box["exclude"]: + plot_region_box(box, 'r') + plt.colorbar() + plt.axis('equal') + plt.savefig('cell_width' + str(call) + '.png', bbox_inches='tight') + plt.close() + + # Plot cell width transistion functions + ts = trans_start/km + tw = trans_width/km + d = np.linspace(0,2*(ts+tw),1000) + bw = .5*(np.tanh((d-ts-.5*tw)/(.2*tw))+1) + dw = 1-bw + plt.figure() + plt.plot(d,bw) + plt.plot(d,dw) + plt.legend(('background','coastal region')) + plt.plot([ts,ts],[0.0,1.0],'k-') + plt.plot([ts+tw,ts+tw],[0.0,1.0],'k-') + plt.tight_layout() + plt.xlabel('distance (km)') + plt.ylabel('weight') + plt.savefig('trans_func'+str(call)+'.png',bbox_inches='tight') + plt.close() + print(" Done") + + return cell_width # }}} + +############################################################## + + +def save_matfile(cell_width, lon, lat): + + io.savemat('cellWidthVsLatLon.mat', + mdict={ + 'cellWidth': cell_width, + 'lon': lon, + 'lat': lat}) + +############################################################## + + +def CPP_projection(lon, lat, origin): + + R = 6378206.4 + origin = origin * deg2rad + x = R * (lon * deg2rad - origin[0]) * np.cos(origin[1]) + y = R * lat * deg2rad + + return x, y + +############################################################## + +def lonlat2xyz(lon,lat): + + R = 6378206.4 + x = R*np.multiply(np.cos(lon*deg2rad),np.cos(lat*deg2rad)) + y = R*np.multiply(np.sin(lon*deg2rad),np.cos(lat*deg2rad)) + z = R*np.sin(lat*deg2rad) + + return x,y,z + +############################################################## + + +def smooth_coastline(x, y, window): + xs = np.copy(x) + ys = np.copy(y) + offset = (window-1)/2 + for pt in range(offset-1, x.size-offset): + xs[pt] = np.mean(x[pt-offset:pt+offset]) + ys[pt] = np.mean(y[pt-offset:pt+offset]) + return xs, ys +############################################################## + + +def get_data_inside_box(lon, lat, data, box, idx=False): + + # Find indicies of coordinates inside bounding box + lon_idx, = np.where((lon >= box[0]) & (lon <= box[1])) + lat_idx, = np.where((lat >= box[2]) & (lat <= box[3])) + + # Get indicies inside bounding box + lon_region = lon[lon_idx] + lat_region = lat[lat_idx] + latlon_idx = np.ix_(lat_idx, lon_idx) + + # Return data inside bounding box + if idx == False: + + try: # Numpy indexing + z_region = data[latlon_idx] + except: # NetCDF indexing + z_region = data[lat_idx, lon_idx] + + return (lon_region, lat_region, z_region) + + # Return indicies of data inside bounding box + elif idx == True: + + return latlon_idx + +############################################################## + + +def get_indices_inside_quad(lon, lat, box, grid=True): + + wrap = flag_wrap(box) + + lon_adj = np.copy(lon) + if wrap: + idx = np.where((lon_adj >= -180.0) & (lon_adj <= -90.0)) + lon_adj[idx] = lon_adj[idx] + 360.0 + + if grid: + # Create vectors of all coordinates + Lon_grd, Lat_grd = np.meshgrid(lon_adj, lat) + X = Lon_grd.ravel() + Y = Lat_grd.ravel() + else: + X = lon_adj + Y = lat + + xb,rect = get_convex_hull_coordinates(box) + + # Find indices of coordinates in convex hull of quad region + idxx = np.where((X >= xb[0]) & (X <= xb[1])) + idxy = np.where((Y >= xb[2]) & (Y <= xb[3])) + idx_ch = np.intersect1d(idxx, idxy) + idx = np.copy(idx_ch) + + if rect == True: + if grid == True: + idx = np.unravel_index(idx, Lon_grd.shape) + return idx + + # Initialize the local coordinate vectors to be outside unit square + R = 0.0 * X - 10.0 + S = 0.0 * Y - 10.0 + + # Initialize the coordinate vectors for points inside convex hull of quad region + r = 0.0 * R[idx] + s = 0.0 * S[idx] + x = X[idx] + y = Y[idx] + + # Map all coordinates in convex hull of quad region to unit square + # by solving inverse transformaion with Newton's method + tol = 1e-8 + maxit = 25 + for it in range(0, maxit): + + # Compute shape fuctions + phi1 = np.multiply((1.0 - r), (1.0 - s)) + phi2 = np.multiply((1.0 + r), (1.0 - s)) + phi3 = np.multiply((1.0 + r), (1.0 + s)) + phi4 = np.multiply((1.0 - r), (1.0 + s)) + + # Compute functions that are being solved + f1 = .25 * (phi1 * box[0, 0] + phi2 * box[1, 0] + + phi3 * box[2, 0] + phi4 * box[3, 0]) - x + f2 = .25 * (phi1 * box[0, 1] + phi2 * box[1, 1] + + phi3 * box[2, 1] + phi4 * box[3, 1]) - y + + # Compute Jacobian + df1ds = .25 * ((r - 1.0) * box[0, 0] - (1.0 + r) * box[1, 0] + (1.0 + r) * box[2, 0] + (1.0 - r) * box[3, 0]) + df1dr = .25 * ((s - 1.0) * box[0, 0] + (1.0 - s) * box[1, 0] + (1.0 + s) * box[2, 0] - (1.0 + s) * box[3, 0]) + df2ds = .25 * ((r - 1.0) * box[0, 1] - (1.0 + r) * box[1, 1] + (1.0 + r) * box[2, 1] + (1.0 - r) * box[3, 1]) + df2dr = .25 * ((s - 1.0) * box[0, 1] + (1.0 - s) * box[1, 1] + (1.0 + s) * box[2, 1] - (1.0 + s) * box[3, 1]) + + # Inverse of 2x2 matrix + det_recip = np.multiply(df1dr, df2ds) - np.multiply(df2dr, df1ds) + det_recip = 1.0 / det_recip + dr = np.multiply(det_recip, np.multiply(df2ds, f1) - np.multiply(df1ds, f2)) + ds = np.multiply(det_recip, -np.multiply(df2dr, f1) + np.multiply(df1dr, f2)) + + # Apply Newton's method + rnew = r - dr + snew = s - ds + + # Find converged values + err = R[idx] - rnew + idxr = np.where(np.absolute(err) < tol) + err = S[idx] - snew + idxs = np.where(np.absolute(err) < tol) + idx_conv = np.intersect1d(idxr, idxs) + + # Update solution + R[idx] = rnew + S[idx] = snew + + # Find indicies of unconverged values + idx = np.delete(idx, idx_conv) + # print("Iteration: ", it, "unconverged values: ", idx.size) + + # Terminate once all values are converged + if idx.size == 0: + break + + # Initialize to unconverged values for next iteration + r = R[idx] + s = S[idx] + x = X[idx] + y = Y[idx] + + # Find any remaining unconverged values + if grid == True: + idx_nc = np.unravel_index(idx, Lon_grd.shape) + else: + idx_nc = np.copy(idx) + + # Find indicies of coordinates inside quad region + lon_idx, = np.where((R >= -1.0) & (R <= 1.0)) + lat_idx, = np.where((S >= -1.0) & (S <= 1.0)) + idx = np.intersect1d(lon_idx, lat_idx) + if grid == True: + idx = np.unravel_index(idx, Lon_grd.shape) + + ## Plot values inside quad region + # plt.figure() + # plt.plot(X,Y,'.') + # if grid == True: + # plt.plot(Lon_grd[idx],Lat_grd[idx],'.') + # plt.plot(Lon_grd[idx_nc],Lat_grd[idx_nc],'.') + # else: + # plt.plot(lon[idx],lat[idx],'.') + # plt.plot(lon[idx_nc],lat[idx_nc],'.') + # plt.plot(box[:,0],box[:,1],'o') + # plt.savefig("restrict_box.png") + + return idx + +############################################################## + +def get_convex_hull_coordinates(box): + + wrap = flag_wrap(box) + + xb = np.zeros(4) + if box.size == 4: + if wrap: + for i in range(2): + if box[i] >= -180.0 and box[i] <= -90.0: + box[i] = box[i] + 360.0 + + xb[0] = box[0] + xb[1] = box[1] + xb[2] = box[2] + xb[3] = box[3] + rect = True + else: + if wrap: + for i in range(4): + if box[i, 0] >= -180.0 and box[i, 0] <= -90.0: + box[i, 0] = box[i, 0] + 360.0 + + xb[0] = np.amin(box[:, 0]) + xb[1] = np.amax(box[:, 0]) + xb[2] = np.amin(box[:, 1]) + xb[3] = np.amax(box[:, 1]) + rect = False + + return xb,rect + +############################################################## + +def flag_wrap(box): + + wrap = False + if box.size == 4: + if box[0] > 0.0 and box[1] < 0.0: + wrap = True + else: + if np.any(box[:,0] > 0.0) and np.any(box[:,0] < 0.0): + wrap = True + + return wrap + +############################################################## + +def plot_coarse_coast(ax, plot_box): + + ax.set_extent(plot_box, crs=ccrs.PlateCarree()) + ax.add_feature(cfeature.COASTLINE) + +############################################################## + + +def plot_region_box(box, color): + ls = color + '-' + if box.size == 4: + plt.plot([box[0], box[1]], [box[2], box[2]], ls) + plt.plot([box[1], box[1]], [box[2], box[3]], ls) + plt.plot([box[1], box[0]], [box[3], box[3]], ls) + plt.plot([box[0], box[0]], [box[3], box[2]], ls) + else: + plt.plot([box[0, 0], box[1, 0]], [box[0, 1], box[1, 1]], ls) + plt.plot([box[1, 0], box[2, 0]], [box[1, 1], box[2, 1]], ls) + plt.plot([box[2, 0], box[3, 0]], [box[2, 1], box[3, 1]], ls) + plt.plot([box[3, 0], box[0, 0]], [box[3, 1], box[0, 1]], ls) + + +########################################################################## +# Incorporate later for depth and slope dependent resolution +########################################################################## + + +## +## Interpolate bathymetry onto background grid +#Lon_grd = Lon_grd*deg2rad +#Lat_grd = Lat_grd*deg2rad +#bathy = inject_bathymetry.interpolate_bathymetry(data_path+nc_file,Lon_grd.ravel(),Lat_grd.ravel()) +#bathy_grd = -1.0*np.reshape(bathy,(ny_grd,nx_grd)) +#ocn_idx = np.where(bathy_grd > 0) +# +# if plot_option: +# plt.figure() +# levels = np.linspace(0,11000,100) +# plt.contourf(lon_grd,lat_grd,bathy_grd,levels=levels) +# plot_coarse_coast(plot_box) +# plt.colorbar() +# plt.axis('equal') +# plt.savefig('bckgnd_grid_bathy.png',bbox_inches='tight') + +## Interpolate bathymetry gradient onto background grid +#dbathy = inject_bathymetry.interpolate_bathymetry(data_path+nc_file,Lon_grd.ravel(),Lat_grd.ravel(),grad=True) +#dbathy = np.reshape(dbathy,(ny_grd,nx_grd)) +#dbathy_grd = np.zeros((ny_grd,nx_grd)) +#dbathy_grd[ocn_idx] = dbathy[ocn_idx] +# +# if plot_option: +# plt.figure() +# plt.contourf(lon_grd,lat_grd,1/dbathy_grd) +# plot_coarse_coast(plot_box) +# plt.colorbar() +# plt.axis('equal') +# plt.savefig('bckgnd_grid_bathy_grad.png',bbox_inches='tight') diff --git a/conda_package/mpas_tools/mesh/creation/inject_bathymetry.py b/conda_package/mpas_tools/mesh/creation/inject_bathymetry.py new file mode 100644 index 000000000..5aa73d284 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/inject_bathymetry.py @@ -0,0 +1,120 @@ +# Simple script to inject bathymetry onto a mesh +# Phillip Wolfram, 01/19/2018 + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +from mpas_tools.mesh.creation.open_msh import readmsh +import numpy as np +from scipy import interpolate +import netCDF4 as nc4 +import timeit +import os +import sys + + +def inject_bathymetry(mesh_file): + # Open NetCDF mesh file and read mesh points + nc_mesh = nc4.Dataset(mesh_file, 'r+') + lon_mesh = np.mod( + nc_mesh.variables['lonCell'][:] + np.pi, + 2 * np.pi) - np.pi + lat_mesh = nc_mesh.variables['latCell'][:] + + # Interpolate bathymetry on to mesh points + if os.path.isfile("earth_relief_15s.nc"): + bathymetry = interpolate_SRTM(lon_mesh, lat_mesh) + elif os.path.isfile("topo.msh"): + bathymetry = interpolate_topomsh(lon_mesh, lat_mesh) + else: + print("Bathymetry data file not found") + raise SystemExit(0) + + # Create new NetCDF variables in mesh file, if necessary + nc_vars = nc_mesh.variables.keys() + if 'bottomDepthObserved' not in nc_vars: + nc_mesh.createVariable('bottomDepthObserved', 'f8', ('nCells')) + + # Write to mesh file + nc_mesh.variables['bottomDepthObserved'][:] = bathymetry + nc_mesh.close() + + +def interpolate_SRTM(lon_pts, lat_pts): + + # Open NetCDF data file and read cooordintes + nc_data = nc4.Dataset("earth_relief_15s.nc", "r") + lon_data = np.deg2rad(nc_data.variables['lon'][:]) + lat_data = np.deg2rad(nc_data.variables['lat'][:]) + + # Setup interpolation boxes (for large bathymetry datasets) + n = 15 + xbox = np.deg2rad(np.linspace(-180, 180, n)) + ybox = np.deg2rad(np.linspace(-90, 90, n)) + dx = xbox[1] - xbox[0] + dy = ybox[1] - ybox[0] + boxes = [] + for i in range(n - 1): + for j in range(n - 1): + boxes.append(np.asarray( + [xbox[i], xbox[i + 1], ybox[j], ybox[j + 1]])) + + # Initialize bathymetry + bathymetry = np.zeros(np.shape(lon_pts)) + bathymetry.fill(np.nan) + + # Interpolate inside each box + start = timeit.default_timer() + for i, box in enumerate(boxes): + print(i + 1, "/", len(boxes)) + + # Get data inside box (plus a small overlap region) + overlap = 0.1 + lon_idx, = np.where( + (lon_data >= box[0] - overlap * dx) & (lon_data <= box[1] + overlap * dx)) + lat_idx, = np.where( + (lat_data >= box[2] - overlap * dy) & (lat_data <= box[3] + overlap * dy)) + xdata = lon_data[lon_idx] + ydata = lat_data[lat_idx] + zdata = nc_data.variables['z'][lat_idx, lon_idx] + + # Get points inside box + lon_idx, = np.where((lon_pts >= box[0]) & (lon_pts <= box[1])) + lat_idx, = np.where((lat_pts >= box[2]) & (lat_pts <= box[3])) + idx = np.intersect1d(lon_idx, lat_idx) + xpts = lon_pts[idx] + ypts = lat_pts[idx] + xy_pts = np.vstack((xpts, ypts)).T + + # Interpolate bathymetry onto points + bathy = interpolate.RegularGridInterpolator( + (xdata, ydata), zdata.T, bounds_error=False, fill_value=np.nan) + bathy_int = bathy(xy_pts) + bathymetry[idx] = bathy_int + + end = timeit.default_timer() + print(end - start, " seconds") + + return bathymetry + + +def interpolate_topomsh(lon_pts, lat_pts): + + topo = readmsh('topo.msh') + xpos = np.deg2rad(topo['COORD1']) + ypos = np.deg2rad(topo['COORD2']) + zlev = np.reshape(topo['VALUE'], (len(ypos), len(xpos))) + + Y, X = np.meshgrid(ypos, xpos) + + bathy = interpolate.LinearNDInterpolator( + np.vstack((X.ravel(), Y.ravel())).T, zlev.ravel()) + bathymetry = bathy(np.vstack((lon_pts, lat_pts)).T) + + return bathymetry + + +def main(): + # Open NetCDF mesh file and read mesh points + mesh_file = sys.argv[1] + inject_bathymetry(mesh_file) diff --git a/conda_package/mpas_tools/mesh/creation/inject_meshDensity.py b/conda_package/mpas_tools/mesh/creation/inject_meshDensity.py new file mode 100644 index 000000000..5224e36d3 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/inject_meshDensity.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python +# Simple script to inject mesh density onto a mesh +# example usage: +# ./inject_meshDensity.py cellWidthVsLatLon.nc base_mesh.nc +# where: +# cellWidthVsLatLon.nc is a netcdf file with cellWidth +# base_mesh.nc is the mpas netcdf file where meshDensity is added +# Mark Petersen, 7/24/2018 + +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy as np +from scipy import interpolate +import netCDF4 as nc4 +import sys + + +def inject_meshDensity(cw_filename, mesh_filename, on_sphere=True): + print('Read cell width field from nc file regular grid...') + ds = nc4.Dataset(cw_filename,'r') + cellWidth = ds.variables['cellWidth'][:] + if on_sphere: + lon = ds.variables['lon'][:] + lat = ds.variables['lat'][:] + else: + x = ds.variables['x'][:] + y = ds.variables['y'][:] + ds.close() + + if on_sphere: + # Add extra column in longitude to interpolate over the Date Line + cellWidth = np.concatenate( + (cellWidth, cellWidth[:, 0:1]), axis=1) + LonPos = np.deg2rad(np.concatenate( + (lon.T, lon.T[0:1] + 360))) + LatPos = np.deg2rad(lat.T) + # set max lat position to be exactly at North Pole to avoid interpolation + # errors + LatPos[np.argmax(LatPos)] = np.pi / 2.0 + minCellWidth = cellWidth.min() + meshDensityVsXY = (minCellWidth / cellWidth)**4 + print(' minimum cell width in grid definition: {0:.0f} km'.format(minCellWidth/1000.)) + print(' maximum cell width in grid definition: {0:.0f} km'.format(cellWidth.max()/1000.)) + + if on_sphere: + X, Y = np.meshgrid(LonPos, LatPos) + else: + X, Y = np.meshgrid(x, y) + + print('Open unstructured MPAS mesh file...') + ds = nc4.Dataset(mesh_filename, 'r+') + meshDensity = ds.variables['meshDensity'] + + print('Preparing interpolation of meshDensity from native coordinates to mesh...') + meshDensityInterp = interpolate.LinearNDInterpolator( + np.vstack((X.ravel(), Y.ravel())).T, meshDensityVsXY.ravel()) + + print('Interpolating and writing meshDensity...') + if on_sphere: + meshDensity[:] = meshDensityInterp( + np.vstack( + (np.mod( + ds.variables['lonCell'][:] + + np.pi, + 2 * + np.pi) - + np.pi, + ds.variables['latCell'][:])).T) + else: + meshDensity[:] = meshDensityInterp( + np.vstack( + (ds.variables['xCell'][:], + ds.variables['yCell'][:]) + ).T) + + ds.close() + + +if __name__ == "__main__": + + inject_meshDensity(cw_filename=sys.argv[1], mesh_filename=sys.argv[2]) diff --git a/conda_package/mpas_tools/mesh/creation/inject_preserve_floodplain.py b/conda_package/mpas_tools/mesh/creation/inject_preserve_floodplain.py new file mode 100644 index 000000000..0e08d3dad --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/inject_preserve_floodplain.py @@ -0,0 +1,28 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import netCDF4 as nc4 +import argparse + + +def inject_preserve_floodplain(mesh_file, floodplain_elevation): + + nc_mesh = nc4.Dataset(mesh_file, 'r+') + nc_vars = nc_mesh.variables.keys() + + if 'cellSeedMask' not in nc_vars: + nc_mesh.createVariable('cellSeedMask', 'i', ('nCells')) + nc_mesh.variables['cellSeedMask'][:] = \ + nc_mesh.variables['bottomDepthObserved'][:] < floodplain_elevation + + nc_mesh.close() + + +def main(): + + parser = argparse.ArgumentParser() + parser.add_argument('mesh_file', action='store', type=str) + parser.add_argument('floodplain_elevation', action='store', type=float) + cl_args = parser.parse_args() + + inject_preserve_floodplain(cl_args.mesh_file, cl_args.floodplain_elevation) diff --git a/conda_package/mpas_tools/mesh/creation/jigsaw_driver.py b/conda_package/mpas_tools/mesh/creation/jigsaw_driver.py new file mode 100644 index 000000000..0960b27e1 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/jigsaw_driver.py @@ -0,0 +1,74 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy +import jigsawpy + + +def jigsaw_driver(cellWidth, x, y, on_sphere=True, geom_points=None, geom_edges=None): + ''' + A function for building a jigsaw mesh + + Parameters + ---------- + cellWidth : ndarray + The size of each cell in the resulting mesh as a function of space + + x, y : ndarray + The x and y coordinates of each point in the cellWidth array (lon and lat for spherical mesh) + + on_sphere : logical + Whether this mesh is spherical or planar + + geom_points : list of point coordinates for bounding polygon for planar mesh + + geom_edges : list of edges between points in geom_points that define the bounding polygon + + ''' + # Authors + # ------- + # Mark Petersen, Phillip Wolfram, Xylar Asay-Davis + + # setup files for JIGSAW + opts = jigsawpy.jigsaw_jig_t() + opts.geom_file = 'mesh.msh' + opts.jcfg_file = 'mesh.jig' + opts.mesh_file = 'mesh-MESH.msh' + opts.hfun_file = 'mesh-HFUN.msh' + + # save HFUN data to file + hmat = jigsawpy.jigsaw_msh_t() + if on_sphere: + hmat.mshID = 'ELLIPSOID-GRID' + hmat.xgrid = numpy.radians(x) + hmat.ygrid = numpy.radians(y) + else: + hmat.mshID = 'EUCLIDEAN-GRID' + hmat.xgrid = x + hmat.ygrid = y + hmat.value = cellWidth + jigsawpy.savemsh(opts.hfun_file, hmat) + + # define JIGSAW geometry + geom = jigsawpy.jigsaw_msh_t() + if on_sphere: + geom.mshID = 'ELLIPSOID-MESH' + geom.radii = 6371.*numpy.ones(3, float) + else: + geom.mshID = 'EUCLIDEAN-MESH' + geom.vert2 = geom_points + geom.edge2 = geom_edges + #geom.edge2.index = geom_edges + print (geom_points) + jigsawpy.savemsh(opts.geom_file, geom) + + # build mesh via JIGSAW! + mesh = jigsawpy.jigsaw_msh_t() + opts.hfun_scal = 'absolute' + opts.hfun_hmax = float("inf") + opts.hfun_hmin = 0.0 + opts.mesh_dims = +2 # 2-dim. simplexes + opts.optm_qlim = 0.9375 + opts.verbosity = +1 + + jigsawpy.cmd.jigsaw(opts) diff --git a/conda_package/mpas_tools/mesh/creation/jigsaw_to_netcdf.py b/conda_package/mpas_tools/mesh/creation/jigsaw_to_netcdf.py new file mode 100644 index 000000000..ccd09cf14 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/jigsaw_to_netcdf.py @@ -0,0 +1,148 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy as np + +from netCDF4 import Dataset as NetCDFFile +from mpas_tools.mesh.creation.open_msh import readmsh +from mpas_tools.mesh.creation.util import circumcenter + +import argparse + + +def jigsaw_to_netcdf(msh_filename, output_name, on_sphere): + """ + Converts mesh data defined in triangle format to NetCDF + + Parameters + ---------- + msh_filename : str + A JIGSAW mesh file name + output_name: str + The name of the output file + on_sphere : bool + Whether the mesh is spherical or planar + """ + # Authors: Phillip J. Wolfram, Matthew Hoffman and Xylar Asay-Davis + + grid = NetCDFFile(output_name, 'w', format='NETCDF3_CLASSIC') + + # Get dimensions + # Get nCells + msh = readmsh(msh_filename) + nCells = msh['POINT'].shape[0] + + # Get vertexDegree and nVertices + vertexDegree = 3 # always triangles with JIGSAW output + nVertices = msh['TRIA3'].shape[0] + + if vertexDegree != 3: + ValueError("This script can only compute vertices with triangular " + "dual meshes currently.") + + grid.createDimension('nCells', nCells) + grid.createDimension('nVertices', nVertices) + grid.createDimension('vertexDegree', vertexDegree) + + # Create cell variables and sphere_radius + sphere_radius = 6371000 + xCell_full = msh['POINT'][:, 0] + yCell_full = msh['POINT'][:, 1] + zCell_full = msh['POINT'][:, 2] + for cells in [xCell_full, yCell_full, zCell_full]: + assert cells.shape[0] == nCells, 'Number of anticipated nodes is' \ + ' not correct!' + if on_sphere: + grid.on_a_sphere = "YES" + grid.sphere_radius = sphere_radius + else: + grid.on_a_sphere = "NO" + grid.sphere_radius = 0.0 + + # Create cellsOnVertex + cellsOnVertex_full = msh['TRIA3'][:, :3] + 1 + assert cellsOnVertex_full.shape == (nVertices, vertexDegree), \ + 'cellsOnVertex_full is not the right shape!' + + # Create vertex variables + xVertex_full = np.zeros((nVertices,)) + yVertex_full = np.zeros((nVertices,)) + zVertex_full = np.zeros((nVertices,)) + + for iVertex in np.arange(0, nVertices): + cell1 = cellsOnVertex_full[iVertex, 0] + cell2 = cellsOnVertex_full[iVertex, 1] + cell3 = cellsOnVertex_full[iVertex, 2] + + x1 = xCell_full[cell1 - 1] + y1 = yCell_full[cell1 - 1] + z1 = zCell_full[cell1 - 1] + x2 = xCell_full[cell2 - 1] + y2 = yCell_full[cell2 - 1] + z2 = zCell_full[cell2 - 1] + x3 = xCell_full[cell3 - 1] + y3 = yCell_full[cell3 - 1] + z3 = zCell_full[cell3 - 1] + + pv = circumcenter(on_sphere, x1, y1, z1, x2, y2, z2, x3, y3, z3) + xVertex_full[iVertex] = pv.x + yVertex_full[iVertex] = pv.y + zVertex_full[iVertex] = pv.z + + meshDensity_full = grid.createVariable( + 'meshDensity', 'f8', ('nCells',)) + + for iCell in np.arange(0, nCells): + meshDensity_full[iCell] = 1.0 + + del meshDensity_full + + var = grid.createVariable('xCell', 'f8', ('nCells',)) + var[:] = xCell_full + var = grid.createVariable('yCell', 'f8', ('nCells',)) + var[:] = yCell_full + var = grid.createVariable('zCell', 'f8', ('nCells',)) + var[:] = zCell_full + var = grid.createVariable('xVertex', 'f8', ('nVertices',)) + var[:] = xVertex_full + var = grid.createVariable('yVertex', 'f8', ('nVertices',)) + var[:] = yVertex_full + var = grid.createVariable('zVertex', 'f8', ('nVertices',)) + var[:] = zVertex_full + var = grid.createVariable( + 'cellsOnVertex', 'i4', ('nVertices', 'vertexDegree',)) + var[:] = cellsOnVertex_full + + grid.sync() + grid.close() + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument( + "-m", + "--msh", + dest="msh", + required=True, + help="input .msh file generated by JIGSAW.", + metavar="FILE") + parser.add_argument( + "-o", + "--output", + dest="output", + default="grid.nc", + help="output file name.", + metavar="FILE") + parser.add_argument( + "-s", + "--spherical", + dest="spherical", + action="store_true", + default=False, + help="Determines if the input/output should be spherical or not.") + + options = parser.parse_args() + + jigsaw_to_netcdf(options.msh, options.output, options.spherical) diff --git a/conda_package/mpas_tools/mesh/creation/mesh_definition_tools.py b/conda_package/mpas_tools/mesh/creation/mesh_definition_tools.py new file mode 100644 index 000000000..67da09609 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/mesh_definition_tools.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python +''' +name: mesh_definition_tools + +These functions are tools used to define the cellWidth variable on +regular lat/lon grids. The cellWidth variable is a jigsaw input that +defines the mesh. +''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy as np + + +########################################################################## +# Functions +########################################################################## + + +def mergeCellWidthVsLat( + lat, + cellWidthInSouth, + cellWidthInNorth, + latTransition, + latWidthTransition): + ''' + mergeCellWidthVsLat: combine two cell width distributions using a tanh function. + This is inted as part of the workflow to make an MPAS global mesh. + + Syntax: cellWidthOut = mergeCellWidthVsLat(lat, cellWidthInSouth, cellWidthInNorth, latTransition, latWidthTransition) + + Inputs: + lat - vector of length n, with entries between -90 and 90, degrees + cellWidthInSouth - vector of length n, first distribution + cellWidthInNorth - vector of length n, second distribution + + Optional inputs: + latTransition = 0 # lat to change from cellWidthInSouth to cellWidthInNorth, degrees + latWidthTransition = 0 # width of lat transition, degrees + + Outputs: + cellWidthOut - vector of length n, entries are cell width as a function of lat + ''' + # Assign defaults + # latTransition = 0 # lat to change from cellWidthInSouth to cellWidthInNorth, degrees + # latWidthTransition = 0 # width of lat transition, degrees + + cellWidthOut = np.ones(lat.size) + if latWidthTransition == 0: + for j in range(lat.size): + if lat[j] < latTransition: + cellWidthOut[j] = cellWidthInSouth[j] + else: + cellWidthOut[j] = cellWidthInNorth[j] + else: + for j in range(lat.size): + weightNorth = 0.5 * \ + (np.tanh((lat[j] - latTransition) / latWidthTransition) + 1.0) + weightSouth = 1.0 - weightNorth + cellWidthOut[j] = weightSouth * cellWidthInSouth[j] + \ + weightNorth * cellWidthInNorth[j] + + return cellWidthOut + + +def EC_CellWidthVsLat(lat, cellWidthEq=30.0, cellWidthMidLat=60.0, + cellWidthPole=35.0, latPosEq=15.0, latPosPole=73.0, + latTransition=40.0, latWidthEq=6.0, latWidthPole=9.0): + """ + Create Eddy Closure spacing as a function of lat. This is intended as part + of the workflow to make an MPAS global mesh. + + Parameters + ---------- + lat : numpy.ndarray + vector of length n, with entries between -90 and 90, degrees + + cellWidthEq : float, optional + Cell width in km at the equator + + cellWidthMidLat : float, optional + Cell width in km at mid latitudes + + cellWidthPole : float, optional + Cell width in km at the poles + + latPosEq : float, optional + Latitude in degrees of center of the equatorial transition region + + latPosPole : float, optional + Latitude in degrees of center of the polar transition region + + latTransition : float, optional + Latitude in degrees of the change from equatorial to polar function + + latWidthEq : float, optional + Width in degrees latitude of the equatorial transition region + + latWidthPole : float, optional + Width in degrees latitude of the polar transition region + + Returns + ------- + + cellWidthOut : numpy.ndarray + 1D array of same length as ``lat`` with entries that are cell width as + a function of lat + + Examples + -------- + Default + + >>> EC60to30 = EC_CellWidthVsLat(lat) + + Half the default resolution: + + >>> EC120to60 = EC_CellWidthVsLat(lat, cellWidthEq=60., cellWidthMidLat=120., cellWidthPole=70.) + """ + + minCellWidth = min(cellWidthEq, min(cellWidthMidLat, cellWidthPole)) + densityEq = (minCellWidth / cellWidthEq)**4 + densityMidLat = (minCellWidth / cellWidthMidLat)**4 + densityPole = (minCellWidth / cellWidthPole)**4 + densityEqToMid = ((densityEq - densityMidLat) * (1.0 + np.tanh( + (latPosEq - np.abs(lat)) / latWidthEq)) / 2.0) + densityMidLat + densityMidToPole = ((densityMidLat - densityPole) * (1.0 + np.tanh( + (latPosPole - np.abs(lat)) / latWidthPole)) / 2.0) + densityPole + mask = np.abs(lat) < latTransition + densityEC = np.array(densityMidToPole) + densityEC[mask] = densityEqToMid[mask] + cellWidthOut = minCellWidth / densityEC**0.25 + + return cellWidthOut + + +def RRS_CellWidthVsLat(lat, cellWidthEq, cellWidthPole): + ''' + RRS_CellWidthVsLat - Create Rossby Radius Scaling as a function of lat. + This is inted as part of the workflow to make an MPAS global mesh. + + Syntax: cellWidthOut = RRS_CellWidthVsLat(lat, cellWidthEq, cellWidthPole) + + Inputs: + lat - vector of length n, with entries between -90 and 90, degrees + cellWidthEq - Cell width at the equator, km + cellWidthPole - Cell width at the poles, km + + Outputs: + RRS_CellWidth - vector of length n, entries are cell width as a function of lat + + Example: + RRS18to6 = RRS_CellWidthVsLat(lat,18,6) + ''' + + # ratio between high and low resolution + gamma = (cellWidthPole / cellWidthEq)**4.0 + + densityRRS = (1.0 - gamma) * \ + np.power(np.sin(np.deg2rad(np.absolute(lat))), 4.0) + gamma + cellWidthOut = cellWidthPole / np.power(densityRRS, 0.25) + return cellWidthOut + + +def AtlanticPacificGrid(lat, lon, cellWidthInAtlantic, cellWidthInPacific): + ''' + AtlanticPacificGrid: combine two cell width distributions using a tanh function. + + Inputs: + lon - vector of length m, with entries between -180, 180, degrees + lat - vector of length n, with entries between -90, 90, degrees + cellWidthInAtlantic - vector of length n, cell width in Atlantic as a function of lon, km + cellWidthInPacific - vector of length n, cell width in Pacific as a function of lon, km + + Optional inputs: + + Outputs: + cellWidthOut - m by n array, grid cell width on globe, km + ''' + cellWidthOut = np.zeros((lat.size, lon.size)) + for i in range(lon.size): + for j in range(lat.size): + # set to Pacific mask as default + cellWidthOut[j, i] = cellWidthInPacific[j] + # test if in Atlantic Basin: + if lat[j] > 65.0: + if (lon[i] > -150.0) & (lon[i] < 170.0): + cellWidthOut[j, i] = cellWidthInAtlantic[j] + elif lat[j] > 20.0: + if (lon[i] > -100.0) & (lon[i] < 35.0): + cellWidthOut[j, i] = cellWidthInAtlantic[j] + elif lat[j] > 0.0: + if (lon[i] > -2.0 * lat[j] - 60.0) & (lon[i] < 35.0): + cellWidthOut[j, i] = cellWidthInAtlantic[j] + else: + if (lon[i] > -60.0) & (lon[i] < 20.0): + cellWidthOut[j, i] = cellWidthInAtlantic[j] + return cellWidthOut + +############################################################## diff --git a/conda_package/mpas_tools/mesh/creation/mpas_to_triangle.py b/conda_package/mpas_tools/mesh/creation/mpas_to_triangle.py new file mode 100644 index 000000000..246f49fc7 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/mpas_to_triangle.py @@ -0,0 +1,105 @@ +''' +Script to convert from MPAS netCDF format to the Triangle format: +https://www.cs.cmu.edu/~quake/triangle.node.html +https://www.cs.cmu.edu/~quake/triangle.ele.html + +Only works for planar meshes. +''' +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import sys +from netCDF4 import Dataset as NetCDFFile +from optparse import OptionParser + + +def mpas_to_triangle(mpasfile, trifile): + + fin = NetCDFFile(options.mpasfile, 'r') + if fin.on_a_sphere == "YES": + sys.abort("ERROR: This script only works for planar meshes!") + + if len(fin.dimensions['vertexDegree']) != 3: + sys.abort("ERROR: This script only works for vertexDegree of 3!") + + nCells = len(fin.dimensions['nCells']) + nVertices = len(fin.dimensions['nVertices']) + + xCell = fin.variables['xCell'][:] + yCell = fin.variables['yCell'][:] + ConC = fin.variables['cellsOnCell'][:] + nConC = fin.variables['nEdgesOnCell'][:] + ConV = fin.variables['cellsOnVertex'][:] + + # create node file + fnode = open(options.trifile + ".node", 'w') + # write node file header: First line: <# of vertices> <# of attributes> <# of boundary markers (0 or 1)> + fnode.write("{:d} 2 0 1\n".format(nCells)) + # Remaining lines: [attributes] [boundary marker] + for i in range(nCells): + if ConC[i, 0:nConC[i]].min() == 0: + isBdy = 1 + else: + isBdy = 0 + fnode.write( + "{:d} {:f} {:f} {:d}\n".format( + i + 1, + xCell[i], + yCell[i], + isBdy)) + fnode.write("# Generated from MPAS file: {}\n".format(options.mpasfile)) + fnode.close() + + # create ele file + fele = open(options.trifile + ".ele", "w") + + # calculate number of non-degenerate triangles + numtri = 0 + for i in range(nVertices): + if ConV[i, :].min() > 0: + numtri += 1 + + # write ele file header: First line: <# of triangles> <# of attributes> + fele.write("{:d} 3 0\n".format(numtri)) + # Remaining lines: ... [attributes] + cnt = 0 + for i in range(nVertices): + # write non-generate triangles only + if ConV[i, :].min() > 0: + cnt += 1 + fele.write("{:d} {:d} {:d} {:d}\n".format( + cnt, ConV[i, 0], ConV[i, 1], ConV[i, 2])) + fele.write("# Generated from MPAS file: {}\n".format(options.mpasfile)) + fele.close() + + fin.close() + print("Conversion complete.") + + +def main(): + parser = OptionParser() + parser.add_option( + "-m", + "--mpas", + dest="mpasfile", + help="input MPAS netCDF file.", + metavar="FILE") + parser.add_option( + "-t", + "--triangle", + dest="trifile", + help="output file name template to be in triangle format (FILE.1.node," + " FILE.1.ele).", + metavar="FILE") + + options, args = parser.parse_args() + + if not options.mpasfile: + parser.error("An input MPAS file is required.") + + if not options.trifile: + parser.error("A output Triangle format file name is required.") + + mpas_to_triangle(mpasfile=options.mpasfile, trifile=options.trifile) diff --git a/conda_package/mpas_tools/mesh/creation/open_msh.py b/conda_package/mpas_tools/mesh/creation/open_msh.py new file mode 100644 index 000000000..2e5db01cb --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/open_msh.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +""" + +Utility functions to read and manipulate JIGSAW meshes. + +Phillip J. Wolfram +04/06/2017 +""" +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy as np + + +def readmsh(fname): + """ + Reads JIGSAW msh structure and produces a dictionary with values. + + Phillip J. Wolfram + 09/22/2017 + """ + + dataset = {} + datavals = {} + datavals['HEADER'] = ';' + datavals['ARRAY'] = None + with open(fname) as f: + line = f.readline() + while line: + if line[0] == '#': + datavals['HEADER'] += line[1:] + ';' + line = f.readline() + continue + if '=' in line: + datavals, dataset = _store_datavals(datavals, dataset) + if 'COORD' in line: + name = 'COORD' + line.split('=')[1][0] + datavals[name] = line.split(';')[-1] + else: + vals = line.split('=') + value = vals[1] if ';' in vals[1] else int(vals[1]) + datavals[vals[0]] = value + line = f.readline() + continue + + # just numbers + arrayvals = np.asarray(line.split(';'), dtype='f8') + if datavals['ARRAY'] is None: + datavals['ARRAY'] = [arrayvals] + else: + datavals['ARRAY'].append(arrayvals) + line = f.readline() + continue + datavals, dataset = _store_datavals(datavals, dataset) + + return dataset + + +def _store_datavals(datavals, dataset): # {{{ + + if datavals['ARRAY'] is not None: + # remove empty data + if np.all(datavals['ARRAY'] == np.array(None, dtype='object')): + datavals.pop('ARRAY') + for key in [aval for aval in datavals.keys() + if aval in ['HEADER', 'MSHID', 'NDIMS']]: + if key in dataset: + dataset[key] += datavals[key] + else: + dataset[key] = datavals[key] + datavals.pop(key) + entryname = [aval for aval in datavals.keys() if aval not in [ + 'ARRAY']] + + if 'TRI' in entryname[0]: + dtype = 'i' + else: + dtype = 'f8' + datavals['ARRAY'] = np.asarray(datavals['ARRAY'], dtype=dtype) + + # decided to throw away "index" from msh because it isn't truly a + # real number + dataset[entryname[0]] = datavals['ARRAY'] + datavals = {} + datavals['ARRAY'] = None + + return datavals, dataset # }}} diff --git a/conda_package/mpas_tools/mesh/creation/triangle_to_netcdf.py b/conda_package/mpas_tools/mesh/creation/triangle_to_netcdf.py new file mode 100644 index 000000000..48238f8b8 --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/triangle_to_netcdf.py @@ -0,0 +1,175 @@ +from __future__ import absolute_import, division, print_function, \ + unicode_literals + +import numpy as np + +from netCDF4 import Dataset as NetCDFFile +from mpas_tools.mesh.creation.util import circumcenter + +import argparse + + +def triangle_to_netcdf(node, ele, output_name): + """ + Converts mesh data defined in triangle format to NetCDF + + Parameters + ---------- + node : str + A node file name + ele : str + An element file name + output_name: str + The name of the output file + """ + # Authors: Phillip J. Wolfram, Matthew Hoffman and Xylar Asay-Davis + on_sphere = False + grid = NetCDFFile(output_name, 'w', format='NETCDF3_CLASSIC') + + # Get dimensions + # Get nCells + cell_info = open(node, 'r') + nCells = -1 # There is one header line + for block in iter(lambda: cell_info.readline(), ""): + if block.startswith("#"): + continue # skip comment lines + nCells = nCells + 1 + cell_info.close() + + # Get vertexDegree and nVertices + cov_info = open(ele, 'r') + vertexDegree = 3 # always triangles with Triangle! + nVertices = -1 # There is one header line + for block in iter(lambda: cov_info.readline(), ""): + if block.startswith("#"): + continue # skip comment lines + nVertices = nVertices + 1 + cov_info.close() + + if vertexDegree != 3: + ValueError("This script can only compute vertices with triangular " + "dual meshes currently.") + + grid.createDimension('nCells', nCells) + grid.createDimension('nVertices', nVertices) + grid.createDimension('vertexDegree', vertexDegree) + + # Create cell variables and sphere_radius + xCell_full = np.zeros((nCells,)) + yCell_full = np.zeros((nCells,)) + zCell_full = np.zeros((nCells,)) + + cell_info = open(node, 'r') + cell_info.readline() # read header + i = 0 + for block in iter(lambda: cell_info.readline(), ""): + block_arr = block.split() + if block_arr[0] == "#": + continue # skip comment lines + xCell_full[i] = float(block_arr[1]) + yCell_full[i] = float(block_arr[2]) + zCell_full[i] = 0.0 # z-position is always 0.0 in a planar mesh + i = i + 1 + cell_info.close() + + grid.on_a_sphere = "NO" + grid.sphere_radius = 0.0 + + cellsOnVertex_full = np.zeros( + (nVertices, vertexDegree), dtype=np.int32) + + cov_info = open(ele, 'r') + cov_info.readline() # read header + iVertex = 0 + for block in iter(lambda: cov_info.readline(), ""): + block_arr = block.split() + if block_arr[0] == "#": + continue # skip comment lines + cellsOnVertex_full[iVertex, :] = int(-1) + # skip the first column, which is the triangle number, and then + # only get the next 3 columns + for j in np.arange(0, 3): + cellsOnVertex_full[iVertex, j] = int(block_arr[j + 1]) + + iVertex = iVertex + 1 + + cov_info.close() + + # Create vertex variables + xVertex_full = np.zeros((nVertices,)) + yVertex_full = np.zeros((nVertices,)) + zVertex_full = np.zeros((nVertices,)) + + for iVertex in np.arange(0, nVertices): + cell1 = cellsOnVertex_full[iVertex, 0] + cell2 = cellsOnVertex_full[iVertex, 1] + cell3 = cellsOnVertex_full[iVertex, 2] + + x1 = xCell_full[cell1 - 1] + y1 = yCell_full[cell1 - 1] + z1 = zCell_full[cell1 - 1] + x2 = xCell_full[cell2 - 1] + y2 = yCell_full[cell2 - 1] + z2 = zCell_full[cell2 - 1] + x3 = xCell_full[cell3 - 1] + y3 = yCell_full[cell3 - 1] + z3 = zCell_full[cell3 - 1] + + pv = circumcenter(on_sphere, x1, y1, z1, x2, y2, z2, x3, y3, z3) + xVertex_full[iVertex] = pv.x + yVertex_full[iVertex] = pv.y + zVertex_full[iVertex] = pv.z + + meshDensity_full = grid.createVariable( + 'meshDensity', 'f8', ('nCells',)) + + meshDensity_full[0:nCells] = 1.0 + + var = grid.createVariable('xCell', 'f8', ('nCells',)) + var[:] = xCell_full + var = grid.createVariable('yCell', 'f8', ('nCells',)) + var[:] = yCell_full + var = grid.createVariable('zCell', 'f8', ('nCells',)) + var[:] = zCell_full + var = grid.createVariable('xVertex', 'f8', ('nVertices',)) + var[:] = xVertex_full + var = grid.createVariable('yVertex', 'f8', ('nVertices',)) + var[:] = yVertex_full + var = grid.createVariable('zVertex', 'f8', ('nVertices',)) + var[:] = zVertex_full + var = grid.createVariable( + 'cellsOnVertex', 'i4', ('nVertices', 'vertexDegree',)) + var[:] = cellsOnVertex_full + + grid.sync() + grid.close() + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument( + "-n", + "--node", + dest="node", + required=True, + help="input .node file generated by Triangle.", + metavar="FILE") + parser.add_argument( + "-e", + "--ele", + dest="ele", + required=True, + help="input .ele file generated by Triangle.", + metavar="FILE") + parser.add_argument( + "-o", + "--output", + dest="output", + default="grid.nc", + help="output file name.", + metavar="FILE") + options = parser.parse_args() + + triangle_to_netcdf(options.node, options.ele, options.output) diff --git a/conda_package/mpas_tools/mesh/creation/util.py b/conda_package/mpas_tools/mesh/creation/util.py new file mode 100644 index 000000000..b9948127a --- /dev/null +++ b/conda_package/mpas_tools/mesh/creation/util.py @@ -0,0 +1,39 @@ +import collections + +point = collections.namedtuple('Point', ['x', 'y', 'z']) + + +def circumcenter(on_sphere, x1, y1, z1, x2, y2, z2, x3, y3, z3): # {{{ + p1 = point(x1, y1, z1) + p2 = point(x2, y2, z2) + p3 = point(x3, y3, z3) + if on_sphere: + a = (p2.x - p3.x)**2 + (p2.y - p3.y)**2 + (p2.z - p3.z)**2 + b = (p3.x - p1.x)**2 + (p3.y - p1.y)**2 + (p3.z - p1.z)**2 + c = (p1.x - p2.x)**2 + (p1.y - p2.y)**2 + (p1.z - p2.z)**2 + + pbc = a * (-a + b + c) + apc = b * (a - b + c) + abp = c * (a + b - c) + + xv = (pbc * p1.x + apc * p2.x + abp * p3.x) / (pbc + apc + abp) + yv = (pbc * p1.y + apc * p2.y + abp * p3.y) / (pbc + apc + abp) + zv = (pbc * p1.z + apc * p2.z + abp * p3.z) / (pbc + apc + abp) + else: + d = 2 * (p1.x * (p2.y - p3.y) + p2.x * + (p3.y - p1.y) + p3.x * (p1.y - p2.y)) + + xv = ((p1.x**2 + p1.y**2) * (p2.y - p3.y) + (p2.x**2 + p2.y**2) + * (p3.y - p1.y) + (p3.x**2 + p3.y**2) * (p1.y - p2.y)) / d + yv = ((p1.x**2 + p1.y**2) * (p3.x - p2.x) + (p2.x**2 + p2.y**2) + * (p1.x - p3.x) + (p3.x**2 + p3.y**2) * (p2.x - p1.x)) / d + zv = 0.0 + + # Optional method to use barycenter instead. + # xv = p1.x + p2.x + p3.x + # xv = xv / 3.0 + # yv = p1.y + p2.y + p3.y + # yv = yv / 3.0 + return point(xv, yv, zv) + +# }}} \ No newline at end of file diff --git a/conda_package/mpas_tools/ocean/moc.py b/conda_package/mpas_tools/ocean/moc.py index 39bff7ff1..4e0fd8db5 100755 --- a/conda_package/mpas_tools/ocean/moc.py +++ b/conda_package/mpas_tools/ocean/moc.py @@ -10,7 +10,7 @@ import shapely.geometry from geometric_features import GeometricFeatures, FeatureCollection -import mpas_tools.conversion +import mpas_tools.mesh.conversion from mpas_tools.io import write_netcdf @@ -57,8 +57,8 @@ def make_moc_basins_and_transects(gf, mesh_filename, fcMOC.to_geojson(geojson_filename) dsMesh = xarray.open_dataset(mesh_filename) - dsMasks = mpas_tools.conversion.mask(dsMesh=dsMesh, fcMask=fcMOC, - logger=logger) + dsMasks = mpas_tools.mesh.conversion.mask(dsMesh=dsMesh, fcMask=fcMOC, + logger=logger) if mask_filename is not None: write_netcdf(dsMasks, mask_filename) diff --git a/conda_package/mpas_tools/tests/define_base_mesh.py b/conda_package/mpas_tools/tests/define_base_mesh.py new file mode 100755 index 000000000..a9aa150fb --- /dev/null +++ b/conda_package/mpas_tools/tests/define_base_mesh.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +""" +% Create cell width array for this mesh on a regular latitude-longitude grid. +% Outputs: +% cellWidth - m x n array, entries are desired cell width in km +% lat - latitude, vector of length m, with entries between -90 and 90, degrees +% lon - longitude, vector of length n, with entries between -180 and 180, degrees +""" +import numpy as np + + +def cellWidthVsLatLon(): + + ddeg = 10 + constantCellWidth = 240 + + lat = np.arange(-90, 90.01, ddeg) + lon = np.arange(-180, 180.01, ddeg) + + cellWidth = constantCellWidth * np.ones((lat.size, lon.size)) + return cellWidth, lon, lat diff --git a/conda_package/mpas_tools/tests/test_conversion.py b/conda_package/mpas_tools/tests/test_conversion.py index 4cce4d55a..23ff97a99 100755 --- a/conda_package/mpas_tools/tests/test_conversion.py +++ b/conda_package/mpas_tools/tests/test_conversion.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from mpas_tools.conversion import convert, cull, mask +from mpas_tools.mesh.conversion import convert, cull, mask from mpas_tools.io import write_netcdf import matplotlib matplotlib.use('Agg') diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/3Wbgy5.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/3Wbgy5.xml new file mode 100644 index 000000000..ea7bde2fe --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/3Wbgy5.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/3wave-yellow-grey-blue.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/3wave-yellow-grey-blue.xml new file mode 100644 index 000000000..74142ef5a --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/3wave-yellow-grey-blue.xml @@ -0,0 +1 @@ +
\ No newline at end of file diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/4wave-grey-red-green-mgreen.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/4wave-grey-red-green-mgreen.xml new file mode 100644 index 000000000..894fc5d18 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/4wave-grey-red-green-mgreen.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/5wave-yellow-brown-blue.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/5wave-yellow-brown-blue.xml new file mode 100644 index 000000000..3c9623fc6 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/5wave-yellow-brown-blue.xml @@ -0,0 +1 @@ +
diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/Publications.bib b/conda_package/mpas_tools/viz/SciVisColorColormaps/Publications.bib new file mode 100644 index 000000000..ef1a7e3bf --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/Publications.bib @@ -0,0 +1,31 @@ +@article{FSamsele, +title = {ColorMoves: Real-time Interactive Colormap Construction for Scientific Visualization}, +author = {Francesca Samsel and Sebastion Klaassen and David Rogers}, +url = {http://doi.ieeecomputersociety.org/10.1109/MCG.2018.011461525}, +year = {2018}, +date = {2018-01-01}, +abstract = {The visualization of scientific data is both a science and an art, in which many tools are used to explore, discover and communicate the information within the data. This process is increasingly difficult, as the size and complexity of data is constantly advancing. Color is a potent tool in scientific data visualization, and has been well studied. However, color’s full potential for communication and discovery remains untapped. Effective use of color requires a depth of understanding and experience employing color and color relationships, in combination with tools to translate that knowledge into scientific visualization workflows. In this paper, we present ColorMoves, an interactive tool that promotes exploration of scientific data through artist-driven color methods in a unique and transformative way. We discuss the power of contrast in scientific visualization, the design of the ColorMoves tool, and the tool’s application in several science domains.}, +note = {LA-UR-17-29913}, +keywords = {color, colormaps, interactive design, scientific visualization}, +pubstate = {published}, +tppubtype = {article} +} + +@inproceedings{info:lanl-repo/lareport/LA-UR-17-22224, +title = {Intuitive Colormaps for Environmental Visualization}, +author = {Francesca Samsel and Terece Turton and Phillip Wolfram and Roxana Bujack}, +editor = {Karsten Rink and Ariane Middel and Dirk Zeckzer and Roxana Bujack}, +url = {http://datascience.dsscale.org/wp-content/uploads/sites/3/2017/08/IntuitiveColormapsforEnvironmentalVisualization.pdf}, +doi = {10.2312/envirvis.20171105}, +isbn = {978-3-03868-040-6}, +year = {2017}, +date = {2017-03-16}, +booktitle = {Workshop on Visualisation in Environmental Sciences (EnvirVis)}, +publisher = {The Eurographics Association}, +abstract = {Visualizations benefit from the use of intuitive colors, enabling an observer to make use of more automatic, subconscious channels. In this paper, we apply the concept of intuitive color to the generation of thematic colormaps for the environmental sciences. In particular, we provide custom sets of colormaps for water, atmosphere, land, and vegetation. These have been integrated into the online tool: ColorMoves: The Environment to enable the environmental scientist to tailor them precisely to the data and tasks in a simple drag-and-drop workflow.}, +howpublished = {EnvirVis ; 2017-06-12 - 2017-06-13 ; Barcelona, Spain}, +note = {LA-UR-17-22224}, +keywords = {colormaps, environmental sciences}, +pubstate = {published}, +tppubtype = {inproceedings} +} diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-1.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-1.xml new file mode 100644 index 000000000..806e75095 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-1.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-3.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-3.xml new file mode 100644 index 000000000..2b54397f1 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-3.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-6.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-6.xml new file mode 100644 index 000000000..a19dc4c9a --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-6.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-8.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-8.xml new file mode 100644 index 000000000..141f01684 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-8.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-orange-div.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-orange-div.xml new file mode 100644 index 000000000..921f52050 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/blue-orange-div.xml @@ -0,0 +1 @@ +
\ No newline at end of file diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-2.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-2.xml new file mode 100644 index 000000000..629d57114 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-2.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-5.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-5.xml new file mode 100644 index 000000000..4e67b00d7 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-5.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-8.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-8.xml new file mode 100644 index 000000000..ad4307c9f --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/brown-8.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/green-1.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-1.xml new file mode 100644 index 000000000..670c9a6a1 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-1.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/green-4.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-4.xml new file mode 100644 index 000000000..d784351d2 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-4.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/green-7.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-7.xml new file mode 100644 index 000000000..f4f3472c4 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-7.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/green-8.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-8.xml new file mode 100644 index 000000000..58d130eb7 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/green-8.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-5.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-5.xml new file mode 100644 index 000000000..ed0f31e86 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-5.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-6.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-6.xml new file mode 100644 index 000000000..9947d2a2f --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-6.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-green-blue-gray.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-green-blue-gray.xml new file mode 100644 index 000000000..d1c242383 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/orange-green-blue-gray.xml @@ -0,0 +1,74 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/purple-7.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/purple-7.xml new file mode 100644 index 000000000..6b07e44ed --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/purple-7.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/purple-8.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/purple-8.xml new file mode 100644 index 000000000..df15a669e --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/purple-8.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/red-1.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/red-1.xml new file mode 100644 index 000000000..09f2a8ab7 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/red-1.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/red-3.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/red-3.xml new file mode 100644 index 000000000..739059aad --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/red-3.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/red-4.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/red-4.xml new file mode 100644 index 000000000..fc0110da9 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/red-4.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/yellow-1.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/yellow-1.xml new file mode 100644 index 000000000..9f773a4bc --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/yellow-1.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/SciVisColorColormaps/yellow-7.xml b/conda_package/mpas_tools/viz/SciVisColorColormaps/yellow-7.xml new file mode 100644 index 000000000..93aee2908 --- /dev/null +++ b/conda_package/mpas_tools/viz/SciVisColorColormaps/yellow-7.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/conda_package/mpas_tools/viz/colormaps.py b/conda_package/mpas_tools/viz/colormaps.py new file mode 100644 index 000000000..52c5797df --- /dev/null +++ b/conda_package/mpas_tools/viz/colormaps.py @@ -0,0 +1,47 @@ +import xml.etree.ElementTree as ET +import pkg_resources +from matplotlib.colors import LinearSegmentedColormap +import matplotlib.pyplot as plt + + +def register_sci_viz_colormaps(): + """Register all SciVisColor colormaps with matplotlib""" + + for mapName in ['3wave-yellow-grey-blue', '3Wbgy5', + '4wave-grey-red-green-mgreen', '5wave-yellow-brown-blue', + 'blue-1', 'blue-3', 'blue-6', 'blue-8', 'blue-orange-div', + 'brown-2', 'brown-5', 'brown-8', 'green-1', 'green-4', + 'green-7', 'green-8', 'orange-5', 'orange-6', + 'orange-green-blue-gray', 'purple-7', 'purple-8', 'red-1', + 'red-3', 'red-4', 'yellow-1', 'yellow-7']: + + xmlFile = pkg_resources.resource_filename( + __name__, 'SciVisColorColormaps/{}.xml'.format(mapName)) + _read_xml_colormap(xmlFile, mapName) + + +def _read_xml_colormap(xmlFile, mapName): + """Read in an XML colormap""" + + xml = ET.parse(xmlFile) + + root = xml.getroot() + colormap = root.findall('ColorMap') + if len(colormap) > 0: + colormap = colormap[0] + colorDict = {'red': [], 'green': [], 'blue': []} + for point in colormap.findall('Point'): + x = float(point.get('x')) + color = [float(point.get('r')), float(point.get('g')), + float(point.get('b'))] + colorDict['red'].append((x, color[0], color[0])) + colorDict['green'].append((x, color[1], color[1])) + colorDict['blue'].append((x, color[2], color[2])) + cmap = LinearSegmentedColormap(mapName, colorDict, 256) + + _register_colormap_and_reverse(mapName, cmap) + + +def _register_colormap_and_reverse(mapName, cmap): + plt.register_cmap(mapName, cmap) + plt.register_cmap('{}_r'.format(mapName), cmap.reversed()) diff --git a/conda_package/recipe/conda_build_config.yaml b/conda_package/recipe/conda_build_config.yaml index a024c5f34..0717ff576 100644 --- a/conda_package/recipe/conda_build_config.yaml +++ b/conda_package/recipe/conda_build_config.yaml @@ -5,6 +5,6 @@ channel_targets: - conda-forge main python: - - 3.7 - 3.6 - - 2.7 + - 3.7 + - 3.8 diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index 6de9c2b62..783036220 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "mpas_tools" %} -{% set version = "0.0.10" %} +{% set version = "0.0.11" %} package: name: '{{ name|lower }}' @@ -15,6 +15,12 @@ build: - translate_planar_grid = mpas_tools.translate:main - merge_grids = mpas_tools.merge_grids:main - split_grids = mpas_tools.split_grids:main + - build_mesh = mpas_tools.mesh.creation.build_mesh:main + - inject_bathymetry = mpas_tools.mesh.creation.inject_bathymetry:main + - inject_preserve_floodplain = mpas_tools.mesh.creation.inject_preserve_floodplain:main + - mpas_to_triangle = mpas_tools.mesh.creation.mpas_to_triangle:main + - triangle_to_netcdf = mpas_tools.mesh.creation.triangle_to_netcdf:main + - jigsaw_to_netcdf = mpas_tools.mesh.creation.jigsaw_to_netcdf:main requirements: build: @@ -41,6 +47,14 @@ requirements: - pyevtk - future - backports.tempfile + - jigsaw >=0.9.12 + - jigsawpy >=0.2.1 + - pyflann + - scikit-image + - cartopy + - pyamg + - rasterio + - affine test: requires: @@ -51,6 +65,12 @@ test: - mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc - mesh_tools/mesh_conversion_tools/test/land_mask_final.nc - conda_package/mpas_tools/tests/* + imports: + - mpas_tools + - mpas_tools.mesh.conversion + - mpas_tools.mesh.creation + - mpas_tools.viz + - mpas_tools.conversion commands: - planar_hex --nx=10 --ny=20 --dc=1000. --outFileName='periodic_mesh_10x20_1km.nc' - translate_planar_grid -f 'periodic_mesh_10x20_1km.nc' -x 1000. -y 2000. @@ -77,6 +97,14 @@ test: - paraview_vtk_field_extractor.py -f mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc -v latCell,lonCell --ignore_time -o vtk_test - split_grids --help - merge_grids --help + # define_base_mesh.py must exist locally for build_mesh to work + - cp conda_package/mpas_tools/tests/define_base_mesh.py . + - build_mesh --help + - inject_bathymetry mesh_tools/mesh_conversion_tools/test/mesh.QU.1920km.151026.nc + - inject_preserve_floodplain --help + - mpas_to_triangle --help + - triangle_to_netcdf --help + - jigsaw_to_netcdf --help about: home: https://github.com/MPAS-Dev/MPAS-Tools/ diff --git a/conda_package/setup.py b/conda_package/setup.py index 82b57b3d9..fb5ab3aa4 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -41,7 +41,7 @@ 'Topic :: Scientific/Engineering', ], packages=find_packages(), - package_data={}, + package_data={'mpas_tools.viz': ['SciVisColorColormaps/*.xml']}, scripts=['mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py', 'mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py', 'mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py', @@ -59,4 +59,10 @@ ['planar_hex = mpas_tools.planar_hex:main', 'translate_planar_grid = mpas_tools.translate:main', 'merge_grids = mpas_tools.merge_grids:main', - 'split_grids = mpas_tools.split_grids:main']}) + 'split_grids = mpas_tools.split_grids:main', + 'build_mesh = mpas_tools.mesh.creation.build_mesh:main', + 'inject_bathymetry = mpas_tools.mesh.creation.inject_bathymetry:main', + 'inject_preserve_floodplain = mpas_tools.mesh.creation.inject_preserve_floodplain:main', + 'mpas_to_triangle = mpas_tools.mesh.creation.mpas_to_triangle:main', + 'triangle_to_netcdf = mpas_tools.mesh.creation.triangle_to_netcdf:main', + 'jigsaw_to_netcdf = mpas_tools.mesh.creation.jigsaw_to_netcdf:main']})