8000 Add conservative remapping of observational datasets onto Greenland Ice Sheet mesh by trhille · Pull Request #803 · MPAS-Dev/compass · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Add conservative remapping of observational datasets onto Greenland Ice Sheet mesh #803

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
131 changes: 131 additions & 0 deletions compass/landice/tests/greenland/mesh.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
from os.path import exists
from shutil import copyfile

import netCDF4
import numpy as np
from mpas_tools.logging import check_call
from mpas_tools.scrip.from_mpas import scrip_from_mpas

from compass.landice.mesh import (
build_cell_width,
build_mali_mesh,
Expand Down Expand Up @@ -50,6 +58,10 @@ def run(self):
logger = self.logger
mesh_name = 'GIS.nc'
section_name = 'mesh'
config = self.config
section = config[section_name]
data_path = section.get('data_path')
nProcs = section.get('nProcs')

logger.info('calling build_cell_width')
cell_width, x1, y1, geom_points, geom_edges, floodMask = \
Expand All @@ -64,9 +76,112 @@ def run(self):
gridded_dataset='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc', # noqa
projection='gis-gimp', geojson_file=None)

# Create scrip files if they don't already exist
if exists(data_path + '/BedMachineGreenland-v5.scrip.nc'):
logger.info('BedMachine script file exists;'
' skipping file creation')
else:
logger.info('creating scrip file for BedMachine dataset')
args = ['create_SCRIP_file_from_planar_rectangular_grid.py',
'-i', data_path + '/BedMachineGreenland-v5_edits_floodFill_extrap.nc', # noqa
'-s', data_path + '/BedMachineGreenland-v5.scrip.nc',
'-p', 'gis-gimp', '-r', '2']
check_call(args, logger=logger)
if exists(data_path + '/greenland_vel_mosaic500.scrip.nc'):
logger.info('Measures script file exists; skipping file creation')
else:
logger.info('creating scrip file for 2006-2010 velocity dataset')
args = ['create_SCRIP_file_from_planar_rectangular_grid.py',
'-i', data_path + '/greenland_vel_mosaic500_extrap.nc',
'-s', data_path + '/greenland_vel_mosaic500.scrip.nc',
'-p', 'gis-gimp', '-r', '2']
check_call(args, logger=logger)

logger.info('calling set_lat_lon_fields_in_planar_grid.py')
args = ['set_lat_lon_fields_in_planar_grid.py', '-f',
'GIS.nc', '-p', 'gis-gimp']
check_call(args, logger=logger)

logger.info('creating scrip file for destination mesh')
scrip_from_mpas('GIS.nc', 'GIS.scrip.nc')
args = ['create_SCRIP_file_from_MPAS_mesh.py',
'-m', 'GIS.nc',
'-s', 'GIS.scrip.nc']
check_call(args, logger=logger)

# Create weight files from datasets to mesh
if exists('BedMachine_to_MPAS_weights.nc'):
logger.info('BedMachine_to_MPAS_weights.nc exists; skipping')
else:
logger.info('generating gridded dataset -> MPAS weights')
args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source',
data_path + 'BedMachineGreenland-v5.scrip.nc',
'--destination',
'GIS.scrip.nc',
'--weight', 'BedMachine_to_MPAS_weights.nc',
'--method', 'conserve',
"-i", "-64bit_offset",
"--dst_regional", "--src_regional", '--netcdf4']
check_call(args, logger=logger)

if exists('measures_to_MPAS_weights.nc'):
logger.info('measures_to_MPAS_weights.nc exists; skipping')
else:
logger.info('generating gridded dataset -> MPAS weights')
args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source',
data_path + 'greenland_vel_mosaic500.scrip.nc',
'--destination',
'GIS.scrip.nc',
'--weight', 'measures_to_MPAS_weights.nc',
'--method', 'conserve',
"-i", "-64bit_offset", '--netcdf4',
"--dst_regional", "--src_regional", '--ignore_unmapped']
check_call(args, logger=logger)

# interpolate fields from BedMachine and Measures
# Using conservative remapping
logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py', '-s',
data_path + '/BedMachineGreenland-v5_edits_floodFill_extrap.nc', # noqa
'-d', 'GIS.nc', '-m', 'e',
'-w', 'BedMachine_to_MPAS_weights.nc']
check_call(args, logger=logger)

logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py', '-s',
data_path + '/greenland_vel_mosaic500_extrap.nc',
'-d', 'GIS.nc', '-m', 'e',
'-w', 'measures_to_MPAS_weights.nc',
'-v', 'observedSurfaceVelocityX',
'observedSurfaceVelocityY',
'observedSurfaceVelocityUncertainty']
check_call(args, logger=logger)

logger.info('Marking domain boundaries dirichlet')
args = ['mark_domain_boundaries_dirichlet.py',
'-f', 'GIS.nc']
check_call(args, logger=logger)

logger.info('creating graph.info')
make_graph_file(mesh_filename=mesh_name,
graph_filename='graph.info')
# Create a backup in case clean-up goes awry
copyfile('GIS.nc', 'GIS_backup.nc')

# Clean up: trim to iceMask and set large velocity
# uncertainties where appropriate.
data = netCDF4.Dataset('GIS.nc', 'r+')
data.set_auto_mask(False)
data.variables['thickness'][:] *= (data.variables['iceMask'][:] > 1.5)

mask = np.logical_or(
np.isnan(
data.variables['observedSurfaceVelocityUncertainty'][:]),
data.variables['thickness'][:] < 1.0)
mask = np.logical_or(
mask,
data.variables['observedSurfaceVelocityUncertainty'][:] == 0.0)
data.variables['observedSurfaceVelocityUncertainty'][0, mask[0, :]] = 1.0 # noqa

# create region masks
mask_filename = f'{mesh_name[:-3]}_regionMasks.nc'
Expand All @@ -80,3 +195,19 @@ def run(self):
'southGreenland',
'southWestGreenland',
'westCentralGreenland'])

# Ensure basalHeatFlux is positive
data.variables['basalHeatFlux'][:] = np.abs(
data.variables['basalHeatFlux'][:])
# Ensure reasonable dHdt values
dHdt = data.variables["observedThicknessTendency"][:]
dHdtErr = data.variables["observedThicknessTendencyUncertainty"][:]
# Arbitrary 5% uncertainty; improve this later
dHdtErr = np.abs(dHdt) * 0.05
# large uncertainty where data is missing
dHdtErr[np.abs(dHdt) > 1.0] = 1.0
dHdt[np.abs(dHdt) > 1.0] = 0.0 # Remove ridiculous values
data.variables["observedThicknessTendency"][:] = dHdt
data.variables["observedThicknessTendencyUncertainty"][:] = dHdtErr

data.close()
6 changes: 6 additions & 0 deletions compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# config options for high_res_mesh test case
[mesh]

# path to directory containing BedMachine and Measures datasets
data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/

# number of levels in the mesh
levels = 10

Expand All @@ -17,6 +20,9 @@ y_max = None
# to cull based on distance from margin.
cull_distance = 10.0

# number of processors to use for ESMF_RegridWeightGen
nProcs = 128

# mesh density parameters
# minimum cell spacing (meters)
min_spac = 3.e3
Expand Down
12 changes: 12 additions & 0 deletions docs/developers_guide/landice/test_groups/greenland.rst
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,15 @@ mesh_gen
The :py:class:`compass.landice.tests.greenland.mesh_gen.MeshGen`
calls the :py:class:`compass.landice.tests.greenland.mesh.Mesh` to create
the variable resolution Greenland Ice Sheet mesh.

The mesh generation is based around 1- and 2-km reference datasets, which
are updated in a number of ways directly from high resolution source
data. In the future, a complete workflow from source datasets may
replace this hybrid method.

Once the mesh is created, scrip files and the associated weights files
are created for the mesh and observational data sets. Then, ice geometry and
velocity observations are conservatively remapped from BedMachine v5 and
MEaSUREs 2006-2010 data sets. Finally, there is some cleanup to set large
velocity uncertainties outside the ice mask, check the sign of the basal heat
flux, and set reasonable values for dH/dt and its uncertainty.
4 changes: 2 additions & 2 deletions docs/users_guide/landice/test_groups/antarctica.rst
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ attributes, extrapolating fields to avoid interpolation ramps at ice margins,
updating mask values, and raising the bed topography at Lake Vostok to ensure
a flat ice surface there.

Those data files and processing scripts currently live here on Chicoma:
``/usr/projects/climate/trhille/data``.
Those data files and processing scripts currently live here on Perlmutter:
``/global/cfs/cdirs/fanssie/standard_datasets/AIS_datasets``.
Eventually that pre-processing could be integrated into a new step in COMPASS,
or the processed data files could be added to the server on Anvil and downloaded
as needed. However, until then, this test case provides a reproducible workflow
Expand Down
101 changes: 72 additions & 29 deletions docs/users_guide/landice/test_groups/greenland.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
greenland
=========

The ``landice/greenland`` test group runs tests with a coarse (20-km)
The ``landice/greenland`` test group is used to run tests with a coarse (20-km)
`Greenland mesh <https://web.lcrc.anl.gov/public/e3sm/mpas_standalonedata/mpas-albany-landice/gis20km.210608.nc>`_.
and to create customized variable resolution meshes.

.. figure:: images/gis_speed.png
:width: 742 px
Expand Down Expand Up @@ -32,34 +33,58 @@ The other test cases do not use config options.

.. code-block:: cfg

[mesh]

# number of levels in the mesh
levels = 10

# distance from ice margin to cull (km).
# Set to a value <= 0 if you do not want
# to cull based on distance from margin.
cull_distance = 10.0

# mesh density parameters
# minimum cell spacing (meters)
min_spac = 3.e3
# maximum cell spacing (meters)
max_spac = 30.e3
# log10 of max speed for cell spacing
high_log_speed = 2.5
# log10 of min speed for cell spacing
low_log_speed = 0.75
# distance at which cell spacing = max_spac
high_dist = 1.e5
# distance within which cell spacing = min_spac
low_dist = 5.e4

# mesh density functions
use_speed = True
use_dist_to_grounding_line = False
use_dist_to_edge = True
# config options for high_res_mesh test case
[mesh]

# path to directory containing BedMachine and Measures datasets
data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/

# number of levels in the mesh
levels = 10

# Bounds of GIS mesh. If any bound is set
# to None, the mesh will use the full extent
# of the gridded dataset.
x_min = None
x_max = None
y_min = None
y_max = None

# distance from ice margin to cull (km).
# Set to a value <= 0 if you do not want
# to cull based on distance from margin.
cull_distance = 10.0

# number of processors to use for ESMF_RegridWeightGen
nProcs = 128

# mesh density parameters
# minimum cell spacing (meters)
min_spac = 3.e3
# maximum cell spacing (meters)
max_spac = 30.e3
# log10 of max speed for cell spacing
high_log_speed = 2.5
# log10 of min speed for cell spacing
low_log_speed = 0.75
# distance at which cell spacing = max_spac
high_dist = 1.e5
# distance within which cell spacing = min_spac
low_dist = 1.e4
# distance at which bed topography has no effect
high_dist_bed = 1.e5
# distance within which bed topography has maximum effect
low_dist_bed = 7.5e4
# Bed elev beneath which cell spacing is minimized
low_bed = 50.0
# Bed elev above which cell spacing is maximized
high_bed = 100.0

# mesh density functions
use_speed = True
use_dist_to_grounding_line = False
use_dist_to_edge = True
use_bed = True

smoke_test
----------
Expand Down Expand Up @@ -93,3 +118,21 @@ on the the config options listed above. This will not be the same as the
pre-generated 20 km mesh used in the other three test cases because it uses
a newer version of Jigsaw. Note that the basal friction optimization is
performed separately and is not part of this test case.

The test case performs interpolation of observational data from gridded
datasets to the Greenland mesh. This takes care of the peculiarities of
the current gridded compilation dataset (greenland_1km_2024_01_29.epsg3413.icesheetonly.nc),
as well as using conservative remapping directly from the high-resolution
BedMachine v5 and MeASUReS 2006-2010 velocity datasets. There is a fairly
heavy degree of pre-processing done to get the BedMachine and MeASUReS
datasets ready to be used here. The pre-processing includes renaming
variables, setting reasonable _FillValue and missing_value attributes
extrapolating fields to avoid interpolation ramps at ice margins,
updating mask values.

Those data files and processing scripts currently live here on Perlmutter:
``/global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/``. Eventually
that pre-processing could be integrated into a new step in COMPASS, or
the processed data files could be added to the server on Anvil and downloaded
as needed. However, until then, this test case provides a reproducible workflow
for setting up Greenland meshes at varying resolutions.
Loading
0