From e0c3149e87cf2f7b4d812370086c763f3fb9a816 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 5 Aug 2024 14:26:52 -0500 Subject: [PATCH 01/41] Fix units for CF compliance --- compass/ocean/tests/global_ocean/init/remap_ice_shelf_melt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compass/ocean/tests/global_ocean/init/remap_ice_shelf_melt.py b/compass/ocean/tests/global_ocean/init/remap_ice_shelf_melt.py index 4980b81acf..fc010135d3 100644 --- a/compass/ocean/tests/global_ocean/init/remap_ice_shelf_melt.py +++ b/compass/ocean/tests/global_ocean/init/remap_ice_shelf_melt.py @@ -261,10 +261,10 @@ def remap_paolo(in_filename, base_mesh_filename, culled_mesh_filename, field = 'dataLandIceFreshwaterFlux' ds_remap[field] = area_ratio * sphere_fwf - ds_remap[field].attrs['units'] = 'kg m^-2 s^-1' + ds_remap[field].attrs['units'] = 'kg m-2 s-1' field = 'dataLandIceHeatFlux' ds_remap[field] = area_ratio * ds_remap[field] - ds_remap[field].attrs['units'] = 'W m^-2' + ds_remap[field].attrs['units'] = 'W m-2' mpas_flux = (ds_remap.dataLandIceFreshwaterFlux * mpas_area_cell).sum().values From dd199861da814998881a6cd91d8ae502bf4f1ce9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 1 Jul 2024 08:59:27 -0500 Subject: [PATCH 02/41] Add a step for normalizing data iceberg and ice-shelf fluxes This is needed for an Antarctic balance approach in which all Antarctic solid runoff is converted into iceberg and ice-shelf melt with the patterns from the Merino et al. (2020) and Paolo et al. (2023) datasets. --- .../global_ocean/files_for_e3sm/__init__.py | 10 +- .../normalize_iceberg_ice_shelf_melt.py | 125 ++++++++++++++++++ .../files_for_e3sm/remap_ice_shelf_melt.py | 4 +- 3 files changed, 136 insertions(+), 3 deletions(-) create mode 100644 compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py index afa6ead955..fd220a1d16 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py @@ -10,6 +10,9 @@ from compass.ocean.tests.global_ocean.files_for_e3sm.e3sm_to_cmip_maps import ( E3smToCmipMaps, ) +from compass.ocean.tests.global_ocean.files_for_e3sm.normalize_iceberg_ice_shelf_melt import ( # noqa: E501 + NormalizeIcebergIceShelfMelt, +) from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_graph_partition import ( # noqa: E501 OceanGraphPartition, ) @@ -113,12 +116,15 @@ def __init__(self, test_group, mesh=None, init=None, self.add_step(DiagnosticMaps(test_case=self)) self.add_step(DiagnosticMasks(test_case=self)) + self.add_step(RemapIcebergClimatology( + test_case=self)) + self.add_step(RemapIceShelfMelt(test_case=self, init=init)) - self.add_step(RemapSeaSurfaceSalinityRestoring( + self.add_step(NormalizeIcebergIceShelfMelt( test_case=self)) - self.add_step(RemapIcebergClimatology( + self.add_step(RemapSeaSurfaceSalinityRestoring( test_case=self)) self.add_step(RemapTidalMixing(test_case=self)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py b/compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py new file mode 100644 index 0000000000..654ff9848a --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py @@ -0,0 +1,125 @@ +import os + +import numpy as np +import xarray as xr +from mpas_tools.io import write_netcdf + +from compass.io import symlink +from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 + FilesForE3SMStep, +) + + +class NormalizeIcebergIceShelfMelt(FilesForE3SMStep): + """ + A step for for normalizing data iceberg and ice-shelf melt rates on the + MPAS grid to a total flux of 1.0 and staging them in ``assembled_files`` + """ + def __init__(self, test_case): + """ + Create a new step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + """ + super().__init__(test_case, name='normalize_iceberg_ice_shelf_melt', + ntasks=1, min_tasks=1) + + filename = 'Iceberg_Climatology_Merino_MPAS.nc' + subdir = 'remap_iceberg_climatology' + self.add_input_file( + filename=filename, + target=f'../{subdir}/{filename}') + + filename = 'prescribed_ismf_paolo2023.nc' + subdir = 'remap_ice_shelf_melt' + self.add_input_file( + filename=filename, + target=f'../{subdir}/{filename}') + + def setup(self): + """ + setup input files based on config options + """ + super().setup() + if self.with_ice_shelf_cavities: + self.add_output_file(filename='dib_merino_2020_normalized.nc') + self.add_output_file(filename='dismf_paolo2023_normalized.nc') + + def run(self): + """ + Run this step of the test case + """ + super().run() + + if not self.with_ice_shelf_cavities: + return + + logger = self.logger + + suffix = f'{self.mesh_short_name}.{self.creation_date}' + + ds_dib = xr.open_dataset('Iceberg_Climatology_Merino_MPAS.nc') + ds_dismf = xr.open_dataset('prescribed_ismf_paolo2023.nc') + + ds_mesh = xr.open_dataset('restart.nc') + + area_cell = ds_mesh.areaCell + + days_in_month = np.array( + [31., 28., 31., 30., 31., 30., 31., 31., 30., 31., 30., 31.]) + + weights = xr.DataArray(data=days_in_month / 365., + dims=('Time',)) + + total_dib_flux = (ds_dib.bergFreshwaterFluxData * weights * + area_cell).sum() + + total_dismf_flux = (ds_dismf.dataLandIceFreshwaterFlux * + area_cell).sum() + + total_flux = total_dib_flux + total_dismf_flux + + logger.info(f'total_dib_flux: {total_dib_flux:.1f}') + logger.info(f'total_dismf_flux: {total_dismf_flux:.1f}') + logger.info(f'total_flux: {total_flux:.1f}') + logger.info('') + + for var in ['bergFreshwaterFluxData']: + ds_dib[var] = ds_dib[var] / total_flux + + write_netcdf(ds_dib, 'dib_merino_2020_normalized.nc') + + for var in ['dataLandIceFreshwaterFlux', 'dataLandIceHeatFlux']: + ds_dismf[var] = ds_dismf[var] / total_flux + + write_netcdf(ds_dismf, 'dismf_paolo2023_normalized.nc') + + norm_total_dib_flux = (ds_dib.bergFreshwaterFluxData * weights * + area_cell).sum() + + norm_total_dismf_flux = (ds_dismf.dataLandIceFreshwaterFlux * + area_cell).sum() + + norm_total_flux = norm_total_dib_flux + norm_total_dismf_flux + + logger.info(f'norm_total_dib_flux: {norm_total_dib_flux:.3f}') + logger.info(f'norm_total_dismf_flux: {norm_total_dismf_flux:.3f}') + logger.info(f'norm_total_flux: {norm_total_flux:.3f}') + logger.info('') + + prefix = 'Iceberg_Climatology_Merino_normalized' + dest_filename = f'{prefix}.{suffix}.nc' + + symlink( + os.path.abspath('dib_merino_2020_normalized.nc'), + f'{self.ocean_inputdata_dir}/{dest_filename}') + + prefix = 'prescribed_ismf_paolo2023_normalized' + dest_filename = f'{prefix}.{suffix}.nc' + + symlink( + os.path.abspath('dismf_paolo2023_normalized.nc'), + f'{self.ocean_inputdata_dir}/{dest_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py b/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py index c49cc941ab..fe5e1b09cd 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py @@ -40,7 +40,9 @@ def setup(self): setup input files based on config options """ super().setup() - if not self.with_ice_shelf_cavities: + if self.init is not None: + # we don't need any files, since we already did this remapping + # during init return filename = 'prescribed_ismf_paolo2023.nc' From 8b35df7f3969eeaab4b07e5fca0be906f83b0948 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Jul 2024 04:46:03 -0500 Subject: [PATCH 03/41] Add area integrated, annual mean fluxes to DIB and DISMF Instead of normalizing the fluxes, simply add the totals as a new field to each file so they can be normalized later in code. --- .../global_ocean/files_for_e3sm/__init__.py | 20 ++---- ...py => add_total_iceberg_ice_shelf_melt.py} | 67 +++++++++++-------- .../files_for_e3sm/remap_ice_shelf_melt.py | 51 +++++--------- .../remap_iceberg_climatology.py | 20 +++--- 4 files changed, 69 insertions(+), 89 deletions(-) rename compass/ocean/tests/global_ocean/files_for_e3sm/{normalize_iceberg_ice_shelf_melt.py => add_total_iceberg_ice_shelf_melt.py} (59%) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py index fd220a1d16..6ffc8384ed 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py @@ -1,6 +1,9 @@ import os from compass.io import package_path, symlink +from compass.ocean.tests.global_ocean.files_for_e3sm.add_total_iceberg_ice_shelf_melt import ( # noqa: E501 + AddTotalIcebergIceShelfMelt, +) from compass.ocean.tests.global_ocean.files_for_e3sm.diagnostic_maps import ( DiagnosticMaps, ) @@ -10,9 +13,6 @@ from compass.ocean.tests.global_ocean.files_for_e3sm.e3sm_to_cmip_maps import ( E3smToCmipMaps, ) -from compass.ocean.tests.global_ocean.files_for_e3sm.normalize_iceberg_ice_shelf_melt import ( # noqa: E501 - NormalizeIcebergIceShelfMelt, -) from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_graph_partition import ( # noqa: E501 OceanGraphPartition, ) @@ -115,18 +115,10 @@ def __init__(self, test_group, mesh=None, init=None, self.add_step(E3smToCmipMaps(test_case=self)) self.add_step(DiagnosticMaps(test_case=self)) self.add_step(DiagnosticMasks(test_case=self)) - - self.add_step(RemapIcebergClimatology( - test_case=self)) - + self.add_step(RemapIcebergClimatology(test_case=self)) self.add_step(RemapIceShelfMelt(test_case=self, init=init)) - - self.add_step(NormalizeIcebergIceShelfMelt( - test_case=self)) - - self.add_step(RemapSeaSurfaceSalinityRestoring( - test_case=self)) - + self.add_step(AddTotalIcebergIceShelfMelt(test_case=self)) + self.add_step(RemapSeaSurfaceSalinityRestoring(test_case=self)) self.add_step(RemapTidalMixing(test_case=self)) if mesh is not None and init is not None: diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py b/compass/ocean/tests/global_ocean/files_for_e3sm/add_total_iceberg_ice_shelf_melt.py similarity index 59% rename from compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py rename to compass/ocean/tests/global_ocean/files_for_e3sm/add_total_iceberg_ice_shelf_melt.py index 654ff9848a..8d4a6da7f4 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/normalize_iceberg_ice_shelf_melt.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/add_total_iceberg_ice_shelf_melt.py @@ -10,10 +10,11 @@ ) -class NormalizeIcebergIceShelfMelt(FilesForE3SMStep): +class AddTotalIcebergIceShelfMelt(FilesForE3SMStep): """ - A step for for normalizing data iceberg and ice-shelf melt rates on the - MPAS grid to a total flux of 1.0 and staging them in ``assembled_files`` + A step for for adding the total data iceberg and ice-shelf melt rates to + to the data iceberg and ice-shelf melt files and staging them in + ``assembled_files`` """ def __init__(self, test_case): """ @@ -24,7 +25,7 @@ def __init__(self, test_case): test_case : compass.TestCase The test case this step belongs to """ - super().__init__(test_case, name='normalize_iceberg_ice_shelf_melt', + super().__init__(test_case, name='add_total_iceberg_ice_shelf_melt', ntasks=1, min_tasks=1) filename = 'Iceberg_Climatology_Merino_MPAS.nc' @@ -41,12 +42,14 @@ def __init__(self, test_case): def setup(self): """ - setup input files based on config options + setup output files based on config options """ super().setup() if self.with_ice_shelf_cavities: - self.add_output_file(filename='dib_merino_2020_normalized.nc') - self.add_output_file(filename='dismf_paolo2023_normalized.nc') + self.add_output_file( + filename='Iceberg_Climatology_Merino_MPAS_with_totals.nc') + self.add_output_file( + filename='prescribed_ismf_paolo2023_with_totals.nc') def run(self): """ @@ -59,11 +62,8 @@ def run(self): logger = self.logger - suffix = f'{self.mesh_short_name}.{self.creation_date}' - ds_dib = xr.open_dataset('Iceberg_Climatology_Merino_MPAS.nc') ds_dismf = xr.open_dataset('prescribed_ismf_paolo2023.nc') - ds_mesh = xr.open_dataset('restart.nc') area_cell = ds_mesh.areaCell @@ -87,39 +87,48 @@ def run(self): logger.info(f'total_flux: {total_flux:.1f}') logger.info('') - for var in ['bergFreshwaterFluxData']: - ds_dib[var] = ds_dib[var] / total_flux + for ds in [ds_dib, ds_dismf]: + ntime = ds.sizes['Time'] + field = 'areaIntegAnnMeanDataIcebergFreshwaterFlux' + ds[field] = (('Time',), np.ones(ntime) * total_dib_flux.values) + ds[field].attrs['units'] = 'kg s-1' + field = 'areaIntegAnnMeanDataIceShelfFreshwaterFlux' + ds[field] = (('Time',), np.ones(ntime) * total_dismf_flux.values) + ds[field].attrs['units'] = 'kg s-1' + field = 'areaIntegAnnMeanDataIcebergIceShelfFreshwaterFlux' + ds[field] = (('Time',), np.ones(ntime) * total_flux.values) + ds[field].attrs['units'] = 'kg s-1' - write_netcdf(ds_dib, 'dib_merino_2020_normalized.nc') + dib_filename = 'Iceberg_Climatology_Merino_MPAS_with_totals.nc' + write_netcdf(ds_dib, dib_filename) - for var in ['dataLandIceFreshwaterFlux', 'dataLandIceHeatFlux']: - ds_dismf[var] = ds_dismf[var] / total_flux - - write_netcdf(ds_dismf, 'dismf_paolo2023_normalized.nc') + dismf_filename = 'prescribed_ismf_paolo2023_with_totals.nc' + write_netcdf(ds_dismf, dismf_filename) norm_total_dib_flux = (ds_dib.bergFreshwaterFluxData * weights * - area_cell).sum() + area_cell / total_flux).sum() norm_total_dismf_flux = (ds_dismf.dataLandIceFreshwaterFlux * - area_cell).sum() + area_cell / total_flux).sum() norm_total_flux = norm_total_dib_flux + norm_total_dismf_flux - logger.info(f'norm_total_dib_flux: {norm_total_dib_flux:.3f}') - logger.info(f'norm_total_dismf_flux: {norm_total_dismf_flux:.3f}') - logger.info(f'norm_total_flux: {norm_total_flux:.3f}') + logger.info(f'norm_total_dib_flux: {norm_total_dib_flux:.16f}') + logger.info(f'norm_total_dismf_flux: {norm_total_dismf_flux:.16f}') + logger.info(f'norm_total_flux: {norm_total_flux:.16f}') + logger.info(f'1 - norm_total_flux: {1 - norm_total_flux:.16g}') logger.info('') - prefix = 'Iceberg_Climatology_Merino_normalized' + prefix = 'Iceberg_Climatology_Merino' + suffix = f'{self.mesh_short_name}.{self.creation_date}' dest_filename = f'{prefix}.{suffix}.nc' - symlink( - os.path.abspath('dib_merino_2020_normalized.nc'), - f'{self.ocean_inputdata_dir}/{dest_filename}') + os.path.abspath(dib_filename), + f'{self.seaice_inputdata_dir}/{dest_filename}') - prefix = 'prescribed_ismf_paolo2023_normalized' + prefix = 'prescribed_ismf_paolo2023' + suffix = f'{self.mesh_short_name}.{self.creation_date}' dest_filename = f'{prefix}.{suffix}.nc' - symlink( - os.path.abspath('dismf_paolo2023_normalized.nc'), + os.path.abspath(dismf_filename), f'{self.ocean_inputdata_dir}/{dest_filename}') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py b/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py index fe5e1b09cd..ee8c47a204 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/remap_ice_shelf_melt.py @@ -1,6 +1,3 @@ -import os - -from compass.io import symlink from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 FilesForE3SMStep, ) @@ -37,13 +34,9 @@ def __init__(self, test_case, init): def setup(self): """ - setup input files based on config options + setup input and output files based on config options """ super().setup() - if self.init is not None: - # we don't need any files, since we already did this remapping - # during init - return filename = 'prescribed_ismf_paolo2023.nc' @@ -72,34 +65,24 @@ def run(self): """ super().run() - if not self.with_ice_shelf_cavities: + if not self.with_ice_shelf_cavities or self.init is not None: return - prefix = 'prescribed_ismf_paolo2023' - suffix = f'{self.mesh_short_name}.{self.creation_date}' - - remapped_filename = f'{prefix}.nc' - dest_filename = f'{prefix}.{suffix}.nc' - - if self.init is None: - logger = self.logger - config = self.config - ntasks = self.ntasks - in_filename = 'Paolo_2023_ANT_G1920V01_IceShelfMelt.nc' - - parallel_executable = config.get('parallel', 'parallel_executable') + logger = self.logger + config = self.config + ntasks = self.ntasks + in_filename = 'Paolo_2023_ANT_G1920V01_IceShelfMelt.nc' + remapped_filename = 'prescribed_ismf_paolo2023.nc' - base_mesh_filename = 'base_mesh.nc' - culled_mesh_filename = 'initial_state.nc' - mesh_name = self.mesh_short_name - land_ice_mask_filename = 'initial_state.nc' + parallel_executable = config.get('parallel', 'parallel_executable') - remap_paolo(in_filename, base_mesh_filename, - culled_mesh_filename, mesh_name, - land_ice_mask_filename, remapped_filename, - logger=logger, mpi_tasks=ntasks, - parallel_executable=parallel_executable) + base_mesh_filename = 'base_mesh.nc' + culled_mesh_filename = 'initial_state.nc' + mesh_name = self.mesh_short_name + land_ice_mask_filename = 'initial_state.nc' - symlink( - os.path.abspath(remapped_filename), - f'{self.ocean_inputdata_dir}/{dest_filename}') + remap_paolo(in_filename, base_mesh_filename, + culled_mesh_filename, mesh_name, + land_ice_mask_filename, remapped_filename, + logger=logger, mpi_tasks=ntasks, + parallel_executable=parallel_executable) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/remap_iceberg_climatology.py b/compass/ocean/tests/global_ocean/files_for_e3sm/remap_iceberg_climatology.py index 24d4532b4d..952ab13569 100644 --- a/compass/ocean/tests/global_ocean/files_for_e3sm/remap_iceberg_climatology.py +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/remap_iceberg_climatology.py @@ -5,7 +5,6 @@ from mpas_tools.io import write_netcdf from pyremap import LatLonGridDescriptor, MpasCellMeshDescriptor, Remapper -from compass.io import symlink from compass.ocean.tests.global_ocean.files_for_e3sm.files_for_e3sm_step import ( # noqa: E501 FilesForE3SMStep, ) @@ -34,7 +33,13 @@ def __init__(self, test_case): target='Iceberg_Interannual_Merino.nc', database='initial_condition_database') - self.add_output_file(filename='Iceberg_Climatology_Merino_MPAS.nc') + def setup(self): + """ + setup output files based on config options + """ + super().setup() + if self.with_ice_shelf_cavities: + self.add_output_file(filename='Iceberg_Climatology_Merino_MPAS.nc') def run(self): """ @@ -48,12 +53,7 @@ def run(self): ntasks = self.ntasks in_filename = 'Iceberg_Interannual_Merino.nc' - - prefix = 'Iceberg_Climatology_Merino' - suffix = f'{self.mesh_short_name}.{self.creation_date}' - - remapped_filename = f'{prefix}_MPAS.nc' - dest_filename = f'{prefix}.{suffix}.nc' + remapped_filename = 'Iceberg_Climatology_Merino_MPAS.nc' parallel_executable = config.get('parallel', 'parallel_executable') @@ -69,10 +69,6 @@ def run(self): logger=logger, mpi_tasks=ntasks, parallel_executable=parallel_executable) - symlink( - os.path.abspath(remapped_filename), - f'{self.seaice_inputdata_dir}/{dest_filename}') - def remap_iceberg_climo(in_filename, mesh_filename, mesh_name, land_ice_mask_filename, out_filename, logger, From 2c85a1c96b7474c273c6762dbd26abaf1bcfba26 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Jan 2024 05:32:33 -0600 Subject: [PATCH 04/41] Update to SOwISC12to30E3r3 Switch SO12to60 to SO12to30 with a base resolution that is now uniformly 30 km. --- compass/ocean/suites/so12to30.txt | 5 + compass/ocean/suites/so12to60.txt | 5 - compass/ocean/suites/sowisc12to30.txt | 5 + compass/ocean/suites/sowisc12to60.txt | 5 - compass/ocean/tests/global_ocean/__init__.py | 2 +- .../ocean/tests/global_ocean/mesh/__init__.py | 6 +- .../global_ocean/mesh/so12to30/__init__.py | 70 +++++++++++ .../dynamic_adjustment.yaml | 0 .../high_res_region.geojson | 2 +- .../namelist.split_explicit_ab2 | 0 .../so12to60.cfg => so12to30/so12to30.cfg} | 11 +- .../global_ocean/mesh/so12to60/__init__.py | 118 ------------------ .../mesh/so12to60/atlantic.geojson | 97 -------------- 13 files changed, 89 insertions(+), 237 deletions(-) create mode 100644 compass/ocean/suites/so12to30.txt delete mode 100644 compass/ocean/suites/so12to60.txt create mode 100644 compass/ocean/suites/sowisc12to30.txt delete mode 100644 compass/ocean/suites/sowisc12to60.txt create mode 100644 compass/ocean/tests/global_ocean/mesh/so12to30/__init__.py rename compass/ocean/tests/global_ocean/mesh/{so12to60 => so12to30}/dynamic_adjustment.yaml (100%) rename compass/ocean/tests/global_ocean/mesh/{so12to60 => so12to30}/high_res_region.geojson (96%) rename compass/ocean/tests/global_ocean/mesh/{so12to60 => so12to30}/namelist.split_explicit_ab2 (100%) rename compass/ocean/tests/global_ocean/mesh/{so12to60/so12to60.cfg => so12to30/so12to30.cfg} (78%) delete mode 100644 compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py delete mode 100644 compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson diff --git a/compass/ocean/suites/so12to30.txt b/compass/ocean/suites/so12to30.txt new file mode 100644 index 0000000000..d9fb27d8c0 --- /dev/null +++ b/compass/ocean/suites/so12to30.txt @@ -0,0 +1,5 @@ +ocean/global_ocean/SO12to30/mesh +ocean/global_ocean/SO12to30/WOA23/init +ocean/global_ocean/SO12to30/WOA23/performance_test +ocean/global_ocean/SO12to30/WOA23/dynamic_adjustment +ocean/global_ocean/SO12to30/WOA23/files_for_e3sm diff --git a/compass/ocean/suites/so12to60.txt b/compass/ocean/suites/so12to60.txt deleted file mode 100644 index 42c0943056..0000000000 --- a/compass/ocean/suites/so12to60.txt +++ /dev/null @@ -1,5 +0,0 @@ -ocean/global_ocean/SO12to60/mesh -ocean/global_ocean/SO12to60/WOA23/init -ocean/global_ocean/SO12to60/WOA23/performance_test -ocean/global_ocean/SO12to60/WOA23/dynamic_adjustment -ocean/global_ocean/SO12to60/WOA23/files_for_e3sm diff --git a/compass/ocean/suites/sowisc12to30.txt b/compass/ocean/suites/sowisc12to30.txt new file mode 100644 index 0000000000..377f440bb6 --- /dev/null +++ b/compass/ocean/suites/sowisc12to30.txt @@ -0,0 +1,5 @@ +ocean/global_ocean/SOwISC12to30/mesh +ocean/global_ocean/SOwISC12to30/WOA23/init +ocean/global_ocean/SOwISC12to30/WOA23/performance_test +ocean/global_ocean/SOwISC12to30/WOA23/dynamic_adjustment +ocean/global_ocean/SOwISC12to30/WOA23/files_for_e3sm diff --git a/compass/ocean/suites/sowisc12to60.txt b/compass/ocean/suites/sowisc12to60.txt deleted file mode 100644 index 6d638a1859..0000000000 --- a/compass/ocean/suites/sowisc12to60.txt +++ /dev/null @@ -1,5 +0,0 @@ -ocean/global_ocean/SOwISC12to60/mesh -ocean/global_ocean/SOwISC12to60/WOA23/init -ocean/global_ocean/SOwISC12to60/WOA23/performance_test -ocean/global_ocean/SOwISC12to60/WOA23/dynamic_adjustment -ocean/global_ocean/SOwISC12to60/WOA23/files_for_e3sm diff --git a/compass/ocean/tests/global_ocean/__init__.py b/compass/ocean/tests/global_ocean/__init__.py index 3d266a9cc9..69f3e96669 100644 --- a/compass/ocean/tests/global_ocean/__init__.py +++ b/compass/ocean/tests/global_ocean/__init__.py @@ -44,7 +44,7 @@ def __init__(self, mpas_core): self._add_tests(mesh_names=['ARRM10to60', 'ARRMwISC10to60']) - self._add_tests(mesh_names=['SO12to60', 'SOwISC12to60']) + self._add_tests(mesh_names=['SO12to30', 'SOwISC12to30']) self._add_tests(mesh_names=['WC14', 'WCwISC14']) diff --git a/compass/ocean/tests/global_ocean/mesh/__init__.py b/compass/ocean/tests/global_ocean/mesh/__init__.py index 280bf49b9d..e88a96d612 100644 --- a/compass/ocean/tests/global_ocean/mesh/__init__.py +++ b/compass/ocean/tests/global_ocean/mesh/__init__.py @@ -16,7 +16,7 @@ QUMeshFromConfigStep, ) from compass.ocean.tests.global_ocean.mesh.rrs6to18 import RRS6to18BaseMesh -from compass.ocean.tests.global_ocean.mesh.so12to60 import SO12to60BaseMesh +from compass.ocean.tests.global_ocean.mesh.so12to30 import SO12to30BaseMesh from compass.ocean.tests.global_ocean.mesh.wc14 import WC14BaseMesh from compass.ocean.tests.global_ocean.metadata import ( get_author_and_email_from_git, @@ -98,8 +98,8 @@ def __init__(self, test_group, mesh_name, high_res_topography): base_mesh_step = ARRM10to60BaseMesh(self, name=name, subdir=subdir) elif mesh_name in ['RRS6to18', 'RRSwISC6to18']: base_mesh_step = RRS6to18BaseMesh(self, name=name, subdir=subdir) - elif mesh_name in ['SO12to60', 'SOwISC12to60']: - base_mesh_step = SO12to60BaseMesh(self, name=name, subdir=subdir) + elif mesh_name in ['SO12to30', 'SOwISC12to30']: + base_mesh_step = SO12to30BaseMesh(self, name=name, subdir=subdir) elif mesh_name in ['FRIS01to60', 'FRISwISC01to60']: base_mesh_step = FRIS01to60BaseMesh(self, name=name, subdir=subdir) elif mesh_name in ['FRIS02to60', 'FRISwISC02to60']: diff --git a/compass/ocean/tests/global_ocean/mesh/so12to30/__init__.py b/compass/ocean/tests/global_ocean/mesh/so12to30/__init__.py new file mode 100644 index 0000000000..2f05894fb4 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to30/__init__.py @@ -0,0 +1,70 @@ +import numpy as np +from geometric_features import read_feature_collection +from mpas_tools.cime.constants import constants +from mpas_tools.mesh.creation.signed_distance import ( + signed_distance_from_geojson, +) + +from compass.mesh import QuasiUniformSphericalMeshStep + + +class SO12to30BaseMesh(QuasiUniformSphericalMeshStep): + """ + A step for creating SO12to30 meshes + """ + def setup(self): + """ + Add some input files + """ + + self.add_input_file(filename='high_res_region.geojson', + package=self.__module__) + + super().setup() + + def build_cell_width_lat_lon(self): + """ + Create cell width array for this mesh on a regular latitude-longitude + grid + + Returns + ------- + cellWidth : numpy.array + m x n array of cell width in km + + lon : numpy.array + longitude in degrees (length n and between -180 and 180) + + lat : numpy.array + longitude in degrees (length m and between -90 and 90) + """ + + dlon = 0.1 + dlat = dlon + earth_radius = constants['SHR_CONST_REARTH'] + nlon = int(360. / dlon) + 1 + nlat = int(180. / dlat) + 1 + lon = np.linspace(-180., 180., nlon) + lat = np.linspace(-90., 90., nlat) + + # start with a uniform 30 km background resolution + dx_max = 30. + cell_width = dx_max * np.ones((nlat, nlon)) + + fc = read_feature_collection('high_res_region.geojson') + + so_signed_distance = signed_distance_from_geojson(fc, lon, lat, + earth_radius, + max_length=0.25) + + # Equivalent to 20 degrees latitude + trans_width = 1600e3 + trans_start = 500e3 + dx_min = 12. + + weights = 0.5 * (1 + np.tanh((so_signed_distance - trans_start) / + trans_width)) + + cell_width = dx_min * (1 - weights) + cell_width * weights + + return cell_width, lon, lat diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment.yaml b/compass/ocean/tests/global_ocean/mesh/so12to30/dynamic_adjustment.yaml similarity index 100% rename from compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment.yaml rename to compass/ocean/tests/global_ocean/mesh/so12to30/dynamic_adjustment.yaml diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/high_res_region.geojson b/compass/ocean/tests/global_ocean/mesh/so12to30/high_res_region.geojson similarity index 96% rename from compass/ocean/tests/global_ocean/mesh/so12to60/high_res_region.geojson rename to compass/ocean/tests/global_ocean/mesh/so12to30/high_res_region.geojson index a536ebaba9..4b8b7202c6 100644 --- a/compass/ocean/tests/global_ocean/mesh/so12to60/high_res_region.geojson +++ b/compass/ocean/tests/global_ocean/mesh/so12to30/high_res_region.geojson @@ -4,7 +4,7 @@ { "type": "Feature", "properties": { - "name": "SO12to60 high res region", + "name": "SO12to30 high res region", "component": "ocean", "object": "region", "author": "Xylar Asay-Davis" diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/namelist.split_explicit_ab2 b/compass/ocean/tests/global_ocean/mesh/so12to30/namelist.split_explicit_ab2 similarity index 100% rename from compass/ocean/tests/global_ocean/mesh/so12to60/namelist.split_explicit_ab2 rename to compass/ocean/tests/global_ocean/mesh/so12to30/namelist.split_explicit_ab2 diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/so12to60.cfg b/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg similarity index 78% rename from compass/ocean/tests/global_ocean/mesh/so12to60/so12to60.cfg rename to compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg index 541b71e917..d0bdfa0351 100644 --- a/compass/ocean/tests/global_ocean/mesh/so12to60/so12to60.cfg +++ b/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg @@ -33,23 +33,20 @@ prefix = SO # a description of the mesh and initial condition mesh_description = MPAS Southern Ocean regionally refined mesh for E3SM version ${e3sm_version} with enhanced resolution (${min_res} km) around - Antarctica, 45-km resolution in the mid southern latitudes, - 30-km resolution in a 15-degree band around the equator, 60-km - resolution in northern mid latitudes, 30 km in the north - Atlantic and 35 km in the Arctic. This mesh has <<>> + Antarctica and ${max_res} km elsewhere. This mesh has <<>> vertical levels and includes cavities under the ice shelves around Antarctica. # E3SM version that the mesh is intended for e3sm_version = 3 # The revision number of the mesh, which should be incremented each time the # mesh is revised -mesh_revision = 1 +mesh_revision = 3 # the minimum (finest) resolution in the mesh min_res = 12 # the maximum (coarsest) resolution in the mesh, can be the same as min_res -max_res = 60 +max_res = 30 # The URL of the pull request documenting the creation of the mesh -pull_request = https://github.com/MPAS-Dev/compass/pull/669 +pull_request = https://github.com/MPAS-Dev/compass/pull/807 # config options related to initial condition and diagnostics support files diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py b/compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py deleted file mode 100644 index c7edd16a57..0000000000 --- a/compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py +++ /dev/null @@ -1,118 +0,0 @@ -import mpas_tools.mesh.creation.mesh_definition_tools as mdt -import numpy as np -from geometric_features import read_feature_collection -from mpas_tools.cime.constants import constants -from mpas_tools.mesh.creation.signed_distance import ( - signed_distance_from_geojson, -) - -from compass.mesh import QuasiUniformSphericalMeshStep - - -class SO12to60BaseMesh(QuasiUniformSphericalMeshStep): - """ - A step for creating SO12to60 meshes - """ - def setup(self): - """ - Add some input files - """ - - self.add_input_file(filename='atlantic.geojson', - package=self.__module__) - - self.add_input_file(filename='high_res_region.geojson', - package=self.__module__) - - super().setup() - - def build_cell_width_lat_lon(self): - """ - Create cell width array for this mesh on a regular latitude-longitude - grid - - Returns - ------- - cellWidth : numpy.array - m x n array of cell width in km - - lon : numpy.array - longitude in degrees (length n and between -180 and 180) - - lat : numpy.array - longitude in degrees (length m and between -90 and 90) - """ - - dlon = 0.1 - dlat = dlon - earth_radius = constants['SHR_CONST_REARTH'] - nlon = int(360. / dlon) + 1 - nlat = int(180. / dlat) + 1 - lon = np.linspace(-180., 180., nlon) - lat = np.linspace(-90., 90., nlat) - - cellWidthSouth = mdt.EC_CellWidthVsLat(lat, cellWidthEq=30., - cellWidthMidLat=45., - cellWidthPole=45., - latPosEq=7.5, latWidthEq=3.0) - - cellWidthNorth = mdt.EC_CellWidthVsLat(lat, cellWidthEq=30., - cellWidthMidLat=60., - cellWidthPole=35., - latPosEq=7.5, latWidthEq=3.0) - - # Transition at Equator - latTransition = 0.0 - latWidthTransition = 2.5 - cellWidthVsLat = mdt.mergeCellWidthVsLat( - lat, - cellWidthSouth, - cellWidthNorth, - latTransition, - latWidthTransition) - - _, cellWidth = np.meshgrid(lon, cellWidthVsLat) - - cellWidthAtlantic = mdt.EC_CellWidthVsLat(lat, cellWidthEq=30., - cellWidthMidLat=30., - cellWidthPole=35., - latPosEq=7.5, latWidthEq=3.0) - - cellWidthAtlantic = mdt.mergeCellWidthVsLat( - lat, - cellWidthSouth, - cellWidthAtlantic, - latTransition, - latWidthTransition) - - _, cellWidthAtlantic = np.meshgrid(lon, cellWidthAtlantic) - - fc = read_feature_collection('atlantic.geojson') - - atlantic_signed_distance = signed_distance_from_geojson( - fc, lon, lat, earth_radius, max_length=0.25) - - trans_width = 400e3 - trans_start = 0. - weights = 0.5 * (1 + np.tanh((atlantic_signed_distance - trans_start) / - trans_width)) - - cellWidth = cellWidthAtlantic * (1 - weights) + cellWidth * weights - - fc = read_feature_collection('high_res_region.geojson') - - so_signed_distance = signed_distance_from_geojson(fc, lon, lat, - earth_radius, - max_length=0.25) - - # Equivalent to 20 degrees latitude - trans_width = 1600e3 - trans_start = 500e3 - dx_min = 12. - - weights = 0.5 * (1 + np.tanh((so_signed_distance - trans_start) / - trans_width)) - - cellWidth = dx_min * (1 - weights) + cellWidth * weights - - return cellWidth, lon, lat diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson b/compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson deleted file mode 100644 index 1df1af372d..0000000000 --- a/compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson +++ /dev/null @@ -1,97 +0,0 @@ -{ - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "properties": { - "name": "Atlantic region", - "component": "ocean", - "object": "region", - "author": "Xylar Asay-Davis" - }, - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -97.3828125, - 85.05112877979998 - ], - [ - -102.3046875, - 40.17887331434696 - ], - [ - -102.3046875, - 23.241346102386135 - ], - [ - -93.1640625, - 15.623036831528264 - ], - [ - -85.78125, - 13.581920900545844 - ], - [ - -83.583984375, - 9.535748998133627 - ], - [ - -81.2109375, - 8.059229627200192 - ], - [ - -79.013671875, - 9.795677582829743 - ], - [ - -75.9375, - 5.61598581915534 - ], - [ - -77.6953125, - 0 - ], - [ - 16.171875, - 0 - ], - [ - 27.773437499999996, - 26.745610382199022 - ], - [ - 37.96875, - 32.24997445586331 - ], - [ - 39.7265625, - 39.36827914916014 - ], - [ - 32.6953125, - 53.9560855309879 - ], - [ - 37.6171875, - 61.438767493682825 - ], - [ - 25.664062500000004, - 68.26938680456564 - ], - [ - 24.609375, - 85.05112877979998 - ], - [ - -97.3828125, - 85.05112877979998 - ] - ] - ] - } - } - ] -} \ No newline at end of file From 074c7ea3cdfe4339648b5cf9c6ba104d7749bc2b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Jan 2024 07:58:37 -0600 Subject: [PATCH 05/41] Update the docs for SO12to30 --- docs/developers_guide/ocean/api.rst | 4 +-- .../ocean/test_groups/global_ocean.rst | 34 ++++++++----------- docs/developers_guide/organization.rst | 2 +- docs/tutorials/dev_add_rrm.rst | 4 +-- .../ocean/test_groups/global_ocean.rst | 17 +++++----- docs/users_guide/quick_start.rst | 4 +-- docs/users_guide/test_suites.rst | 4 +-- 7 files changed, 32 insertions(+), 37 deletions(-) diff --git a/docs/developers_guide/ocean/api.rst b/docs/developers_guide/ocean/api.rst index bd94c3cac1..d7f0d3e05d 100644 --- a/docs/developers_guide/ocean/api.rst +++ b/docs/developers_guide/ocean/api.rst @@ -295,8 +295,8 @@ test cases and steps mesh.rrs6to18.RRS6to18BaseMesh mesh.rrs6to18.RRS6to18BaseMesh.build_cell_width_lat_lon - mesh.so12to60.SO12to60BaseMesh - mesh.so12to60.SO12to60BaseMesh.build_cell_width_lat_lon + mesh.so12to30.SO12to30BaseMesh + mesh.so12to30.SO12to30BaseMesh.build_cell_width_lat_lon mesh.wc14.WC14BaseMesh mesh.wc14.WC14BaseMesh.build_cell_width_lat_lon diff --git a/docs/developers_guide/ocean/test_groups/global_ocean.rst b/docs/developers_guide/ocean/test_groups/global_ocean.rst index 2dd28ffc74..dccd5abdd7 100644 --- a/docs/developers_guide/ocean/test_groups/global_ocean.rst +++ b/docs/developers_guide/ocean/test_groups/global_ocean.rst @@ -791,20 +791,18 @@ the higher surface resolution of the 80-layer RRS vertical coordinate. config_rx1_min_layer_thickness = 0.1 -.. _dev_ocean_global_ocean_sowisc12to60: +.. _dev_ocean_global_ocean_sowisc12to30: -SO12to60 and SOwISC12to60 +SO12to30 and SOwISC12to30 +++++++++++++++++++++++++ -The ``SO12to60`` and ``SOwISC12to60`` meshes are Southern Ocean regionally -refined meshes with 12-km resolution around the Southern Ocean and Antarctica, -45-km at southern mid-latitudes, 30-km at the equator and in the North -Atlantic, 60-km resolution in the North Pacific, and 35-km resolution in the -Arctic. +The ``SO12to30`` and ``SOwISC12to30`` meshes are Southern Ocean regionally +refined meshes with 12-km resolution around the Southern Ocean and Antarctica +and have 30-km resoltuion elsewhere. The class -:py:class:`compass.ocean.tests.global_ocean.mesh.so12to60.SO12to60BaseMesh` defines -the resolution for the meshes. The ``compass.ocean.tests.global_ocean.mesh.so12to60`` +:py:class:`compass.ocean.tests.global_ocean.mesh.so12to30.SO12to30BaseMesh` defines +the resolution for the meshes. The ``compass.ocean.tests.global_ocean.mesh.so12to30`` module includes namelist options appropriate for forward simulations with split-explicit (but not RK4) time integration on these meshes. These set the time step and default run duration for short runs with these meshes. @@ -847,24 +845,21 @@ The default config options for these meshes are: prefix = SO # a description of the mesh and initial condition mesh_description = MPAS Southern Ocean regionally refined mesh for E3SM version - ${e3sm_version} with enhanced resolution (${min_res} km) around - Antarctica, 45-km resolution in the mid southern latitudes, - 30-km resolution in a 15-degree band around the equator, 60-km - resolution in northern mid latitudes, 30 km in the north - Atlantic and 35 km in the Arctic. This mesh has <<>> - vertical levels and includes cavities under the ice shelves - around Antarctica. + ${e3sm_version} with enhanced resolution (${min_res} km) around + Antarctica and 30 km elsewhere. This mesh has <<>> + vertical levels and includes cavities under the ice shelves + around Antarctica. # E3SM version that the mesh is intended for e3sm_version = 3 # The revision number of the mesh, which should be incremented each time the # mesh is revised - mesh_revision = 1 + mesh_revision = 2 # the minimum (finest) resolution in the mesh min_res = 12 # the maximum (coarsest) resolution in the mesh, can be the same as min_res - max_res = 60 + max_res = 30 # The URL of the pull request documenting the creation of the mesh - pull_request = https://github.com/MPAS-Dev/compass/pull/460 + pull_request = https://github.com/MPAS-Dev/compass/pull/752 # config options related to initial condition and diagnostics support files @@ -874,6 +869,7 @@ The default config options for these meshes are: # CMIP6 grid resolution cmip6_grid_res = 180x360 + The vertical grid is an ``index_tanh_dz`` profile (see :ref:`dev_ocean_framework_vertical`) with 64 vertical levels ranging in thickness from 10 to 250 m. diff --git a/docs/developers_guide/organization.rst b/docs/developers_guide/organization.rst index f4e478bdd1..c41c421f5a 100644 --- a/docs/developers_guide/organization.rst +++ b/docs/developers_guide/organization.rst @@ -1675,7 +1675,7 @@ should have cached outputs, the suffix ``c`` can be added to the test number: compass setup -n 90c 91c 92 ... In this example, test cases 90 and 91 (``mesh`` and ``init`` test cases from -the ``SOwISC12to60`` global ocean mesh, in this case) are set up with cached +the ``SOwISC12to30`` global ocean mesh, in this case) are set up with cached outputs in all steps and 92 (``performance_test``) is not. This approach is efficient but does not provide any control of which steps use cached outputs and which do not. diff --git a/docs/tutorials/dev_add_rrm.rst b/docs/tutorials/dev_add_rrm.rst index 840666ab14..701c6e9e50 100644 --- a/docs/tutorials/dev_add_rrm.rst +++ b/docs/tutorials/dev_add_rrm.rst @@ -7,7 +7,7 @@ This tutorial presents a step-by-step guide to adding a new mesh to the ``global_ocean`` test group in ``compass`` (see the :ref:`glossary` for definitions of some relevant terms). In this tutorial, I will add a new mesh called YAM ("yet another mesh") that is based on techniques used to build -the existing :ref:`dev_ocean_global_ocean_sowisc12to60` as well as +the existing :ref:`dev_ocean_global_ocean_sowisc12to30` as well as :ref:`dev_ocean_global_ocean_wc14` meshes. .. _dev_tutorial_add_rrm_getting_started: @@ -164,7 +164,7 @@ Next, we need to add this mesh to the list of known meshes: ... - from compass.ocean.tests.global_ocean.mesh.so12to60 import SO12to60BaseMesh + from compass.ocean.tests.global_ocean.mesh.so12to30 import SO12to30BaseMesh from compass.ocean.tests.global_ocean.mesh.wc14 import WC14BaseMesh from compass.ocean.tests.global_ocean.mesh.yam10to60 import YAM10to60BaseMesh from compass.ocean.tests.global_ocean.metadata import ( diff --git a/docs/users_guide/ocean/test_groups/global_ocean.rst b/docs/users_guide/ocean/test_groups/global_ocean.rst index b86063163d..7ca1785b9d 100644 --- a/docs/users_guide/ocean/test_groups/global_ocean.rst +++ b/docs/users_guide/ocean/test_groups/global_ocean.rst @@ -460,24 +460,23 @@ ice-shelf cavities around Antarctica, whereas the RRS6to18 mesh does not. :width: 500 px :align: center -.. _global_ocean_mesh_sowisc12to60: +.. _global_ocean_mesh_sowisc12to30: -SO12to60 and SOwISC12to60 +SO12to30 and SOwISC12to30 ^^^^^^^^^^^^^^^^^^^^^^^^^ -The Southern Ocean 12- to 60-km mesh with ice-shelf cavities (SOwISC12to60), +The Southern Ocean 12- to 30-km mesh with ice-shelf cavities (SOwISC12to30), sometimes called the Southern Ocean regionally refined mesh (SORRM), is the main simulation mesh for the `E3SM v2 Cryosphere Science Campaign `_ and E3SM v3 Polar Processes, Sea-Level Rise, and Coastal Impacts Campaign. -The SO12to60 is the same mesh but without ice-shelf cavities. -The mesh has 12 km resolution around Antarctica, tapering to 45 km in mid -Southern latitudes, 30 km at the equator and in the North Atlantic, 60 km -in the North Pacific, and 35 km in the Arctic. The mesh includes the -:ref:`global_ocean_ice_shelf_cavities` around Antarctica in the ocean domain. +The SO12to30 is the same mesh but without ice-shelf cavities. +The mesh has 12-km resolution around Antarctica and 30-km resolution elsewhere. +The mesh includes the :ref:`global_ocean_ice_shelf_cavities` around Antarctica +in the ocean domain. -.. image:: images/sowisc12to60.png +.. image:: images/sowisc12to30.png :width: 500 px :align: center diff --git a/docs/users_guide/quick_start.rst b/docs/users_guide/quick_start.rst index 2f875ad713..ed3f5f01fc 100644 --- a/docs/users_guide/quick_start.rst +++ b/docs/users_guide/quick_start.rst @@ -428,8 +428,8 @@ The output is: -c ocean -t qu240_for_e3sm -c ocean -t quwisc240 -c ocean -t quwisc240_for_e3sm - -c ocean -t so12to60 - -c ocean -t sowisc12to60 + -c ocean -t so12to30 + -c ocean -t sowisc12to30 -c ocean -t wc14 -c ocean -t wcwisc14 -c ocean -t wetdry diff --git a/docs/users_guide/test_suites.rst b/docs/users_guide/test_suites.rst index bea5dd7582..94943129bb 100644 --- a/docs/users_guide/test_suites.rst +++ b/docs/users_guide/test_suites.rst @@ -41,8 +41,8 @@ the current set of available test suites is: -c ocean -t qu240_for_e3sm -c ocean -t quwisc240 -c ocean -t quwisc240_for_e3sm - -c ocean -t so12to60 - -c ocean -t sowisc12to60 + -c ocean -t so12to30 + -c ocean -t sowisc12to30 -c ocean -t wc14 -c ocean -t wcwisc14 -c ocean -t wetdry From 028fe16d42f7f0b65de972ea11663acc12151436 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 10 Apr 2024 06:02:40 -0500 Subject: [PATCH 06/41] Switch to 80 vertical levels --- .../global_ocean/mesh/so12to30/so12to30.cfg | 33 +++++++++++++++---- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg b/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg index d0bdfa0351..d7450979bb 100644 --- a/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg +++ b/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg @@ -2,28 +2,47 @@ [vertical_grid] # the type of vertical grid -grid_type = index_tanh_dz +grid_type = tanh_dz # Number of vertical levels -vert_levels = 64 +vert_levels = 80 # Depth of the bottom of the ocean bottom_depth = 5500.0 # The minimum layer thickness -min_layer_thickness = 10.0 +min_layer_thickness = 2.0 # The maximum layer thickness -max_layer_thickness = 250.0 +max_layer_thickness = 150.0 -# The characteristic number of levels over which the transition between -# the min and max occurs -transition_levels = 28 +# Options relate to adjusting the sea-surface height or land-ice pressure +# below ice shelves to they are dynamically consistent with one another +[ssh_adjustment] + +# the number of iterations of ssh adjustment to perform +iterations = 4 # options for global ocean testcases [global_ocean] +## config options related to the initial_state step + +# minimum number of vertical levels, both in the open ocean and in ice-shelf +# cavities +min_levels = 5 +cavity_min_levels = ${min_levels} + +# minimum thickness of layers in ice-shelf cavities at the beginning and end +# of iterative ssh init +cavity_min_layer_thickness_initial = 2.0 +cavity_min_layer_thickness_final = 1.0 + +# Maximum allowed Haney number for configurations with ice-shelf cavities +rx1_max = 20.0 + + # the approximate number of cells in the mesh approx_cell_count = 570000 From 0e3ed26bbff9caa8107f5487a837044066f9a2da Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 21 May 2024 08:39:36 -0500 Subject: [PATCH 07/41] Add namelist options for haney-number vert. coord. --- .../ocean/tests/global_ocean/mesh/so12to30/namelist.init | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 compass/ocean/tests/global_ocean/mesh/so12to30/namelist.init diff --git a/compass/ocean/tests/global_ocean/mesh/so12to30/namelist.init b/compass/ocean/tests/global_ocean/mesh/so12to30/namelist.init new file mode 100644 index 0000000000..16185cf866 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to30/namelist.init @@ -0,0 +1,8 @@ +config_rx1_inner_iter_count = 20 +config_rx1_horiz_smooth_weight = 1.0 +config_rx1_vert_smooth_weight = 1.0 +config_rx1_slope_weight = 1e-2 +config_rx1_zstar_weight = 1.0 +config_rx1_min_levels = 5 +config_rx1_min_layer_thickness = 2.0 +config_global_ocean_minimum_depth = 10.0 From 23c773b21e1e00b73703b9fc498f467a533e2c1d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 11 Jul 2024 01:30:10 -0500 Subject: [PATCH 08/41] Increase min cavity layer thickness to 2 m --- compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg b/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg index d7450979bb..3f7551ae34 100644 --- a/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg +++ b/compass/ocean/tests/global_ocean/mesh/so12to30/so12to30.cfg @@ -37,7 +37,7 @@ cavity_min_levels = ${min_levels} # minimum thickness of layers in ice-shelf cavities at the beginning and end # of iterative ssh init cavity_min_layer_thickness_initial = 2.0 -cavity_min_layer_thickness_final = 1.0 +cavity_min_layer_thickness_final = 2.0 # Maximum allowed Haney number for configurations with ice-shelf cavities rx1_max = 20.0 From 84244688dd8152cf7ea2f484c1f4b7a01d276eb6 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 11 Jul 2024 01:56:51 -0500 Subject: [PATCH 09/41] Add water-column thickness to init plots When we plot histograms of the initial condition, now plot histograms of the water-column thickness in the open ocean and in cavities. --- compass/ocean/plot.py | 38 +++++++++++++++++++++++++++++--------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/compass/ocean/plot.py b/compass/ocean/plot.py index 21b9abd75e..ae5eb91408 100644 --- a/compass/ocean/plot.py +++ b/compass/ocean/plot.py @@ -29,7 +29,7 @@ def plot_initial_state(input_file_name='initial_state.nc', nVertLevels = ds.sizes['nVertLevels'] fig = plt.figure() - fig.set_size_inches(16.0, 12.0) + fig.set_size_inches(16.0, 16.0) plt.clf() print('plotting histograms of the initial condition') @@ -43,7 +43,7 @@ def plot_initial_state(input_file_name='initial_state.nc', 'number layers: {}\n\n'.format(nVertLevels) + \ ' min val max val variable name\n' - plt.subplot(3, 3, 2) + plt.subplot(4, 3, 2) varName = 'maxLevelCell' var = ds[varName] maxLevelCell = var.values - 1 @@ -53,7 +53,7 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) - plt.subplot(3, 3, 3) + plt.subplot(4, 3, 3) varName = 'bottomDepth' var = ds[varName] xarray.plot.hist(var, bins=nVertLevels - 4) @@ -75,7 +75,7 @@ def plot_initial_state(input_file_name='initial_state.nc', cellMask = xarray.DataArray(data=cellMask, dims=('nCells', 'nVertLevels')) edgeMask = xarray.DataArray(data=edgeMask, dims=('nEdges', 'nVertLevels')) - plt.subplot(3, 3, 4) + plt.subplot(4, 3, 4) varName = 'temperature' var = ds[varName].isel(Time=0).where(cellMask) xarray.plot.hist(var, bins=100, log=True) @@ -84,7 +84,7 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) - plt.subplot(3, 3, 5) + plt.subplot(4, 3, 5) varName = 'salinity' var = ds[varName].isel(Time=0).where(cellMask) xarray.plot.hist(var, bins=100, log=True) @@ -92,7 +92,7 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) - plt.subplot(3, 3, 6) + plt.subplot(4, 3, 6) varName = 'layerThickness' var = ds[varName].isel(Time=0).where(cellMask) xarray.plot.hist(var, bins=100, log=True) @@ -100,7 +100,7 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) - plt.subplot(3, 3, 7) + plt.subplot(4, 3, 7) varName = 'rx1Edge' var = ds[varName].isel(Time=0).where(edgeMask) maxRx1Edge = var.max().values @@ -110,7 +110,7 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) - plt.subplot(3, 3, 8) + plt.subplot(4, 3, 8) varName = 'areaCell' var = ds[varName] xarray.plot.hist(1e-6 * var, bins=100, log=True) @@ -119,7 +119,7 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) - plt.subplot(3, 3, 9) + plt.subplot(4, 3, 9) varName = 'dcEdge' var = ds[varName] xarray.plot.hist(1e-3 * var, bins=100, log=True) @@ -128,6 +128,26 @@ def plot_initial_state(input_file_name='initial_state.nc', txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, var.max().values, varName) + plt.subplot(4, 3, 10) + var = ds.ssh - (-ds.bottomDepth) + if 'landIceMask' in ds: + mask = ds.landIceMask == 0 + var = var.where(mask) + xarray.plot.hist(var, bins=100, log=True) + plt.ylabel('frequency') + plt.xlabel(r'open ocean water-column thickness (m)') + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, 'open ocean wc') + + if 'landIceMask' in ds: + plt.subplot(4, 3, 11) + var = (ds.ssh - (-ds.bottomDepth)).where(ds.landIceMask == 1) + xarray.plot.hist(var, bins=100, log=True) + plt.ylabel('frequency') + plt.xlabel(r'ice-shelf cavity water-column thickness (m)') + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, 'cavity wc') + font = FontProperties() font.set_family('monospace') font.set_size(12) From 3412c81f8247095659e1ed980a51a2aab7b692ef Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 1 Aug 2024 14:40:50 -0500 Subject: [PATCH 10/41] Enforce minimum column thickness in initial_state In cavities, we thicken the water column to make sure it has the minimum thickness. The hope is that this makes the vertical coorinate converge more quickly and smoothly to its final result. --- .../tests/global_ocean/init/initial_state.py | 41 +++++++++++++++---- 1 file changed, 34 insertions(+), 7 deletions(-) diff --git a/compass/ocean/tests/global_ocean/init/initial_state.py b/compass/ocean/tests/global_ocean/init/initial_state.py index 71f47e94c6..5b6e511bd7 100644 --- a/compass/ocean/tests/global_ocean/init/initial_state.py +++ b/compass/ocean/tests/global_ocean/init/initial_state.py @@ -1,6 +1,7 @@ import os from importlib.resources import contents, read_text +import numpy as np import xarray as xr from jinja2 import Template from mpas_tools.io import write_netcdf @@ -194,7 +195,7 @@ def run(self): """ config = self.config section = config['global_ocean'] - self._smooth_topography() + topo_filename = self._smooth_topography() interfaces = generate_1d_grid(config=config) @@ -223,8 +224,16 @@ def run(self): namelist['config_rx1_min_layer_thickness'] = \ f'{cavity_min_layer_thickness}' + min_water_column_thickness = \ + cavity_min_layer_thickness * cavity_min_levels + + topo_filename = self._dig_cavity_bed_elevation( + topo_filename, min_water_column_thickness) + self.update_namelist_at_runtime(namelist) + symlink(target=topo_filename, link_name='topography.nc') + update_pio = config.getboolean('global_ocean', 'init_update_pio') run_model(self, update_pio=update_pio) @@ -250,10 +259,8 @@ def _smooth_topography(self): section = config['global_ocean'] num_passes = section.getint('topo_smooth_num_passes') if num_passes == 0: - # just symlink the culled topography to be the topography used for - # the initial condition - symlink(target='topography_culled.nc', link_name='topography.nc') - return + # just return the culled topography file name + return 'topography_culled.nc' distance_limit = section.getfloat('topo_smooth_distance_limit') std_deviation = section.getfloat('topo_smooth_std_deviation') @@ -274,7 +281,8 @@ def _smooth_topography(self): check_call(args=['ocean_smooth_topo_before_init'], logger=self.logger) - with (xr.open_dataset('topography_culled.nc') as ds_topo): + out_filename = 'topography_smoothed.nc' + with xr.open_dataset('topography_culled.nc') as ds_topo: with xr.open_dataset('topography_orig_and_smooth.nc') as ds_smooth: for field in ['bed_elevation', 'landIceDraftObserved', 'landIceThkObserved']: @@ -282,4 +290,23 @@ def _smooth_topography(self): ds_topo[field] = ds_smooth[f'{field}New'] ds_topo[field].attrs = attrs - write_netcdf(ds_topo, 'topography.nc') + write_netcdf(ds_topo, out_filename) + return out_filename + + def _dig_cavity_bed_elevation(self, in_filename, + min_water_column_thickness): + """ Dig bed elevation to preserve minimum water-column thickness """ + + out_filename = 'topography_dig_bed.nc' + with xr.open_dataset(in_filename) as ds_topo: + bed = ds_topo.bed_elevation + attrs = bed.attrs + draft = ds_topo.landIceDraftObserved + max_bed = draft - min_water_column_thickness + mask = np.logical_or(draft == 0., bed < max_bed) + bed = xr.where(mask, bed, max_bed) + ds_topo['bed_elevation'] = bed + ds_topo['bed_elevation'].attrs = attrs + + write_netcdf(ds_topo, out_filename) + return out_filename From 197417ff2f268183d673b00b8cc7fa3556b79e20 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Thu, 12 Sep 2024 10:03:13 -0600 Subject: [PATCH 11/41] Add reservation option to `job` config section for chicoma --- compass/job/__init__.py | 8 +++++++- compass/job/job_script.template | 3 +++ compass/machines/chicoma-cpu.cfg | 3 +++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/compass/job/__init__.py b/compass/job/__init__.py index 54a0421235..21ec57f582 100644 --- a/compass/job/__init__.py +++ b/compass/job/__init__.py @@ -82,6 +82,11 @@ def write_job_script(config, machine, target_cores, min_cores, work_dir, else: constraint = '' + if config.has_option('job', 'reservation'): + reservation = config.get('job', 'reservation') + else: + reservation = '' + job_name = config.get('job', 'job_name') if job_name == '<<>>': if suite == '': @@ -96,7 +101,8 @@ def write_job_script(config, machine, target_cores, min_cores, work_dir, text = template.render(job_name=job_name, account=account, nodes=f'{nodes}', wall_time=wall_time, qos=qos, partition=partition, constraint=constraint, - suite=suite, pre_run_commands=pre_run_commands, + reservation=reservation, suite=suite, + pre_run_commands=pre_run_commands, post_run_commands=post_run_commands) text = _clean_up_whitespace(text) if suite == '': diff --git a/compass/job/job_script.template b/compass/job/job_script.template index 09030e2a6a..37a384a280 100644 --- a/compass/job/job_script.template +++ b/compass/job/job_script.template @@ -10,6 +10,9 @@ {% if qos != '' -%} #SBATCH --qos={{ qos }} {%- endif %} +{% if reservation != '' -%} +#SBATCH --reservation={{ reservation }} +{%- endif %} {% if partition != '' -%} #SBATCH --partition={{ partition }} {%- endif %} diff --git a/compass/machines/chicoma-cpu.cfg b/compass/machines/chicoma-cpu.cfg index 34211cf53f..73e75d3ffe 100644 --- a/compass/machines/chicoma-cpu.cfg +++ b/compass/machines/chicoma-cpu.cfg @@ -51,5 +51,8 @@ threads_per_core = 1 # The job partition to use partition = standard +# The job reservation to use (needed for debug jobs) +reservation = + # The job quality of service (QOS) to use qos = standard From 5fab51373c71e6ff228dfa88e2cf7b92d1ad12f7 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Thu, 12 Sep 2024 10:52:36 -0600 Subject: [PATCH 12/41] Update docs for using chicoma debug reservation --- docs/developers_guide/machines/chicoma.rst | 19 +++++++++++++++++++ docs/users_guide/machines/chicoma.rst | 7 +++++-- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/docs/developers_guide/machines/chicoma.rst b/docs/developers_guide/machines/chicoma.rst index 8bc14cfeec..69d4c7139e 100644 --- a/docs/developers_guide/machines/chicoma.rst +++ b/docs/developers_guide/machines/chicoma.rst @@ -19,3 +19,22 @@ Then, you can build the MPAS model with .. code-block:: bash make [DEBUG=true] gnu-cray + +debug jobs +~~~~~~~~~~ + +In order to run jobs in the debug queue, you will need to use: + +.. code-block:: cfg + + # Config options related to creating a job script + [job] + + # The job partition to use + partition = debug + + # The job reservation to use (needed for debug jobs) + reservation = debug + + # The job quality of service (QOS) to use + qos = diff --git a/docs/users_guide/machines/chicoma.rst b/docs/users_guide/machines/chicoma.rst index aa379af53c..3bc304db3c 100644 --- a/docs/users_guide/machines/chicoma.rst +++ b/docs/users_guide/machines/chicoma.rst @@ -138,9 +138,12 @@ when setting up test cases or a test suite: # The job partition to use partition = standard + # The job reservation to use (needed for debug jobs) + reservation = + # The job quality of service (QOS) to use qos = standard - + Additionally, some relevant config options come from the `mache `_ package: @@ -213,4 +216,4 @@ To build the MPAS model with .. code-block:: bash - make [DEBUG=true] [OPENMP=true] [ALBANY=true] gnu-cray \ No newline at end of file + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gnu-cray From 79cfe7c1ed0193e3123905c00282270c8717a795 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Tue, 2 Apr 2024 15:09:10 -0700 Subject: [PATCH 13/41] Unify `compass.landice.mesh.interp...` functions into a single func. Added a single function that should incorporate all the functionality of the exisiting `compass.landice.mesh.interp_ais_bedmachine()` and `compass.landice.mesh.interp_ais_measures()` functions, but that is general enough to work for both GIS and AIS, as well as for MEASURES and BedMachine datasets. --- compass/landice/mesh.py | 68 +++++++++++++++++++++++++++++------------ 1 file changed, 49 insertions(+), 19 deletions(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index 33fcc9931f..b685c5ef92 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -1,4 +1,5 @@ import os +import re import sys import time from shutil import copyfile @@ -1002,15 +1003,15 @@ def preprocess_ais_data(self, source_gridded_dataset, return preprocessed_gridded_dataset -def interp_ais_bedmachine(self, data_path, mali_scrip, nProcs, dest_file): +def interp_gridded2mali(self, source_file, mali_scrip, nProcs, dest_file, proj, + variables="all"): """ - Interpolates BedMachine thickness and bedTopography dataset - to a MALI mesh + Interpolate gridded dataset (e.g. MEASURES, BedMachine) onto a MALI mesh Parameters ---------- - data_path : str - path to AIS datasets, including BedMachine + source_file : str + filepath to the source gridded datatset to be interpolated mali_scrip : str name of scrip file corresponding to destination MALI mesh @@ -1020,19 +1021,45 @@ def interp_ais_bedmachine(self, data_path, mali_scrip, nProcs, dest_file): dest_file: str MALI input file to which data should be remapped + + proj: str + projection of the source dataset ... + + variables: str or list of strings + """ + def __guess_scrip_name(filename): + + # try searching for string followed by a version number + match = re.search(r'(^.*[_-]v\d*[_-])+', filename) + + if match: + # slice string to end of match minus one to leave of final _ or - + base_fn = filename[:match.end() - 1] + else: + # no matches were found, just use the filename (minus extension) + base_fn = os.path.splitext(filename)[0] + + return f"{base_fn}.scrip.nc" + logger = self.logger - logger.info('creating scrip file for BedMachine dataset') + source_scrip = __guess_scrip_name(os.path.basename(source_file)) + weights_filename = "gridded_to_MPAS_weights.nc" + + if variables != "all": + # make sure this is a list + + # if list, then join the list making it a space seprated list for cli + variables = " ".join(variables) + + logger.info('creating scrip file for source dataset') # Note: writing scrip file to workdir args = ['create_SCRIP_file_from_planar_rectangular_grid.py', - '-i', - os.path.join(data_path, - 'BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc'), # noqa - '-s', - 'BedMachineAntarctica_2020-07-15_v02.scrip.nc', - '-p', 'ais-bedmap2', + '-i', source_file, + '-s', source_scrip, + '-p', proj, '-r', '2'] check_call(args, logger=logger) @@ -1041,10 +1068,9 @@ def interp_ais_bedmachine(self, data_path, mali_scrip, nProcs, dest_file): # 2 nodes is too few. I have not tested anything in between. logger.info('generating gridded dataset -> MPAS weights') args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', - '--source', - 'BedMachineAntarctica_2020-07-15_v02.scrip.nc', + '--source', source_scrip, '--destination', mali_scrip, - '--weight', 'BedMachine_to_MPAS_weights.nc', + '--weight', weights_filename, '--method', 'conserve', "--netcdf4", "--dst_regional", "--src_regional", '--ignore_unmapped'] @@ -1052,14 +1078,18 @@ def interp_ais_bedmachine(self, data_path, mali_scrip, nProcs, dest_file): # Perform actual interpolation using the weights logger.info('calling interpolate_to_mpasli_grid.py') - args = ['interpolate_to_mpasli_grid.py', '-s', - os.path.join(data_path, - 'BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc'), # noqa + args = ['interpolate_to_mpasli_grid.py', + '-s', source_file, '-d', dest_file, '-m', 'e', - '-w', 'BedMachine_to_MPAS_weights.nc'] + '-w', weights_filename, + '-v', variables] + check_call(args, logger=logger) + # should I delted the weights file, since that could cause namespace + # conflicts when multiple interpolations are done? + def interp_ais_measures(self, data_path, mali_scrip, nProcs, dest_file): """ From 8aedc7fbbf280d21ab0f107e95040e05d718a044 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Tue, 2 Apr 2024 15:12:23 -0700 Subject: [PATCH 14/41] Add new `mesh_gen` config options that allow abstraction of interp funcs --- .../landice/tests/antarctica/mesh_gen/mesh_gen.cfg | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/compass/landice/tests/antarctica/mesh_gen/mesh_gen.cfg b/compass/landice/tests/antarctica/mesh_gen/mesh_gen.cfg index 3ce52b0ee7..74cbae28c0 100644 --- a/compass/landice/tests/antarctica/mesh_gen/mesh_gen.cfg +++ b/compass/landice/tests/antarctica/mesh_gen/mesh_gen.cfg @@ -52,5 +52,17 @@ use_bed = False # (default value is for Perlmutter) data_path = /global/cfs/cdirs/fanssie/standard_datasets/AIS_datasets +# filename of the BedMachine thickness and bedTopography dataset +# (default value is for Perlmutter) +bedmachine_filename = BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc + +# filename of the MEASURES ice velocity dataset +# (default value is for Perlmutter) +measures_filename = antarctica_ice_velocity_450m_v2_edits_extrap.nc + +# projection of the source datasets, according to the dictionary keys +# create_SCRIP_file_from_planar_rectangular_grid.py from MPAS_Tools +src_proj = ais-bedmap2 + # number of processors to use for ESMF_RegridWeightGen nProcs = 128 From d3ce601215d034ce8b934817baa28ae48fa13814 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Wed, 3 Apr 2024 13:36:07 -0700 Subject: [PATCH 15/41] Clean up unified interpolation method and use in antarctic testcase. --- compass/landice/mesh.py | 60 ------------------------ compass/landice/tests/antarctica/mesh.py | 36 +++++++++----- 2 files changed, 24 insertions(+), 72 deletions(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index b685c5ef92..2f9712eb12 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -1091,66 +1091,6 @@ def __guess_scrip_name(filename): # conflicts when multiple interpolations are done? -def interp_ais_measures(self, data_path, mali_scrip, nProcs, dest_file): - """ - Interpolates MEASURES ice velocity dataset - to a MALI mesh - - Parameters - ---------- - data_path : str - path to AIS datasets, including BedMachine - - mali_scrip : str - name of scrip file corresponding to destination MALI mesh - - nProcs : int - number of processors to use for generating remapping weights - - dest_file: str - MALI input file to which data should be remapped - """ - - logger = self.logger - - logger.info('creating scrip file for velocity dataset') - # Note: writing scrip file to workdir - args = ['create_SCRIP_file_from_planar_rectangular_grid.py', - '-i', - os.path.join(data_path, - 'antarctica_ice_velocity_450m_v2_edits_extrap.nc'), - '-s', - 'antarctica_ice_velocity_450m_v2.scrip.nc', - '-p', 'ais-bedmap2', - '-r', '2'] - check_call(args, logger=logger) - - # Generate remapping weights - logger.info('generating gridded dataset -> MPAS weights') - args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', - '--source', - 'antarctica_ice_velocity_450m_v2.scrip.nc', - '--destination', mali_scrip, - '--weight', 'measures_to_MPAS_weights.nc', - '--method', 'conserve', - "--netcdf4", - "--dst_regional", "--src_regional", '--ignore_unmapped'] - check_call(args, logger=logger) - - logger.info('calling interpolate_to_mpasli_grid.py') - args = ['interpolate_to_mpasli_grid.py', - '-s', - os.path.join(data_path, - 'antarctica_ice_velocity_450m_v2_edits_extrap.nc'), - '-d', dest_file, - '-m', 'e', - '-w', 'measures_to_MPAS_weights.nc', - '-v', 'observedSurfaceVelocityX', - 'observedSurfaceVelocityY', - 'observedSurfaceVelocityUncertainty'] - check_call(args, logger=logger) - - def clean_up_after_interp(fname): """ Perform some final clean up steps after interpolation diff --git a/compass/landice/tests/antarctica/mesh.py b/compass/landice/tests/antarctica/mesh.py index 609bb7275c..f314ea6e66 100644 --- a/compass/landice/tests/antarctica/mesh.py +++ b/compass/landice/tests/antarctica/mesh.py @@ -9,8 +9,7 @@ build_cell_width, build_mali_mesh, clean_up_after_interp, - interp_ais_bedmachine, - interp_ais_measures, + interp_gridded2mali, make_region_masks, preprocess_ais_data, ) @@ -61,19 +60,26 @@ def run(self): """ logger = self.logger config = self.config + section_ais = config['antarctica'] - data_path = section_ais.get('data_path') + nProcs = section_ais.get('nProcs') + src_proj = section_ais.get("src_proj") + data_path = section_ais.get('data_path') + measures_filename = section_ais.get("measures_filename") + bedmachine_filename = section_ais.get("bedmachine_filename") + + measures_dataset = os.path.join(data_path, measures_filename) + bedmachine_dataset = os.path.join(data_path, bedmachine_filename) section_name = 'mesh' + # do we want to add this to the config file? source_gridded_dataset = 'antarctica_8km_2024_01_29.nc' - bedmachine_path = os.path.join( - data_path, - 'BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc') # noqa bm_updated_gridded_dataset = add_bedmachine_thk_to_ais_gridded_data( - self, source_gridded_dataset, bedmachine_path) + self, source_gridded_dataset, bedmachine_dataset) + logger.info('calling build_cell_width') cell_width, x1, y1, geom_points, geom_edges, floodFillMask = \ build_cell_width( @@ -91,7 +97,7 @@ def run(self): self, cell_width, x1, y1, geom_points, geom_edges, mesh_name=self.mesh_filename, section_name=section_name, gridded_dataset=bm_updated_gridded_dataset, - projection='ais-bedmap2', geojson_file=None) + projection=src_proj, geojson_file=None) # Now that we have base mesh with standard interpolation # perform advanced interpolation for specific fields @@ -131,10 +137,16 @@ def run(self): # Now perform bespoke interpolation of geometry and velocity data # from their respective sources - interp_ais_bedmachine(self, data_path, dst_scrip_file, nProcs, - self.mesh_filename) - interp_ais_measures(self, data_path, dst_scrip_file, nProcs, - self.mesh_filename) + interp_gridded2mali(self, bedmachine_dataset, dst_scrip_file, nProcs, + self.mesh_filename, src_proj, variables="all") + + # only interpolate a subset of MEASURES varibles onto the MALI mesh + measures_vars = ['observedSurfaceVelocityX', + 'observedSurfaceVelocityY', + 'observedSurfaceVelocityUncertainty'] + interp_gridded2mali(self, measures_dataset, dst_scrip_file, nProcs, + self.mesh_filename, src_proj, + variables=measures_vars) # perform some final cleanup details clean_up_after_interp(self.mesh_filename) From d9ab5b1167f9d7b3abbcbba4293926a73a0a4be0 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Tue, 16 Apr 2024 12:05:21 -0700 Subject: [PATCH 16/41] Use unifed interpolation and framework functions for GIS mesh_gen. --- compass/landice/tests/greenland/mesh.py | 177 ++++++------------ .../tests/greenland/mesh_gen/mesh_gen.cfg | 23 ++- 2 files changed, 77 insertions(+), 123 deletions(-) diff --git a/compass/landice/tests/greenland/mesh.py b/compass/landice/tests/greenland/mesh.py index 37b66aeb96..80668d6a84 100644 --- a/compass/landice/tests/greenland/mesh.py +++ b/compass/landice/tests/greenland/mesh.py @@ -1,14 +1,14 @@ -from os.path import exists -from shutil import copyfile +import os import netCDF4 import numpy as np -from mpas_tools.logging import check_call from mpas_tools.scrip.from_mpas import scrip_from_mpas from compass.landice.mesh import ( build_cell_width, build_mali_mesh, + clean_up_after_interp, + interp_gridded2mali, make_region_masks, ) from compass.model import make_graph_file @@ -21,8 +21,8 @@ class Mesh(Step): Attributes ---------- - mesh_type : str - The resolution or mesh type of the test case + mesh_filename : str + File name of the MALI mesh """ def __init__(self, test_case): """ @@ -33,14 +33,17 @@ def __init__(self, test_case): test_case : compass.TestCase The test case this step belongs to - mesh_type : str - The resolution or mesh type of the test case """ super().__init__(test_case=test_case, name='mesh', cpus_per_task=128, min_cpus_per_task=1) + # output files + self.mesh_filename = 'GIS.nc' self.add_output_file(filename='graph.info') - self.add_output_file(filename='GIS.nc') + self.add_output_file(filename=self.mesh_filename) + self.add_output_file(filename=f'{self.mesh_filename[:-3]}_' + f'regionMasks.nc') + # input files self.add_input_file( filename='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc', target='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc', @@ -56,132 +59,63 @@ def run(self): Run this step of the test case """ logger = self.logger - mesh_name = 'GIS.nc' - section_name = 'mesh' config = self.config - section = config[section_name] - data_path = section.get('data_path') - nProcs = section.get('nProcs') + + section_gis = config['greenland'] + + nProcs = section_gis.get('nProcs') + src_proj = section_gis.get("src_proj") + data_path = section_gis.get('data_path') + measures_filename = section_gis.get("measures_filename") + bedmachine_filename = section_gis.get("bedmachine_filename") + + measures_dataset = os.path.join(data_path, measures_filename) + bedmachine_dataset = os.path.join(data_path, bedmachine_filename) + + section_name = 'mesh' + + source_gridded_dataset_1km = 'greenland_1km_2024_01_29.epsg3413.icesheetonly.nc' # noqa: E501 + source_gridded_dataset_2km = 'greenland_2km_2024_01_29.epsg3413.nc' logger.info('calling build_cell_width') cell_width, x1, y1, geom_points, geom_edges, floodMask = \ build_cell_width( self, section_name=section_name, - gridded_dataset='greenland_2km_2024_01_29.epsg3413.nc', + gridded_dataset=source_gridded_dataset_2km, flood_fill_start=[100, 700]) + # Now build the base mesh and perform the standard interpolation build_mali_mesh( self, cell_width, x1, y1, geom_points, geom_edges, - mesh_name=mesh_name, section_name=section_name, - gridded_dataset='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc', # noqa - projection='gis-gimp', geojson_file=None) - - # Create scrip files if they don't already exist - if exists(data_path + '/BedMachineGreenland-v5.scrip.nc'): - logger.info('BedMachine script file exists;' - ' skipping file creation') - else: - logger.info('creating scrip file for BedMachine dataset') - args = ['create_SCRIP_file_from_planar_rectangular_grid.py', - '-i', data_path + '/BedMachineGreenland-v5_edits_floodFill_extrap.nc', # noqa - '-s', data_path + '/BedMachineGreenland-v5.scrip.nc', - '-p', 'gis-gimp', '-r', '2'] - check_call(args, logger=logger) - if exists(data_path + '/greenland_vel_mosaic500.scrip.nc'): - logger.info('Measures script file exists; skipping file creation') - else: - logger.info('creating scrip file for 2006-2010 velocity dataset') - args = ['create_SCRIP_file_from_planar_rectangular_grid.py', - '-i', data_path + '/greenland_vel_mosaic500_extrap.nc', - '-s', data_path + '/greenland_vel_mosaic500.scrip.nc', - '-p', 'gis-gimp', '-r', '2'] - check_call(args, logger=logger) - - logger.info('calling set_lat_lon_fields_in_planar_grid.py') - args = ['set_lat_lon_fields_in_planar_grid.py', '-f', - 'GIS.nc', '-p', 'gis-gimp'] - check_call(args, logger=logger) + mesh_name=self.mesh_filename, section_name=section_name, + gridded_dataset=source_gridded_dataset_1km, + projection=src_proj, geojson_file=None) + # Create scrip file for the newly generated mesh logger.info('creating scrip file for destination mesh') - scrip_from_mpas('GIS.nc', 'GIS.scrip.nc') - args = ['create_SCRIP_file_from_MPAS_mesh.py', - '-m', 'GIS.nc', - '-s', 'GIS.scrip.nc'] - check_call(args, logger=logger) - - # Create weight files from datasets to mesh - if exists('BedMachine_to_MPAS_weights.nc'): - logger.info('BedMachine_to_MPAS_weights.nc exists; skipping') - else: - logger.info('generating gridded dataset -> MPAS weights') - args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source', - data_path + 'BedMachineGreenland-v5.scrip.nc', - '--destination', - 'GIS.scrip.nc', - '--weight', 'BedMachine_to_MPAS_weights.nc', - '--method', 'conserve', - "-i", "-64bit_offset", - "--dst_regional", "--src_regional", '--netcdf4'] - check_call(args, logger=logger) - - if exists('measures_to_MPAS_weights.nc'): - logger.info('measures_to_MPAS_weights.nc exists; skipping') - else: - logger.info('generating gridded dataset -> MPAS weights') - args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source', - data_path + 'greenland_vel_mosaic500.scrip.nc', - '--destination', - 'GIS.scrip.nc', - '--weight', 'measures_to_MPAS_weights.nc', - '--method', 'conserve', - "-i", "-64bit_offset", '--netcdf4', - "--dst_regional", "--src_regional", '--ignore_unmapped'] - check_call(args, logger=logger) - - # interpolate fields from BedMachine and Measures - # Using conservative remapping - logger.info('calling interpolate_to_mpasli_grid.py') - args = ['interpolate_to_mpasli_grid.py', '-s', - data_path + '/BedMachineGreenland-v5_edits_floodFill_extrap.nc', # noqa - '-d', 'GIS.nc', '-m', 'e', - '-w', 'BedMachine_to_MPAS_weights.nc'] - check_call(args, logger=logger) - - logger.info('calling interpolate_to_mpasli_grid.py') - args = ['interpolate_to_mpasli_grid.py', '-s', - data_path + '/greenland_vel_mosaic500_extrap.nc', - '-d', 'GIS.nc', '-m', 'e', - '-w', 'measures_to_MPAS_weights.nc', - '-v', 'observedSurfaceVelocityX', - 'observedSurfaceVelocityY', - 'observedSurfaceVelocityUncertainty'] - check_call(args, logger=logger) - - logger.info('Marking domain boundaries dirichlet') - args = ['mark_domain_boundaries_dirichlet.py', - '-f', 'GIS.nc'] - check_call(args, logger=logger) - + dst_scrip_file = f"{self.mesh_filename.split('.')[:-1][0]}_scrip.nc" + scrip_from_mpas(self.mesh_filename, dst_scrip_file) + + # Now perform bespoke interpolation of geometry and velocity data + # from their respective sources + interp_gridded2mali(self, bedmachine_dataset, dst_scrip_file, nProcs, + self.mesh_filename, src_proj, variables="all") + + # only interpolate a subset of MEASURES varibles onto the MALI mesh + measures_vars = ['observedSurfaceVelocityX', + 'observedSurfaceVelocityY', + 'observedSurfaceVelocityUncertainty'] + interp_gridded2mali(self, measures_dataset, dst_scrip_file, nProcs, + self.mesh_filename, src_proj, + variables=measures_vars) + + # perform some final cleanup details + clean_up_after_interp(self.mesh_filename) + + # create graph file logger.info('creating graph.info') - make_graph_file(mesh_filename=mesh_name, + make_graph_file(mesh_filename=self.mesh_filename, graph_filename='graph.info') - # Create a backup in case clean-up goes awry - copyfile('GIS.nc', 'GIS_backup.nc') - - # Clean up: trim to iceMask and set large velocity - # uncertainties where appropriate. - data = netCDF4.Dataset('GIS.nc', 'r+') - data.set_auto_mask(False) - data.variables['thickness'][:] *= (data.variables['iceMask'][:] > 1.5) - - mask = np.logical_or( - np.isnan( - data.variables['observedSurfaceVelocityUncertainty'][:]), - data.variables['thickness'][:] < 1.0) - mask = np.logical_or( - mask, - data.variables['observedSurfaceVelocityUncertainty'][:] == 0.0) - data.variables['observedSurfaceVelocityUncertainty'][0, mask[0, :]] = 1.0 # noqa # create region masks mask_filename = f'{mesh_name[:-3]}_ismip6_regionMasks.nc' @@ -203,6 +137,9 @@ def run(self): 'westCentralGreenland'], all_tags=False) + # is there a way to encompass this in an existing framework function? + data = netCDF4.Dataset('GIS.nc', 'r+') + data.set_auto_mask(False) # Ensure basalHeatFlux is positive data.variables['basalHeatFlux'][:] = np.abs( data.variables['basalHeatFlux'][:]) diff --git a/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg b/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg index 77ca7e42b5..a586ea4099 100644 --- a/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg +++ b/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg @@ -1,9 +1,6 @@ # config options for high_res_mesh test case [mesh] -# path to directory containing BedMachine and Measures datasets -data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/ - # number of levels in the mesh levels = 10 @@ -50,3 +47,23 @@ use_speed = True use_dist_to_grounding_line = False use_dist_to_edge = True use_bed = True + +[greenland] +# path to directory containing BedMachine and Measures datasets +# (default value is for Perlmutter) +data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/ + +# filename of the BedMachine thickness and bedTopography dataset +# (default value is for Perlmutter) +bedmachine_filename = BedMachineGreenland-v5_edits_floodFill_extrap.nc + +# filename of the MEASURES ice velocity dataset +# (default value is for Perlmutter) +measures_filename = greenland_vel_mosaic500_extrap.nc + +# projection of the source datasets, according to the dictionary keys +# create_SCRIP_file_from_planar_rectangular_grid.py from MPAS_Tools +src_proj = gis-gimp + +# number of processors to use for ESMF_RegridWeightGen +nProcs = 128 From 0771a66f0ca7e93b3516e48e1d56c10aa78cdd8f Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Thu, 12 Sep 2024 08:51:55 -0600 Subject: [PATCH 17/41] update GIS region mask names --- compass/landice/tests/greenland/mesh.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/compass/landice/tests/greenland/mesh.py b/compass/landice/tests/greenland/mesh.py index 80668d6a84..c85d8646d3 100644 --- a/compass/landice/tests/greenland/mesh.py +++ b/compass/landice/tests/greenland/mesh.py @@ -41,8 +41,10 @@ def __init__(self, test_case): self.mesh_filename = 'GIS.nc' self.add_output_file(filename='graph.info') self.add_output_file(filename=self.mesh_filename) - self.add_output_file(filename=f'{self.mesh_filename[:-3]}_' - f'regionMasks.nc') + self.add_output_file( + filename=f'{self.mesh_filename[:-3]}_ismip6_regionMasks.nc') + self.add_output_file( + filename=f'{self.mesh_filename[:-3]}_zwally_regionMasks.nc') # input files self.add_input_file( filename='greenland_1km_2024_01_29.epsg3413.icesheetonly.nc', @@ -118,14 +120,14 @@ def run(self): graph_filename='graph.info') # create region masks - mask_filename = f'{mesh_name[:-3]}_ismip6_regionMasks.nc' - make_region_masks(self, mesh_name, mask_filename, + mask_filename = f'{self.mesh_filename[:-3]}_ismip6_regionMasks.nc' + make_region_masks(self, self.mesh_filename, mask_filename, self.cpus_per_task, tags=["Greenland", "ISMIP6", "Shelf"], component='ocean') - mask_filename = f'{mesh_name[:-3]}_zwally_regionMasks.nc' - make_region_masks(self, mesh_name, mask_filename, + mask_filename = f'{self.mesh_filename[:-3]}_zwally_regionMasks.nc' + make_region_masks(self, self.mesh_filename, mask_filename, self.cpus_per_task, tags=['eastCentralGreenland', 'northEastGreenland', From 2a8c32848fd3026f1466541162254c2a67f321ae Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Thu, 12 Sep 2024 11:56:59 -0600 Subject: [PATCH 18/41] Reorder and better comment Antarctic `mesh_gen` testcase. --- compass/landice/mesh.py | 4 ++- compass/landice/tests/antarctica/mesh.py | 33 ++++++++++++------------ 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index 2f9712eb12..9b0c2c43b1 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -442,7 +442,7 @@ def get_dist_to_edge_and_gl(self, thk, topg, x, y, elif window_size < min(high_dist, high_dist_bed): logger.info('WARNING: window_size was set to a value smaller' ' than high_dist and/or high_dist_bed. Resetting' - f' window_size to {max(high_dist, high_dist_bed)},' + f' window_size to {max(high_dist, high_dist_bed)}, ' ' which is max(high_dist, high_dist_bed)') window_size = max(high_dist, high_dist_bed) @@ -969,6 +969,8 @@ def preprocess_ais_data(self, source_gridded_dataset, tic = time.perf_counter() logger.info(f"Beginning interpolation for {field}") + # NOTE: Do not need to evaluate the extrapolator at all gridcells. + # Only needed for ice-free gricells, since it's NN extrapolation data.variables[field][0, :] = interp(xGrid, yGrid) toc = time.perf_counter() logger.info(f"Interpolation completed in {toc - tic} seconds") diff --git a/compass/landice/tests/antarctica/mesh.py b/compass/landice/tests/antarctica/mesh.py index f314ea6e66..e1cc93e63e 100644 --- a/compass/landice/tests/antarctica/mesh.py +++ b/compass/landice/tests/antarctica/mesh.py @@ -43,10 +43,10 @@ def __init__(self, test_case): self.mesh_filename = 'Antarctica.nc' self.add_output_file(filename='graph.info') self.add_output_file(filename=self.mesh_filename) - self.add_output_file(filename=f'{self.mesh_filename[:-3]}_' - f'imbie_regionMasks.nc') - self.add_output_file(filename=f'{self.mesh_filename[:-3]}_' - f'ismip6_regionMasks.nc') + self.add_output_file( + filename=f'{self.mesh_filename[:-3]}_imbie_regionMasks.nc') + self.add_output_file( + filename=f'{self.mesh_filename[:-3]}_ismip6_regionMasks.nc') self.add_input_file( filename='antarctica_8km_2024_01_29.nc', target='antarctica_8km_2024_01_29.nc', @@ -86,12 +86,6 @@ def run(self): self, section_name=section_name, gridded_dataset=bm_updated_gridded_dataset) - # Preprocess the gridded AIS source datasets to work - # with the rest of the workflow - logger.info('calling preprocess_ais_data') - preprocessed_gridded_dataset = preprocess_ais_data( - self, bm_updated_gridded_dataset, floodFillMask) - # Now build the base mesh and perform the standard interpolation build_mali_mesh( self, cell_width, x1, y1, geom_points, geom_edges, @@ -113,12 +107,19 @@ def run(self): data.variables['iceMask'][:] = 0. data.close() - # interpolate fields from composite dataset - # Note: this was already done in build_mali_mesh() using - # bilinear interpolation. Redoing it here again is likely - # not needed. Also, it should be assessed if bilinear or - # barycentric used here is preferred for this application. - # Current thinking is they are both equally appropriate. + # Preprocess the gridded AIS source datasets to work + # with the rest of the workflow + logger.info('calling preprocess_ais_data') + preprocessed_gridded_dataset = preprocess_ais_data( + self, bm_updated_gridded_dataset, floodFillMask) + + # interpolate fields from *preprocessed* composite dataset + # NOTE: while this has already been done in `build_mali_mesh()` + # we are using an updated version of the gridded dataset here, + # which has had unit conversion and extrapolation done. + # Also, it should be assessed if bilinear or + # barycentric used here is preferred for this application. + # Current thinking is they are both equally appropriate. logger.info('calling interpolate_to_mpasli_grid.py') args = ['interpolate_to_mpasli_grid.py', '-s', preprocessed_gridded_dataset, From 6a0cee8495ca68489c6ec865881a0e095d4e32e8 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Thu, 12 Sep 2024 13:51:22 -0600 Subject: [PATCH 19/41] Swtich to `xarray` for postprocessing the mesh --- compass/landice/tests/greenland/mesh.py | 31 +++++++++++++------------ 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/compass/landice/tests/greenland/mesh.py b/compass/landice/tests/greenland/mesh.py index c85d8646d3..217934443d 100644 --- a/compass/landice/tests/greenland/mesh.py +++ b/compass/landice/tests/greenland/mesh.py @@ -1,7 +1,7 @@ import os -import netCDF4 import numpy as np +import xarray as xr from mpas_tools.scrip.from_mpas import scrip_from_mpas from compass.landice.mesh import ( @@ -139,21 +139,22 @@ def run(self): 'westCentralGreenland'], all_tags=False) - # is there a way to encompass this in an existing framework function? - data = netCDF4.Dataset('GIS.nc', 'r+') - data.set_auto_mask(False) + # Do some final validation of the mesh + ds = xr.open_dataset(self.mesh_filename) # Ensure basalHeatFlux is positive - data.variables['basalHeatFlux'][:] = np.abs( - data.variables['basalHeatFlux'][:]) + ds["basalHeatFlux"] = np.abs(ds.basalHeatFlux) # Ensure reasonable dHdt values - dHdt = data.variables["observedThicknessTendency"][:] - dHdtErr = data.variables["observedThicknessTendencyUncertainty"][:] + dHdt = ds["observedThicknessTendency"] # Arbitrary 5% uncertainty; improve this later dHdtErr = np.abs(dHdt) * 0.05 - # large uncertainty where data is missing - dHdtErr[np.abs(dHdt) > 1.0] = 1.0 - dHdt[np.abs(dHdt) > 1.0] = 0.0 # Remove ridiculous values - data.variables["observedThicknessTendency"][:] = dHdt - data.variables["observedThicknessTendencyUncertainty"][:] = dHdtErr - - data.close() + # Use threshold of |dHdt| > 1.0 to determine invalid data + mask = np.abs(dHdt) > 1.0 + # Assign 100% uncertainty where data is missing + dHdtErr = dHdtErr.where(~mask, 1.0) + # Remove ridiculous values + dHdt = dHdt.where(~mask, 0.0) + # Put the updated fields back in the dataset + ds["observedThicknessTendency"] = dHdt + ds["observedThicknessTendencyUncertainty"] = dHdtErr + # Write the data to disk + ds.to_netcdf(self.mesh_filename, 'a') From b2816442e93dfede723fe35425036d01a3ebcc34 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Fri, 13 Sep 2024 14:41:46 -0600 Subject: [PATCH 20/41] Switch to list concatenation for variables specification. Add the list of requested variables to the `args` list by concating. Previously I was manually creating the space seperated list (i.e string) when a list of variables was passed to the girdded inerpolator function. This space seperated string was not correctly parsed by the `mpas_tool.logging.check_call` function resulting in the variables not being inerpolated. --- compass/landice/mesh.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index 9b0c2c43b1..07bc4833b1 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -1050,11 +1050,12 @@ def __guess_scrip_name(filename): source_scrip = __guess_scrip_name(os.path.basename(source_file)) weights_filename = "gridded_to_MPAS_weights.nc" - if variables != "all": - # make sure this is a list - - # if list, then join the list making it a space seprated list for cli - variables = " ".join(variables) + # make sure variables is a list, encompasses the variables="all" case + if isinstance(variables, str): + variables = [variables] + if not isinstance(variables, list): + raise TypeError("Arugment 'variables' is of incorrect type, must" + " either the string 'all' or a list string") logger.info('creating scrip file for source dataset') # Note: writing scrip file to workdir @@ -1085,7 +1086,7 @@ def __guess_scrip_name(filename): '-d', dest_file, '-m', 'e', '-w', weights_filename, - '-v', variables] + '-v'] + variables check_call(args, logger=logger) From bdae36278dd6312c9d46d6069b1b7e26507efc2c Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Fri, 13 Sep 2024 14:41:23 -0700 Subject: [PATCH 21/41] Add documentation for unified framework and testcases --- docs/developers_guide/landice/framework.rst | 11 ++++------ .../landice/test_groups/antarctica.rst | 14 ++++++++++++- .../landice/test_groups/greenland.rst | 20 +++++++++++++++++++ 3 files changed, 37 insertions(+), 8 deletions(-) diff --git a/docs/developers_guide/landice/framework.rst b/docs/developers_guide/landice/framework.rst index 02123c5b22..daafe9b50d 100644 --- a/docs/developers_guide/landice/framework.rst +++ b/docs/developers_guide/landice/framework.rst @@ -56,13 +56,10 @@ clean up steps after interpolation for the AIS mesh case. :py:func:`compass.landice.mesh.gridded_flood_fill()` applies a flood-fill algorithm to the gridded dataset in order to separate the ice sheet from peripheral ice. -:py:func:`compass.landice.mesh.interp_ais_bedmachine()` interpolates BedMachine -thickness and bedTopography dataset to a MALI mesh, accounting for masking of -the ice extent to avoid interpolation ramps. - -:py:func:`compass.landice.mesh.interp_ais_interp_ais_measures()` interpolates -MEASURES ice velocity dataset to a MALI mesh, accounting for masking at the ice -edge and extrapolation. +:py:func:`compass.landice.mesh.interp_gridded2mali()` interpolates gridded data +(e.g. BedMachine thickness or MEASURES ice velocity) to a MALI mesh, accounting +for masking of the ice extent to avoid interpolation ramps. This functions works +for either the Antarctic or Greenland Icesheets. :py:func:`compass.landice.mesh.preprocess_ais_data()` performs adjustments to gridded AIS datasets needed for rest of compass workflow to utilize them. diff --git a/docs/users_guide/landice/test_groups/antarctica.rst b/docs/users_guide/landice/test_groups/antarctica.rst index f0a0629e80..cef797f9ee 100644 --- a/docs/users_guide/landice/test_groups/antarctica.rst +++ b/docs/users_guide/landice/test_groups/antarctica.rst @@ -73,7 +73,19 @@ the mesh generation options are adjusted through the config file. # path to directory containing BedMachine and Measures datasets # (default value is for Perlmutter) data_path = /global/cfs/cdirs/fanssie/standard_datasets/AIS_datasets - + + # filename of the BedMachine thickness and bedTopography dataset + # (default value is for Perlmutter) + bedmachine_filename = BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc + + # filename of the MEASURES ice velocity dataset + # (default value is for Perlmutter) + measures_filename = antarctica_ice_velocity_450m_v2_edits_extrap.nc + + # projection of the source datasets, according to the dictionary keys + # create_SCRIP_file_from_planar_rectangular_grid.py from MPAS_Tools + src_proj = ais-bedmap2 + # number of processors to use for ESMF_RegridWeightGen nProcs = 128 diff --git a/docs/users_guide/landice/test_groups/greenland.rst b/docs/users_guide/landice/test_groups/greenland.rst index 902baa0161..146119b909 100644 --- a/docs/users_guide/landice/test_groups/greenland.rst +++ b/docs/users_guide/landice/test_groups/greenland.rst @@ -86,6 +86,26 @@ The other test cases do not use config options. use_dist_to_edge = True use_bed = True + [greenland] + # path to directory containing BedMachine and Measures datasets + # (default value is for Perlmutter) + data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/ + + # filename of the BedMachine thickness and bedTopography dataset + # (default value is for Perlmutter) + bedmachine_filename = BedMachineGreenland-v5_edits_floodFill_extrap.nc + + # filename of the MEASURES ice velocity dataset + # (default value is for Perlmutter) + measures_filename = greenland_vel_mosaic500_extrap.nc + + # projection of the source datasets, according to the dictionary keys + # create_SCRIP_file_from_planar_rectangular_grid.py from MPAS_Tools + src_proj = gis-gimp + + # number of processors to use for ESMF_RegridWeightGen + nProcs = 128 + smoke_test ---------- From 626f11a6a97ea5a8f3d681c58d39156d193b71de Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Fri, 13 Sep 2024 15:06:40 -0700 Subject: [PATCH 22/41] Update framework API documentation --- docs/developers_guide/landice/api.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/developers_guide/landice/api.rst b/docs/developers_guide/landice/api.rst index 456dce84ae..ae9736f688 100644 --- a/docs/developers_guide/landice/api.rst +++ b/docs/developers_guide/landice/api.rst @@ -492,8 +492,7 @@ Landice Framework mesh.add_bedmachine_thk_to_ais_gridded_data mesh.clean_up_after_interp mesh.gridded_flood_fill - mesh.interp_ais_bedmachine - mesh.interp_ais_measures + mesh.interp_gridded2mali mesh.mpas_flood_fill mesh.preprocess_ais_data mesh.set_rectangular_geom_points_and_edges From 3ec3a7b4115583a9a3cf2f666e41513649f52709 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 25 Jul 2024 06:41:20 -0700 Subject: [PATCH 23/41] Update Compass to v1.5.0-alpha.1 --- compass/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compass/version.py b/compass/version.py index 91e460c183..1b2e6af6b7 100644 --- a/compass/version.py +++ b/compass/version.py @@ -1 +1 @@ -__version__ = '1.4.0-alpha.7' +__version__ = '1.5.0-alpha.1' From ec3e55d6e3a2a7693c1504fede0ad5ffc220bb71 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 25 Jul 2024 06:42:56 -0700 Subject: [PATCH 24/41] Update mache to v1.25.0 --- conda/compass_env/spec-file.template | 2 +- conda/configure_compass_env.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/conda/compass_env/spec-file.template b/conda/compass_env/spec-file.template index e7ae69b96d..05142ac1f0 100644 --- a/conda/compass_env/spec-file.template +++ b/conda/compass_env/spec-file.template @@ -16,7 +16,7 @@ ipython jupyter lxml {% if include_mache %} -mache=1.23.0 +mache=1.25.0 {% endif %} matplotlib-base >=3.9.1 metis diff --git a/conda/configure_compass_env.py b/conda/configure_compass_env.py index 6ad866196b..9db9c2c518 100755 --- a/conda/configure_compass_env.py +++ b/conda/configure_compass_env.py @@ -100,7 +100,7 @@ def main(): if local_mache: mache = '' else: - mache = '"mache=1.23.0"' + mache = '"mache=1.25.0"' setup_install_env(env_name, activate_base, args.use_local, logger, args.recreate, conda_base, mache) From 25d74f0595ffdb97a5c41a5b9651213ab28fe35d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 1 Jul 2024 04:21:45 -0700 Subject: [PATCH 25/41] Add pm-gpu --- compass/machines/pm-gpu.cfg | 44 +++++++++++++++++++++++++++++++++++++ conda/albany_supported.txt | 1 + conda/unsupported.txt | 3 ++- 3 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 compass/machines/pm-gpu.cfg diff --git a/compass/machines/pm-gpu.cfg b/compass/machines/pm-gpu.cfg new file mode 100644 index 0000000000..b82faf5189 --- /dev/null +++ b/compass/machines/pm-gpu.cfg @@ -0,0 +1,44 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# A shared root directory where MPAS standalone data can be found +database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /global/common/software/e3sm/compass/pm-gpu/base + + +# Options related to deploying a compass conda environment on supported +# machines +[deploy] + +# the compiler set to use for system libraries and MPAS builds +compiler = gnugpu + +# the system MPI library to use for gnugpu compiler +mpi_gnugpu = mpich + +# the system MPI library to use for nvidiagpu compiler +mpi_nvidiagpu = mpich + +# the base path for spack environments used by compass +spack = /global/cfs/cdirs/e3sm/software/compass/pm-gpu/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True + +# The parallel section describes options related to running jobs in parallel. +# Most options in this section come from mache so here we just add or override +# some defaults +[parallel] + +# cores per node on the machine +cores_per_node = 64 + +# threads per core (set to 1 because trying to hyperthread seems to be causing +# hanging on perlmutter) +threads_per_core = 1 diff --git a/conda/albany_supported.txt b/conda/albany_supported.txt index 12ce2817af..d969e7a20a 100644 --- a/conda/albany_supported.txt +++ b/conda/albany_supported.txt @@ -3,4 +3,5 @@ chicoma-cpu, gnu, mpich chrysalis, gnu, openmpi pm-cpu, gnu, mpich +pm-gpu, gnugpu, mpich morpheus, gnu, openmpi diff --git a/conda/unsupported.txt b/conda/unsupported.txt index 5014d526ea..cfce62076d 100644 --- a/conda/unsupported.txt +++ b/conda/unsupported.txt @@ -15,7 +15,8 @@ compy, pgi, mvapich2 pm-cpu, nvidia, mpich pm-cpu, aocc, mpich pm-cpu, amdclang, mpich - +pm-gpu, gnu, mpich +pm-gpu, nvidia, mpich # compiles but tests unreliable (errors or hanging), # see https://github.com/MPAS-Dev/compass/issues/336 From ebd5953c13a094c8bc6853807c1a128616a6ad38 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 25 Jul 2024 06:52:24 -0700 Subject: [PATCH 26/41] Add `gpus-per-node` to job scripts and resources --- compass/job/__init__.py | 9 ++++++++- compass/job/job_script.template | 3 +++ compass/parallel.py | 5 +++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/compass/job/__init__.py b/compass/job/__init__.py index 21ec57f582..151d951cb2 100644 --- a/compass/job/__init__.py +++ b/compass/job/__init__.py @@ -93,6 +93,12 @@ def write_job_script(config, machine, target_cores, min_cores, work_dir, job_name = 'compass' else: job_name = f'compass_{suite}' + + if config.has_option('parallel', 'gpus_per_node'): + gpus_per_node = config.get('parallel', 'gpus_per_node') + else: + gpus_per_node = '' + wall_time = config.get('job', 'wall_time') template = Template(resources.read_text( @@ -101,7 +107,8 @@ def write_job_script(config, machine, target_cores, min_cores, work_dir, text = template.render(job_name=job_name, account=account, nodes=f'{nodes}', wall_time=wall_time, qos=qos, partition=partition, constraint=constraint, - reservation=reservation, suite=suite, + reservation=reservation, + gpus_per_node=gpus_per_node, suite=suite, pre_run_commands=pre_run_commands, post_run_commands=post_run_commands) text = _clean_up_whitespace(text) diff --git a/compass/job/job_script.template b/compass/job/job_script.template index 37a384a280..fec4d599df 100644 --- a/compass/job/job_script.template +++ b/compass/job/job_script.template @@ -19,6 +19,9 @@ {% if constraint != '' -%} #SBATCH --constraint={{ constraint }} {%- endif %} +{% if gpus_per_node != '' -%} +#SBATCH --gpus-per-node={{ gpus_per_node }} +{%- endif %} source load_compass_env.sh {{ pre_run_commands }} diff --git a/compass/parallel.py b/compass/parallel.py index cf49c930ab..08d0e7d804 100644 --- a/compass/parallel.py +++ b/compass/parallel.py @@ -66,6 +66,11 @@ def get_available_parallel_resources(config): cores_per_node=cores_per_node, mpi_allowed=mpi_allowed ) + + if config.has_option('parallel', 'gpus_per_node'): + available_resources['gpus_per_node'] = \ + config.getint('parallel', 'gpus_per_node') + return available_resources From b04ee856d37a8b263da468d44cb778ef390fc438 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 5 Aug 2024 06:24:54 -0700 Subject: [PATCH 27/41] Fix local mache install --- conda/bootstrap.py | 5 +++-- conda/configure_compass_env.py | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index c113fb0e59..33228645dc 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -1082,8 +1082,9 @@ def main(): # noqa: C901 print('Install local mache\n') commands = f'source {conda_base}/etc/profile.d/conda.sh && ' \ f'conda activate {conda_env_name} && ' \ - 'cd ../build_mache/mache && ' \ - 'python -m pip install --no-deps .' + f'cd ../build_mache/mache && ' \ + f'conda install -y --file spec-file.txt && ' \ + f'python -m pip install --no-deps .' check_call(commands, logger=logger) previous_conda_env = conda_env_name diff --git a/conda/configure_compass_env.py b/conda/configure_compass_env.py index 9db9c2c518..bb2eb51f8f 100755 --- a/conda/configure_compass_env.py +++ b/conda/configure_compass_env.py @@ -114,6 +114,7 @@ def main(): f'git clone -b {args.mache_branch} ' \ f'git@github.com:{args.mache_fork}.git mache && ' \ f'cd mache && ' \ + f'conda install -y --file spec-file.txt && ' \ f'python -m pip install --no-deps .' check_call(commands, logger=logger) From 077b42bcf670113f5c399fced3bdb772e4c4a54f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 5 Aug 2024 06:25:23 -0700 Subject: [PATCH 28/41] Add cuda to Albany and Trilinos builds for GPU machines --- conda/bootstrap.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 33228645dc..f746194ac6 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -476,6 +476,9 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 scorpio = config.get('deploy', 'scorpio') parallelio = config.get('deploy', 'parallelio') + # for now, we'll assume Cuda is needed anytime GPUs are present + with_cuda = config.has_option('parallel', 'gpus_per_node') + if config.has_option('deploy', 'spack_mirror'): spack_mirror = config.get('deploy', 'spack_mirror') else: @@ -536,8 +539,12 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 f'@{parallelio}+pnetcdf~timing"') if albany != 'None': - specs.append(f'"trilinos-for-albany@{albany}"') - specs.append(f'"albany@{albany}+mpas~py+unit_tests"') + if with_cuda: + cuda = '+cuda+uvm' + else: + cuda = '' + specs.append(f'"trilinos-for-albany@{albany}{cuda}"') + specs.append(f'"albany@{albany}+mpas~py+unit_tests{cuda}"') yaml_template = f'{spack_template_path}/{machine}_{compiler}_{mpi}.yaml' if not os.path.exists(yaml_template): From 27b9975075677458486d52da79bb7ffb3e4fe98c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 13 Aug 2024 08:27:16 -0700 Subject: [PATCH 29/41] Add +sfad variant to albany spack build with cuda --- conda/bootstrap.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index f746194ac6..ff2f742096 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -540,11 +540,13 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 if albany != 'None': if with_cuda: - cuda = '+cuda+uvm' + albany_cuda = '+cuda+uvm+sfad sfadsize=12' + trilinos_cuda = '+cuda+uvm' else: - cuda = '' - specs.append(f'"trilinos-for-albany@{albany}{cuda}"') - specs.append(f'"albany@{albany}+mpas~py+unit_tests{cuda}"') + albany_cuda = '' + trilinos_cuda = '' + specs.append(f'"trilinos-for-albany@{albany}{trilinos_cuda}"') + specs.append(f'"albany@{albany}+mpas~py+unit_tests{albany_cuda}"') yaml_template = f'{spack_template_path}/{machine}_{compiler}_{mpi}.yaml' if not os.path.exists(yaml_template): From 34aa147047eb483e6ce68c4216830b2b1b3ede3f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 10 Sep 2024 02:08:26 -0700 Subject: [PATCH 30/41] Update conda and spack deps --- conda/compass_env/spec-file.template | 4 ++-- conda/default.cfg | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/conda/compass_env/spec-file.template b/conda/compass_env/spec-file.template index 05142ac1f0..ee6e0e7b11 100644 --- a/conda/compass_env/spec-file.template +++ b/conda/compass_env/spec-file.template @@ -49,8 +49,8 @@ cmake cxx-compiler fortran-compiler libnetcdf=4.9.2={{ mpi_prefix }}_* -libpnetcdf=1.12.3={{ mpi_prefix }}_* -parallelio=2.6.2={{ mpi_prefix }}_* +libpnetcdf=1.13.0={{ mpi_prefix }}_* +parallelio=2.6.3={{ mpi_prefix }}_* m4 make {{ mpi }} diff --git a/conda/default.cfg b/conda/default.cfg index 83e13ee115..6cae25589b 100644 --- a/conda/default.cfg +++ b/conda/default.cfg @@ -29,9 +29,9 @@ lapack = 3.9.1 metis = 5.1.0 moab = 5.5.1 netcdf_c = 4.9.2 -netcdf_fortran = 4.6.0 +netcdf_fortran = 4.6.1 petsc = 3.19.1 -pnetcdf = 1.12.3 -scorpio = 1.6.3 -# parallelio = 2.6.2 +pnetcdf = 1.13.0 +scorpio = 1.6.5 +# parallelio = 2.6.3 parallelio = None From b11239500e40bfc3f35638aa7de4fc91dcdb7ceb Mon Sep 17 00:00:00 2001 From: Althea Denlinger Date: Tue, 17 Sep 2024 15:33:44 -0700 Subject: [PATCH 31/41] Add flags to `pip install` in quick start --- docs/developers_guide/quick_start.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst index d48ad1cec2..2397e84f30 100644 --- a/docs/developers_guide/quick_start.rst +++ b/docs/developers_guide/quick_start.rst @@ -382,7 +382,7 @@ switch branches. .. code-block:: bash - python -m pip install -e . + python -m pip install --no-deps --no-build-isolation -e . The activation script will do this automatically when you source it in the root directory of your compass branch. The activation script will also @@ -446,7 +446,7 @@ to the current directory. .. code-block:: bash - python -m pip install -e . + python -m pip install --no-deps --no-build-isolation -e . This will be substantially faster than rerunning ``./conda/configure_compass_env.py ...`` but at the risk that dependencies are From 3d67afb794057b07e42939df7e94ba8626f51a3f Mon Sep 17 00:00:00 2001 From: Althea Denlinger Date: Mon, 23 Sep 2024 09:35:42 -0700 Subject: [PATCH 32/41] Update `pip install` flags in conda files --- conda/bootstrap.py | 11 ++++++----- conda/configure_compass_env.py | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index ff2f742096..0e0d5403ec 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -336,7 +336,7 @@ def build_conda_env(env_type, recreate, mpi, conda_mpi, version, f'git submodule update --init alphaBetaLab &&' \ f'cd {source_path}/alphaBetaLab&& ' \ f'conda install -y --file dev-spec.txt && ' \ - f'python -m pip install --no-deps -e .' + f'python -m pip install --no-deps --no-build-isolation -e .' check_call(commands, logger=logger) if recreate or update_jigsaw: @@ -348,7 +348,7 @@ def build_conda_env(env_type, recreate, mpi, conda_mpi, version, f'{activate_env} && ' \ f'cd {source_path} && ' \ f'rm -rf compass.egg-info && ' \ - f'python -m pip install --no-deps -e .' + f'python -m pip install --no-deps --no-build-isolation -e .' check_call(commands, logger=logger) print('Installing pre-commit\n') @@ -402,7 +402,7 @@ def build_jigsaw(activate_env, source_path, env_path, logger): commands = \ f'{activate_env} && ' \ f'cd {source_path}/jigsaw-python && ' \ - f'python -m pip install --no-deps -e . && ' \ + f'python -m pip install --no-deps --no-build-isolation -e . && ' \ f'cp jigsawpy/_bin/* ${{CONDA_PREFIX}}/bin' check_call(commands, logger=logger) @@ -688,7 +688,8 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, mkdir -p conda/logs echo Reinstalling compass package in edit mode... rm -rf compass.egg-info - python -m pip install --no-deps -e . &> conda/logs/install_compass.log + python -m pip install --no-deps --no-build-isolation -e . \\ + &> conda/logs/install_compass.log echo Done. echo fi @@ -1093,7 +1094,7 @@ def main(): # noqa: C901 f'conda activate {conda_env_name} && ' \ f'cd ../build_mache/mache && ' \ f'conda install -y --file spec-file.txt && ' \ - f'python -m pip install --no-deps .' + f'python -m pip install --no-deps --no-build-isolation .' check_call(commands, logger=logger) previous_conda_env = conda_env_name diff --git a/conda/configure_compass_env.py b/conda/configure_compass_env.py index bb2eb51f8f..233ac8d0f3 100755 --- a/conda/configure_compass_env.py +++ b/conda/configure_compass_env.py @@ -115,7 +115,7 @@ def main(): f'git@github.com:{args.mache_fork}.git mache && ' \ f'cd mache && ' \ f'conda install -y --file spec-file.txt && ' \ - f'python -m pip install --no-deps .' + f'python -m pip install --no-deps --no-build-isolation .' check_call(commands, logger=logger) From d32d67e61ddb29ec759d875a26a3a53ade5d8c43 Mon Sep 17 00:00:00 2001 From: Althea Denlinger Date: Mon, 23 Sep 2024 10:18:47 -0700 Subject: [PATCH 33/41] Satisfy linter --- conda/bootstrap.py | 108 +++++++++++++++++++-------------- conda/configure_compass_env.py | 29 +++++---- 2 files changed, 78 insertions(+), 59 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 0e0d5403ec..1e42a3b5df 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -312,8 +312,9 @@ def build_conda_env(env_type, recreate, mpi, conda_mpi, version, # conda packages don't like dashes version_conda = version.replace('-', '') packages = f'{packages} "compass={version_conda}={mpi_prefix}_*"' - commands = f'{activate_base} && ' \ - f'conda create -y -n {env_name} {channels} {packages}' + commands = \ + f'{activate_base} && ' \ + f'conda create -y -n {env_name} {channels} {packages}' check_call(commands, logger=logger) else: if env_type == 'dev': @@ -408,7 +409,7 @@ def build_jigsaw(activate_env, source_path, env_path, logger): t1 = time.time() total = int(t1 - t0 + 0.5) - message = f'JIGSAW install took {total:.1f} s.' + message = f'JIGSAW install took {total:.1f} s.' # noqa: 231 if logger is None: print(message) else: @@ -424,11 +425,12 @@ def get_env_vars(machine, compiler, mpilib): env_vars = 'export MPAS_EXTERNAL_LIBS=""\n' if 'intel' in compiler and machine == 'anvil': - env_vars = f'{env_vars}' \ - f'export I_MPI_CC=icc\n' \ - f'export I_MPI_CXX=icpc\n' \ - f'export I_MPI_F77=ifort\n' \ - f'export I_MPI_F90=ifort\n' + env_vars = \ + f'{env_vars}' \ + f'export I_MPI_CC=icc\n' \ + f'export I_MPI_CXX=icpc\n' \ + f'export I_MPI_F77=ifort\n' \ + f'export I_MPI_F90=ifort\n' if machine.startswith('conda'): # we're using parallelio so we don't have ADIOS support @@ -442,9 +444,10 @@ def get_env_vars(machine, compiler, mpilib): f'export MPAS_EXTERNAL_LIBS="${{MPAS_EXTERNAL_LIBS}} -lgomp"\n' if mpilib == 'mvapich': - env_vars = f'{env_vars}' \ - f'export MV2_ENABLE_AFFINITY=0\n' \ - f'export MV2_SHOW_CPU_BINDING=1\n' + env_vars = \ + f'{env_vars}' \ + f'export MV2_ENABLE_AFFINITY=0\n' \ + f'export MV2_SHOW_CPU_BINDING=1\n' if machine.startswith('chicoma') or machine.startswith('pm'): env_vars = \ @@ -525,8 +528,9 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 f'"scorpio' f'@{scorpio}+pnetcdf~timing+internal-timing~tools+malloc"') # make sure scorpio, not esmf, libraries are linked - lib_path = f'{spack_branch_base}/var/spack/environments/' \ - f'{spack_env}/.spack-env/view/lib' + lib_path = \ + f'{spack_branch_base}/var/spack/environments/' \ + f'{spack_env}/.spack-env/view/lib' scorpio_lib_path = '$(spack find --format "{prefix}" scorpio)' custom_spack = \ f'{custom_spack}' \ @@ -572,8 +576,9 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 custom_spack=custom_spack, spack_mirror=spack_mirror) # remove ESMC/ESMF include files that interfere with MPAS time keeping - include_path = f'{spack_branch_base}/var/spack/environments/' \ - f'{spack_env}/.spack-env/view/include' + include_path = \ + f'{spack_branch_base}/var/spack/environments/' \ + f'{spack_env}/.spack-env/view/include' for prefix in ['ESMC', 'esmf']: files = glob.glob(os.path.join(include_path, f'{prefix}*')) for filename in files: @@ -587,10 +592,12 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf, yaml_template=yaml_template) - spack_view = f'{spack_branch_base}/var/spack/environments/' \ - f'{spack_env}/.spack-env/view' - env_vars = f'{env_vars}' \ - f'export PIO={spack_view}\n' + spack_view = \ + f'{spack_branch_base}/var/spack/environments/' \ + f'{spack_env}/.spack-env/view' + env_vars = \ + f'{env_vars}' \ + f'export PIO={spack_view}\n' if parallelio != 'None': env_vars = \ f'{env_vars}' \ @@ -619,14 +626,16 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, # noqa: C901 f'${{ALBANY_LINK_LIBS}} {stdcxx} {mpicxx}"\n' if lapack != 'None': - env_vars = f'{env_vars}' \ - f'export LAPACK={spack_view}\n' \ - f'export USE_LAPACK=true\n' + env_vars = \ + f'{env_vars}' \ + f'export LAPACK={spack_view}\n' \ + f'export USE_LAPACK=true\n' if petsc != 'None': - env_vars = f'{env_vars}' \ - f'export PETSC={spack_view}\n' \ - f'export USE_PETSC=true\n' + env_vars = \ + f'{env_vars}' \ + f'export PETSC={spack_view}\n' \ + f'export USE_PETSC=true\n' return spack_branch_base, spack_script, env_vars @@ -636,7 +645,7 @@ def set_ld_library_path(spack_branch_base, spack_env, logger): f'source {spack_branch_base}/share/spack/setup-env.sh && ' \ f'spack env activate {spack_env} && ' \ f'spack config add modules:prefix_inspections:lib:[LD_LIBRARY_PATH] && ' \ - f'spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]' # noqa: E501 + f'spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]' # noqa: E501,E231 check_call(commands, logger=logger) @@ -658,21 +667,26 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, script_filename = '{}/{}{}.sh'.format(activ_path, prefix, activ_suffix) if not env_only: - env_vars = f'{env_vars}\n' \ - f'export USE_PIO2=true' + env_vars = \ + f'{env_vars}\n' \ + f'export USE_PIO2=true' if without_openmp: - env_vars = f'{env_vars}\n' \ - f'export OPENMP=false' + env_vars = \ + f'{env_vars}\n' \ + f'export OPENMP=false' else: - env_vars = f'{env_vars}\n' \ - f'export OPENMP=true' + env_vars = \ + f'{env_vars}\n' \ + f'export OPENMP=true' - env_vars = f'{env_vars}\n' \ - f'export HDF5_USE_FILE_LOCKING=FALSE\n' \ - f'export LOAD_COMPASS_ENV={script_filename}' + env_vars = \ + f'{env_vars}\n' \ + f'export HDF5_USE_FILE_LOCKING=FALSE\n' \ + f'export LOAD_COMPASS_ENV={script_filename}' if machine is not None and not machine.startswith('conda'): - env_vars = f'{env_vars}\n' \ - f'export COMPASS_MACHINE={machine}' + env_vars = \ + f'{env_vars}\n' \ + f'export COMPASS_MACHINE={machine}' filename = f'{template_path}/load_compass.template' with open(filename, 'r') as f: @@ -1090,11 +1104,12 @@ def main(): # noqa: C901 if local_mache: print('Install local mache\n') - commands = f'source {conda_base}/etc/profile.d/conda.sh && ' \ - f'conda activate {conda_env_name} && ' \ - f'cd ../build_mache/mache && ' \ - f'conda install -y --file spec-file.txt && ' \ - f'python -m pip install --no-deps --no-build-isolation .' + commands = \ + f'source {conda_base}/etc/profile.d/conda.sh && ' \ + f'conda activate {conda_env_name} && ' \ + f'cd ../build_mache/mache && ' \ + f'conda install -y --file spec-file.txt && ' \ + f'python -m pip install --no-deps --no-build-isolation .' check_call(commands, logger=logger) previous_conda_env = conda_env_name @@ -1110,10 +1125,11 @@ def main(): # noqa: C901 config, args.update_spack, machine, compiler, mpi, spack_env, spack_base, spack_template_path, env_vars, args.tmpdir, logger) - spack_script = f'echo Loading Spack environment...\n' \ - f'{spack_script}\n' \ - f'echo Done.\n' \ - f'echo\n' + spack_script = \ + f'echo Loading Spack environment...\n' \ + f'{spack_script}\n' \ + f'echo Done.\n' \ + f'echo\n' else: env_vars = \ f'{env_vars}' \ diff --git a/conda/configure_compass_env.py b/conda/configure_compass_env.py index 233ac8d0f3..e2db4855b5 100755 --- a/conda/configure_compass_env.py +++ b/conda/configure_compass_env.py @@ -30,8 +30,9 @@ def bootstrap(activate_install_env, source_path, local_conda_build): print('Creating the compass conda environment\n') bootstrap_command = f'{source_path}/conda/bootstrap.py' - command = f'{activate_install_env} && ' \ - f'{bootstrap_command} {" ".join(sys.argv[1:])}' + command = \ + f'{activate_install_env} && ' \ + f'{bootstrap_command} {" ".join(sys.argv[1:])}' if local_conda_build is not None: command = f'{command} --local_conda_build {local_conda_build}' check_call(command) @@ -51,8 +52,9 @@ def setup_install_env(env_name, activate_base, use_local, logger, recreate, f'conda create -y -n {env_name} {channels} {packages}' else: print('Updating conda environment for installing compass\n') - commands = f'{activate_base} && ' \ - f'conda install -y -n {env_name} {channels} {packages}' + commands = \ + f'{activate_base} && ' \ + f'conda install -y -n {env_name} {channels} {packages}' check_call(commands, logger=logger) @@ -107,15 +109,16 @@ def main(): if local_mache: print('Clone and install local mache\n') - commands = f'{activate_install_env} && ' \ - f'rm -rf conda/build_mache && ' \ - f'mkdir -p conda/build_mache && ' \ - f'cd conda/build_mache && ' \ - f'git clone -b {args.mache_branch} ' \ - f'git@github.com:{args.mache_fork}.git mache && ' \ - f'cd mache && ' \ - f'conda install -y --file spec-file.txt && ' \ - f'python -m pip install --no-deps --no-build-isolation .' + commands = \ + f'{activate_install_env} && ' \ + f'rm -rf conda/build_mache && ' \ + f'mkdir -p conda/build_mache && ' \ + f'cd conda/build_mache && ' \ + f'git clone -b {args.mache_branch} ' \ + f'git@github.com:{args.mache_fork}.git mache && ' \ + f'cd mache && ' \ + f'conda install -y --file spec-file.txt && ' \ + f'python -m pip install --no-deps --no-build-isolation .' # noqa: E231,E501 check_call(commands, logger=logger) From 433897776e1391cfea6d78646e16f94c3c1abcb7 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Tue, 1 Oct 2024 11:50:45 -0700 Subject: [PATCH 34/41] Add `sort_mesh` call to `build_mali_mesh` framework function. Sorting the global indicies will improve memory performance of `cellsOnCell` and such. Additionally, sorting will help with PIO decompostion and ice-sheet-coupler communication. --- compass/landice/mesh.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index 07bc4833b1..4086ff035f 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -13,6 +13,7 @@ from mpas_tools.logging import check_call from mpas_tools.mesh.conversion import convert, cull from mpas_tools.mesh.creation import build_planar_mesh +from mpas_tools.mesh.creation.sort_mesh import sort_mesh from netCDF4 import Dataset from scipy.interpolate import NearestNDInterpolator, interpn @@ -745,10 +746,11 @@ def build_mali_mesh(self, cell_width, x1, y1, geom_points, check_call(args, logger=logger) - logger.info('culling and converting') + logger.info('culling, converting, and sorting') dsMesh = xarray.open_dataset('culled.nc') dsMesh = cull(dsMesh, logger=logger) dsMesh = convert(dsMesh, logger=logger) + dsMesh = sort_mesh(dsMesh) write_netcdf(dsMesh, 'dehorned.nc') args = ['create_landice_grid_from_generic_MPAS_grid.py', '-i', From ed7b85ea38b7aa423481c0fcde3823011e2e6e8f Mon Sep 17 00:00:00 2001 From: Andrew Nolan <32367657+andrewdnolan@users.noreply.github.com> Date: Tue, 1 Oct 2024 12:55:41 -0600 Subject: [PATCH 35/41] Apply suggestions from code review Co-authored-by: Trevor Hillebrand --- compass/landice/mesh.py | 10 ++++------ compass/landice/tests/antarctica/mesh.py | 4 ++-- compass/landice/tests/greenland/mesh.py | 4 ++-- compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg | 2 +- docs/developers_guide/landice/framework.rst | 4 ++-- 5 files changed, 11 insertions(+), 13 deletions(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index 4086ff035f..00271b49bb 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -444,7 +444,7 @@ def get_dist_to_edge_and_gl(self, thk, topg, x, y, logger.info('WARNING: window_size was set to a value smaller' ' than high_dist and/or high_dist_bed. Resetting' f' window_size to {max(high_dist, high_dist_bed)}, ' - ' which is max(high_dist, high_dist_bed)') + ' which is max(high_dist, high_dist_bed)') window_size = max(high_dist, high_dist_bed) dx = x[1] - x[0] # assumed constant and equal in x and y @@ -971,8 +971,8 @@ def preprocess_ais_data(self, source_gridded_dataset, tic = time.perf_counter() logger.info(f"Beginning interpolation for {field}") - # NOTE: Do not need to evaluate the extrapolator at all gridcells. - # Only needed for ice-free gricells, since it's NN extrapolation + # NOTE: Do not need to evaluate the extrapolator at all grid cells. + # Only needed for ice-free grid cells, since it's NN extrapolation data.variables[field][0, :] = interp(xGrid, yGrid) toc = time.perf_counter() logger.info(f"Interpolation completed in {toc - tic} seconds") @@ -1027,7 +1027,7 @@ def interp_gridded2mali(self, source_file, mali_scrip, nProcs, dest_file, proj, MALI input file to which data should be remapped proj: str - projection of the source dataset ... + projection of the source dataset variables: str or list of strings @@ -1069,8 +1069,6 @@ def __guess_scrip_name(filename): check_call(args, logger=logger) # Generate remapping weights - # Testing shows 5 badger/grizzly nodes works well. - # 2 nodes is too few. I have not tested anything in between. logger.info('generating gridded dataset -> MPAS weights') args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen', '--source', source_scrip, diff --git a/compass/landice/tests/antarctica/mesh.py b/compass/landice/tests/antarctica/mesh.py index e1cc93e63e..fcff251304 100644 --- a/compass/landice/tests/antarctica/mesh.py +++ b/compass/landice/tests/antarctica/mesh.py @@ -74,7 +74,7 @@ def run(self): section_name = 'mesh' - # do we want to add this to the config file? + # TODO: do we want to add this to the config file? source_gridded_dataset = 'antarctica_8km_2024_01_29.nc' bm_updated_gridded_dataset = add_bedmachine_thk_to_ais_gridded_data( @@ -141,7 +141,7 @@ def run(self): interp_gridded2mali(self, bedmachine_dataset, dst_scrip_file, nProcs, self.mesh_filename, src_proj, variables="all") - # only interpolate a subset of MEASURES varibles onto the MALI mesh + # only interpolate a subset of MEaSUREs variables onto the MALI mesh measures_vars = ['observedSurfaceVelocityX', 'observedSurfaceVelocityY', 'observedSurfaceVelocityUncertainty'] diff --git a/compass/landice/tests/greenland/mesh.py b/compass/landice/tests/greenland/mesh.py index 217934443d..9742398a4c 100644 --- a/compass/landice/tests/greenland/mesh.py +++ b/compass/landice/tests/greenland/mesh.py @@ -103,7 +103,7 @@ def run(self): interp_gridded2mali(self, bedmachine_dataset, dst_scrip_file, nProcs, self.mesh_filename, src_proj, variables="all") - # only interpolate a subset of MEASURES varibles onto the MALI mesh + # only interpolate a subset of MEaSUREs variables onto the MALI mesh measures_vars = ['observedSurfaceVelocityX', 'observedSurfaceVelocityY', 'observedSurfaceVelocityUncertainty'] @@ -149,7 +149,7 @@ def run(self): dHdtErr = np.abs(dHdt) * 0.05 # Use threshold of |dHdt| > 1.0 to determine invalid data mask = np.abs(dHdt) > 1.0 - # Assign 100% uncertainty where data is missing + # Assign very large uncertainty where data is missing dHdtErr = dHdtErr.where(~mask, 1.0) # Remove ridiculous values dHdt = dHdt.where(~mask, 0.0) diff --git a/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg b/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg index a586ea4099..a285a1cb7c 100644 --- a/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg +++ b/compass/landice/tests/greenland/mesh_gen/mesh_gen.cfg @@ -57,7 +57,7 @@ data_path = /global/cfs/cdirs/fanssie/standard_datasets/GIS_datasets/ # (default value is for Perlmutter) bedmachine_filename = BedMachineGreenland-v5_edits_floodFill_extrap.nc -# filename of the MEASURES ice velocity dataset +# filename of the MEaSUREs ice velocity dataset # (default value is for Perlmutter) measures_filename = greenland_vel_mosaic500_extrap.nc diff --git a/docs/developers_guide/landice/framework.rst b/docs/developers_guide/landice/framework.rst index daafe9b50d..e3fddd6e17 100644 --- a/docs/developers_guide/landice/framework.rst +++ b/docs/developers_guide/landice/framework.rst @@ -57,9 +57,9 @@ clean up steps after interpolation for the AIS mesh case. to the gridded dataset in order to separate the ice sheet from peripheral ice. :py:func:`compass.landice.mesh.interp_gridded2mali()` interpolates gridded data -(e.g. BedMachine thickness or MEASURES ice velocity) to a MALI mesh, accounting +(e.g. BedMachine thickness or MEaSUREs ice velocity) to a MALI mesh, accounting for masking of the ice extent to avoid interpolation ramps. This functions works -for either the Antarctic or Greenland Icesheets. +for both Antarctica and Greenland. :py:func:`compass.landice.mesh.preprocess_ais_data()` performs adjustments to gridded AIS datasets needed for rest of compass workflow to utilize them. From 490e606e5d0e58b51c24b460fdaeafe86bd783a7 Mon Sep 17 00:00:00 2001 From: Andrew Nolan Date: Tue, 1 Oct 2024 12:02:32 -0700 Subject: [PATCH 36/41] Apply more suggestions/edits from code review. --- compass/landice/mesh.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/compass/landice/mesh.py b/compass/landice/mesh.py index 00271b49bb..0427e47556 100644 --- a/compass/landice/mesh.py +++ b/compass/landice/mesh.py @@ -443,7 +443,7 @@ def get_dist_to_edge_and_gl(self, thk, topg, x, y, elif window_size < min(high_dist, high_dist_bed): logger.info('WARNING: window_size was set to a value smaller' ' than high_dist and/or high_dist_bed. Resetting' - f' window_size to {max(high_dist, high_dist_bed)}, ' + f' window_size to {max(high_dist, high_dist_bed)},' ' which is max(high_dist, high_dist_bed)') window_size = max(high_dist, high_dist_bed) @@ -972,7 +972,7 @@ def preprocess_ais_data(self, source_gridded_dataset, tic = time.perf_counter() logger.info(f"Beginning interpolation for {field}") # NOTE: Do not need to evaluate the extrapolator at all grid cells. - # Only needed for ice-free grid cells, since it's NN extrapolation + # Only needed for ice-free grid cells, since is NN extrapolation data.variables[field][0, :] = interp(xGrid, yGrid) toc = time.perf_counter() logger.info(f"Interpolation completed in {toc - tic} seconds") @@ -1029,8 +1029,8 @@ def interp_gridded2mali(self, source_file, mali_scrip, nProcs, dest_file, proj, proj: str projection of the source dataset - variables: str or list of strings - + variables: "all" or list of strings + either the string "all" or a list of strings """ def __guess_scrip_name(filename): @@ -1057,7 +1057,7 @@ def __guess_scrip_name(filename): variables = [variables] if not isinstance(variables, list): raise TypeError("Arugment 'variables' is of incorrect type, must" - " either the string 'all' or a list string") + " either the string 'all' or a list of strings") logger.info('creating scrip file for source dataset') # Note: writing scrip file to workdir @@ -1090,9 +1090,6 @@ def __guess_scrip_name(filename): check_call(args, logger=logger) - # should I delted the weights file, since that could cause namespace - # conflicts when multiple interpolations are done? - def clean_up_after_interp(fname): """ From 70d0e8d1a4571bc650b6dda8422179967a6519dd Mon Sep 17 00:00:00 2001 From: Althea Denlinger Date: Tue, 1 Oct 2024 13:33:53 -0700 Subject: [PATCH 37/41] Update pre-commit dependencies --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2305081d39..42734632d7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,14 +4,14 @@ fail_fast: true repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.6.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer # Can run individually with `pre-commit run isort --all-files` - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort @@ -19,7 +19,7 @@ repos: # Need to use flake8 GitHub mirror due to CentOS git issue with GitLab # https://github.com/pre-commit/pre-commit/issues/1206 - repo: https://github.com/pycqa/flake8 - rev: 6.0.0 + rev: 7.1.1 hooks: - id: flake8 args: ["--config=setup.cfg"] From 50074f467496bbf71d86b1e826f114755af39f2e Mon Sep 17 00:00:00 2001 From: Althea Denlinger Date: Tue, 1 Oct 2024 13:37:59 -0700 Subject: [PATCH 38/41] Add monthly CI update --- .pre-commit-config.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 42734632d7..06ec144118 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,3 +24,8 @@ repos: - id: flake8 args: ["--config=setup.cfg"] additional_dependencies: [flake8-isort] + +# https://pre-commit.ci/#configuration +ci: + autofix_prs: false + autoupdate_schedule: monthly From a0d8ab8f5f313b7ec35874cd8e11be45e08cc9b4 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 9 Oct 2024 16:03:39 -0500 Subject: [PATCH 39/41] Put low res topography options in a config file This is much less confusing than having them hard-coded deep inside of global ocean. --- compass/ocean/mesh/low_res_topography.cfg | 16 ++++++++++++++++ .../ocean/tests/global_ocean/mesh/__init__.py | 14 ++++---------- 2 files changed, 20 insertions(+), 10 deletions(-) create mode 100644 compass/ocean/mesh/low_res_topography.cfg diff --git a/compass/ocean/mesh/low_res_topography.cfg b/compass/ocean/mesh/low_res_topography.cfg new file mode 100644 index 0000000000..c3bf08c611 --- /dev/null +++ b/compass/ocean/mesh/low_res_topography.cfg @@ -0,0 +1,16 @@ +# config options related to remapping topography to an MPAS-Ocean mesh +[remap_topography] + +# the name of the topography file in the bathymetry database +topo_filename = BedMachineAntarctica_v2_and_GEBCO_2022_0.05_degree_20220729.nc + +# variable names in topo_filename +bathy_frac_var = ocean_mask + +# the description to include in metadata +description = Bathymetry is from GEBCO 2022, combined with BedMachine + Antarctica v2 around Antarctica. + +# the target and minimum number of MPI tasks to use in remapping +ntasks = 64 +min_tasks = 4 diff --git a/compass/ocean/tests/global_ocean/mesh/__init__.py b/compass/ocean/tests/global_ocean/mesh/__init__.py index e88a96d612..2d082c3b83 100644 --- a/compass/ocean/tests/global_ocean/mesh/__init__.py +++ b/compass/ocean/tests/global_ocean/mesh/__init__.py @@ -137,20 +137,14 @@ def configure(self, config=None): if config is None: config = self.config config.add_from_package('compass.mesh', 'mesh.cfg', exception=True) - # a description of the bathymetry if 'remap_topography' in self.steps: config.add_from_package('compass.ocean.mesh', 'remap_topography.cfg', exception=True) - if not self.high_res_topography: - filename = 'BedMachineAntarctica_v2_and_GEBCO_2022_0.05_degree_20220729.nc' # noqa: E501 - description = 'Bathymetry is from GEBCO 2022, combined with ' \ - 'BedMachine Antarctica v2 around Antarctica.' - config.set('remap_topography', 'topo_filename', filename) - config.set('remap_topography', 'bathy_frac_var', 'ocean_mask') - config.set('remap_topography', 'description', description) - config.set('remap_topography', 'ntasks', '64') - config.set('remap_topography', 'min_tasks', '4') + if not self.high_res_topography: + config.add_from_package('compass.ocean.mesh', + 'low_res_topography.cfg', + exception=True) if self.mesh_name.startswith('Kuroshio'): # add the config options for all kuroshio meshes From 1167c3cd3e5ad4098504e6d660e75af5f8679f07 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 25 Oct 2024 23:06:34 -0500 Subject: [PATCH 40/41] Update E3SM-Project submodule This merge updates the E3SM-Project submodule from [727ad81](https://github.com/E3SM-Project/E3SM/tree/727ad81) to [1442143](https://github.com/E3SM-Project/E3SM/tree/1442143). This update includes the following MPAS-Ocean and MPAS-Frameworks PRs (check mark indicates bit-for-bit with previous PR in the list): - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6509 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6508 - [ ] (fwk) https://github.com/E3SM-Project/E3SM/pull/6575 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6590 - [ ] (fwk) https://github.com/E3SM-Project/E3SM/pull/6643 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6656 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6672 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6659 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6497 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6485 - [ ] (ocn) https://github.com/E3SM-Project/E3SM/pull/6566 --- E3SM-Project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/E3SM-Project b/E3SM-Project index 727ad813f7..1442143a95 160000 --- a/E3SM-Project +++ b/E3SM-Project @@ -1 +1 @@ -Subproject commit 727ad813f70a6c9801b31121ff1930a65e24a767 +Subproject commit 1442143a9519fbd66fdea4f3f2e67aba17ad49e5 From e360cc70e3a9a6fa31ad1ff0c9c1a382afe4ba2d Mon Sep 17 00:00:00 2001 From: Matthew Hoffman Date: Mon, 28 Oct 2024 14:59:54 -0700 Subject: [PATCH 41/41] Update MALI-Dev submodule --- MALI-Dev | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MALI-Dev b/MALI-Dev index 86f24750e4..07e3abfe7d 160000 --- a/MALI-Dev +++ b/MALI-Dev @@ -1 +1 @@ -Subproject commit 86f24750e4e2b6fa5410430d53e475ebaf76c339 +Subproject commit 07e3abfe7db663a2a83f6e4e32739946d7ebd317