Skip to content

Commit

Permalink
Merge pull request #817 from andrewdnolan/landice/interpolation_unifi…
Browse files Browse the repository at this point in the history
…cation

Landice interpolation unification
  • Loading branch information
trhille authored Oct 2, 2024
2 parents 2aa611e + 490e606 commit 1279a40
Show file tree
Hide file tree
Showing 9 changed files with 234 additions and 254 deletions.
124 changes: 47 additions & 77 deletions compass/landice/mesh.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import re
import sys
import time
from shutil import copyfile
Expand All @@ -12,6 +13,7 @@
from mpas_tools.logging import check_call
from mpas_tools.mesh.conversion import convert, cull
from mpas_tools.mesh.creation import build_planar_mesh
from mpas_tools.mesh.creation.sort_mesh import sort_mesh
from netCDF4 import Dataset
from scipy.interpolate import NearestNDInterpolator, interpn

Expand Down Expand Up @@ -442,7 +444,7 @@ def get_dist_to_edge_and_gl(self, thk, topg, x, y,
logger.info('WARNING: window_size was set to a value smaller'
' than high_dist and/or high_dist_bed. Resetting'
f' window_size to {max(high_dist, high_dist_bed)},'
' which is max(high_dist, high_dist_bed)')
' which is max(high_dist, high_dist_bed)')
window_size = max(high_dist, high_dist_bed)

dx = x[1] - x[0] # assumed constant and equal in x and y
Expand Down Expand Up @@ -744,10 +746,11 @@ def build_mali_mesh(self, cell_width, x1, y1, geom_points,

check_call(args, logger=logger)

logger.info('culling and converting')
logger.info('culling, converting, and sorting')
dsMesh = xarray.open_dataset('culled.nc')
dsMesh = cull(dsMesh, logger=logger)
dsMesh = convert(dsMesh, logger=logger)
dsMesh = sort_mesh(dsMesh)
write_netcdf(dsMesh, 'dehorned.nc')

args = ['create_landice_grid_from_generic_MPAS_grid.py', '-i',
Expand Down Expand Up @@ -968,6 +971,8 @@ def preprocess_ais_data(self, source_gridded_dataset,

tic = time.perf_counter()
logger.info(f"Beginning interpolation for {field}")
# NOTE: Do not need to evaluate the extrapolator at all grid cells.
# Only needed for ice-free grid cells, since is NN extrapolation
data.variables[field][0, :] = interp(xGrid, yGrid)
toc = time.perf_counter()
logger.info(f"Interpolation completed in {toc - tic} seconds")
Expand Down Expand Up @@ -1002,15 +1007,15 @@ def preprocess_ais_data(self, source_gridded_dataset,
return preprocessed_gridded_dataset


def interp_ais_bedmachine(self, data_path, mali_scrip, nProcs, dest_file):
def interp_gridded2mali(self, source_file, mali_scrip, nProcs, dest_file, proj,
variables="all"):
"""
Interpolates BedMachine thickness and bedTopography dataset
to a MALI mesh
Interpolate gridded dataset (e.g. MEASURES, BedMachine) onto a MALI mesh
Parameters
----------
data_path : str
path to AIS datasets, including BedMachine
source_file : str
filepath to the source gridded datatset to be interpolated
mali_scrip : str
name of scrip file corresponding to destination MALI mesh
Expand All @@ -1020,104 +1025,69 @@ def interp_ais_bedmachine(self, data_path, mali_scrip, nProcs, dest_file):
dest_file: str
MALI input file to which data should be remapped
"""

logger = self.logger

logger.info('creating scrip file for BedMachine dataset')
# Note: writing scrip file to workdir
args = ['create_SCRIP_file_from_planar_rectangular_grid.py',
'-i',
os.path.join(data_path,
'BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc'), # noqa
'-s',
'BedMachineAntarctica_2020-07-15_v02.scrip.nc',
'-p', 'ais-bedmap2',
'-r', '2']
check_call(args, logger=logger)

# Generate remapping weights
# Testing shows 5 badger/grizzly nodes works well.
# 2 nodes is too few. I have not tested anything in between.
logger.info('generating gridded dataset -> MPAS weights')
args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen',
'--source',
'BedMachineAntarctica_2020-07-15_v02.scrip.nc',
'--destination', mali_scrip,
'--weight', 'BedMachine_to_MPAS_weights.nc',
'--method', 'conserve',
"--netcdf4",
"--dst_regional", "--src_regional", '--ignore_unmapped']
check_call(args, logger=logger)

# Perform actual interpolation using the weights
logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py', '-s',
os.path.join(data_path,
'BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc'), # noqa
'-d', dest_file,
'-m', 'e',
'-w', 'BedMachine_to_MPAS_weights.nc']
check_call(args, logger=logger)
proj: str
projection of the source dataset
def interp_ais_measures(self, data_path, mali_scrip, nProcs, dest_file):
variables: "all" or list of strings
either the string "all" or a list of strings
"""
Interpolates MEASURES ice velocity dataset
to a MALI mesh

Parameters
----------
data_path : str
path to AIS datasets, including BedMachine
def __guess_scrip_name(filename):

mali_scrip : str
name of scrip file corresponding to destination MALI mesh
# try searching for string followed by a version number
match = re.search(r'(^.*[_-]v\d*[_-])+', filename)

nProcs : int
number of processors to use for generating remapping weights
if match:
# slice string to end of match minus one to leave of final _ or -
base_fn = filename[:match.end() - 1]
else:
# no matches were found, just use the filename (minus extension)
base_fn = os.path.splitext(filename)[0]

dest_file: str
MALI input file to which data should be remapped
"""
return f"{base_fn}.scrip.nc"

logger = self.logger

logger.info('creating scrip file for velocity dataset')
source_scrip = __guess_scrip_name(os.path.basename(source_file))
weights_filename = "gridded_to_MPAS_weights.nc"

# make sure variables is a list, encompasses the variables="all" case
if isinstance(variables, str):
variables = [variables]
if not isinstance(variables, list):
raise TypeError("Arugment 'variables' is of incorrect type, must"
" either the string 'all' or a list of strings")

logger.info('creating scrip file for source dataset')
# Note: writing scrip file to workdir
args = ['create_SCRIP_file_from_planar_rectangular_grid.py',
'-i',
os.path.join(data_path,
'antarctica_ice_velocity_450m_v2_edits_extrap.nc'),
'-s',
'antarctica_ice_velocity_450m_v2.scrip.nc',
'-p', 'ais-bedmap2',
'-i', source_file,
'-s', source_scrip,
'-p', proj,
'-r', '2']
check_call(args, logger=logger)

# Generate remapping weights
logger.info('generating gridded dataset -> MPAS weights')
args = ['srun', '-n', nProcs, 'ESMF_RegridWeightGen',
'--source',
'antarctica_ice_velocity_450m_v2.scrip.nc',
'--source', source_scrip,
'--destination', mali_scrip,
'--weight', 'measures_to_MPAS_weights.nc',
'--weight', weights_filename,
'--method', 'conserve',
"--netcdf4",
"--dst_regional", "--src_regional", '--ignore_unmapped']
check_call(args, logger=logger)

# Perform actual interpolation using the weights
logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py',
'-s',
os.path.join(data_path,
'antarctica_ice_velocity_450m_v2_edits_extrap.nc'),
'-s', source_file,
'-d', dest_file,
'-m', 'e',
'-w', 'measures_to_MPAS_weights.nc',
'-v', 'observedSurfaceVelocityX',
'observedSurfaceVelocityY',
'observedSurfaceVelocityUncertainty']
'-w', weights_filename,
'-v'] + variables

check_call(args, logger=logger)


Expand Down
69 changes: 41 additions & 28 deletions compass/landice/tests/antarctica/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@
build_cell_width,
build_mali_mesh,
clean_up_after_interp,
interp_ais_bedmachine,
interp_ais_measures,
interp_gridded2mali,
make_region_masks,
preprocess_ais_data,
)
Expand Down Expand Up @@ -44,10 +43,10 @@ def __init__(self, test_case):
self.mesh_filename = 'Antarctica.nc'
self.add_output_file(filename='graph.info')
self.add_output_file(filename=self.mesh_filename)
self.add_output_file(filename=f'{self.mesh_filename[:-3]}_'
f'imbie_regionMasks.nc')
self.add_output_file(filename=f'{self.mesh_filename[:-3]}_'
f'ismip6_regionMasks.nc')
self.add_output_file(
filename=f'{self.mesh_filename[:-3]}_imbie_regionMasks.nc')
self.add_output_file(
filename=f'{self.mesh_filename[:-3]}_ismip6_regionMasks.nc')
self.add_input_file(
filename='antarctica_8km_2024_01_29.nc',
target='antarctica_8km_2024_01_29.nc',
Expand All @@ -61,37 +60,38 @@ def run(self):
"""
logger = self.logger
config = self.config

section_ais = config['antarctica']
data_path = section_ais.get('data_path')

nProcs = section_ais.get('nProcs')
src_proj = section_ais.get("src_proj")
data_path = section_ais.get('data_path')
measures_filename = section_ais.get("measures_filename")
bedmachine_filename = section_ais.get("bedmachine_filename")

measures_dataset = os.path.join(data_path, measures_filename)
bedmachine_dataset = os.path.join(data_path, bedmachine_filename)

section_name = 'mesh'

# TODO: do we want to add this to the config file?
source_gridded_dataset = 'antarctica_8km_2024_01_29.nc'
bedmachine_path = os.path.join(
data_path,
'BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc') # noqa

bm_updated_gridded_dataset = add_bedmachine_thk_to_ais_gridded_data(
self, source_gridded_dataset, bedmachine_path)
self, source_gridded_dataset, bedmachine_dataset)

logger.info('calling build_cell_width')
cell_width, x1, y1, geom_points, geom_edges, floodFillMask = \
build_cell_width(
self, section_name=section_name,
gridded_dataset=bm_updated_gridded_dataset)

# Preprocess the gridded AIS source datasets to work
# with the rest of the workflow
logger.info('calling preprocess_ais_data')
preprocessed_gridded_dataset = preprocess_ais_data(
self, bm_updated_gridded_dataset, floodFillMask)

# Now build the base mesh and perform the standard interpolation
build_mali_mesh(
self, cell_width, x1, y1, geom_points, geom_edges,
mesh_name=self.mesh_filename, section_name=section_name,
gridded_dataset=bm_updated_gridded_dataset,
projection='ais-bedmap2', geojson_file=None)
projection=src_proj, geojson_file=None)

# Now that we have base mesh with standard interpolation
# perform advanced interpolation for specific fields
Expand All @@ -107,12 +107,19 @@ def run(self):
data.variables['iceMask'][:] = 0.
data.close()

# interpolate fields from composite dataset
# Note: this was already done in build_mali_mesh() using
# bilinear interpolation. Redoing it here again is likely
# not needed. Also, it should be assessed if bilinear or
# barycentric used here is preferred for this application.
# Current thinking is they are both equally appropriate.
# Preprocess the gridded AIS source datasets to work
# with the rest of the workflow
logger.info('calling preprocess_ais_data')
preprocessed_gridded_dataset = preprocess_ais_data(
self, bm_updated_gridded_dataset, floodFillMask)

# interpolate fields from *preprocessed* composite dataset
# NOTE: while this has already been done in `build_mali_mesh()`
# we are using an updated version of the gridded dataset here,
# which has had unit conversion and extrapolation done.
# Also, it should be assessed if bilinear or
# barycentric used here is preferred for this application.
# Current thinking is they are both equally appropriate.
logger.info('calling interpolate_to_mpasli_grid.py')
args = ['interpolate_to_mpasli_grid.py', '-s',
preprocessed_gridded_dataset,
Expand All @@ -131,10 +138,16 @@ def run(self):

# Now perform bespoke interpolation of geometry and velocity data
# from their respective sources
interp_ais_bedmachine(self, data_path, dst_scrip_file, nProcs,
self.mesh_filename)
interp_ais_measures(self, data_path, dst_scrip_file, nProcs,
self.mesh_filename)
interp_gridded2mali(self, bedmachine_dataset, dst_scrip_file, nProcs,
self.mesh_filename, src_proj, variables="all")

# only interpolate a subset of MEaSUREs variables onto the MALI mesh
measures_vars = ['observedSurfaceVelocityX',
'observedSurfaceVelocityY',
'observedSurfaceVelocityUncertainty']
interp_gridded2mali(self, measures_dataset, dst_scrip_file, nProcs,
self.mesh_filename, src_proj,
variables=measures_vars)

# perform some final cleanup details
clean_up_after_interp(self.mesh_filename)
Expand Down
12 changes: 12 additions & 0 deletions compass/landice/tests/antarctica/mesh_gen/mesh_gen.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -52,5 +52,17 @@ use_bed = False
# (default value is for Perlmutter)
data_path = /global/cfs/cdirs/fanssie/standard_datasets/AIS_datasets

# filename of the BedMachine thickness and bedTopography dataset
# (default value is for Perlmutter)
bedmachine_filename = BedMachineAntarctica_2020-07-15_v02_edits_floodFill_extrap_fillVostok.nc

# filename of the MEASURES ice velocity dataset
# (default value is for Perlmutter)
measures_filename = antarctica_ice_velocity_450m_v2_edits_extrap.nc

# projection of the source datasets, according to the dictionary keys
# create_SCRIP_file_from_planar_rectangular_grid.py from MPAS_Tools
src_proj = ais-bedmap2

# number of processors to use for ESMF_RegridWeightGen
nProcs = 128
Loading

0 comments on commit 1279a40

Please sign in to comment.