From f85ba527f8433447c511cf281ca954e463446c3c Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Tue, 7 Nov 2023 14:03:31 +1100 Subject: [PATCH 1/8] compression with zst and significant digits quantization --- gplately/grids.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/gplately/grids.py b/gplately/grids.py index adf34e4a..caa25d3b 100644 --- a/gplately/grids.py +++ b/gplately/grids.py @@ -206,6 +206,10 @@ def find_label(keys, labels): cdf_lon = cdf[key_lon][:] cdf_lat = cdf[key_lat][:] + fill_value = cdf[key_z].missing_value + + cdf_grid[np.isclose(cdf_grid, fill_value, rtol=0.1)] = np.nan + if realign: # realign longitudes to -180/180 dateline cdf_grid_z, cdf_lon, cdf_lat = realign_grid(cdf_grid, cdf_lon, cdf_lat) @@ -243,7 +247,7 @@ def find_label(keys, labels): else: return cdf_grid_z -def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90]): +def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90], significant_digits=None, fill_value=np.nan): """ Write geological data contained in a `grid` to a netCDF4 grid with a specified `filename`. Notes @@ -288,8 +292,8 @@ def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90]): cdf.title = "Grid produced by gplately" cdf.createDimension('lon', lon_grid.size) cdf.createDimension('lat', lat_grid.size) - cdf_lon = cdf.createVariable('lon', lon_grid.dtype, ('lon',), zlib=True) - cdf_lat = cdf.createVariable('lat', lat_grid.dtype, ('lat',), zlib=True) + cdf_lon = cdf.createVariable('lon', lon_grid.dtype, ('lon',), compression='zstd', complevel=9) + cdf_lat = cdf.createVariable('lat', lat_grid.dtype, ('lat',), compression='zstd', complevel=9) cdf_lon[:] = lon_grid cdf_lat[:] = lat_grid @@ -301,10 +305,18 @@ def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90]): cdf_lat.standard_name = 'lat' cdf_lat.actual_range = [lat_grid[0], lat_grid[-1]] - cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), zlib=True) + if significant_digits: + cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), compression='zstd', complevel=9, + significant_digits=int(significant_digits), + quantize_mode='GranularBitRound') + + else: + cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), compression='zstd', complevel=9) + # netCDF4 uses the missing_value attribute as the default _FillValue # without this, _FillValue defaults to 9.969209968386869e+36 - cdf_data.missing_value = np.nan + cdf_data.missing_value = fill_value + cdf_data.standard_name = 'z' #Ensure pygmt registers min and max z values properly cdf_data.actual_range = [np.nanmin(grid), np.nanmax(grid)] From d2e8339ed0d58567e50fb3f424e0b235b2a5b793 Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Tue, 7 Nov 2023 14:29:58 +1100 Subject: [PATCH 2/8] read netcdf files even when there is no missing_values attribute --- gplately/grids.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gplately/grids.py b/gplately/grids.py index caa25d3b..b2d857b8 100644 --- a/gplately/grids.py +++ b/gplately/grids.py @@ -206,9 +206,9 @@ def find_label(keys, labels): cdf_lon = cdf[key_lon][:] cdf_lat = cdf[key_lat][:] - fill_value = cdf[key_z].missing_value - - cdf_grid[np.isclose(cdf_grid, fill_value, rtol=0.1)] = np.nan + if hasattr(cdf[key_z], 'missing_value'): + fill_value = cdf[key_z].missing_value + cdf_grid[np.isclose(cdf_grid, fill_value, rtol=0.1)] = np.nan if realign: # realign longitudes to -180/180 dateline From 8f8102a0a3b40ac9c469633f771aa5106fde6f56 Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Fri, 13 Sep 2024 21:39:53 +1000 Subject: [PATCH 3/8] update write_netcdf_grid --- gplately/grids.py | 41 +++++++++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/gplately/grids.py b/gplately/grids.py index b2d857b8..fad72a54 100644 --- a/gplately/grids.py +++ b/gplately/grids.py @@ -247,7 +247,7 @@ def find_label(keys, labels): else: return cdf_grid_z -def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90], significant_digits=None, fill_value=np.nan): +def write_netcdf_grid(filename, grid, extent="global", significant_digits=None, fill_value=np.nan, **kwargs): """ Write geological data contained in a `grid` to a netCDF4 grid with a specified `filename`. Notes @@ -272,7 +272,7 @@ def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90], significant_digi An ndarray grid containing data to be written into a `netCDF` (.nc) file. Note: Rows correspond to the data's latitudes, while the columns correspond to the data's longitudes. - extent : 1D numpy array, default=[-180,180,-90,90] + extent : list, default=[-180,180,-90,90] Four elements that specify the [min lon, max lon, min lat, max lat] to constrain the lat and lon variables of the netCDF grid to. If no extents are supplied, full global extent `[-180, 180, -90, 90]` is assumed. @@ -282,18 +282,25 @@ def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90], significant_digi A netCDF grid will be saved to the path specified in `filename`. """ import netCDF4 + + if extent == 'global': + extent = [-180, 180, -90, 90] + else: + assert len(extent) == 4, "specify the [min lon, max lon, min lat, max lat]" nrows, ncols = np.shape(grid) lon_grid = np.linspace(extent[0], extent[1], ncols) lat_grid = np.linspace(extent[2], extent[3], nrows) + + data_kwds = {'compression': 'zlib', 'complevel': 9} with netCDF4.Dataset(filename, 'w', driver=None) as cdf: cdf.title = "Grid produced by gplately" cdf.createDimension('lon', lon_grid.size) cdf.createDimension('lat', lat_grid.size) - cdf_lon = cdf.createVariable('lon', lon_grid.dtype, ('lon',), compression='zstd', complevel=9) - cdf_lat = cdf.createVariable('lat', lat_grid.dtype, ('lat',), compression='zstd', complevel=9) + cdf_lon = cdf.createVariable('lon', lon_grid.dtype, ('lon',), **data_kwds) + cdf_lat = cdf.createVariable('lat', lat_grid.dtype, ('lat',), **data_kwds) cdf_lon[:] = lon_grid cdf_lat[:] = lat_grid @@ -305,22 +312,36 @@ def write_netcdf_grid(filename, grid, extent=[-180,180,-90,90], significant_digi cdf_lat.standard_name = 'lat' cdf_lat.actual_range = [lat_grid[0], lat_grid[-1]] + # create container variable for CRS: lon/lat WGS84 datum + crso = cdf.createVariable('crs','i4') + crso.long_name = 'Lon/Lat Coords in WGS84' + crso.grid_mapping_name='latitude_longitude' + crso.longitude_of_prime_meridian = 0.0 + crso.semi_major_axis = 6378137.0 + crso.inverse_flattening = 298.257223563 + crso.spatial_ref = """GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]""" + + # add more keyword arguments for quantizing data if significant_digits: - cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), compression='zstd', complevel=9, - significant_digits=int(significant_digits), - quantize_mode='GranularBitRound') + data_kwds['significant_digits'] = int(significant_digits) + data_kwds['quantize_mode'] = 'GranularBitRound' - else: - cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), compression='zstd', complevel=9) + cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), **data_kwds) # netCDF4 uses the missing_value attribute as the default _FillValue # without this, _FillValue defaults to 9.969209968386869e+36 cdf_data.missing_value = fill_value cdf_data.standard_name = 'z' - #Ensure pygmt registers min and max z values properly + + cdf_data.add_offset = 0.0 + cdf_data.grid_mapping = 'crs' + cdf_data.set_auto_maskandscale(False) + + # ensure min and max z values are properly registered cdf_data.actual_range = [np.nanmin(grid), np.nanmax(grid)] + # write data cdf_data[:,:] = grid From 97fcfa3daa396f5426ddf9e1f42e3dfa230f474c Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Mon, 16 Sep 2024 11:50:48 +1000 Subject: [PATCH 4/8] add default significant digits (2) and descriptive title --- gplately/grids.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gplately/grids.py b/gplately/grids.py index 30843b96..30cea1e7 100644 --- a/gplately/grids.py +++ b/gplately/grids.py @@ -314,6 +314,7 @@ def write_netcdf_grid(filename, grid, extent="global", significant_digits=None, A netCDF grid will be saved to the path specified in `filename`. """ import netCDF4 + from gplately import __version__ as _version if extent == 'global': extent = [-180, 180, -90, 90] @@ -328,7 +329,7 @@ def write_netcdf_grid(filename, grid, extent="global", significant_digits=None, data_kwds = {'compression': 'zlib', 'complevel': 9} with netCDF4.Dataset(filename, 'w', driver=None) as cdf: - cdf.title = "Grid produced by gplately" + cdf.title = "Grid produced by gplately " + str(_version) cdf.createDimension('lon', lon_grid.size) cdf.createDimension('lat', lat_grid.size) cdf_lon = cdf.createVariable('lon', lon_grid.dtype, ('lon',), **data_kwds) @@ -355,7 +356,8 @@ def write_netcdf_grid(filename, grid, extent="global", significant_digits=None, # add more keyword arguments for quantizing data if significant_digits: - data_kwds['significant_digits'] = int(significant_digits) + # significant_digits needs to be >= 2 so that NaNs are preserved + data_kwds['significant_digits'] = max(2, int(significant_digits)) data_kwds['quantize_mode'] = 'GranularBitRound' cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), **data_kwds) From 78485e3e545696b47d9db85713c074970b3e41a7 Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Mon, 16 Sep 2024 15:50:17 +1000 Subject: [PATCH 5/8] add support for boolean grids --- gplately/grids.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/gplately/grids.py b/gplately/grids.py index 30cea1e7..631dcf4d 100644 --- a/gplately/grids.py +++ b/gplately/grids.py @@ -360,11 +360,18 @@ def write_netcdf_grid(filename, grid, extent="global", significant_digits=None, data_kwds['significant_digits'] = max(2, int(significant_digits)) data_kwds['quantize_mode'] = 'GranularBitRound' + # boolean arrays need to be converted to integers + # no such thing as a mask on a boolean array + if grid.dtype is np.dtype(bool): + grid = grid.astype('i1') + fill_value = None + cdf_data = cdf.createVariable('z', grid.dtype, ('lat','lon'), **data_kwds) # netCDF4 uses the missing_value attribute as the default _FillValue # without this, _FillValue defaults to 9.969209968386869e+36 - cdf_data.missing_value = fill_value + if fill_value is not None: + cdf_data.missing_value = fill_value cdf_data.standard_name = 'z' From 91008d10b9116c15f4783ac6b9697a011c0b6f70 Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Mon, 16 Sep 2024 15:50:45 +1000 Subject: [PATCH 6/8] update age gridding workflow to spit out optimised netcdf grids --- gplately/oceans.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/gplately/oceans.py b/gplately/oceans.py index d1dec4e7..3e3c81cd 100644 --- a/gplately/oceans.py +++ b/gplately/oceans.py @@ -781,8 +781,9 @@ def _create_continental_mask(self, time_array): grids.write_netcdf_grid( self.continent_mask_filepath.format(time), - final_grid, + final_grid.astype('i1'), extent=[-180, 180, -90, 90], + fill_value=None, ) logger.info(f"Finished building a continental mask at {time} Ma!") @@ -809,8 +810,9 @@ def _build_continental_mask(self, time: float, overwrite=False): grids.write_netcdf_grid( self.continent_mask_filepath.format(time), - final_grid, + final_grid.astype('i1'), extent=[-180, 180, -90, 90], + fill_value=None, ) logger.info(f"Finished building a continental mask at {time} Ma!") @@ -1490,7 +1492,11 @@ def _save_netcdf_file( grid_output = os.path.join(output_dir, grid_basename) if unmasked: - grids.write_netcdf_grid(grid_output_unmasked, Z, extent=extent) + grids.write_netcdf_grid( + grid_output_unmasked, + Z, + extent=extent, + significant_digits=2) # Identify regions in the grid in the continental mask cont_mask = grids.Raster(data=continent_mask_filename.format(time)) @@ -1512,6 +1518,7 @@ def _save_netcdf_file( grid_output, Z, extent=extent, + significant_digits=2, ) logger.info(f"Save {name} netCDF grid at {time:0.2f} Ma completed!") @@ -1578,7 +1585,10 @@ def _lat_lon_z_to_netCDF_time( grid_output = os.path.join(output_dir, grid_basename) if unmasked: - grids.write_netcdf_grid(grid_output_unmasked, Z, extent=extent) + grids.write_netcdf_grid(grid_output_unmasked, + Z, + extent=extent, + significant_digits=2) # Identify regions in the grid in the continental mask cont_mask = grids.Raster(data=continent_mask_filename.format(time)) @@ -1603,6 +1613,7 @@ def _lat_lon_z_to_netCDF_time( grid_output, Z, extent=extent, + significant_digits=2, ) logger.info(f"{zval_name} netCDF grids for {time:0.2f} Ma complete!") @@ -1717,8 +1728,9 @@ def continent_contouring_buffer_and_gap_distance_radians(time, contoured_contine ) grids.write_netcdf_grid( continent_mask_filepath.format(time), - continent_mask.astype("float"), + continent_mask.astype("i1"), extent=[-180, 180, -90, 90], + fill_value=None, ) logger.info( f"Finished building a continental mask at {time} Ma! (continent_contouring)" From d02915be264d56eea6519ce16f8b9235a9c46f70 Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Tue, 17 Sep 2024 10:28:35 +1000 Subject: [PATCH 7/8] add resize parameter for reading netcdf grids --- gplately/grids.py | 100 +++++++++++++++++++++++++++++++++++----------- 1 file changed, 77 insertions(+), 23 deletions(-) diff --git a/gplately/grids.py b/gplately/grids.py index 631dcf4d..a8791821 100644 --- a/gplately/grids.py +++ b/gplately/grids.py @@ -142,7 +142,7 @@ def realign_grid(array, lons, lats): return array, lons, lats -def read_netcdf_grid(filename, return_grids=False, realign=False, resample=None): +def read_netcdf_grid(filename, return_grids=False, realign=False, resample=None, resize=None): """Read a `netCDF` (.nc) grid from a given `filename` and return its data as a `MaskedArray`. @@ -174,6 +174,10 @@ def read_netcdf_grid(filename, return_grids=False, realign=False, resample=None) If passed as `resample = (spacingX, spacingY)`, the given `netCDF` grid is resampled with these x and y resolutions. + resize : tuple, optional, default=None + If passed as `resample = (resX, resY)`, the given `netCDF` grid is resized + to the number of columns (resX) and rows (resY). + Returns ------- grid_z : MaskedArray @@ -237,10 +241,18 @@ def find_label(keys, labels): cdf_lon = cdf[key_lon][:] cdf_lat = cdf[key_lat][:] - if hasattr(cdf[key_z], 'missing_value'): + # fill missing values + if hasattr(cdf[key_z], 'missing_value') and np.issubdtype(cdf_grid.dtype, np.floating): fill_value = cdf[key_z].missing_value cdf_grid[np.isclose(cdf_grid, fill_value, rtol=0.1)] = np.nan + # convert to boolean array + if np.issubdtype(cdf_grid.dtype, np.integer): + unique_grid = np.unique(cdf_grid) + if len(unique_grid) == 2: + if (unique_grid == [0,1]).all(): + cdf_grid = cdf_grid.astype(bool) + if realign: # realign longitudes to -180/180 dateline cdf_grid_z, cdf_lon, cdf_lat = realign_grid(cdf_grid, cdf_lon, cdf_lat) @@ -250,25 +262,58 @@ def find_label(keys, labels): # resample if resample is not None: spacingX, spacingY = resample - lon_grid = np.arange(cdf_lon.min(), cdf_lon.max() + spacingX, spacingX) - lat_grid = np.arange(cdf_lat.min(), cdf_lat.max() + spacingY, spacingY) - lonq, latq = np.meshgrid(lon_grid, lat_grid) - original_extent = ( - cdf_lon[0], - cdf_lon[-1], - cdf_lat[0], - cdf_lat[-1], - ) - cdf_grid_z = sample_grid( - lonq, - latq, - cdf_grid_z, - method="nearest", - extent=original_extent, - return_indices=False, - ) - cdf_lon = lon_grid - cdf_lat = lat_grid + + # don't resample if already the same resolution + dX = np.diff(cdf_lon).mean() + dY = np.diff(cdf_lat).mean() + + if spacingX != dX or spacingY != dY: + lon_grid = np.arange(cdf_lon.min(), cdf_lon.max() + spacingX, spacingX) + lat_grid = np.arange(cdf_lat.min(), cdf_lat.max() + spacingY, spacingY) + lonq, latq = np.meshgrid(lon_grid, lat_grid) + original_extent = ( + cdf_lon[0], + cdf_lon[-1], + cdf_lat[0], + cdf_lat[-1], + ) + cdf_grid_z = sample_grid( + lonq, + latq, + cdf_grid_z, + method="nearest", + extent=original_extent, + return_indices=False, + ) + cdf_lon = lon_grid + cdf_lat = lat_grid + + # resize + if resize is not None: + resX, resY = resize + + # don't resize if already the same shape + if resX != cdf_grid_z.shape[1] or resY != cdf_grid_z.shape[0]: + original_extent = ( + cdf_lon[0], + cdf_lon[-1], + cdf_lat[0], + cdf_lat[-1], + ) + lon_grid = np.linspace(original_extent[0], original_extent[1], resX) + lat_grid = np.linspace(original_extent[2], original_extent[3], resY) + lonq, latq = np.meshgrid(lon_grid, lat_grid) + + cdf_grid_z = sample_grid( + lonq, + latq, + cdf_grid_z, + method="nearest", + extent=original_extent, + return_indices=False, + ) + cdf_lon = lon_grid + cdf_lat = lat_grid # Fix grids with 9e36 as the fill value for nan. # cdf_grid_z.fill_value = float('nan') @@ -1501,6 +1546,7 @@ def __init__( extent="global", realign=False, resample=None, + resize=None, time=0.0, origin=None, **kwargs, @@ -1530,6 +1576,10 @@ def __init__( Optionally resample grid, pass spacing in X and Y direction as a 2-tuple e.g. resample=(spacingX, spacingY). + resize : 2-tuple, optional + Optionally resample grid to X-columns, Y-rows as a + 2-tuple e.g. resample=(resX, resY). + time : float, default: 0.0 The time step represented by the raster data. Used for raster reconstruction. @@ -1600,6 +1650,7 @@ def __init__( return_grids=True, realign=realign, resample=resample, + resize=resize, ) self._lons = lons self._lats = lats @@ -1621,6 +1672,9 @@ def __init__( if (not isinstance(data, str)) and (resample is not None): self.resample(*resample, inplace=True) + if (not isinstance(data, str)) and (resize is not None): + self.resize(*resize, inplace=True) + @property def time(self): """The time step represented by the raster data.""" @@ -1963,10 +2017,10 @@ def fill_NaNs(self, inplace=False, return_array=False): else: return Raster(data, self.plate_reconstruction, self.extent, self.time) - def save_to_netcdf4(self, filename): + def save_to_netcdf4(self, filename, significant_digits=None, fill_value=np.nan): """Saves the grid attributed to the `Raster` object to the given `filename` (including the ".nc" extension) in netCDF4 format.""" - write_netcdf_grid(str(filename), self.data, self.extent) + write_netcdf_grid(str(filename), self.data, self.extent, significant_digits, fill_value) def reconstruct( self, From b635a23964ac0b2a499b7048c6851683c082b809 Mon Sep 17 00:00:00 2001 From: Ben Mather Date: Tue, 17 Sep 2024 10:29:04 +1000 Subject: [PATCH 8/8] refactor writing masked and unmasked grids --- gplately/oceans.py | 38 ++++++++++++++------------------------ 1 file changed, 14 insertions(+), 24 deletions(-) diff --git a/gplately/oceans.py b/gplately/oceans.py index 3e3c81cd..f3aaaf5f 100644 --- a/gplately/oceans.py +++ b/gplately/oceans.py @@ -1480,6 +1480,7 @@ def _save_netcdf_file( # Interpolate lons, lats and zvals over a regular grid using nearest neighbour interpolation Z = tools.griddata_sphere((lons, lats), zdata, (X, Y), method="nearest") + Z = Z.astype('f4') unmasked_basename = f"{name}_grid_unmasked_{time:0.2f}_Ma.nc" grid_basename = f"{name}_grid_{time:0.2f}_Ma.nc" @@ -1496,29 +1497,24 @@ def _save_netcdf_file( grid_output_unmasked, Z, extent=extent, - significant_digits=2) + significant_digits=2, + fill_value=None) # Identify regions in the grid in the continental mask - cont_mask = grids.Raster(data=continent_mask_filename.format(time)) - # We need the continental mask to match the number of nodes # in the uniform grid defined above. This is important if we # pass our own continental mask to SeafloorGrid - if cont_mask.shape[1] != resX: - cont_mask.resize(resX, resY, inplace=True) + cont_mask = grids.read_netcdf_grid(continent_mask_filename.format(time), resize=(resX,resY)) # Use the continental mask - Z = np.ma.array( - grids.Raster(data=Z.astype("float")).data.data, - mask=cont_mask.data.data, - fill_value=np.nan, - ) + Z[cont_mask] = np.nan grids.write_netcdf_grid( grid_output, Z, extent=extent, significant_digits=2, + fill_value=np.nan, ) logger.info(f"Save {name} netCDF grid at {time:0.2f} Ma completed!") @@ -1573,6 +1569,7 @@ def _lat_lon_z_to_netCDF_time( # Interpolate lons, lats and zvals over a regular grid using nearest neighbour interpolation Z = tools.griddata_sphere((lons, lats), zdata, (X, Y), method="nearest") + Z = Z.astype('f4') # float32 precision unmasked_basename = f"{zval_name}_grid_unmasked_{time:0.2f}Ma.nc" grid_basename = f"{zval_name}_grid_{time:0.2f}Ma.nc" @@ -1585,35 +1582,28 @@ def _lat_lon_z_to_netCDF_time( grid_output = os.path.join(output_dir, grid_basename) if unmasked: - grids.write_netcdf_grid(grid_output_unmasked, + grids.write_netcdf_grid( + grid_output_unmasked, Z, extent=extent, - significant_digits=2) + significant_digits=2, + fill_value=None) # Identify regions in the grid in the continental mask - cont_mask = grids.Raster(data=continent_mask_filename.format(time)) - # We need the continental mask to match the number of nodes # in the uniform grid defined above. This is important if we # pass our own continental mask to SeafloorGrid - if cont_mask.shape[1] != resX: - cont_mask.resize(resX, resY, inplace=True) + cont_mask = grids.read_netcdf_grid(continent_mask_filename.format(time), resize=(resX,resY)) # Use the continental mask - Z = np.ma.array( - grids.Raster(data=Z.astype("float")).data.data, - mask=cont_mask.data.data, - fill_value=np.nan, - ) - - # grd = cont_mask.interpolate(X, Y) > 0.5 - # Z[grd] = np.nan + Z[cont_mask] = np.nan grids.write_netcdf_grid( grid_output, Z, extent=extent, significant_digits=2, + fill_value=np.nan, ) logger.info(f"{zval_name} netCDF grids for {time:0.2f} Ma complete!")