Skip to content

Commit

Permalink
Apply autoformatting
Browse files Browse the repository at this point in the history
  • Loading branch information
schlunma committed Sep 27, 2024
1 parent 815eac2 commit 22feacf
Show file tree
Hide file tree
Showing 20 changed files with 2,520 additions and 2,257 deletions.
191 changes: 112 additions & 79 deletions esmvalcore/_main.py

Large diffs are not rendered by default.

516 changes: 291 additions & 225 deletions esmvalcore/_recipe/recipe.py

Large diffs are not rendered by default.

103 changes: 55 additions & 48 deletions esmvalcore/cmor/_fixes/icon/_base_fixes.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Fix base classes for ICON on-the-fly CMORizer."""

from __future__ import annotations

import logging
Expand Down Expand Up @@ -27,10 +28,10 @@
class IconFix(NativeDatasetFix):
"""Base class for all ICON fixes."""

CACHE_DIR = Path.home() / '.esmvaltool' / 'cache'
CACHE_DIR = Path.home() / ".esmvaltool" / "cache"
CACHE_VALIDITY = 7 * 24 * 60 * 60 # [s]; = 1 week
TIMEOUT = 5 * 60 # [s]; = 5 min
GRID_FILE_ATTR = 'grid_file_uri'
GRID_FILE_ATTR = "grid_file_uri"

def __init__(self, *args, **kwargs):
"""Initialize ICON fix."""
Expand Down Expand Up @@ -65,7 +66,8 @@ def _create_mesh(self, cube: Cube) -> MeshXY:
# 'vertex_of_cell'; since UGRID expects a different dimension ordering
# we transpose the cube here)
vertex_of_cell = horizontal_grid.extract_cube(
NameConstraint(var_name='vertex_of_cell'))
NameConstraint(var_name="vertex_of_cell")
)
vertex_of_cell.transpose()

# Extract start index used to name nodes from the the horizontal grid
Expand All @@ -74,8 +76,8 @@ def _create_mesh(self, cube: Cube) -> MeshXY:

# Extract face coordinates from cube (in ICON jargon called 'cell
# latitude' and 'cell longitude')
face_lat = cube.coord('latitude')
face_lon = cube.coord('longitude')
face_lat = cube.coord("latitude")
face_lon = cube.coord("longitude")

# Extract node coordinates from horizontal grid
(node_lat, node_lon) = self._get_node_coords(horizontal_grid)
Expand All @@ -87,11 +89,11 @@ def _create_mesh(self, cube: Cube) -> MeshXY:

# Latitude: there might be slight numerical differences (-> check that
# the differences are very small before fixing it)
close_kwargs = {'rtol': 1e-3, 'atol': 1e-5}
close_kwargs = {"rtol": 1e-3, "atol": 1e-5}
if not np.allclose(
face_lat.bounds,
node_lat.points[conn_node_inds],
**close_kwargs, # type: ignore
face_lat.bounds,
node_lat.points[conn_node_inds],
**close_kwargs, # type: ignore
):
logger.warning(
"Latitude bounds of the face coordinate ('clat_vertices' in "
Expand Down Expand Up @@ -127,15 +129,15 @@ def _create_mesh(self, cube: Cube) -> MeshXY:
# Create mesh
connectivity = Connectivity(
indices=vertex_of_cell.data,
cf_role='face_node_connectivity',
cf_role="face_node_connectivity",
start_index=start_index,
location_axis=0,
)
mesh = MeshXY(
topology_dimension=2,
node_coords_and_axes=[(node_lat, 'y'), (node_lon, 'x')],
node_coords_and_axes=[(node_lat, "y"), (node_lon, "x")],
connectivities=[connectivity],
face_coords_and_axes=[(face_lat, 'y'), (face_lon, 'x')],
face_coords_and_axes=[(face_lat, "y"), (face_lon, "x")],
)

return mesh
Expand All @@ -146,7 +148,8 @@ def _get_grid_url(self, cube):
raise ValueError(
f"Cube does not contain the attribute '{self.GRID_FILE_ATTR}' "
f"necessary to download the ICON horizontal grid file:\n"
f"{cube}")
f"{cube}"
)
grid_url = cube.attributes[self.GRID_FILE_ATTR]
parsed_url = urlparse(grid_url)
grid_name = Path(parsed_url.path).name
Expand All @@ -162,21 +165,22 @@ def _get_node_coords(self, horizontal_grid):
"""
dual_area_cube = horizontal_grid.extract_cube(
NameConstraint(var_name='dual_area'))
node_lat = dual_area_cube.coord(var_name='vlat')
node_lon = dual_area_cube.coord(var_name='vlon')
NameConstraint(var_name="dual_area")
)
node_lat = dual_area_cube.coord(var_name="vlat")
node_lon = dual_area_cube.coord(var_name="vlon")

# Fix metadata
node_lat.bounds = None
node_lon.bounds = None
node_lat.var_name = 'nlat'
node_lon.var_name = 'nlon'
node_lat.standard_name = 'latitude'
node_lon.standard_name = 'longitude'
node_lat.long_name = 'node latitude'
node_lon.long_name = 'node longitude'
node_lat.convert_units('degrees_north')
node_lon.convert_units('degrees_east')
node_lat.var_name = "nlat"
node_lon.var_name = "nlon"
node_lat.standard_name = "latitude"
node_lon.standard_name = "longitude"
node_lat.long_name = "node latitude"
node_lon.long_name = "node longitude"
node_lat.convert_units("degrees_north")
node_lon.convert_units("degrees_east")

# Convert longitude to [0, 360]
self._set_range_in_0_360(node_lon)
Expand All @@ -186,10 +190,10 @@ def _get_node_coords(self, horizontal_grid):
def _get_path_from_facet(self, facet, description=None):
"""Try to get path from facet."""
if description is None:
description = 'File'
description = "File"
path = Path(os.path.expandvars(self.extra_facets[facet])).expanduser()
if not path.is_file():
new_path = self.session['auxiliary_data_dir'] / path
new_path = self.session["auxiliary_data_dir"] / path
if not new_path.is_file():
raise FileNotFoundError(
f"{description} '{path}' given by facet '{facet}' does "
Expand Down Expand Up @@ -238,8 +242,8 @@ def add_additional_cubes(self, cubes):
"""
facets_to_consider = [
'zg_file',
'zghalf_file',
"zg_file",
"zghalf_file",
]
for facet in facets_to_consider:
if self.extra_facets.get(facet) is None:
Expand All @@ -254,7 +258,7 @@ def add_additional_cubes(self, cubes):
def _get_grid_from_facet(self):
"""Get horizontal grid from user-defined facet `horizontal_grid`."""
grid_path = self._get_path_from_facet(
'horizontal_grid', 'Horizontal grid file'
"horizontal_grid", "Horizontal grid file"
)
grid_name = grid_path.name

Expand Down Expand Up @@ -297,7 +301,7 @@ def _get_grid_from_cube_attr(self, cube: Cube) -> Cube:
def _get_grid_from_rootpath(self, grid_name: str) -> CubeList | None:
"""Try to get grid from the ICON rootpath."""
glob_patterns: list[Path] = []
for data_source in _get_data_sources('ICON'):
for data_source in _get_data_sources("ICON"):
glob_patterns.extend(
data_source.get_glob_patterns(**self.extra_facets)
)
Expand Down Expand Up @@ -334,8 +338,10 @@ def _get_downloaded_grid(self, grid_url: str, grid_name: str) -> CubeList:
logger.debug("Using cached ICON grid file '%s'", grid_path)
valid_cache = True
else:
logger.debug("Existing cached ICON grid file '%s' is outdated",
grid_path)
logger.debug(
"Existing cached ICON grid file '%s' is outdated",
grid_path,
)

# File is not present in cache or too old -> download it
if not valid_cache:
Expand All @@ -347,12 +353,12 @@ def _get_downloaded_grid(self, grid_url: str, grid_name: str) -> CubeList:
tmp_path,
)
with requests.get(
grid_url,
stream=True,
timeout=self.TIMEOUT,
grid_url,
stream=True,
timeout=self.TIMEOUT,
) as response:
response.raise_for_status()
with tmp_path.open('wb') as file:
with tmp_path.open("wb") as file:
copyfileobj(response.raw, file)
shutil.move(tmp_path, grid_path)
logger.info(
Expand Down Expand Up @@ -403,7 +409,7 @@ def get_horizontal_grid(self, cube):
file.
"""
if self.extra_facets.get('horizontal_grid') is not None:
if self.extra_facets.get("horizontal_grid") is not None:
grid = self._get_grid_from_facet()
else:
grid = self._get_grid_from_cube_attr(cube)
Expand Down Expand Up @@ -444,9 +450,9 @@ def get_mesh(self, cube):
"""
# If specified by the user, use `horizontal_grid` facet to determine
# grid name; otherwise, use the `grid_file_uri` attribute of the cube
if self.extra_facets.get('horizontal_grid') is not None:
if self.extra_facets.get("horizontal_grid") is not None:
grid_path = self._get_path_from_facet(
'horizontal_grid', 'Horizontal grid file'
"horizontal_grid", "Horizontal grid file"
)
grid_name = grid_path.name
else:
Expand Down Expand Up @@ -474,32 +480,33 @@ def _get_start_index(horizontal_grid):
"""
vertex_index = horizontal_grid.extract_cube(
NameConstraint(var_name='vertex_index'))
NameConstraint(var_name="vertex_index")
)
return np.int32(np.min(vertex_index.data))

@staticmethod
def _load_cubes(path: Path | str) -> CubeList:
"""Load cubes and ignore certain warnings."""
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore',
"ignore",
message="Ignoring netCDF variable .* invalid units .*",
category=UserWarning,
module='iris',
module="iris",
) # iris < 3.8
warnings.filterwarnings(
'ignore',
"ignore",
message="Ignoring invalid units .* on netCDF variable .*",
category=UserWarning,
module='iris',
module="iris",
) # iris >= 3.8
warnings.filterwarnings(
'ignore',
"ignore",
message="Failed to create 'height' dimension coordinate: The "
"'height' DimCoord bounds array must be strictly "
"monotonic.",
"'height' DimCoord bounds array must be strictly "
"monotonic.",
category=UserWarning,
module='iris',
module="iris",
)
cubes = iris.load(path)
return cubes
Expand Down
6 changes: 3 additions & 3 deletions esmvalcore/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from ._config_object import CFG, Config, Session

__all__ = (
'CFG',
'Config',
'Session',
"CFG",
"Config",
"Session",
)
46 changes: 25 additions & 21 deletions esmvalcore/config/_config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Functions dealing with config-developer.yml and extra facets."""

from __future__ import annotations

import collections.abc
Expand Down Expand Up @@ -43,7 +44,7 @@ def _load_extra_facets(project, extra_facets_dir):
config_file_paths = config_path.glob(f"{project.lower()}-*.yml")
for config_file_path in sorted(config_file_paths):
logger.debug("Loading extra facets from %s", config_file_path)
with config_file_path.open(encoding='utf-8') as config_file:
with config_file_path.open(encoding="utf-8") as config_file:
config_piece = yaml.safe_load(config_file)
if config_piece:
_deep_update(config, config_piece)
Expand All @@ -54,7 +55,7 @@ def get_extra_facets(dataset, extra_facets_dir):
"""Read files with additional variable information ("extra facets")."""
extra_facets_dir = tuple(extra_facets_dir)
project_details = _load_extra_facets(
dataset.facets['project'],
dataset.facets["project"],
extra_facets_dir,
)

Expand All @@ -76,10 +77,11 @@ def pattern_filter(patterns, name):
return [pat for pat in patterns if fnmatch.fnmatchcase(name, pat)]

extra_facets = {}
for dataset_ in pattern_filter(project_details, dataset['dataset']):
for mip_ in pattern_filter(project_details[dataset_], dataset['mip']):
for var in pattern_filter(project_details[dataset_][mip_],
dataset['short_name']):
for dataset_ in pattern_filter(project_details, dataset["dataset"]):
for mip_ in pattern_filter(project_details[dataset_], dataset["mip"]):
for var in pattern_filter(
project_details[dataset_][mip_], dataset["short_name"]
):
facets = project_details[dataset_][mip_][var]
extra_facets.update(facets)

Expand All @@ -88,23 +90,25 @@ def pattern_filter(patterns, name):

def load_config_developer(cfg_file):
"""Read the developer's configuration file."""
with open(cfg_file, 'r', encoding='utf-8') as file:
with open(cfg_file, "r", encoding="utf-8") as file:
cfg = yaml.safe_load(file)

if 'obs4mips' in cfg:
if "obs4mips" in cfg:
logger.warning(
"Correcting capitalization, project 'obs4mips'"
" should be written as 'obs4MIPs' in %s", cfg_file)
cfg['obs4MIPs'] = cfg.pop('obs4mips')
" should be written as 'obs4MIPs' in %s",
cfg_file,
)
cfg["obs4MIPs"] = cfg.pop("obs4mips")

for project, settings in cfg.items():
for site, drs in settings.get('input_dir', {}).items():
for site, drs in settings.get("input_dir", {}).items():
# Since v2.8, 'version' can be used instead of 'latestversion'
if isinstance(drs, list):
drs = [d.replace('{latestversion}', '{version}') for d in drs]
drs = [d.replace("{latestversion}", "{version}") for d in drs]
else:
drs = drs.replace('{latestversion}', '{version}')
settings['input_dir'][site] = drs
drs = drs.replace("{latestversion}", "{version}")
settings["input_dir"][site] = drs
CFG[project] = settings

read_cmor_tables(cfg_file)
Expand All @@ -119,8 +123,8 @@ def get_project_config(project):

def get_institutes(variable):
"""Return the institutes given the dataset name in CMIP6."""
dataset = variable['dataset']
project = variable['project']
dataset = variable["dataset"]
project = variable["project"]
try:
return CMOR_TABLES[project].institutes[dataset]
except (KeyError, AttributeError):
Expand All @@ -129,9 +133,9 @@ def get_institutes(variable):

def get_activity(variable):
"""Return the activity given the experiment name in CMIP6."""
project = variable['project']
project = variable["project"]
try:
exp = variable['exp']
exp = variable["exp"]
if isinstance(exp, list):
return [CMOR_TABLES[project].activities[value][0] for value in exp]
return CMOR_TABLES[project].activities[exp][0]
Expand All @@ -144,8 +148,8 @@ def get_ignored_warnings(project: FacetValue, step: str) -> None | list:
if project not in CFG:
return None
project_cfg = CFG[project]
if 'ignore_warnings' not in project_cfg:
if "ignore_warnings" not in project_cfg:
return None
if step not in project_cfg['ignore_warnings']:
if step not in project_cfg["ignore_warnings"]:
return None
return project_cfg['ignore_warnings'][step]
return project_cfg["ignore_warnings"][step]
Loading

0 comments on commit 22feacf

Please sign in to comment.