diff --git a/.circleci/config.yml b/.circleci/config.yml index 975a1ae50a..4be031b9a3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,8 +2,8 @@ version: 2.1 orbs: - coverage-reporter: codacy/coverage-reporter@13.16.5 - codecov: codecov/codecov@3.2.5 + coverage-reporter: codacy/coverage-reporter@14.0.0 + codecov: codecov/codecov@4.1.0 commands: check_changes: @@ -28,7 +28,6 @@ commands: mkdir -p test-reports . /opt/conda/etc/profile.d/conda.sh conda activate esmvaltool - flake8 -j 4 pytest -n 4 --junitxml=test-reports/report.xml esmvaltool version - store_test_results: @@ -48,8 +47,15 @@ commands: test_installation_from_source: parameters: extra: + description: pip "extra"s to install type: string + default: "test" flags: + description: pip install flags + type: string + default: "" + upstream_packages: + description: List of packages that will be installed with pip. type: string default: "" steps: @@ -74,20 +80,25 @@ commands: - r-yaml - ncl " >> environment.yml + # Installation of development version of packages requires compilers + if [[ "<< parameters.upstream_packages >>" ]]; then + echo " - compilers" >> environment.yml + fi # Install . /opt/conda/etc/profile.d/conda.sh mkdir /logs - mamba env create >> /logs/conda.txt 2>&1 + mamba env create |& tee /logs/conda.txt + git stash # Restore repository state to get clean version number. conda activate esmvaltool - pip install << parameters.flags >> ".[<>]"> /logs/install.txt 2>&1 + pip install << parameters.flags >> ".[<>]" << parameters.upstream_packages >> |& tee /logs/install.txt - run: name: Log versions command: | . /opt/conda/etc/profile.d/conda.sh conda activate esmvaltool - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - pip freeze > /logs/requirements.txt + dpkg -l | tee /logs/versions.txt + conda env export | tee /logs/environment.yml + pip freeze | tee /logs/requirements.txt - test_and_report - save_cache: key: install-<< parameters.extra >>-{{ .Branch }}-{{ checksum "cache_key.txt" }} @@ -116,7 +127,7 @@ jobs: . /opt/conda/etc/profile.d/conda.sh mkdir /logs conda activate esmvaltool - pip install .[test] > /logs/install.txt 2>&1 + pip install .[test] |& tee -a /logs/install.txt - test_and_report - save_cache: key: test-{{ .Branch }}-{{ checksum "cache_key.txt" }} @@ -141,8 +152,7 @@ jobs: - image: condaforge/mambaforge resource_class: large steps: - - test_installation_from_source: - extra: test + - test_installation_from_source test_installation_from_source_develop_mode: # Test development installation @@ -154,6 +164,25 @@ jobs: extra: develop flags: "--editable" + test_with_upstream_developments: + # Test with development versions of upstream packages + docker: + - image: condaforge/mambaforge + resource_class: large + steps: + - test_installation_from_source: + upstream_packages: >- + git+https://github.com/esgf/esgf-pyclient + git+https://github.com/euro-cordex/py-cordex + git+https://github.com/SciTools/cartopy + git+https://github.com/SciTools/cf-units + git+https://github.com/SciTools/iris + git+https://github.com/SciTools/iris-grib + git+https://github.com/SciTools/nc-time-axis + git+https://github.com/SciTools-incubator/iris-esmf-regrid + git+https://github.com/SciTools-incubator/python-stratify + git+https://github.com/Toblerity/Fiona + test_installation_from_conda: # Test conda package installation working_directory: /esmvaltool @@ -167,14 +196,13 @@ jobs: set -x # Install prerequisites mkdir /logs - # conda update -y conda > /logs/conda.txt 2>&1 # Create and activate conda environment mamba create -y --name esmvaltool 'python=3.11' set +x; conda activate esmvaltool; set -x # Install mamba install -y esmvalcore # Log versions - conda env export > /logs/environment.yml + conda env export | tee /logs/environment.yml # Test installation esmvaltool version @@ -194,9 +222,9 @@ jobs: conda activate esmvaltool pip install .[doc] # Log versions - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - pip freeze > /logs/requirements.txt + dpkg -l | tee /logs/versions.txt + conda env export | tee /logs/environment.yml + pip freeze | tee /logs/requirements.txt # Test building documentation MPLBACKEND=Agg sphinx-build -W doc doc/build - store_artifacts: @@ -206,8 +234,9 @@ workflows: commit: jobs: - run_tests - - test_installation_from_source_test_mode - test_installation_from_source_develop_mode + - test_installation_from_source_test_mode + - test_with_upstream_developments nightly: triggers: @@ -218,8 +247,9 @@ workflows: only: - main jobs: - - run_tests - - test_installation_from_source_test_mode - build_documentation - - test_installation_from_source_develop_mode + - run_tests - test_installation_from_conda + - test_installation_from_source_develop_mode + - test_installation_from_source_test_mode + - test_with_upstream_developments diff --git a/.editorconfig b/.editorconfig index 97c8ef6e5a..ddab414f89 100644 --- a/.editorconfig +++ b/.editorconfig @@ -27,4 +27,3 @@ indent_size = 2 [*.{md,Rmd}] trim_trailing_whitespace = false - diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1c8122dcbe..1e5f05f0eb 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1 @@ -esmvalcore/cmor @jvegasbsc .github/workflows @valeriupredoi - diff --git a/.github/workflows/create-condalock-file.yml b/.github/workflows/create-condalock-file.yml index 1d425591e5..5e1eaec889 100644 --- a/.github/workflows/create-condalock-file.yml +++ b/.github/workflows/create-condalock-file.yml @@ -71,8 +71,6 @@ jobs: run: | esmvaltool --help esmvaltool version - - name: Run flake8 - run: flake8 - name: Run pytests run: pytest -n 2 -m "not installation" # Automated PR diff --git a/.github/workflows/install-from-conda.yml b/.github/workflows/install-from-conda.yml index 951a91328a..88e78619ea 100644 --- a/.github/workflows/install-from-conda.yml +++ b/.github/workflows/install-from-conda.yml @@ -39,7 +39,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] # fail-fast set to False allows all other tests # in the workflow to run regardless of any fail fail-fast: false @@ -75,7 +75,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} diff --git a/.github/workflows/install-from-condalock-file.yml b/.github/workflows/install-from-condalock-file.yml index 3838387fa8..44a7839b55 100644 --- a/.github/workflows/install-from-condalock-file.yml +++ b/.github/workflows/install-from-condalock-file.yml @@ -29,7 +29,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false name: Linux Python ${{ matrix.python-version }} steps: @@ -51,7 +51,6 @@ jobs: - run: pip install -e .[develop] - run: esmvaltool --help - run: esmvaltool version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt - - run: flake8 - run: pytest -n 2 -m "not installation" - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail diff --git a/.github/workflows/install-from-pypi.yml b/.github/workflows/install-from-pypi.yml index cd49eed089..1e326c89eb 100644 --- a/.github/workflows/install-from-pypi.yml +++ b/.github/workflows/install-from-pypi.yml @@ -39,7 +39,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] # fail-fast set to False allows all other tests # in the workflow to run regardless of any fail fail-fast: false @@ -77,7 +77,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} diff --git a/.github/workflows/install-from-source.yml b/.github/workflows/install-from-source.yml index a4aa9b8e01..7cb8c7d629 100644 --- a/.github/workflows/install-from-source.yml +++ b/.github/workflows/install-from-source.yml @@ -37,7 +37,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false name: Linux Python ${{ matrix.python-version }} steps: @@ -74,7 +74,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} diff --git a/.github/workflows/run-tests-monitor.yml b/.github/workflows/run-tests-monitor.yml index 561516a3e2..160bdd2850 100644 --- a/.github/workflows/run-tests-monitor.yml +++ b/.github/workflows/run-tests-monitor.yml @@ -22,7 +22,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false name: Linux Python ${{ matrix.python-version }} steps: @@ -55,7 +55,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 9cf1d6308b..73e15c100c 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -39,7 +39,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false name: Linux Python ${{ matrix.python-version }} steps: @@ -60,7 +60,9 @@ jobs: - run: conda list - run: pip install -e .[develop] 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/install.txt - run: conda list - - run: flake8 + - run: | + pre-commit install + pre-commit run -a - run: pytest -n 2 -m "not installation" 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail @@ -73,7 +75,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} @@ -97,7 +99,9 @@ jobs: - run: conda list - run: pip install -e .[develop] 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/install.txt - run: conda list - - run: flake8 + - run: | + pre-commit install + pre-commit run -a - run: pytest -n 2 -m "not installation" 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2b01ae61c0..6dac9fed16 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,14 +1,19 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks --- +ci: + autofix_prs: false + exclude: | (?x) ^doc/conf.py| ^esmvalcore/cmor/tables/| - ^esmvalcore/preprocessor/ne_masks/ + ^esmvalcore/preprocessor/ne_masks/| + ^esmvalcore/preprocessor/shapefiles/ + repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.6.0 hooks: - id: check-added-large-files - id: check-ast @@ -19,33 +24,22 @@ repos: - id: trailing-whitespace args: [--markdown-linebreak-ext=md] - repo: https://github.com/adrienverge/yamllint - rev: 'v1.31.0' + rev: 'v1.35.1' hooks: - id: yamllint - repo: https://github.com/codespell-project/codespell - rev: 'v2.2.4' + rev: 'v2.3.0' hooks: - id: codespell - - repo: https://github.com/PyCQA/isort - rev: '5.12.0' + additional_dependencies: [tomli] # required for Python 3.10 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.6.8" hooks: - - id: isort - - repo: https://github.com/pre-commit/mirrors-yapf - rev: 'v0.32.0' - hooks: - - id: yapf - additional_dependencies: - - 'toml' - - repo: https://github.com/myint/docformatter - rev: 'v1.6.5' - hooks: - - id: docformatter - - repo: https://github.com/pycqa/flake8 - rev: '6.0.0' - hooks: - - id: flake8 + - id: ruff + args: [--fix] + - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.2.0' + rev: 'v1.11.2' hooks: - id: mypy additional_dependencies: diff --git a/.prospector.yml b/.prospector.yml index f1272ec938..51508847c2 100644 --- a/.prospector.yml +++ b/.prospector.yml @@ -10,16 +10,21 @@ test-warnings: true member-warnings: false pyroma: - run: true + run: true pep8: - full: true + full: true + # ignore rules that conflict with ruff formatter + # E203: https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#slices + # E501: https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + # W503: https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes + disable: ['E203', 'E501', 'W503'] mypy: run: true pep257: - # disable rules that are allowed by the numpy convention - # see https://github.com/PyCQA/pydocstyle/blob/master/src/pydocstyle/violations.py - # and http://pydocstyle.readthedocs.io/en/latest/error_codes.html - disable: ['D107', 'D203', 'D212', 'D213', 'D402', 'D413', 'D416'] + # disable rules that are allowed by the numpy convention + # see https://github.com/PyCQA/pydocstyle/blob/master/src/pydocstyle/violations.py + # and http://pydocstyle.readthedocs.io/en/latest/error_codes.html + disable: ['D107', 'D203', 'D212', 'D213', 'D402', 'D413', 'D416'] diff --git a/.yamllint b/.yamllint new file mode 100644 index 0000000000..5192dee89a --- /dev/null +++ b/.yamllint @@ -0,0 +1,9 @@ +--- + +extends: default + +rules: + line-length: + level: warning + max: 120 + octal-values: enable diff --git a/.zenodo.json b/.zenodo.json index bb00d5471d..f0997c6467 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -204,6 +204,11 @@ { "affiliation": "DLR, Germany", "name": "Cammarano, Diego" + }, + { + "affiliation": "ACCESS-NRI, Australia", + "name": "Yousong, Zeng", + "orcid": "0000-0002-8385-5367" } ], "description": "ESMValCore: A community tool for pre-processing data from Earth system models in CMIP and running analysis scripts.", diff --git a/CITATION.cff b/CITATION.cff index 562e044ecb..4d3da6e4c7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -208,13 +208,18 @@ authors: affiliation: "DLR, Germany" family-names: Cammarano given-names: Diego + - + affiliation: "ACCESS-NRI, Australia" + family-names: Yousong + given-names: Zeng + orcid: "https://orcid.org/0000-0002-8385-5367" cff-version: 1.2.0 -date-released: 2024-05-08 +date-released: 2024-07-03 doi: "10.5281/zenodo.3387139" license: "Apache-2.0" message: "If you use this software, please cite it using these metadata." repository-code: "https://github.com/ESMValGroup/ESMValCore/" title: ESMValCore -version: "v2.11.0rc1" +version: "v2.11.0" ... diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 76efeb2eca..26e110735f 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -6,7 +6,7 @@ In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, -level of experience, education, socio-economic status, nationality, personal +level of experience, education, socioeconomic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards diff --git a/NOTICE b/NOTICE index 5bf9f5d0cd..5e413cd5ba 100644 --- a/NOTICE +++ b/NOTICE @@ -50,5 +50,3 @@ In addition to using the Software, we encourage the community to join the Softwa To join the ESMValTool Development Team, please contact Dr. Birgit Hassler (birgit.hassler@dlr.de) and Dr. Axel Lauer (axel.lauer@dlr.de). ========================================== - - diff --git a/README.md b/README.md index b256b0be40..f3e5c693bf 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ [![Anaconda-Server Badge](https://img.shields.io/conda/vn/conda-forge/ESMValCore?color=blue&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/esmvalcore) [![Github Actions Test](https://github.com/ESMValGroup/ESMValCore/actions/workflows/run-tests.yml/badge.svg)](https://github.com/ESMValGroup/ESMValCore/actions/workflows/run-tests.yml) -![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValCore/main/doc/figures/ESMValTool-logo-2.png) +![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValCore/main/doc/figures/ESMValTool-logo-2-glow.png) ESMValCore: core functionalities for the ESMValTool, a community diagnostic and performance metrics tool for routine evaluation of Earth System Models diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 3888d61b99..74039e033f 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -1,66 +1,65 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 69fbe724ec4bb47187d3f262df186f510282059d1357920abda019f36fa711f5 +# input_hash: 41b16d5b0336bab6afebd9904d78b10ab3eb64f882e3c0e4b72f7c58a2e4e4b4 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda#2f4327a1cbe7f022401b236e915a5fef +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda#cbbe59391138ea5ad3658c76912e147f -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_1.conda#33b7851c39c25da14f6a233a8ccbeeca -https://conda.anaconda.org/conda-forge/linux-64/libboost-headers-1.85.0-ha770c72_1.conda#012455a6eddcbf487ef0ddd1715f0b80 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-hc0a3c3a_7.conda#53ebd4c833fa01cb2c6353e99f905406 -https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.2-ha770c72_0.conda#8c924f0b7f3e064b1c954a08e7c32fba +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda#b80f2f396ca2c28b8c14c437a4ed1e74 +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.4-ha770c72_0.conda#61c94057aaa5ae6145137ce1fddb2c04 https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda#d8d7293c5b37f39b2ac32940621c6592 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-4_cp312.conda#dccc2d142812964fcc6abdc97b672dff -https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda#161081fc7cec0bfda0d86d7cb595f8d8 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda#8bfdead4e0fff0383ae4c9c50d0531bd https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h77fa898_7.conda#abf3fec87c2563697defa759dec3d639 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda#23c255b008c4f2ae008f81edcabaca89 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h77fa898_7.conda#72ec1b1b04c4d15d4204ece1ecea5978 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.19-h4ab18f5_0.conda#c6dedd5eab2236f4abb59ade9fb7fd44 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.28.1-hd590300_0.conda#dcde58ff9a1f30b0037a2315d1846d1f -https://conda.anaconda.org/conda-forge/linux-64/fmt-10.2.1-h00ab1b0_0.conda#35ef8bc24bd34074ebae3c943d551728 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda#002ef4463dd1e2b44a94a4ace468f5d2 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda#1b53af320b24547ce0fb8196d2604542 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda#59f4c43bb1b5ef1c71946ff2cbf59524 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda#1efc0ad219877a73ef977af7dbb51f17 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda#10a0cef64b784d6ab6da50ebca4e984d +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda#9dbb9699ea467983ba8a4ba89b08b066 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda#4d638782050ab6faa27275bed57e9b4e +https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda#db124840386e1f842f93372897d1b857 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda#f301eb944d297fc879c441fffe461d8a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda#5e08c385a1b8a79b52012b74653bbb99 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda#bfe6623096906d2502c78ccdbfc3bc7a +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda#eadcc12bedac44f13223a2909c0e5bcc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda#0d3c60291342c0c025db231353376dfb +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda#6595440079bed734b113de44ffd3cd0a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.1-h59595ed_0.conda#8c0f4f71f5a59ceb0c6fa9f51501066d -https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2#cddaf2c63ea4a5901cf09524c490ecdc https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 -https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c -https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff -https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h7ab15ed_0.conda#9961b1f100c3b6852bd97c9233d06979 +https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda#f8f0f0c4338bad5c34a4e9e11460481d https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_h59595ed_0.conda#682bdbe046a68f749769b492f3625c5c -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda#aec6c91c7371c26392a06708a73c70e5 -https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.20-hd590300_0.conda#8e88f9389f1165d7c0936fe40d9a9a79 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda#36ce76665bf67f5aac36be7a0d21b7f3 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda#e7ba12deb7020dd080c6c70e7b6f6a3d +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-hca663fb_7.conda#c0bd771f09a326fdcd95a60b617795bf +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda#591e631bc1ae62c64f2ab4f66178c097 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2#c3788462a6fbddafdb413a9f9053e58d +https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda#bd2598399a70bb86d8218e95548d735e +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2#ede4266dc02e875fe1ea77b25dd43747 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h4ab18f5_6.conda#27329162c0dc732bcf67a4e0cd488125 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda#57d7dc60e9325e3de37ff8dffd18e814 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda#fcea371545eda051b6deafb24889fc69 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.0-h4ab18f5_3.conda#12ea6d0d4ed54530eaed18e4835c1f7c -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 -https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.0-hdb0a2a9_1.conda#843bbb8ace1d64ac50d64639ff38b014 -https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024a-h3f72095_0.conda#32146e34aaec3745a08b6f49af3f41b0 -https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.2-h7b32b05_0.conda#daf6322364fe6fc46c515d4d3d0051c2 +https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2#bcd1b3396ec6960cbc1d2855a9e60b2b https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 @@ -71,67 +70,71 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.14-h88a6e22_1.conda#7ed63b0e816dd1635903506ef5d2c079 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.18-h83b837d_6.conda#3e572eacd0ce99a59e1bb9c260ad5b20 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.16-h83b837d_2.conda#f40c698b4ea90f7fedd187c6639c818b -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h83b837d_6.conda#7995cb937bdac5913c8904fed6b3729d -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda#53fb86322bdb89496d7579fe3f02fd61 -https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.0-hed5481d_0.conda#a9ea19c48e11754899299f8123070f4e +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-hc2627b9_9.conda#b1ba84c5cb2e6fe5f5cd1101097a4592 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 +https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda#995f7e13598497691c1dc476d889bc04 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.2-he02047a_1.conda#aab9195bc018b82dc77a84584b36cce9 +https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2#cddaf2c63ea4a5901cf09524c490ecdc +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda#f07002e225d7a60a694d42a7bf5ff53f -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda#5fc11c6020d421960607d821310fcd4d +https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda#c48fc56ec03229f294176923c3265c05 +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa +https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_7.conda#1b84f26d9f4f6026e179e7805d5a15cd -https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-h01aab08_1018.conda#3eb5f16bcc8a02892199aa63555c731f +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda#16cec94c5992d7f42ae3f9fa8b25df8d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda#009981dd9cfcaa4dbfa25ffaed86bcae -https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda#6945825cebd2aeb16af4c69d97c32c13 -https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda#41c69fba59d495e8cf5ffda48a607e35 -https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h8917695_15.conda#20c3c14bc491f30daecaa6f73e2223ae -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.45.3-h2797004_0.conda#b3316cbe90249da4f8e84cd66e1cc55b +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda#36f79405ab16bf271edb55b213836dac https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe -https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-hc051c1a_0.conda#5d801a4906adc712d480afc362623b59 +https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda#d0ed81c4591775b70384f4cc78e05cd1 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda#3601598f0db0470af28985e3e7ad0158 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.43-hcad00b1_0.conda#8292dea9e022d9610a11fce5e0896ed8 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 +https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.4.15-he19d79f_0.conda#4c7cc3fa1d2c5a63f9e2b1e2980a1672 -https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.13.0-hd2e6256_0.conda#18f9348f064632785d54dbd1db9344bb +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 +https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 +https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2#65ad6e1eb4aed2b0611855aff05e04f6 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h4ab18f5_6.conda#559d338a4234c2ad6e676f460a093e67 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda#9653f1bf3766164d0e65fa723cabbc54 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.8-h21d4f22_5.conda#f9dd6e8a46f55f49eae5380d3b922b71 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-hc2324a3_1.conda#11d76bee958b1989bd1ac6ee7372ea6d -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda#39f910d205726805a958da408ca194ba +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda#c05358e3a231195f7f0b3f592078bb0c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.8-h5e77a74_2.conda#b75afaaf2a4ea0e1137ecb35262b8ed4 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda#cd95826dbd331ed1be26bdf401432844 -https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.2-hf974151_0.conda#72724f6a78ecb15559396966226d5838 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_h413a1c8_0.conda#a356024784da6dfd4683dc5ecf45b155 -https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.19.0-hb90f79a_1.conda#8cdb7d41faa0260875ba92414c487e2d -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h1dd3fc0_3.conda#66f03896ffbe1a110ffda05c7a856504 -https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 -https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.5-h0ab5242_0.conda#557396140c71eba588e96d597e0c61aa -https://conda.anaconda.org/conda-forge/linux-64/nss-3.100-hca3bf56_0.conda#949c4a82290ee58b3c970cef4bcfd4ad -https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.1-h17fec99_1.conda#3bf65f0d8e7322a1cfe8b670fa35ec81 -https://conda.anaconda.org/conda-forge/linux-64/python-3.12.3-hab00c5b_0_cpython.conda#2540b74d304f71d3e89c81209db4db84 -https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda#8f70e36268dea8eb666ef14c29bd3cda -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.45.3-h2c6b66d_0.conda#be7d70f2db41b674733667bdd69bd000 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-h8ee46fc_0.conda#077b6e8ad6a3ddb741fce2496dd01bec -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda#def531a3ac77b7fb8c21d17bb5d0badb +https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda#b0143a3e98136a680b728fdf9b42a258 +https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda#ae05ece66d3924ac3d48b4aa3fa96cec +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda#6945825cebd2aeb16af4c69d97c32c13 +https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda#41c69fba59d495e8cf5ffda48a607e35 +https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda#3d9f3a2e5d7213c34997e4464d2f938c +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h46a8edc_4.conda#a7e3a62981350e232e0e7345b5aea580 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 +https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda#0664e59f6937a660eba9f3d2f9123fa8 +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda#9c56c4df45f6571b13111d8df2448692 +https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda#909188c8979846bac8e586908cf1ca6a +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda#b2b3e737da0ae347e16ef1970a5d3f14 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda#4a6d410296d7e39f00bacdee7df046e9 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 -https://conda.anaconda.org/conda-forge/linux-64/astroid-3.2.2-py312h7900ff3_0.conda#2acaebd0ea2aeaf38e996bda79545c0b +https://conda.anaconda.org/conda-forge/linux-64/astroid-3.2.4-py312h7900ff3_0.conda#06f82f9dc65aa8ebd14bf3cf2b34dad2 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/attrs-23.2.0-pyh71513ae_0.conda#5e4c0743c70186509d1412e03c2d8dfa -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.2-ha47c788_12.conda#8420d8e495a1468f593128e5fbf6748a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.1-h29d6fba_17.conda#c20a29ff47043ba1ec24f45dc68930bf -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda#f27a24d46e3ea7b70a1f98e50c62508f -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda#45801a89533d3336a365284d93298e36 -https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda#0876280e409658fc6f9e75d035960333 +https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.29-h03582ad_1.conda#6d23dd1c1742112d5fe9f529da7afea9 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h01636a3_19.conda#8ec16206ccaaf74ee5830ffeba436ebc +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda#b0b867af6fc74b2a0aa206da29c0f3cf +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca @@ -139,71 +142,75 @@ https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.con https://conda.anaconda.org/conda-forge/noarch/codespell-2.3.0-pyhd8ed1ab_0.conda#6e67fa19bedafa7eb7d6ea91de53e03d https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.10-py312h30efb56_0.conda#b119273bff37284cbcb9281c1e85e67d +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py312h2ec8cdc_2.conda#399d49ab187d0ac77fff457f276d5101 https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2#43afe5ab04e35e17ba28649471dd7364 https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2#961b3a227b437d82ad7054484cfa71b2 https://conda.anaconda.org/conda-forge/noarch/dill-0.3.8-pyhd8ed1ab_0.conda#78745f157d56877a2c6e7b386f66f3e2 https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2#3cf04868fee0a029769bd41f4b2fbf2d -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda#8d652ea2ee8eaee02ed8dc820bc794aa +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 -https://conda.anaconda.org/conda-forge/noarch/executing-2.0.1-pyhd8ed1ab_0.conda#e16be50e378d8a4533b989035b196ab8 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.14.0-pyhd8ed1ab_0.conda#831d85ae0acfba31b8efd0f0d07da736 +https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda#d0441db20c827c11721889a241df1220 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda#ec288789b07ae3be555046e099798a56 +https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda#12e6988845706b2cfbc3bc35c9a61a95 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.5.0-pyhff2d567_0.conda#d73e9932511ef7670b2cc0ebd9dfbd30 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda#ace4329fbff4c69ab0309db6da182987 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2#6b1f32359fc5d2ab7b491d0029bfffeb https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe +https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd8ed1ab_6.conda#2ed1fe4b9079da97c44cfe9c2e5078fd -https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda#c0cc1420498b17414d8617d0b9f506ca +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 +https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda#99e164522f6bdf23c177c8d9ae63f975 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda#ff7ca04134ee8dde1d7cf491a78ef7c7 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py312h8572e83_1.conda#c1e71f2bc05d8e8e033aefac2c490d05 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-22_linux64_openblas.conda#1a2a0cd3153464fee6646f3dd6dad9b8 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.8.0-hca28451_0.conda#f21c27f076a07907e70c49bb57bd0f20 -https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda#8dabe607748cb3d7002ad73cd06f1325 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.3-ha72fbe1_0.conda#bac737ae28b79cfbafd515258d97d29e -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.4.0-h2c329e2_0.conda#80030debaa84cfc31755d53742df3ca6 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda#96c8450a40aa2b9733073a9460de972c +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda#7da1d242ca3591e174a3c7d82230d3c0 +https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda#7e3173fd1299939a02ebf9ec32aa77c4 +https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/lxml-5.2.2-py312hb90d8a5_0.conda#da3e0a20f8eb75072ad036198c37be61 -https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312h03f37cb_0.conda#8465027beab0db69006f744be7fb3fb6 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h98912ed_0.conda#6ff0b9582da2d4a74a1f9ae1f9ce2af6 +https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda#b99d90ef4e77acdab74828f79705a919 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h66e93f0_1.conda#80b79ce0d3dc127e96002dfdcec0a2a5 https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_0.tar.bz2#34fc335fc50eef0b5ea708f2b5f54e0c https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda#5cbee699846772cc939bef23a0d524ed -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.8-py312h2492b07_0.conda#0df463266eaaa1b8a35f8fd26368c1a1 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda#5c9b020a3f86799cdc6115e55df06146 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda#4eccaeba205f0aed9ac3a9ea58568ca3 https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda#d335fd5704b46f4efb89a6774e81aef0 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 -https://conda.anaconda.org/conda-forge/noarch/packaging-24.0-pyhd8ed1ab_0.conda#248f521b64ce055e7feae3105e7abeb8 +https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda#1e6c10f7d749a490612404efeb179eb8 +https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda#81534b420deb77da8833f2289b8d47ac https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda#17064acba08d3686f1135b5ec1b32b12 https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2#415f0ebb6198cc2801c73438a9fb5761 https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda#405678b942f2481cecdb3e010f4925d9 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.2-pyhd8ed1ab_0.conda#6f6cf28bf8e021933869bae3f84b8fc9 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda#e1a2dfcd5695f0744f1bcd3bbfe02523 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py312h98912ed_0.conda#3facaca6cc0f7988df3250efccd32da3 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_1.conda#76706c73e315d21bede804514a39bccf https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2#359eeb6536da0e687af562ed265ec263 -https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.2-pyhd8ed1ab_0.tar.bz2#6784285c7e55cb7212efabc79e4c2883 -https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.11.1-pyhd8ed1ab_0.conda#29ff12b36df16bb66fdccd4206aaebfb +https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda#0f051f09d992e0d08941706ad519ee0e +https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.12.1-pyhd8ed1ab_0.conda#72453e39709f38d0494d096bb5f678b7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.2.0-pyhd8ed1ab_0.conda#0cf7fef6aa123df28adb21a590065e3d https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.2-pyhd8ed1ab_0.conda#b9a4dacf97241704529131a0dfc0494f +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda#4d91352a50949d049cf9714c8563d433 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.1-pyhd8ed1ab_0.conda#4d3ceee3af4b0f9a1f48f57176bf8625 +https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda#b98d2018c01ce9980c03ee2850690fab https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda#98206ea9954216ee7540f0c773f2104d -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py312h98912ed_0.conda#a8f9739e0ada2320148c92ddd608864f +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f0_1.conda#39aed2afe4d0cf76ab3d6b09eecdbea7 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py312h98912ed_1.conda#e3fd78d8d490af1d84763b9fe3f2e552 -https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.18.1-py312h4413252_0.conda#73da42918aaeb87d5618f82e2ac18d1f -https://conda.anaconda.org/conda-forge/noarch/setuptools-70.0.0-pyhd8ed1ab_0.conda#c8ddb4f34a208df4dd42509a0f6a1c89 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 +https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda#8f70e36268dea8eb666ef14c29bd3cda +https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda#9ae193ac9c1ead5024d5a4ee0024e9a6 +https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda#f0b618d7673d1b2464f600b34d912f6f https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -213,47 +220,48 @@ https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04e https://conda.anaconda.org/conda-forge/noarch/termcolor-2.4.0-pyhd8ed1ab_0.conda#a5033708ad9283907c3b1bc1f90d0d0d https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 -https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.12.5-pyha770c72_0.conda#e5dde5caf905e9d95895e05f94967e14 +https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda#0062a5f3347733f67b0f33ca48cc21dd https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda#2fcb582444635e2c402e8569bb94e039 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4-py312h98912ed_0.conda#e8332e534dca8c5c12c8352e0a23501c +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda#af648b62462794649066366af4ecd5b0 https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda#3df84416a021220d8b5700c613af2dc5 -https://conda.anaconda.org/conda-forge/noarch/types-pyyaml-6.0.12.20240311-pyhd8ed1ab_0.conda#df5d4b66033ecb54c7a4040627215529 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda#6ef2fc37559256cf682d8b3375e89b80 -https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py312h7070661_0.conda#dd19f5820a3fd57aea70aaf88e6dd191 -https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-py_0.tar.bz2#1447ead40f2a01733a9c8dfc32988375 +https://conda.anaconda.org/conda-forge/noarch/types-pyyaml-6.0.12.20240808-pyhd8ed1ab_0.conda#49b9901257a3b3fb213d6482ecaf9f03 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 +https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py312h2ec8cdc_1.conda#96226f62dddc63226472b7477d783967 +https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-pyhd8ed1ab_1.conda#6042b782b893029aa40335782584a092 https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda#68f0738df502a14213624b288c60c9ad https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 -https://conda.anaconda.org/conda-forge/noarch/webob-1.8.7-pyhd8ed1ab_0.tar.bz2#a8192f3585f341ea66c60c189580ac67 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda#0b5293a157c2b5cd513dd1b03d8d3aae +https://conda.anaconda.org/conda-forge/noarch/webob-1.8.8-pyhd8ed1ab_0.conda#ae69b699c308c3bd20388219764235b0 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2#e9a21aa4d5e3e5f1aed71e8cefd46b6a https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 -https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.4.0-pyhd8ed1ab_0.conda#93dffc47dadbe36a1a644f3f50d4979d -https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h75354e8_4.conda#03cc8d9838ad9dd0060ab532e81ccb21 +https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda#156c91e778c1d4d57b709f8c5333fd06 +https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda#e8372041ebb377237db9d0d24c7b5962 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a -https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda#74a4befb4b38897e19a107693e49da20 +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda#5f25798dcefd8252ce5f9dc494d5f571 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.22-h96bc93b_2.conda#de2b7c9aa9b279cca5542134b7a2b86a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.4-h759edc4_4.conda#8ced661d9dcece8698922fd8a73b6511 -https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.11.1-h91d86a7_1.conda#2dbab1d281b7e1da05eee544cbdc8af6 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-h191b246_2.conda#f8f40355dac7a75313d9c10de91330e7 +https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda#debd1677c2fea41eb2233a260f48a298 https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda#0ed9d7c0e9afa7c025807a9a8136ea3e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py312hf06ca03_0.conda#56b0ca764ce23cc54f3f7e2a7b970f6d -https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.0-hbdc6101_1.conda#0ba5a427a51923dcdfe1121115ac8293 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 +https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda#1b7a01fd02d11efe0eb5a676842a7b7d https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2#4fd2c6b53934bd7d96d1f3fdaf99b79f https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2#a29b7c141d6b2de4bb67788a5f107734 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.5.3-py312h9a8786e_0.conda#f01930d0afe8ac5f8062c98e6b8d1fd0 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py312h66e93f0_1.conda#5dc6e358ee0af388564bd0eba635cf9e https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py312h98912ed_0.conda#a4fbffb84a54767266c69e3699078a00 https://conda.anaconda.org/conda-forge/noarch/docformatter-1.7.5-pyhd8ed1ab_0.conda#3a941b6083e945aa87e739a9b85c82e9 https://conda.anaconda.org/conda-forge/noarch/fire-0.6.0-pyhd8ed1ab_0.conda#e9ed10aa8fa1dd6782940b95c942a6ae -https://conda.anaconda.org/conda-forge/noarch/flake8-7.0.0-pyhd8ed1ab_0.conda#15bc58c860fc0a9abc26ec902df35252 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.0-py312h9a8786e_0.conda#8490346e9d5efd7a6869582aa0c95b25 +https://conda.anaconda.org/conda-forge/noarch/flake8-7.1.1-pyhd8ed1ab_0.conda#a25e5df6b26be3c2d64be307c1ef0b37 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py312h66e93f0_1.conda#7abb7d39d482ac3b8e27e6c0fff3b168 https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_102.conda#d8cb3688b92e891e1e5f613517a50ca8 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.1.0-pyha770c72_0.conda#0896606848b2dc5cebdf111b6543aa04 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.0-pyhd8ed1ab_0.conda#c5d3907ad8bd7bf557521a1833cf7e6d +https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda#7e1729554e209627636a0f6fabcdd115 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.4.0-pyha770c72_0.conda#6e3dbc422d3749ad72659243d6ac8b2b +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda#c808991d29b9838fb4d96ce8267ec9ec https://conda.anaconda.org/conda-forge/noarch/isodate-0.6.1-pyhd8ed1ab_0.tar.bz2#4a62c93c1b5c0b920508ae3fd285eaf5 https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_0.conda#1d25ed2b95b92b026aaa795eabec8d91 https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda#81a3be0b2023e1ea8555781f0ad904a2 @@ -261,135 +269,158 @@ https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.7.2-py312h7900ff3_0.conda#eee5a2e3465220ed87196bbb5665f420 https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda#afcd1b53bcac8844540358e33f33d28f https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-22_linux64_openblas.conda#4b31699e0ec5de64d5896e580389c9a1 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda#cfebc557e54905dadc355c0e9f003004 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.24.0-h2736e30_0.conda#34aeee3fa7fca5dc21fad3ac6f4f0ab2 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-22_linux64_openblas.conda#b083767b6c877e24ee597d93b87ab838 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda#eede29b40efa878cbe5bdcb767e97310 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f +https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda#8dabe607748cb3d7002ad73cd06f1325 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda#2af0879961951987e464722fd00ec1e0 +https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py312he28fd5a_1.conda#4bc1e0dda9208b8934333d878dde4996 https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda#779345c95648be40d22aaa89de7d4254 -https://conda.anaconda.org/conda-forge/linux-64/mypy-1.10.0-py312h9a8786e_0.conda#644f1c538810d1f250ef0fc844f3822d +https://conda.anaconda.org/conda-forge/linux-64/mypy-1.11.2-py312h66e93f0_0.conda#ea315027e648236653f27d3d1ae893f6 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda#629f3203c99b32e0988910c93e77f3b6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.3.0-py312hdcec9eb_0.conda#425bb325f970e57a047ac57c4586489d -https://conda.anaconda.org/conda-forge/noarch/pip-24.0-pyhd8ed1ab_0.conda#f586ac1e56c8638b64f9c8122a7b8a67 -https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.3-h8e811e2_0.conda#e4d52462da124ed3792472f95a36fc2a -https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.0-h1d62c97_1.conda#113f894e5019db2e2705645ee3bcf91a -https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.42-pyha770c72_0.conda#0bf64bf10eee21f46ac83c161917fa86 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.4.0-py312h287a98d_0.conda#59ea71eed98aee0bebbbdd3b118167c7 +https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 +https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda#1aaec5dbae29b3f0a2c20eeb84e9e38a +https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda#e479d1991c725e1a355f33c0e40dbc66 +https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.47-pyha770c72_0.conda#1247c861065d227781231950e14fe817 https://conda.anaconda.org/conda-forge/noarch/pydocstyle-6.3.0-pyhd8ed1ab_0.conda#7e23a61a7fbaedfef6eb0e1ac775c8e5 -https://conda.anaconda.org/conda-forge/noarch/pytest-8.2.1-pyhd8ed1ab_0.conda#e4418e8bdbaa8eea28e047531e6763c8 +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.0.3-py312h8fd38d8_0.conda#27efa6d21e98bcab4585a6b913df7625 +https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_2.conda#44f46ddfdd01d242d2fff2d69a0d7cba https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda#0fc8b52192a8898627c3efae1003e9f6 https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda#8662629d9a05f9cff364e31ca106c1ac -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.11.0-hd8ed1ab_0.conda#471e3988f8ca5e9eb3ce6be7eac3bcee +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.1-pyhd8ed1ab_0.conda#08807a87fa7af10754d46f63b368e016 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.2-pyhd8ed1ab_0.conda#7d36e7a485ea2f5829408813bdbbfb38 -https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-hac6953d_0.conda#63b80ca78d29380fe69e69412dcbe4ac -https://conda.anaconda.org/conda-forge/noarch/yamale-5.0.0-pyhca7485f_0.conda#53c70f2dd94c690afd8606de7e952fe7 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda#14c15fa7def506fe7d1a0e3abdc212d6 +https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h666cd97_1.conda#97e8ef960a53cf08f2c4ceec8cf9e10d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda#749baebe7e2ff3360630e069175e528b +https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhca7485f_0.conda#c089f90a086b6214c5606368d0d3bad0 https://conda.anaconda.org/conda-forge/noarch/yamllint-1.35.1-pyhd8ed1ab_0.conda#a1240b99a7ccd953879dc63111823986 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.5.9-h594631b_3.conda#47490db1dcddfb1c355251fc427746a6 -https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.6.0-hf1915f5_1.conda#fd11ea65ceb397f9587b1d88a4329d73 -https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.5.0-h94269e2_4.conda#f364272cb4c2f4ce2341067107b82865 -https://conda.anaconda.org/conda-forge/noarch/cattrs-23.2.3-pyhd8ed1ab_0.conda#91fc4700dcce4a46d439900a132fe4e5 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-42.0.7-py312hbcc2302_0.conda#7bc0e1aae21b2e82d03959931f4294f0 -https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h928be8b_0.conda#c0f2468661b7cae54a7a1ff11926e372 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.5.0-hfac3d4d_0.conda#f5126317dd0ce0ba26945e411ecc6960 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.1.0-hd8ed1ab_0.conda#6ef2b72d291b39e479d7694efa2b2b98 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h29c84ef_4.conda#81674a3f6a59966a9ffaaaf063c8c331 +https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda#36df3cf05459de5d0a41c77c4329634b +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda#ab6d507ad16dbe2157920451d662e4a1 +https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.0-pyhd8ed1ab_0.conda#1e5ac693650d3312e6421e766a5abadd +https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.1-py312hda17c39_0.conda#1b673277378cb4c80a061a4c6f453b6d +https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 +https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda#1d6bdc6b2c62c8cc90c67b50142d7b7f +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.4.0-hd8ed1ab_0.conda#01b7411c765c3d863dcc920207f258bd https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda#a0e4efb5f35786a05af4809a2fb1f855 -https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hee9dde6_1.conda#c5b7b29e2b66107553d0366538257a51 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.24.0-h3d9a0c8_0.conda#a731371833a7b1ab3a87be0fe7e6235a -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h9612171_113.conda#b2414908e43c442ddc68e6148774a304 -https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h5539517_6.conda#1ee26233875c04444bdb2e5a838b5634 +https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda#ffe68c611ae0ccfda4e7a605195e22b3 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda#2c51703b4d775f8943c08a361788131b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda#a908e463c710bd6b10a9eaa89fdf003c +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda#4957a903bd6a68cc2e53e47476f9c6f4 https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py312heda63a1_0.conda#d8285bea2a350f63fab23bf460221f3f -https://conda.anaconda.org/conda-forge/linux-64/poppler-24.04.0-hb6cd0d7_0.conda#d19eed746748f1d44b575662f2bcfe95 +https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda#0854b9ff0cc10a1f6f67b0f352b8e75a https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba -https://conda.anaconda.org/conda-forge/noarch/pylint-3.2.2-pyhd8ed1ab_0.conda#6621f1cfd6f9669482be177467ebe919 -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312hb591178_6.conda#e5a041de2ecaee7e02c40cf82afa132e +https://conda.anaconda.org/conda-forge/noarch/pylint-3.2.7-pyhd8ed1ab_0.conda#ea8954e8d1d2c8a56f615be494323cb3 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda#173afeb0d112c854fd1a9fcac4b5cce3 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda#c54c0107057d67ddf077751339ec2c63 -https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.3-pyhd8ed1ab_0.conda#1dbdf019d740419852c4a7803fff49d9 +https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.4-pyhd8ed1ab_0.conda#638cfd3bf6904125e868176d89c2ae0b https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-mypy-0.10.3-pyhd8ed1ab_0.conda#37fd5d5468805e017650a395ca779f38 -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/rdflib-7.0.0-pyhd8ed1ab_0.conda#44d14ef95495b3d4438f28998e0296a9 -https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda#e7df0fdd404616638df5ece6e69ba7af -https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240523-pyhd8ed1ab_0.conda#fbfe8eaee3a9ccbd55ac49170cf2c063 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h8572e83_4.conda#52c9e25ee0a32485a102eeecdb7eef52 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/noarch/yapf-0.40.1-pyhd8ed1ab_0.conda#f269942e802d5e148632143d4c37acc9 -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.26.9-he3a8b3b_0.conda#fbe6a256dd70a505730e7c461cd37a35 -https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.10.0-h00ab1b0_1.conda#1e63d3866554a4d2e3d1cba5f21a2841 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py312h085067d_1.conda#b121b9dd4935f63959eb35cc6c36973b -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.1-py312h8572e83_0.conda#12c6a831ef734f0b2dd4caff514cbb7f -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.5.2-pyhd8ed1ab_0.conda#1a57a819915e1c169b74933720b138f2 -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.36-pyhd8ed1ab_0.conda#ba68cb5105760379432cebc82b45af40 -https://conda.anaconda.org/conda-forge/noarch/ipython-8.25.0-pyh707e725_0.conda#98466a37c08f3bdbb500786271859517 -https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.22.0-pyhd8ed1ab_0.conda#b9661a4b1200d6bc7d8a4cdafdc91468 +https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda#5dc18b385893b7991a3bbeb135ad7c3e +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda#61f1c193452f0daa582f39634627ea33 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_1.conda#6b9f9141c247bdd61a2d6d37e0a8b530 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda#f80cc5989f445f23b1622d6c455896d9 +https://conda.anaconda.org/conda-forge/noarch/ipython-8.27.0-pyh707e725_0.conda#0ed09f0c0f62f50b4b7dd2744af13629 +https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 +https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda#da304c192ad59975202859b367d0f6a2 https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda#3cdbb2fa84490e5fd44c9f9806c0d292 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_hacb5139_103.conda#50f05f98d084805642d24dff910e11e8 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-h353785f_1.conda#c363d0b330b4b21b4c1b10e0981d3a99 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda#9e7960f0b9ab3895ef73d92477c47dae +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_106.conda#5b911bfe75855326bae6857451268e59 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py312hfb8ada1_0.conda#d0745ae74c2b26571b692ddde112eebb -https://conda.anaconda.org/conda-forge/linux-64/pango-1.52.2-ha41ecd1_0.conda#a658eeabf188c3040da36b0763de2bfd -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.1-pyhd8ed1ab_0.conda#d15917f33140f8d2ac9ca44db7ec8a25 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.0.0-pyhd8ed1ab_0.conda#b50aec2c744a5c493c09cce9e2e7533e +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda#85fa2fdd26d5a38792eb57bc72463f07 https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d -https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.0-pyhd8ed1ab_1.conda#f351ee57bb9c3e4b2f4952980461ba1d -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.13.1-py312hc2bc53b_0.conda#864b2399a9c998e17d1a9a4e0c601285 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.4-py312ha5b4d35_1.conda#1248b799f811d8ea215de88f53ae7ffc -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.329-hba8bd5f_3.conda#720494d9f06b4aff1270cffb7acc7920 -https://conda.anaconda.org/conda-forge/noarch/bokeh-3.4.1-pyhd8ed1ab_0.conda#0f8e0831bbf38d83973438ce9af9af9a -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py312h085067d_5.conda#b40cdf87aee69ccf162022579cb99afb -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.5.2-pyhd8ed1ab_0.conda#2fa6807bd19e5cdc77fe1b6a42c86228 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h280cfa0_4.conda#410f86e58e880dcc7b0e910a8e89c05c -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.0-hadf69e7_1.conda#0e2b5bd9533043b41f9482ae9e2c16b5 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.4-py312h20ab3a6_2.conda#fbfe798f83f0d66410903ad8f40d5283 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h7d485d2_0.conda#7418a22e73008356d9aba99d93dfeeee +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda#5be02e05e1adaa42826cc6800ce399bc +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda#e804c43f58255e977093a2298e442bb8 +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda#11d926d1f4a75a1b03d1c053ca20424b +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda#38d785787ec83d0431b3855328395113 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py312hc0a28a1_6.conda#fa4853d25b6fbfef5eb7b3e1b5616dd5 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.37.0-h8bb6dbc_0.conda#977e230843c4747236c5972a69b1121d +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.6.1-nompi_h4441c20_3.conda#1afc1e85414e228916732df2b8c5d93b +https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_1.conda#740a4aafba1bdf3afa5d1e1aed919521 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_1.conda#d156bb989645b02eb3271e294dc5f4af +https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_1.conda#c5607874642370f104ab12f084244717 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_1.conda#63cda7b902bc6efd1906a17c7ac3fcc7 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_1.conda#42a2fd4d036167c60334952c0505ffda +https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_1.conda#1b418348a1e0f090864c0bf837db0dfa +https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_1.conda#ab4a54efb504269e95f74a7e3bb82326 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h151b34b_1.conda#6dc6cade5a71d9a353bca7dd0abf491c +https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h151b34b_1.conda#adc9eda4a8f5d124635949e25e981bb1 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_1.conda#ec9b5886293e9a5574bd11325e591a1c +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.3-h9564881_0.conda#a7045ed6fb8b68ef7be002ce615e3bf6 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312h854627b_0.conda#a57b0ae7c0aac603839a4e83a3e997d6 https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda#0b57b5368ab7fc7cdc9e3511fa867214 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.5-nompi_py312h39d4375_101.conda#9033de6c10fd3396990890d8d8a6ac4e -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.7.1-pyha770c72_0.conda#724bc4489c1174fc8e3233b0624fa51f -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc7c0aa3_1.conda#81789bd582b389e77ff709f6b27300fb -https://conda.anaconda.org/conda-forge/noarch/xarray-2024.5.0-pyhd8ed1ab_0.conda#e839fd0ae78a368c930f0b1feafa6736 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py312h21d6d8e_102.conda#9049ba34261ce7106220711d313fcf61 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda#004cff3a7f6fafb0a041fb575de85185 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 +https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.25.0-h86fa3b2_11.conda#1d20992085ca324beac5db83afbab651 +https://conda.anaconda.org/conda-forge/noarch/types-requests-2.32.0.20240907-pyhd8ed1ab_0.conda#9f907bdcfc41daad16bee14d959b18aa +https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda#a7d4ff4bf1502eaba3fbbaeba66969ec https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py312h1d6d2e6_1.conda#6392d3ce615ab0f32bc39b07f8f4c300 -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.9.1-pyhd8ed1ab_0.conda#9b1ddfcc35856fbd69e193b945b7d7f2 -https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.8.5-pyhd8ed1ab_0.conda#abfb434fb6654f83d740428863ec85a8 -https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhca7485f_3.conda#1d43833138d38ad8324700ce45a7099a -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-11.0.0-hc68bbd7_0.conda#52a531ef95358086a56086c45d97ab75 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-16.1.0-hcb6531f_6_cpu.conda#0df3fc2a8d63b1cc49973c5a679ec438 +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.9.4-pyhd8ed1ab_0.conda#c8b6a3126f659e311d3b5c61be254d95 +https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.6.1-pyhc1e730c_0.conda#25a9661177fd68bfdb4314fd658e5c3b +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda#dc379f362829d5df5ce6722565110029 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_1.conda#76704f973c67df65adce022f4cfb57b5 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_1.conda#cdeb3c147ff20a1c9114e6b1ca8772bb +https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-ha8d0372_1.conda#99186faa63e7e920f3a6e5a553ec543a https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.23.0-hfa691db_2.conda#f9cd15d6c7deeeb5b60d65fac59b18bc -https://conda.anaconda.org/conda-forge/noarch/iris-3.9.0-pyha770c72_0.conda#efaf150eb009f04efa58f1401c767192 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-16.1.0-hac33072_6_cpu.conda#38b1161e2f8c72095f64ea35ee1294c5 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.0-h77540a9_5.conda#3703d3fb83229c78852cba9c37482caa -https://conda.anaconda.org/conda-forge/linux-64/libparquet-16.1.0-h6a7eafb_6_cpu.conda#87f676c6cb33f8e1956948ee216fa3a1 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_0.conda#43d9cd74e3950ab09cbddf36f1706b9f -https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.7.0-pyhd8ed1ab_0.conda#7ad60b498674a9bff3ba8f3fb335e4f0 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-16.1.0-py312h5429d62_1_cpu.conda#cee0cddfaedfd3657f429318207e5816 -https://conda.anaconda.org/conda-forge/linux-64/pydot-2.0.0-py312h7900ff3_0.conda#a3628f9a0ca8573314dc6f94f40a0419 -https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.0-py312h86af8fa_5.conda#fa187303265629d7229baaa6b44d333d -https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-16.1.0-hac33072_6_cpu.conda#2e9430df8ffd645a5bc7edffb252c3de -https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_0.conda#391934bd1a79990c23df1d1809ddc821 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed +https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py312hc0a28a1_0.conda#476b0357e207e10d2b7b13ed82156e6d +https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b +https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhca7485f_3.conda#1d43833138d38ad8324700ce45a7099a +https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_1.conda#b7212cd8247ce909631fdcb77015914a +https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda#b654d072b8d5da807495e49b28a0b884 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_1.conda#2b67e1429e481ef57404f1481446f6c6 +https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda#49c60a8dc089d8127b9368e9eb6c1a77 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda#e2d2abb421c13456a9a9f80272fdf543 +https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.8.0-pyhd8ed1ab_0.conda#fba377622e74ee0bbeb8ccae9fa593d3 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda#235827b9c93850cafdd2d5ab359893f9 +https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.1-py312h7900ff3_0.conda#d09da44e1f00d5f99adadd5aa6244c2f +https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.0-py312h5aa26c2_0.conda#eb359fdc8e1fe9efc70ed82f540e8128 +https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.20.0-pyhd8ed1ab_1.conda#d8dced41fc56982c81190ba0eb10c3de +https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda#cd2c36e8865b158b82f61c6aac28b7e1 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 -https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.6-py312h32ad294_3.conda#6da62c5c06a6416e0130220e4f418bb0 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-16.1.0-h7e0c224_6_cpu.conda#81fea801c4bb126509e784cbd2ca4d17 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_0.conda#c9d64b8a7ee8e6bdbf0e7d8aa7f39601 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-16.1.0-py312h8da182e_1.conda#2d8b51007ba9ec982067ecfc74315c3a -https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.2-pyhd8ed1ab_0.conda#34db694d2afc672094f1a74af51cb44e +https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda#46f41533959eee8826c09e55976b8c06 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_1.conda#ab83e3b9ca2b111d8f332e9dc8b2170f +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_1.conda#7e8ddbd44fb99ba376b09c4e9e61e509 +https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda#b77166a6032a2b8e52b3fee90d62ea4d https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda#ccc06e6ef2064ae129fab3286299abda -https://conda.anaconda.org/conda-forge/noarch/dask-2024.5.2-pyhd8ed1ab_0.conda#6bb2c18b838161e550a30de0fdff6993 -https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.10.0-pyhd8ed1ab_0.conda#a5ccce1a87da81d6c690cd11ae0687a2 -https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.6-pyhd8ed1ab_0.tar.bz2#4409dd7e06a62c3b2aa9e96782c49c6d -https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.4-pyhd8ed1ab_0.conda#9dc80eaeff56fb67dbf4f871b81bc13a -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.3-pyhd8ed1ab_0.conda#55e445f4fcb07f2471fb0e1102d36488 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.8-pyhd8ed1ab_0.conda#611a35a27914fac3aa37611a6fe40bb5 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.6-pyhd8ed1ab_0.conda#d7e4954df0d3aea2eacc7835ad12671d -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.5-pyhd8ed1ab_0.conda#7e1e7437273682ada2ed5e9e9714b140 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.7-pyhd8ed1ab_0.conda#26acae54b06f178681bfb551760f5dd1 -https://conda.anaconda.org/conda-forge/noarch/sphinx-7.3.7-pyhd8ed1ab_0.conda#7b1465205e28d75d2c0e1a868ee00a67 +https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda#3adbad9b363bd0163ef2ac59f095cc13 +https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_0.conda#b30cbc09f81d9dbaf8b74f2c8eacddc5 +https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.13-pyhd8ed1ab_0.conda#b2f4f2f3923646802215b040e63d042e +https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.5-pyhd8ed1ab_0.conda#b808b8a0494c5cca76200c73e260a060 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.0.2-pyhd8ed1ab_0.conda#625004bdab1b171dfd1e29ebb30c40dd https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e # pip antlr4-python3-runtime @ https://files.pythonhosted.org/packages/29/14/8ac135ec7cc9db3f768e2d032776718c6b23f74e63543f0974b4873500b2/antlr4-python3-runtime-4.7.2.tar.gz#sha256=168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b -# pip scitools-iris @ https://files.pythonhosted.org/packages/2a/4f/9929f74388dbc6334c3342ff5f45a862322ad1789e32d6ab292d99d81f51/scitools_iris-3.9.0-py3-none-any.whl#sha256=1a9acfb872b50554de269d403a79e01e169c9973d4ec30a46b824c8ec710a818 +# pip scitools-iris @ https://files.pythonhosted.org/packages/13/f9/492f73d8cb5cc6a4552448e2583690e918d8ed0c7dad661fb118340ab127/scitools_iris-3.10.0-py3-none-any.whl#sha256=01f99d9cabde69536f21ca31213e5497e1c7d62cd7222e06bfa05885318c9169 # pip esmvaltool-sample-data @ https://files.pythonhosted.org/packages/58/fa/4ecc84665e0ed04c8c4c797405c19c12900bdba6438ab2f5541bf8aa1d42/ESMValTool_sample_data-0.0.3-py3-none-any.whl#sha256=81f0f02182eacb3b639cb207abae5ac469c6dd83fb6dfe6d2430c69723d85461 diff --git a/doc/api/esmvalcore.dataset.rst b/doc/api/esmvalcore.dataset.rst index b48e7f20fb..083e517537 100644 --- a/doc/api/esmvalcore.dataset.rst +++ b/doc/api/esmvalcore.dataset.rst @@ -1,5 +1,5 @@ Dataset -======= +======== .. automodule:: esmvalcore.dataset :no-show-inheritance: diff --git a/doc/api/esmvalcore.experimental.recipe.rst b/doc/api/esmvalcore.experimental.recipe.rst index 80f3fb3310..4234db317e 100644 --- a/doc/api/esmvalcore.experimental.recipe.rst +++ b/doc/api/esmvalcore.experimental.recipe.rst @@ -1,7 +1,7 @@ .. _api_recipe: Recipes -======= +======== This section describes the :py:mod:`~esmvalcore.experimental.recipe` submodule of the API (:py:mod:`esmvalcore.experimental`). diff --git a/doc/changelog.rst b/doc/changelog.rst index e3e4b518d9..68c4fe1792 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -9,21 +9,66 @@ v2.11.0 ------- Highlights -TODO: add highlights +- Performance improvements have been made to many preprocessors: + + - Preprocessors :func:`esmvalcore.preprocessor.mask_landsea`, + :func:`esmvalcore.preprocessor.mask_landseaice`, + :func:`esmvalcore.preprocessor.mask_glaciated`, + :func:`esmvalcore.preprocessor.extract_levels` are now lazy + +- Several new preprocessors have been added: + + - :func:`esmvalcore.preprocessor.local_solar_time` + - :func:`esmvalcore.preprocessor.distance_metrics` + - :func:`esmvalcore.preprocessor.histogram` + +- NEW TREND: First time release manager shout-outs! + + - This is the first ESMValTool release managed by the Met Office! We want to + shout this out - and for all future first time release managers to + shout-out - to celebrate the growing, thriving ESMValTool community. This release includes Backwards incompatible changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -TODO: add examples of how to deal with these changes - - Allow contiguous representation of extracted regions (:pull:`2230`) by :user:`rebeccaherman1` + - The preprocessor function :func:`esmvalcore.preprocessor.extract_region` + no longer automatically maps the extracted :class:`iris.cube.Cube` to the + 0-360 degrees longitude domain. If you need this behaviour, use + ``cube.intersection(longitude=(0., 360.))`` in your Python code after + extracting the region. There is no possibility to restore the previous + behaviour from a recipe. + +- Use ``iris.FUTURE.save_split_attrs = True`` to remove iris warning (:pull:`2398`) by :user:`schlunma` + + - Since `v3.8.0`_, Iris explicitly distinguishes between local and global + netCDF attributes. ESMValCore adopted this behavior with v2.11.0. With + this change, attributes are written as local attributes by default, unless + they already existed as global attributes or belong to a special list of + global attributes (in which case attributes are written as global + attributes). See :class:`iris.cube.CubeAttrsDict` for details. + +.. _v3.8.0: https://scitools-iris.readthedocs.io/en/stable/whatsnew/3.8.html#v3-8-29-feb-2024 + Deprecations ~~~~~~~~~~~~ - Refactor regridding (:pull:`2231`) by :user:`schlunma` + + - This PR deprecated two regridding schemes, which will be removed with + ESMValCore v2.13.0: + + - ``unstructured_nearest``: Please use the scheme ``nearest`` instead. + This is an exact replacement for data on unstructured grids. ESMValCore + is now able to determine the most suitable regridding scheme based on + the input data. + - ``linear_extrapolate``: Please use a generic scheme with + ``reference: iris.analysis:Linear`` and + ``extrapolation_mode: extrapolate`` instead. + - Allow deprecated regridding scheme ``linear_extrapolate`` in recipe checks (:pull:`2324`) by :user:`schlunma` - Allow deprecated regridding scheme ``unstructured_nearest`` in recipe checks (:pull:`2336`) by :user:`schlunma` @@ -33,6 +78,9 @@ Bug fixes - Do not overwrite facets from recipe with CMOR table facets for derived variables (:pull:`2255`) by :user:`bouweandela` - Fix error message in variable definition check (:pull:`2313`) by :user:`enekomartinmartinez` - Unify dtype handling of preprocessors (:pull:`2393`) by :user:`schlunma` +- Fix bug in ``_rechunk_aux_factory_dependencies`` (:pull:`2428`) by :user:`ehogan` +- Avoid loading entire files into memory when downloading from ESGF (:pull:`2434`) by :user:`bouweandela` +- Preserve cube attribute global vs local when concatenating (:pull:`2449`) by :user:`bouweandela` CMOR standard ~~~~~~~~~~~~~ @@ -55,6 +103,7 @@ Computational performance improvements - Cache regridding weights if possible (:pull:`2344`) by :user:`schlunma` - Implement lazy area weights (:pull:`2354`) by :user:`schlunma` - Avoid large chunks in :func:`esmvalcore.preprocessor.climate_statistics` preprocessor function with `period='full'` (:pull:`2404`) by :user:`bouweandela` +- Load data only once for ESMPy regridders (:pull:`2418`) by :user:`bouweandela` Documentation ~~~~~~~~~~~~~ @@ -84,10 +133,11 @@ Fixes for datasets Installation ~~~~~~~~~~~~ -- Updated iris pin to ``iris>=3.6.1`` (:pull:`2286`) by :user:`schlunma` - Pin pandas yet again avoid new ``2.2.1`` as well (:pull:`2353`) by :user:`valeriupredoi` - Update Iris pin to avoid using versions with memory issues (:pull:`2408`) by :user:`chrisbillowsMO` - Pin esmpy <8.6.0 (:pull:`2402`) by :user:`valeriupredoi` +- Pin numpy<2.0.0 to avoid pulling 2.0.0rcX (:pull:`2415`) by :user:`valeriupredoi` +- Add support for Python=3.12 (:pull:`2228`) by :user:`valeriupredoi` Preprocessor ~~~~~~~~~~~~ @@ -132,12 +182,11 @@ Improvements - Handle warnings about invalid units for iris>=3.8 (:pull:`2378`) by :user:`schlunma` - Added note on how to access ``index.html`` on remote server (:pull:`2276`) by :user:`schlunma` - Remove custom fix for concatenation of aux factories now that bug in iris is solved (:pull:`2392`) by :user:`schlunma` -- Use ``iris.FUTURE.save_split_attrs = True`` to remove iris warning (:pull:`2398`) by :user:`schlunma` - Ignored iris warnings about global attributes (:pull:`2400`) by :user:`schlunma` +- Add native6, OBS6 and RAWOBS rootpaths to metoffice config-user.yml template, and remove temporary dir (:pull:`2432`) by :user:`alistairsellar` .. _changelog-v2-10-0: - v2.10.0 ------- Highlights @@ -975,7 +1024,7 @@ Highlights ~~~~~~~~~~ - ESMValCore now has the ability to automatically download missing data from ESGF. For details, see :ref:`Data Retrieval`. -- ESMValCore now also can resume an earlier run. This is useful to re-use expensive preprocessor results. For details, see :ref:`Running`. +- ESMValCore now also can resume an earlier run. This is useful to reuse expensive preprocessor results. For details, see :ref:`Running`. This release includes diff --git a/doc/conf.py b/doc/conf.py index 3f443ced03..7e0b4b988d 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -81,11 +81,16 @@ # Show type hints in function signature AND docstring autodoc_typehints = 'both' +# See https://github.com/sphinx-doc/sphinx/issues/12589 +suppress_warnings = [ + 'autosummary.import_cycle', +] + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = {'.rst': 'restructuredtext'} # The encoding of source files. # source_encoding = 'utf-8-sig' @@ -165,8 +170,13 @@ # Short synopsis of said issue: as of now, left/right keys take one # to the previous/next page instead of scrolling horizontally; this # should be fixed upstream, then we can set again navigation with keys True -html_theme_options = {"navigation_with_keys": False} - +html_theme_options = { + "navigation_with_keys": False, + "logo": { + "image_light": "figures/ESMValTool-logo-2.png", + "image_dark": "figures/ESMValTool-logo-2-dark.png", + }, +} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -189,7 +199,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path: list = [] +html_static_path: list = ["figures/ESMValTool-logo-2-dark.png"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -433,20 +443,21 @@ # Configuration for intersphinx intersphinx_mapping = { - 'cf_units': ('https://cf-units.readthedocs.io/en/latest/', None), + 'cf_units': ('https://cf-units.readthedocs.io/en/stable/', None), 'cftime': ('https://unidata.github.io/cftime/', None), 'esmvalcore': (f'https://docs.esmvaltool.org/projects/ESMValCore/en/{rtd_version}/', None), 'esmvaltool': (f'https://docs.esmvaltool.org/en/{rtd_version}/', None), + 'esmpy': ('https://earthsystemmodeling.org/esmpy_doc/release/latest/html/', + None), 'dask': ('https://docs.dask.org/en/stable/', None), 'distributed': ('https://distributed.dask.org/en/stable/', None), - 'iris': ('https://scitools-iris.readthedocs.io/en/latest/', None), - 'iris-esmf-regrid': ('https://iris-esmf-regrid.readthedocs.io/en/latest', - None), + 'iris': ('https://scitools-iris.readthedocs.io/en/stable/', None), + 'esmf_regrid': ('https://iris-esmf-regrid.readthedocs.io/en/stable/', None), 'matplotlib': ('https://matplotlib.org/stable/', None), 'numpy': ('https://numpy.org/doc/stable/', None), - 'pyesgf': ('https://esgf-pyclient.readthedocs.io/en/latest/', None), + 'pyesgf': ('https://esgf-pyclient.readthedocs.io/en/stable/', None), 'python': ('https://docs.python.org/3/', None), 'scipy': ('https://docs.scipy.org/doc/scipy/', None), } diff --git a/doc/contributing.rst b/doc/contributing.rst index 814ab79263..ee47974e90 100644 --- a/doc/contributing.rst +++ b/doc/contributing.rst @@ -101,7 +101,7 @@ Please keep the following considerations in mind when programming: code. - If you find yourself copy-pasting a piece of code and making minor changes to every copy, instead put the repeated bit of code in a function that you can - re-use, and provide the changed bits as function arguments. + reuse, and provide the changed bits as function arguments. - Be careful when changing existing unit tests to make your new feature work. You might be breaking existing features if you have to change existing tests. @@ -210,6 +210,14 @@ This includes checks for invalid syntax and formatting errors. automatically just before you commit your code. It knows knows which tool to run for each filetype, and therefore provides a convenient way to check your code. +Install the pre-commit hooks by running + +.. code-block:: bash + + pre-commit install + +to make sure your code is formatted correctly and does not contain mistakes +whenever you commit some changes. Python ~~~~~~ @@ -229,20 +237,22 @@ the repository is cloned, e.g. ``cd ESMValCore``, and run `prospector `_ -to automatically check for bugs and formatting mistakes and +In addition to prospector, we use `ruff `_ +to automatically format the code and to check for certain bugs and `mypy `_ for checking that `type hints `_ are correct. Note that `type hints`_ are completely optional, but if you do choose to add them, they should be correct. +Both `ruff`_ and `mypy`_ are automatically run by pre-commit. When you make a pull request, adherence to the Python development best practices is checked in two ways: -#. As part of the unit tests, flake8_ and mypy_ are run by - `CircleCI `_, - see the section on Tests_ for more information. +#. A check that the code is formatted using the pre-commit hooks and does + not contain any mistakes that can be found by analyzing the code without + running it, is performed by + `pre-commit.ci `_. #. `Codacy `_ is a service that runs prospector (and other code quality tools) on changed files and reports the results. @@ -259,42 +269,25 @@ If you suspect prospector or Codacy may be wrong, please ask the Note that running prospector locally will give you quicker and sometimes more accurate results than waiting for Codacy. -Most formatting issues in Python code can be fixed automatically by -running the commands +Formatting issues in Python code can be fixed automatically by running the +command :: - isort some_file.py - -to sort the imports in `the standard way `__ -using `isort `__ and - -:: - - yapf -i some_file.py - -to add/remove whitespace as required by the standard using `yapf `__, - -:: - - docformatter -i some_file.py - -to run `docformatter `__ which helps -formatting the docstrings (such as line length, spaces). + pre-commit run --all YAML ~~~~ -Please use `yamllint `_ to check that your -YAML files do not contain mistakes. -``yamllint`` checks for valid syntax, common mistakes like key repetition and -cosmetic problems such as line length, trailing spaces, wrong indentation, etc. +We use `yamllint `_ to check that YAML files +do not contain mistakes. This is automatically run by pre-commit. Any text file ~~~~~~~~~~~~~ A generic tool to check for common spelling mistakes is `codespell `__. +This is automatically run by pre-commit. .. _documentation: @@ -379,13 +372,13 @@ the individual checks. To build the documentation on your own computer, go to the directory where the repository was cloned and run -:: +.. code-block:: bash sphinx-build doc doc/build or -:: +.. code-block:: bash sphinx-build -Ea doc doc/build @@ -393,7 +386,8 @@ to build it from scratch. Make sure that your newly added documentation builds without warnings or errors and looks correctly formatted. -CircleCI_ will build the documentation with the command: +`CircleCI `_ +will build the documentation with the command: .. code-block:: bash @@ -720,7 +714,7 @@ If the Codacy check keeps failing, please run prospector locally. If necessary, ask the pull request author to do the same and to address the reported issues. See the section on code_quality_ for more information. -Never merge a pull request with failing CircleCI or readthedocs checks. +Never merge a pull request with failing pre-commit, CircleCI, or readthedocs checks. .. _how-to-make-a-release: @@ -743,15 +737,15 @@ Perform the steps listed below with two persons, to reduce the risk of error. `PyPI `__, and `readthedocs `__. -The release of ESMValCore is tied to the release of ESMValTool. +The release of ESMValCore is tied to the release of ESMValTool. The detailed steps can be found in the ESMValTool :ref:`documentation `. -To start the procedure, ESMValCore gets released as a +To start the procedure, ESMValCore gets released as a release candidate to test the recipes in ESMValTool. If bugs are found -during the testing phase of the release candidate, make as many release -candidates for ESMValCore as needed in order to fix them. +during the testing phase of the release candidate, make as many release +candidates for ESMValCore as needed in order to fix them. -To make a new release of the package, be it a release candidate or the final release, +To make a new release of the package, be it a release candidate or the final release, follow these steps: 1. Check that all tests and builds work @@ -795,13 +789,13 @@ Use the script to create create a draft of the release notes. This script uses the titles and labels of merged pull requests since the previous release. -Open a discussion to allow members of the development team to nominate pull -requests as highlights. Add the most voted pull requests as highlights at the -beginning of changelog. After the highlights section, list any backward -incompatible changes that the release may include. The +Open a discussion to allow members of the development team to nominate pull +requests as highlights. Add the most voted pull requests as highlights at the +beginning of changelog. After the highlights section, list any backward +incompatible changes that the release may include. The :ref:`backward compatibility policy`. -lists the information that should be provided by the developer of any backward -incompatible change. Make sure to also list any deprecations that the release +lists the information that should be provided by the developer of any backward +incompatible change. Make sure to also list any deprecations that the release may include, as well as a brief description on how to upgrade a deprecated feature. Review the results, and if anything needs changing, change it on GitHub and re-run the script until the changelog looks acceptable. diff --git a/doc/figures/ESMValTool-logo-2-dark.png b/doc/figures/ESMValTool-logo-2-dark.png new file mode 100644 index 0000000000..e120b2e731 Binary files /dev/null and b/doc/figures/ESMValTool-logo-2-dark.png differ diff --git a/doc/figures/ESMValTool-logo-2-glow.png b/doc/figures/ESMValTool-logo-2-glow.png new file mode 100644 index 0000000000..14aef201ee Binary files /dev/null and b/doc/figures/ESMValTool-logo-2-glow.png differ diff --git a/doc/figures/ESMValTool-logo-2.png b/doc/figures/ESMValTool-logo-2.png index e876219038..aaaa3578a5 100644 Binary files a/doc/figures/ESMValTool-logo-2.png and b/doc/figures/ESMValTool-logo-2.png differ diff --git a/doc/gensidebar.py b/doc/gensidebar.py index 01f8b3e839..a4a0cae2ee 100644 --- a/doc/gensidebar.py +++ b/doc/gensidebar.py @@ -38,7 +38,7 @@ def _toctree(): def _endl(): lines.append("") - def _write(project, desc, link, mapping=conf['intersphinx_mapping']): + def _write(project, desc, link, mapping=conf["intersphinx_mapping"]): if project != conf_api: if do_gen: args = desc, mapping[project][0], link diff --git a/doc/index.rst b/doc/index.rst index 3026d4b564..e4bb98a9d2 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -1,6 +1,27 @@ Welcome to ESMValTool's documentation! ====================================== +To get a first impression of what ESMValTool and ESMValCore can do for you, +have a look at our blog posts +`Analysis-ready climate data with ESMValCore `_ +and +`ESMValTool: Recipes for solid climate science `_. + +A tutorial is available on https://tutorial.esmvaltool.org. + +A series of video lectures has been created by `ACCESS-NRI `_. +While these are tailored for ACCESS users, they are still very informative. + +.. raw:: html + + + +| + +For more detailed information, the documentation is available below. + +Get in touch! Contact information is available :ref:`here `. + .. include:: _sidebar.rst.inc Indices and tables @@ -8,4 +29,3 @@ Indices and tables * :ref:`genindex` * :ref:`search` - diff --git a/doc/quickstart/configure.rst b/doc/quickstart/configure.rst index cb5c733404..37e6efd230 100644 --- a/doc/quickstart/configure.rst +++ b/doc/quickstart/configure.rst @@ -440,71 +440,6 @@ corresponding command line arguments ``--search_esgf=when_missing`` or tool by pressing the ``Ctrl`` and ``C`` keys on your keyboard simultaneously several times, edit the recipe so it contains fewer datasets and try again. -For downloading some files, you may need to log in to be able to download the -data. - -See the -`ESGF user guide `_ -for instructions on how to create an ESGF OpenID account if you do not have -one yet. -Note that the OpenID account consists of 3 components instead of the usual -two, in addition a username and password you also need the hostname of the -provider of the ID; for example -`esgf-data.dkrz.de `_. -Even though the account is issued by a particular host, the same OpenID -account can be used to download data from all hosts in the ESGF. - -Next, configure your system so the ``esmvaltool`` can use your credentials. -This can be done using the keyring_ package or they can be stored in a -:ref:`configuration file `. - -.. _keyring: - -Storing credentials in keyring ------------------------------- -First install the keyring package. Note that this requires a supported -backend that may not be available on compute clusters, see the -`keyring documentation `__ for more -information. - -.. code-block:: bash - - pip install keyring - -Next, set your username and password by running the commands: - -.. code-block:: bash - - keyring set ESGF hostname - keyring set ESGF username - keyring set ESGF password - -for example, if you created an account on the host `esgf-data.dkrz.de`_ with username -'cookiemonster' and password 'Welcome01', run the command - -.. code-block:: bash - - keyring set ESGF hostname - -this will display the text - -.. code-block:: bash - - Password for 'hostname' in 'ESGF': - -type ``esgf-data.dkrz.de`` (the characters will not be shown) and press ``Enter``. -Repeat the same procedure with ``keyring set ESGF username``, type ``cookiemonster`` -and press ``Enter`` and ``keyring set ESGF password``, type ``Welcome01`` and -press ``Enter``. - -To check that you entered your credentials correctly, run: - -.. code-block:: bash - - keyring get ESGF hostname - keyring get ESGF username - keyring get ESGF password - .. _config_esgf_pyclient: Configuration file @@ -514,49 +449,6 @@ An optional configuration file can be created for configuring how the tool uses to find and download data. The name of this file is ``~/.esmvaltool/esgf-pyclient.yml``. -Logon -````` -In the ``logon`` section you can provide arguments that will be passed on to -:py:meth:`pyesgf.logon.LogonManager.logon`. -For example, you can store the hostname, username, and password or your OpenID -account in the file like this: - -.. code-block:: yaml - - logon: - hostname: "your-hostname" - username: "your-username" - password: "your-password" - -for example - -.. code-block:: yaml - - logon: - hostname: "esgf-data.dkrz.de" - username: "cookiemonster" - password: "Welcome01" - -if you created an account on the host `esgf-data.dkrz.de`_ with username -'cookiemonster' and password 'Welcome01'. -Alternatively, you can configure an interactive log in: - -.. code-block:: yaml - - logon: - interactive: true - -Note that storing your password in plain text in the configuration -file is less secure. -On shared systems, make sure the permissions of the file are set so -only you and administrators can read it, i.e. - -.. code-block:: bash - - ls -l ~/.esmvaltool/esgf-pyclient.yml - -shows permissions ``-rw-------``. - Search `````` Any arguments to :py:obj:`pyesgf.search.connection.SearchConnection` can diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index 5692a9f0b1..e9077884f2 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -398,32 +398,17 @@ ESMValCore can automatically make native ICON data `UGRID loading the data. The UGRID conventions provide a standardized format to store data on unstructured grids, which is required by many software packages or tools to -work correctly. +work correctly and specifically by Iris to interpret the grid as a +:ref:`mesh `. An example is the horizontal regridding of native ICON data to a regular grid. -While the built-in :ref:`nearest scheme ` can handle unstructured grids not in UGRID format, using more complex -regridding algorithms (for example provided by the -:doc:`iris-esmf-regrid:index` package through :ref:`generic regridding -schemes`) requires the input data in UGRID format. -The following code snippet provides a preprocessor that regrids native ICON -data to a 1°x1° grid using `ESMF's first-order conservative regridding -algorithm `__: - -.. code-block:: yaml - - preprocessors: - regrid_icon: - regrid: - target_grid: 1x1 - scheme: - reference: esmf_regrid.schemes:ESMFAreaWeighted - +While the :ref:`built-in regridding schemes ` +`linear` and `nearest` can handle unstructured grids (i.e., not UGRID-compliant) and meshes (i.e., UGRID-compliant), +the `area_weighted` scheme requires the input data in UGRID format. This automatic UGRIDization is enabled by default, but can be switched off with the facet ``ugrid: false`` in the recipe or the extra facets (see below). -This is useful for diagnostics that do not support input data in UGRID format -(yet) like the :ref:`Psyplot diagnostic ` or -if you want to use the built-in :ref:`nearest scheme ` regridding scheme. +This is useful for diagnostics that act on the native ICON grid and do not +support input data in UGRID format (yet), like the +:ref:`Psyplot diagnostic `. For 3D ICON variables, ESMValCore tries to add the pressure level information (from the variables `pfull` and `phalf`) and/or altitude information (from the @@ -564,6 +549,64 @@ explained in :ref:`extra_facets`, and which content is :download:`available here `. These multi-variable files must also undergo some data selection. +.. _read_access-esm: + +ACCESS-ESM +^^^^^^^^^^ + +ESMValTool can read native `ACCESS-ESM `__ +model output. + +.. warning:: + + This is the first version of ACCESS-ESM CMORizer for ESMValCore. Currently, + Supported variables: ``pr``, ``ps``, ``psl``, ``rlds``, ``tas``, ``ta``, ``va``, + ``ua``, ``zg``, ``hus``, ``clt``, ``rsus``, ``rlus``. + +The default naming conventions for input directories and files for ACCESS output are + +* input directories: ``{institute}/{sub_dataset}/{exp}/{modeling_realm}/netCDF`` +* input files: ``{sub_dataset}.{special_attr}-*.nc`` + +.. hint:: + + We only provide one default `input_dir` since this is how ACCESS-ESM native data was + stored on NCI. Users can modify this path in the :ref:`config-developer` to match their local file structure. + + +Thus, example dataset entries could look like this: + +.. code-block:: yaml + + dataset: + - {project: ACCESS, mip: Amon, dataset:ACCESS_ESM1_5, sub_dataset: HI-CN-05, + exp: history, modeling_realm: atm, special_attr: pa, start_year: 1986, end_year: 1986} + + +Similar to any other fix, the ACCESS-ESM fix allows the use of :ref:`extra +facets`. +By default, the file :download:`access-mappings.yml +` is used for that +purpose. +For some variables, extra facets are necessary; otherwise ESMValCore cannot +read them properly. +Supported keys for extra facets are: + +==================== ====================================== ================================= +Key Description Default value if not specified +==================== ====================================== ================================= +``raw_name`` Variable name of the variable in the CMOR variable name of the + raw input file corresponding variable +``modeling_realm`` Realm attribute include `atm`, `ice` No default (needs to be + and `oce` specified in extra facets or + recipe if default DRS is used) +```special_attr`` A special attribute in the filename No default + `ACCESS-ESM` raw data, it's related to + frequency of raw data +``sub_dataset`` Part of the ACCESS-ESM raw dataset No default + root, need to specify if you want to + use the cmoriser +==================== ====================================== ================================= .. _data-retrieval: @@ -802,7 +845,7 @@ about this since we can point the user to the specific functionality `here `_ but we will underline that the initial loading is done by adhering to the CF Conventions that `iris` operates by as well (see `CF Conventions Document `_ and the search -page for CF `standard names `_). +page for CF `standard names `_). Data concatenation from multiple sources ======================================== diff --git a/doc/quickstart/install.rst b/doc/quickstart/install.rst index 4c9305dec7..0a821a0df9 100644 --- a/doc/quickstart/install.rst +++ b/doc/quickstart/install.rst @@ -22,7 +22,7 @@ and install ESMValCore into it with a single command: .. code-block:: bash - conda create --name esmvalcore -c conda-forge esmvalcore 'python=3.10' + conda create --name esmvalcore -c conda-forge esmvalcore Don't forget to activate the newly created environment after the installation: @@ -195,13 +195,15 @@ To install from source for development, follow these instructions. e.g. ``pip install --trusted-host=pypi.python.org --trusted-host=pypi.org --trusted-host=files.pythonhosted.org -e .[develop]`` - Test that your installation was successful by running ``esmvaltool -h``. +- Install the :ref:`esmvaltool:pre-commit` hooks by running: + ``pre-commit install``. Pre-installed versions on HPC clusters / other servers ------------------------------------------------------ If you would like to use pre-installed versions on HPC clusters (currently CEDA-JASMIN and DKRZ-Levante), -and other servers (currently Met Office Linux estate), please have a look at +and other servers (currently Met Office Linux estate), please have a look at :ref:`these instructions `. diff --git a/doc/quickstart/run.rst b/doc/quickstart/run.rst index ebde6d4075..fec474f290 100644 --- a/doc/quickstart/run.rst +++ b/doc/quickstart/run.rst @@ -79,7 +79,7 @@ This feature is available for projects that are hosted on the ESGF, i.e. CMIP3, CMIP5, CMIP6, CORDEX, and obs4MIPs. To control the strictness of the CMOR checker and the checks during concatenation -on auxiliary coordinates, supplementary variables, and derived coordinates, +on auxiliary coordinates, supplementary variables, and derived coordinates, use the flag ``--check_level``: .. code:: bash @@ -93,14 +93,14 @@ Possible values are: - `default`: fail if there are any errors. - `strict`: fail if there are any warnings. -To re-use pre-processed files from a previous run of the same recipe, you can +To reuse pre-processed files from a previous run of the same recipe, you can use .. code:: bash esmvaltool run recipe_example.yml --resume_from ~/esmvaltool_output/recipe_python_20210930_123907 -Multiple directories can be specified for re-use, make sure to quote them: +Multiple directories can be specified for reuse, make sure to quote them: .. code:: bash diff --git a/doc/recipe/index.rst b/doc/recipe/index.rst index 98c3f6c237..bdb57e2336 100644 --- a/doc/recipe/index.rst +++ b/doc/recipe/index.rst @@ -8,4 +8,3 @@ The recipe format Overview Preprocessor - \ No newline at end of file diff --git a/doc/recipe/preprocessor.rst b/doc/recipe/preprocessor.rst index dd5e61b36b..ddd9d2b472 100644 --- a/doc/recipe/preprocessor.rst +++ b/doc/recipe/preprocessor.rst @@ -611,7 +611,7 @@ See also :func:`esmvalcore.preprocessor.weighting_landsea_fraction`. .. _masking: Masking -======= +======== Introduction to masking ----------------------- @@ -890,15 +890,15 @@ The arguments are defined below: Regridding (interpolation, extrapolation) schemes ------------------------------------------------- -ESMValCore has a number of built-in regridding schemes, which are presented in -:ref:`built-in regridding schemes`. Additionally, it is also possible to use -third party regridding schemes designed for use with :doc:`Iris -`. This is explained in :ref:`generic regridding schemes`. +ESMValCore provides three default regridding schemes, which are presented in +:ref:`default regridding schemes`. Additionally, it is also possible to use +third party regridding schemes designed for use with :meth:`iris.cube.Cube.regrid`. +This is explained in :ref:`generic regridding schemes`. Grid types ~~~~~~~~~~ -In ESMValCore, we distinguish between three grid types (note that these might +In ESMValCore, we distinguish between various grid types (note that these might differ from other definitions): * **Regular grid**: A rectilinear grid with 1D latitude and 1D longitude @@ -907,30 +907,34 @@ differ from other definitions): longitude coordinates with common dimensions. * **Unstructured grid**: A grid with 1D latitude and 1D longitude coordinates with common dimensions (i.e., a simple list of points). +* **Mesh**: A mesh as supported by Iris and described in :ref:`iris:ugrid`. -.. _built-in regridding schemes: +.. _default regridding schemes: -Built-in regridding schemes -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Default regridding schemes +~~~~~~~~~~~~~~~~~~~~~~~~~~ * ``linear``: Bilinear regridding. For source data on a regular grid, uses :obj:`~iris.analysis.Linear` with `extrapolation_mode='mask'`. - For source data on an irregular grid, uses - :class:`~esmvalcore.preprocessor.regrid_schemes.ESMPyLinear`. + For source and/or target data on an irregular grid or mesh, uses + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` with + `method='bilinear'`. For source data on an unstructured grid, uses :class:`~esmvalcore.preprocessor.regrid_schemes.UnstructuredLinear`. * ``nearest``: Nearest-neighbor regridding. For source data on a regular grid, uses :obj:`~iris.analysis.Nearest` with `extrapolation_mode='mask'`. - For source data on an irregular grid, uses - :class:`~esmvalcore.preprocessor.regrid_schemes.ESMPyNearest`. + For source and/or target data on an irregular grid or mesh, uses + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` with + `method='nearest'`. For source data on an unstructured grid, uses :class:`~esmvalcore.preprocessor.regrid_schemes.UnstructuredNearest`. * ``area_weighted``: First-order conservative (area-weighted) regridding. For source data on a regular grid, uses :obj:`~iris.analysis.AreaWeighted`. - For source data on an irregular grid, uses - :class:`~esmvalcore.preprocessor.regrid_schemes.ESMPyAreaWeighted`. + For source and/or target data on an irregular grid or mesh, uses + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` with + `method='conservative'`. Source data on an unstructured grid is not supported. .. _generic regridding schemes: @@ -950,7 +954,9 @@ afforded by the built-in schemes described above. To facilitate this, the :func:`~esmvalcore.preprocessor.regrid` preprocessor allows the use of any scheme designed for Iris. The scheme must be installed -and importable. To use this feature, the ``scheme`` key passed to the +and importable. Several such schemes are provided by :mod:`iris.analysis` and +:mod:`esmvalcore.preprocessor.regrid_schemes`. +To use this feature, the ``scheme`` key passed to the preprocessor must be a dictionary instead of a simple string that contains all necessary information. That includes a ``reference`` to the desired scheme itself, as well as any arguments that should be passed through to the @@ -996,10 +1002,13 @@ module, the second refers to the scheme, i.e. some callable that will be called with the remaining entries of the ``scheme`` dictionary passed as keyword arguments. -One package that aims to capitalize on the :ref:`support for unstructured grids -introduced in Iris 3.2 ` is :doc:`iris-esmf-regrid:index`. +One package that aims to capitalize on the :ref:`support for meshes +introduced in Iris 3.2 ` is :doc:`esmf_regrid:index`. It aims to provide lazy regridding for structured regular and irregular grids, -as well as unstructured grids. +as well as meshes. It is recommended to use these schemes through +the :obj:`esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` scheme though, +as that provides more efficient handling of masks. + An example of its usage in a preprocessor is: .. code-block:: yaml @@ -1009,8 +1018,11 @@ An example of its usage in a preprocessor is: regrid: target_grid: 2.5x2.5 scheme: - reference: esmf_regrid.schemes:ESMFAreaWeighted + reference: esmvalcore.preprocessor.regrid_schemes:IrisESMFRegrid + method: conservative mdtol: 0.7 + use_src_mask: true + collapse_src_mask_along: ZT Additionally, the use of generic schemes that take source and target grid cubes as arguments is also supported. The call function for such schemes must be defined as @@ -1018,7 +1030,7 @@ arguments is also supported. The call function for such schemes must be defined The `regrid` module will automatically pass the source and grid cubes as inputs of the scheme. An example of this usage is the :func:`~esmf_regrid.schemes.regrid_rectilinear_to_rectilinear` -scheme available in :doc:`iris-esmf-regrid:index`: +scheme available in :doc:`esmf_regrid:index`: .. code-block:: yaml @@ -2410,7 +2422,7 @@ See also :func:`esmvalcore.preprocessor.linear_trend_stderr`. .. _detrend: Detrend -======= +======== ESMValCore also supports detrending along any dimension using the preprocessor function 'detrend'. diff --git a/environment.yml b/environment.yml index 6405b6ca57..eaf317965f 100644 --- a/environment.yml +++ b/environment.yml @@ -8,7 +8,7 @@ dependencies: - cartopy - cf-units - cftime - - dask + - dask !=2024.8.0 # github.com/ESMValGroup/ESMValCore/issues/2503 - dask-jobqueue - distributed - esgf-pyclient >=0.3.1 @@ -18,9 +18,9 @@ dependencies: - fire - geopy - humanfriendly - - importlib_metadata # required for Python < 3.10 - - iris >=3.9.0 - - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 + - iris >=3.10.0 + - iris-esmf-regrid >=0.11.0 + - iris-grib - isodate - jinja2 - libnetcdf !=4.9.1 # to avoid hdf5 warnings @@ -29,14 +29,14 @@ dependencies: - netcdf4 - numpy !=1.24.3,<2.0.0 # avoid pulling 2.0.0rcX - packaging - - pandas !=2.2.0,!=2.2.1,!=2.2.2 # github.com/ESMValGroup/ESMValCore/pull/2305 and #2349 + - pandas - pillow - pip !=21.3 - prov - psutil - py-cordex - pybtex - - python >=3.9 + - python >=3.10 - python-stratify >=0.3 - pyyaml - requests @@ -50,29 +50,17 @@ dependencies: - sphinx >=6.1.3 - pydata-sphinx-theme # Python packages needed for testing - - mypy >=0.990 - pytest >=3.9,!=6.0.0rc1,!=6.0.0 - pytest-cov >=2.10.1 - pytest-env - pytest-html !=2.1.0 - pytest-metadata >=1.5.1 - pytest-mock - - pytest-mypy - pytest-xdist # Not on conda-forge - ESMValTool_sample_data==0.0.3 - # Still for testing, MyPy library stubs - - types-requests - - types-PyYAML # Python packages needed for installing in development mode - - codespell - - docformatter - - isort - pre-commit - pylint - - flake8 >= 7 - - pydocstyle # Not on conda forge - vprof - - yamllint - - yapf - pip: - - ESMValTool_sample_data + - ESMValTool_sample_data diff --git a/esmvalcore/__init__.py b/esmvalcore/__init__.py index 4d1bb19255..29fe7e2bb9 100644 --- a/esmvalcore/__init__.py +++ b/esmvalcore/__init__.py @@ -1,4 +1,5 @@ """ESMValTool core package.""" + import logging import os @@ -8,9 +9,9 @@ logger.addHandler(logging.NullHandler()) __all__ = [ - '__version__', - 'cmor', - 'preprocessor', + "__version__", + "cmor", + "preprocessor", ] diff --git a/esmvalcore/_citation.py b/esmvalcore/_citation.py index 510aa8cb42..fd6c733bcc 100644 --- a/esmvalcore/_citation.py +++ b/esmvalcore/_citation.py @@ -1,4 +1,5 @@ """Citation module.""" + import logging import os import re @@ -11,7 +12,7 @@ logger = logging.getLogger(__name__) -CMIP6_URL_STEM = 'https://cera-www.dkrz.de/WDCC/ui/cerasearch' +CMIP6_URL_STEM = "https://cera-www.dkrz.de/WDCC/ui/cerasearch" # The technical overview paper should always be cited ESMVALTOOL_PAPER = ( @@ -26,7 +27,7 @@ "\tpages = {1179--1199},\n" "\tauthor = {Mattia Righi and Bouwe Andela and Veronika Eyring " "and Axel Lauer and Valeriu Predoi and Manuel Schlund " - "and Javier Vegas-Regidor and Lisa Bock and Bj\"{o}rn Br\"{o}tz " + 'and Javier Vegas-Regidor and Lisa Bock and Bj"{o}rn Br"{o}tz ' "and Lee de Mora and Faruk Diblen and Laura Dreyer " "and Niels Drost and Paul Earnshaw and Birgit Hassler " "and Nikolay Koldunov and Bill Little and Saskia Loosveldt Tomas " @@ -34,7 +35,8 @@ "\ttitle = {Earth System Model Evaluation Tool (ESMValTool) v2.0 " "-- technical overview},\n" "\tjournal = {Geoscientific Model Development}\n" - "}\n") + "}\n" +) def _write_citation_files(filename, provenance): @@ -56,22 +58,22 @@ def _write_citation_files(filename, provenance): for item in provenance.records: # get cmip6 data citation info - cmip6_data = 'CMIP6' in item.get_attribute('attribute:mip_era') + cmip6_data = "CMIP6" in item.get_attribute("attribute:mip_era") if cmip6_data: url_prefix = _make_url_prefix(item.attributes) cmip6_info_urls.add(_make_info_url(url_prefix)) cmip6_json_urls.add(_make_json_url(url_prefix)) # get other citation info - references = item.get_attribute('attribute:references') + references = item.get_attribute("attribute:references") if not references: # ESMValTool CMORization scripts use 'reference' (without final s) - references = item.get_attribute('attribute:reference') + references = item.get_attribute("attribute:reference") if references: - if item.identifier.namespace.prefix == 'recipe': + if item.identifier.namespace.prefix == "recipe": # get recipe citation tags tags.update(references) - elif item.get_attribute('attribute:script_file'): + elif item.get_attribute("attribute:script_file"): # get diagnostics citation tags tags.update(references) elif not cmip6_data: @@ -101,9 +103,10 @@ def _save_citation_bibtex(product_name, tags, json_urls): entries.add(cmip_citation) citation_entries.extend(sorted(entries)) - with open(f'{product_name}_citation.bibtex', - 'w', encoding='utf-8') as file: - file.write('\n'.join(citation_entries)) + with open( + f"{product_name}_citation.bibtex", "w", encoding="utf-8" + ) as file: + file.write("\n".join(citation_entries)) def _save_citation_info_txt(product_name, info_urls, other_info): @@ -114,22 +117,26 @@ def _save_citation_info_txt(product_name, info_urls, other_info): lines.append( "Follow the links below to find more information about CMIP6 data:" ) - lines.extend(f'- {url}' for url in sorted(info_urls)) + lines.extend(f"- {url}" for url in sorted(info_urls)) # Save any references from the 'references' and 'reference' NetCDF global # attributes. if other_info: if lines: - lines.append('') - lines.append("Additional data citation information was found, for " - "which no entry is available in the bibtex file:") - lines.extend('- ' + str(t).replace('\n', ' ') - for t in sorted(other_info)) + lines.append("") + lines.append( + "Additional data citation information was found, for " + "which no entry is available in the bibtex file:" + ) + lines.extend( + "- " + str(t).replace("\n", " ") for t in sorted(other_info) + ) if lines: - with open(f'{product_name}_data_citation_info.txt', - 'w', encoding='utf-8') as file: - file.write('\n'.join(lines) + '\n') + with open( + f"{product_name}_data_citation_info.txt", "w", encoding="utf-8" + ) as file: + file.write("\n".join(lines) + "\n") def _extract_tags(tags): @@ -139,46 +146,48 @@ def _extract_tags(tags): For example, a single entry in the list `tags` could be the string "['acknow_project', 'acknow_author']". """ - pattern = re.compile(r'[\w-]+') + pattern = re.compile(r"[\w-]+") return set(pattern.findall(str(tags))) def _get_response(url): """Return information from CMIP6 Data Citation service in json format.""" json_data = None - if url.lower().startswith('https'): + if url.lower().startswith("https"): try: response = requests.get(url, timeout=5) if response.status_code == 200: json_data = response.json() else: - logger.warning('Error in the CMIP6 citation link: %s', url) + logger.warning("Error in the CMIP6 citation link: %s", url) except IOError: - logger.info('No network connection, ' - 'unable to retrieve CMIP6 citation information') + logger.info( + "No network connection, " + "unable to retrieve CMIP6 citation information" + ) return json_data def _json_to_bibtex(data): """Make a bibtex entry from CMIP6 Data Citation json data.""" - url = 'url not found' - title = data.get('titles', ['title not found'])[0] - publisher = data.get('publisher', 'publisher not found') - year = data.get('publicationYear', 'publicationYear not found') - authors = 'creators not found' - doi = 'doi not found' - - if 'creators' in data: + url = "url not found" + title = data.get("titles", ["title not found"])[0] + publisher = data.get("publisher", "publisher not found") + year = data.get("publicationYear", "publicationYear not found") + authors = "creators not found" + doi = "doi not found" + + if "creators" in data: author_list = [ - item.get('creatorName', '') for item in data['creators'] + item.get("creatorName", "") for item in data["creators"] ] - authors = ' and '.join(author_list) + authors = " and ".join(author_list) if not authors: - authors = 'creators not found' + authors = "creators not found" - if 'identifier' in data: - doi = data['identifier'].get('id', 'doi not found') - url = f'https://doi.org/{doi}' + if "identifier" in data: + doi = data["identifier"].get("id", "doi not found") + url = f"https://doi.org/{doi}" bibtex_entry = textwrap.dedent(f""" @misc{{{url}, @@ -196,14 +205,16 @@ def _json_to_bibtex(data): @lru_cache(maxsize=1024) def _collect_bibtex_citation(tag): """Collect information from bibtex files.""" - bibtex_file = DIAGNOSTICS.references / f'{tag}.bibtex' + bibtex_file = DIAGNOSTICS.references / f"{tag}.bibtex" if bibtex_file.is_file(): - entry = bibtex_file.read_text(encoding='utf-8') + entry = bibtex_file.read_text(encoding="utf-8") else: - entry = '' + entry = "" logger.warning( "The reference file %s does not exist, citation information " - "incomplete.", bibtex_file) + "incomplete.", + bibtex_file, + ) return entry @@ -214,7 +225,7 @@ def _collect_cmip_citation(json_url): if json_data: bibtex_entry = _json_to_bibtex(json_data) else: - bibtex_entry = '' + bibtex_entry = "" return bibtex_entry @@ -222,26 +233,26 @@ def _make_url_prefix(attribute): """Make url prefix based on CMIP6 Data Citation Service.""" # the order of keys is important localpart = { - 'mip_era': '', - 'activity_id': '', - 'institution_id': '', - 'source_id': '', - 'experiment_id': '', + "mip_era": "", + "activity_id": "", + "institution_id": "", + "source_id": "", + "experiment_id": "", } for key, value in attribute: if key.localpart in localpart: localpart[key.localpart] = value - url_prefix = '.'.join(localpart.values()) + url_prefix = ".".join(localpart.values()) return url_prefix def _make_json_url(url_prefix): """Make json url based on CMIP6 Data Citation Service.""" - json_url = f'{CMIP6_URL_STEM}/cerarest/exportcmip6?input={url_prefix}' + json_url = f"{CMIP6_URL_STEM}/cerarest/exportcmip6?input={url_prefix}" return json_url def _make_info_url(url_prefix): """Make info url based on CMIP6 Data Citation Service.""" - info_url = f'{CMIP6_URL_STEM}/cmip6?input={url_prefix}' + info_url = f"{CMIP6_URL_STEM}/cmip6?input={url_prefix}" return info_url diff --git a/esmvalcore/_main.py b/esmvalcore/_main.py index 055bd04212..32b692e070 100755 --- a/esmvalcore/_main.py +++ b/esmvalcore/_main.py @@ -25,7 +25,8 @@ For further help, please read the documentation at http://docs.esmvaltool.org. Have fun! -""" # noqa: line-too-long pylint: disable=line-too-long +""" + # pylint: disable=import-outside-toplevel import logging import os @@ -42,7 +43,8 @@ # set up logging logger = logging.getLogger(__name__) -HEADER = r""" +HEADER = ( + r""" ______________________________________________________________________ _____ ____ __ ____ __ _ _____ _ | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | @@ -51,7 +53,9 @@ |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| ______________________________________________________________________ -""" + __doc__ +""" + + __doc__ +) def parse_resume(resume, recipe): @@ -59,17 +63,19 @@ def parse_resume(resume, recipe): if not resume: return [] if isinstance(resume, str): - resume = resume.split(' ') + resume = resume.split(" ") for i, resume_dir in enumerate(resume): resume[i] = Path(os.path.expandvars(resume_dir)).expanduser() # Sanity check resume directories: - current_recipe = recipe.read_text(encoding='utf-8') + current_recipe = recipe.read_text(encoding="utf-8") for resume_dir in resume: - resume_recipe = resume_dir / 'run' / recipe.name - if current_recipe != resume_recipe.read_text(encoding='utf-8'): - raise ValueError(f'Only identical recipes can be resumed, but ' - f'{resume_recipe} is different from {recipe}') + resume_recipe = resume_dir / "run" / recipe.name + if current_recipe != resume_recipe.read_text(encoding="utf-8"): + raise ValueError( + f"Only identical recipes can be resumed, but " + f"{resume_recipe} is different from {recipe}" + ) return resume @@ -80,17 +86,21 @@ def process_recipe(recipe_file: Path, session): from esmvalcore._recipe.recipe import read_recipe_file from esmvalcore.config._dask import check_distributed_config + if not recipe_file.is_file(): import errno - raise OSError(errno.ENOENT, "Specified recipe file does not exist", - recipe_file) + + raise OSError( + errno.ENOENT, "Specified recipe file does not exist", recipe_file + ) timestamp1 = datetime.datetime.utcnow() timestamp_format = "%Y-%m-%d %H:%M:%S" logger.info( "Starting the Earth System Model Evaluation Tool at time: %s UTC", - timestamp1.strftime(timestamp_format)) + timestamp1.strftime(timestamp_format), + ) logger.info(70 * "-") logger.info("RECIPE = %s", recipe_file) @@ -100,25 +110,29 @@ def process_recipe(recipe_file: Path, session): logger.info("PLOTDIR = %s", session.plot_dir) logger.info(70 * "-") - n_processes = session['max_parallel_tasks'] or os.cpu_count() + n_processes = session["max_parallel_tasks"] or os.cpu_count() logger.info("Running tasks using at most %s processes", n_processes) logger.info( "If your system hangs during execution, it may not have enough " - "memory for keeping this number of tasks in memory.") - logger.info("If you experience memory problems, try reducing " - "'max_parallel_tasks' in your user configuration file.") + "memory for keeping this number of tasks in memory." + ) + logger.info( + "If you experience memory problems, try reducing " + "'max_parallel_tasks' in your user configuration file." + ) check_distributed_config() - if session['compress_netcdf']: + if session["compress_netcdf"]: logger.warning( "You have enabled NetCDF compression. Accessing .nc files can be " "much slower than expected if your access pattern does not match " "their internal pattern. Make sure to specify the expected " "access pattern in the recipe as a parameter to the 'save' " "preprocessor function. If the problem persists, try disabling " - "NetCDF compression.") + "NetCDF compression." + ) # copy recipe to run_dir for future reference shutil.copy2(recipe_file, session.run_dir) @@ -132,11 +146,12 @@ def process_recipe(recipe_file: Path, session): timestamp2 = datetime.datetime.utcnow() logger.info( "Ending the Earth System Model Evaluation Tool at time: %s UTC", - timestamp2.strftime(timestamp_format)) + timestamp2.strftime(timestamp_format), + ) logger.info("Time for running the recipe was: %s", timestamp2 - timestamp1) -class Config(): +class Config: """Manage ESMValTool's configuration. This group contains utilities to manage ESMValTool configuration @@ -148,25 +163,26 @@ def _copy_config_file(filename, overwrite, path): import shutil from .config._logging import configure_logging - configure_logging(console_log_level='info') + + configure_logging(console_log_level="info") if not path: - path = os.path.join(os.path.expanduser('~/.esmvaltool'), filename) + path = os.path.join(os.path.expanduser("~/.esmvaltool"), filename) if os.path.isfile(path): if overwrite: - logger.info('Overwriting file %s.', path) + logger.info("Overwriting file %s.", path) else: - logger.info('Copy aborted. File %s already exists.', path) + logger.info("Copy aborted. File %s already exists.", path) return target_folder = os.path.dirname(path) if not os.path.isdir(target_folder): - logger.info('Creating folder %s', target_folder) + logger.info("Creating folder %s", target_folder) os.makedirs(target_folder) conf_file = os.path.join(os.path.dirname(__file__), filename) - logger.info('Copying file %s to path %s.', conf_file, path) + logger.info("Copying file %s to path %s.", conf_file, path) shutil.copy2(conf_file, path) - logger.info('Copy finished.') + logger.info("Copy finished.") @classmethod def get_config_user(cls, overwrite=False, path=None): @@ -183,7 +199,7 @@ def get_config_user(cls, overwrite=False, path=None): If not provided, the file will be copied to .esmvaltool in the user's home. """ - cls._copy_config_file('config-user.yml', overwrite, path) + cls._copy_config_file("config-user.yml", overwrite, path) @classmethod def get_config_developer(cls, overwrite=False, path=None): @@ -200,10 +216,10 @@ def get_config_developer(cls, overwrite=False, path=None): If not provided, the file will be copied to .esmvaltool in the user's home. """ - cls._copy_config_file('config-developer.yml', overwrite, path) + cls._copy_config_file("config-developer.yml", overwrite, path) -class Recipes(): +class Recipes: """List, show and retrieve installed recipes. This group contains utilities to explore and manage the recipes available @@ -221,18 +237,19 @@ def list(): """ from .config._diagnostics import DIAGNOSTICS from .config._logging import configure_logging - configure_logging(console_log_level='info') + + configure_logging(console_log_level="info") recipes_folder = DIAGNOSTICS.recipes logger.info("Showing recipes installed in %s", recipes_folder) - print('# Installed recipes') + print("# Installed recipes") for root, _, files in sorted(os.walk(recipes_folder)): root = os.path.relpath(root, recipes_folder) - if root == '.': - root = '' + if root == ".": + root = "" if root: print(f"\n# {root.replace(os.sep, ' - ').title()}") for filename in sorted(files): - if filename.endswith('.yml'): + if filename.endswith(".yml"): print(os.path.join(root, filename)) @staticmethod @@ -250,15 +267,18 @@ def get(recipe): from .config._diagnostics import DIAGNOSTICS from .config._logging import configure_logging - configure_logging(console_log_level='info') + from .exceptions import RecipeError + + configure_logging(console_log_level="info") installed_recipe = DIAGNOSTICS.recipes / recipe if not installed_recipe.exists(): - ValueError( - f'Recipe {recipe} not found. To list all available recipes, ' - 'execute "esmvaltool list"') - logger.info('Copying installed recipe to the current folder...') + raise RecipeError( + f"Recipe {recipe} not found. To list all available recipes, " + 'execute "esmvaltool list"' + ) + logger.info("Copying installed recipe to the current folder...") shutil.copy(installed_recipe, Path(recipe).name) - logger.info('Recipe %s successfully copied', recipe) + logger.info("Recipe %s successfully copied", recipe) @staticmethod def show(recipe): @@ -273,19 +293,22 @@ def show(recipe): """ from .config._diagnostics import DIAGNOSTICS from .config._logging import configure_logging - configure_logging(console_log_level='info') + from .exceptions import RecipeError + + configure_logging(console_log_level="info") installed_recipe = DIAGNOSTICS.recipes / recipe if not installed_recipe.exists(): - ValueError( - f'Recipe {recipe} not found. To list all available recipes, ' - 'execute "esmvaltool list"') - msg = f'Recipe {recipe}' + raise RecipeError( + f"Recipe {recipe} not found. To list all available recipes, " + 'execute "esmvaltool list"' + ) + msg = f"Recipe {recipe}" logger.info(msg) - logger.info('=' * len(msg)) - print(installed_recipe.read_text(encoding='utf-8')) + logger.info("=" * len(msg)) + print(installed_recipe.read_text(encoding="utf-8")) -class ESMValTool(): +class ESMValTool: """A community tool for routine evaluation of Earth system models. The Earth System Model Evaluation Tool (ESMValTool) is a community @@ -304,17 +327,21 @@ def __init__(self): self.config = Config() self.recipes = Recipes() self._extra_packages = {} - esmvaltool_commands = entry_points(group='esmvaltool_commands') + esmvaltool_commands = entry_points(group="esmvaltool_commands") if not esmvaltool_commands: - print("Running esmvaltool executable from ESMValCore. " - "No other command line utilities are available " - "until ESMValTool is installed.") + print( + "Running esmvaltool executable from ESMValCore. " + "No other command line utilities are available " + "until ESMValTool is installed." + ) for entry_point in esmvaltool_commands: - self._extra_packages[entry_point.dist.name] = \ + self._extra_packages[entry_point.dist.name] = ( entry_point.dist.version + ) if hasattr(self, entry_point.name): - logger.error('Registered command %s already exists', - entry_point.name) + logger.error( + "Registered command %s already exists", entry_point.name + ) continue self.__setattr__(entry_point.name, entry_point.load()()) @@ -326,21 +353,24 @@ def version(self): command. """ from . import __version__ - print(f'ESMValCore: {__version__}') + + print(f"ESMValCore: {__version__}") for project, version in self._extra_packages.items(): - print(f'{project}: {version}') - - def run(self, - recipe, - config_file=None, - resume_from=None, - max_datasets=None, - max_years=None, - skip_nonexistent=None, - search_esgf=None, - diagnostics=None, - check_level=None, - **kwargs): + print(f"{project}: {version}") + + def run( + self, + recipe, + config_file=None, + resume_from=None, + max_datasets=None, + max_years=None, + skip_nonexistent=None, + search_esgf=None, + diagnostics=None, + check_level=None, + **kwargs, + ): """Execute an ESMValTool recipe. `esmvaltool run` executes the given recipe. To see a list of available @@ -396,18 +426,18 @@ def run(self, session = CFG.start_session(recipe.stem) if check_level is not None: - session['check_level'] = check_level + session["check_level"] = check_level if diagnostics is not None: - session['diagnostics'] = diagnostics + session["diagnostics"] = diagnostics if max_datasets is not None: - session['max_datasets'] = max_datasets + session["max_datasets"] = max_datasets if max_years is not None: - session['max_years'] = max_years + session["max_years"] = max_years if search_esgf is not None: - session['search_esgf'] = search_esgf + session["search_esgf"] = search_esgf if skip_nonexistent is not None: - session['skip_nonexistent'] = skip_nonexistent - session['resume_from'] = parse_resume(resume_from, recipe) + session["skip_nonexistent"] = skip_nonexistent + session["resume_from"] = parse_resume(resume_from, recipe) session.update(kwargs) self._run(recipe, session) @@ -431,7 +461,8 @@ def _create_session_dir(session): raise RecipeError( f"Output directory '{session.session_dir}' already exists and" - " unable to find alternative, aborting to prevent data loss.") + " unable to find alternative, aborting to prevent data loss." + ) def _run(self, recipe: Path, session) -> None: """Run `recipe` using `session`.""" @@ -440,23 +471,22 @@ def _run(self, recipe: Path, session) -> None: # configure logging from .config._logging import configure_logging - log_files = configure_logging(output_dir=session.run_dir, - console_log_level=session['log_level']) - self._log_header(session['config_file'], log_files) - if session['search_esgf'] != 'never': - from .esgf._logon import logon - logon() + log_files = configure_logging( + output_dir=session.run_dir, console_log_level=session["log_level"] + ) + self._log_header(session["config_file"], log_files) # configure resource logger and run program from ._task import resource_usage_logger - resource_log = session.run_dir / 'resource_usage.txt' + + resource_log = session.run_dir / "resource_usage.txt" with resource_usage_logger(pid=os.getpid(), filename=resource_log): process_recipe(recipe_file=recipe, session=session) self._clean_preproc(session) - if session.cmor_log.read_text(encoding='utf-8'): + if session.cmor_log.read_text(encoding="utf-8"): logger.warning( "Input data is not (fully) CMOR-compliant, see %s for details", session.cmor_log, @@ -468,8 +498,10 @@ def _run(self, recipe: Path, session) -> None: def _clean_preproc(session): import shutil - if (not session['save_intermediary_cubes'] and - session._fixed_file_dir.exists()): + if ( + not session["save_intermediary_cubes"] + and session._fixed_file_dir.exists() + ): logger.debug( "Removing `preproc/fixed_files` directory containing fixed " "data" @@ -481,7 +513,7 @@ def _clean_preproc(session): ) shutil.rmtree(session._fixed_file_dir) - if session['remove_preproc_dir'] and session.preproc_dir.exists(): + if session["remove_preproc_dir"] and session.preproc_dir.exists(): logger.info( "Removing `preproc` directory containing preprocessed data" ) @@ -495,6 +527,7 @@ def _clean_preproc(session): @staticmethod def _get_recipe(recipe) -> Path: from esmvalcore.config._diagnostics import DIAGNOSTICS + if not os.path.isfile(recipe): installed_recipe = DIAGNOSTICS.recipes / recipe if os.path.isfile(installed_recipe): @@ -504,21 +537,20 @@ def _get_recipe(recipe) -> Path: def _log_header(self, config_file, log_files): from . import __version__ + logger.info(HEADER) - logger.info('Package versions') - logger.info('----------------') - logger.info('ESMValCore: %s', __version__) + logger.info("Package versions") + logger.info("----------------") + logger.info("ESMValCore: %s", __version__) for project, version in self._extra_packages.items(): - logger.info('%s: %s', project, version) - logger.info('----------------') + logger.info("%s: %s", project, version) + logger.info("----------------") logger.info("Using config file %s", config_file) logger.info("Writing program log files to:\n%s", "\n".join(log_files)) def run(): """Run the `esmvaltool` program, logging any exceptions.""" - import sys - from .exceptions import RecipeError # Workaround to avoid using more for the output @@ -545,7 +577,8 @@ def display(lines, out): logger.exception( "Program terminated abnormally, see stack trace " "below for more information:", - exc_info=True) + exc_info=True, + ) logger.info( "\n" "If you have a question or need help, please start a new " @@ -557,5 +590,6 @@ def display(lines, out): "\n" "To make it easier to find out what the problem is, please " "consider attaching the files run/recipe_*.yml and " - "run/main_log_debug.txt from the output directory.") + "run/main_log_debug.txt from the output directory." + ) sys.exit(1) diff --git a/esmvalcore/_provenance.py b/esmvalcore/_provenance.py index 7ab27ceb46..25ad81f5ba 100644 --- a/esmvalcore/_provenance.py +++ b/esmvalcore/_provenance.py @@ -1,4 +1,5 @@ """Provenance module.""" + import copy import logging import os @@ -13,7 +14,7 @@ logger = logging.getLogger(__name__) -ESMVALTOOL_URI_PREFIX = 'https://www.esmvaltool.org/' +ESMVALTOOL_URI_PREFIX = "https://www.esmvaltool.org/" def create_namespace(provenance, namespace): @@ -24,11 +25,12 @@ def create_namespace(provenance, namespace): def get_esmvaltool_provenance(): """Create an esmvaltool run activity.""" provenance = ProvDocument() - namespace = 'software' + namespace = "software" create_namespace(provenance, namespace) attributes = {} # TODO: add dependencies with versions here - activity = provenance.activity(namespace + ':esmvaltool==' + __version__, - other_attributes=attributes) + activity = provenance.activity( + namespace + ":esmvaltool==" + __version__, other_attributes=attributes + ) return activity @@ -38,27 +40,27 @@ def get_esmvaltool_provenance(): def attribute_to_authors(entity, authors): """Attribute entity to authors.""" - namespace = 'author' + namespace = "author" create_namespace(entity.bundle, namespace) for author in authors: if isinstance(author, str): # This happens if the config-references.yml file is not available - author = {'name': author} + author = {"name": author} agent = entity.bundle.agent( - namespace + ':' + author['name'], - {'attribute:' + k: author[k] - for k in author if k != 'name'}) + namespace + ":" + author["name"], + {"attribute:" + k: author[k] for k in author if k != "name"}, + ) entity.wasAttributedTo(agent) def attribute_to_projects(entity, projects): """Attribute entity to projects.""" - namespace = 'project' + namespace = "project" create_namespace(entity.bundle, namespace) for project in projects: - agent = entity.bundle.agent(namespace + ':' + project) + agent = entity.bundle.agent(namespace + ":" + project) entity.wasAttributedTo(agent) @@ -66,17 +68,19 @@ def get_recipe_provenance(documentation, filename): """Create a provenance entity describing a recipe.""" provenance = ProvDocument() - for namespace in ('recipe', 'attribute'): + for namespace in ("recipe", "attribute"): create_namespace(provenance, namespace) entity = provenance.entity( - 'recipe:{}'.format(filename), { - 'attribute:description': documentation.get('description', ''), - 'attribute:references': str(documentation.get('references', [])), - }) + "recipe:{}".format(filename), + { + "attribute:description": documentation.get("description", ""), + "attribute:references": str(documentation.get("references", [])), + }, + ) - attribute_to_authors(entity, documentation.get('authors', [])) - attribute_to_projects(entity, documentation.get('projects', [])) + attribute_to_authors(entity, documentation.get("authors", [])) + attribute_to_projects(entity, documentation.get("projects", [])) return entity @@ -84,9 +88,9 @@ def get_recipe_provenance(documentation, filename): def get_task_provenance(task, recipe_entity): """Create a provenance activity describing a task.""" provenance = ProvDocument() - create_namespace(provenance, 'task') + create_namespace(provenance, "task") - activity = provenance.activity('task:' + task.name) + activity = provenance.activity("task:" + task.name) trigger = recipe_entity provenance.update(recipe_entity.bundle) @@ -103,11 +107,9 @@ def get_task_provenance(task, recipe_entity): class TrackedFile: """File with provenance tracking.""" - def __init__(self, - filename, - attributes=None, - ancestors=None, - prov_filename=None): + def __init__( + self, filename, attributes=None, ancestors=None, prov_filename=None + ): """Create an instance of a file with provenance tracking. Arguments @@ -146,11 +148,11 @@ def __repr__(self): def __eq__(self, other): """Check if `other` equals `self`.""" - return hasattr(other, 'filename') and self.filename == other.filename + return hasattr(other, "filename") and self.filename == other.filename def __lt__(self, other): """Check if `other` should be sorted before `self`.""" - return hasattr(other, 'filename') and self.filename < other.filename + return hasattr(other, "filename") and self.filename < other.filename def __hash__(self): """Return a unique hash for the file.""" @@ -174,7 +176,7 @@ def filename(self): @property def provenance_file(self): """Filename of provenance.""" - return os.path.splitext(self.filename)[0] + '_provenance.xml' + return os.path.splitext(self.filename)[0] + "_provenance.xml" def initialize_provenance(self, activity): """Initialize the provenance document. @@ -185,7 +187,8 @@ def initialize_provenance(self, activity): """ if self.provenance is not None: raise ValueError( - "Provenance of {} already initialized".format(self)) + "Provenance of {} already initialized".format(self) + ) self.provenance = ProvDocument() self._initialize_namespaces() self._initialize_activity(activity) @@ -194,7 +197,7 @@ def initialize_provenance(self, activity): def _initialize_namespaces(self): """Initialize the namespaces.""" - for namespace in ('file', 'attribute', 'preprocessor', 'task'): + for namespace in ("file", "attribute", "preprocessor", "task"): create_namespace(self.provenance, namespace) def _initialize_activity(self, activity): @@ -206,20 +209,21 @@ def _initialize_entity(self): """Initialize the entity representing the file.""" if self.attributes is None: self.attributes = {} - with Dataset(self.filename, 'r') as dataset: + with Dataset(self.filename, "r") as dataset: for attr in dataset.ncattrs(): self.attributes[attr] = dataset.getncattr(attr) attributes = { - 'attribute:' + str(k).replace(' ', '_'): str(v) + "attribute:" + str(k).replace(" ", "_"): str(v) for k, v in self.attributes.items() - if k not in ('authors', 'projects') + if k not in ("authors", "projects") } - self.entity = self.provenance.entity(f'file:{self.filename}', - attributes) + self.entity = self.provenance.entity( + f"file:{self.filename}", attributes + ) - attribute_to_authors(self.entity, self.attributes.get('authors', [])) - attribute_to_projects(self.entity, self.attributes.get('projects', [])) + attribute_to_authors(self.entity, self.attributes.get("authors", [])) + attribute_to_projects(self.entity, self.attributes.get("projects", [])) def _initialize_ancestors(self, activity): """Register ancestor files for provenance tracking.""" @@ -245,15 +249,15 @@ def wasderivedfrom(self, other): def _select_for_include(self): attributes = { - 'software': "Created with ESMValTool v{}".format(__version__), + "software": "Created with ESMValTool v{}".format(__version__), } - if 'caption' in self.attributes: - attributes['caption'] = self.attributes['caption'] + if "caption" in self.attributes: + attributes["caption"] = self.attributes["caption"] return attributes @staticmethod def _include_provenance_nc(filename, attributes): - with Dataset(filename, 'a') as dataset: + with Dataset(filename, "a") as dataset: for key, value in attributes.items(): setattr(dataset, key, value) @@ -261,8 +265,8 @@ def _include_provenance_nc(filename, attributes): def _include_provenance_png(filename, attributes): pnginfo = PngInfo() exif_tags = { - 'caption': 'ImageDescription', - 'software': 'Software', + "caption": "ImageDescription", + "software": "Software", } for key, value in attributes.items(): pnginfo.add_text(exif_tags.get(key, key), value, zip=True) @@ -274,8 +278,8 @@ def _include_provenance(self): attributes = self._select_for_include() # Attach provenance to supported file types - ext = os.path.splitext(self.filename)[1].lstrip('.').lower() - write = getattr(self, '_include_provenance_' + ext, None) + ext = os.path.splitext(self.filename)[1].lstrip(".").lower() + write = getattr(self, "_include_provenance_" + ext, None) if write: write(self.filename, attributes) @@ -286,17 +290,18 @@ def save_provenance(self): namespaces=self.provenance.namespaces, ) self._include_provenance() - with open(self.provenance_file, 'wb') as file: + with open(self.provenance_file, "wb") as file: # Create file with correct permissions before saving. - self.provenance.serialize(file, format='xml') + self.provenance.serialize(file, format="xml") self.activity = None self.entity = None self.provenance = None def restore_provenance(self): """Import provenance information from a previously saved file.""" - self.provenance = ProvDocument.deserialize(self.provenance_file, - format='xml') + self.provenance = ProvDocument.deserialize( + self.provenance_file, format="xml" + ) entity_uri = f"{ESMVALTOOL_URI_PREFIX}file{self.prov_filename}" self.entity = self.provenance.get_record(entity_uri)[0] # Find the associated activity diff --git a/esmvalcore/_recipe/_io.py b/esmvalcore/_recipe/_io.py index 937793a5fd..46fef8ceb3 100644 --- a/esmvalcore/_recipe/_io.py +++ b/esmvalcore/_recipe/_io.py @@ -1,4 +1,5 @@ """Functions for reading recipes.""" + from __future__ import annotations import os.path @@ -27,12 +28,13 @@ def _load_recipe(recipe: Path | str | dict[str, Any] | None) -> dict[str, Any]: """Load a recipe from a file, string, dict, or create a new recipe.""" if recipe is None: recipe = { - 'diagnostics': {}, + "diagnostics": {}, } - if isinstance(recipe, Path) or (isinstance(recipe, str) - and os.path.exists(recipe)): - recipe = Path(recipe).read_text(encoding='utf-8') + if isinstance(recipe, Path) or ( + isinstance(recipe, str) and os.path.exists(recipe) + ): + recipe = Path(recipe).read_text(encoding="utf-8") if isinstance(recipe, str): recipe = yaml.safe_load(recipe) diff --git a/esmvalcore/_recipe/check.py b/esmvalcore/_recipe/check.py index ac7b7f5bec..4e4fa6d2b9 100644 --- a/esmvalcore/_recipe/check.py +++ b/esmvalcore/_recipe/check.py @@ -1,4 +1,5 @@ """Module with functions to check a recipe.""" + from __future__ import annotations import inspect @@ -34,30 +35,36 @@ def ncl_version(): """Check the NCL version.""" - ncl = which('ncl') + ncl = which("ncl") if not ncl: - raise RecipeError("Recipe contains NCL scripts, but cannot find " - "an NCL installation.") + raise RecipeError( + "Recipe contains NCL scripts, but cannot find " + "an NCL installation." + ) try: - cmd = [ncl, '-V'] + cmd = [ncl, "-V"] version = subprocess.check_output(cmd, universal_newlines=True) except subprocess.CalledProcessError: - logger.error("Failed to execute '%s'", ' '.join(' '.join(cmd))) - raise RecipeError("Recipe contains NCL scripts, but your NCL " - "installation appears to be broken.") + logger.error("Failed to execute '%s'", " ".join(" ".join(cmd))) + raise RecipeError( + "Recipe contains NCL scripts, but your NCL " + "installation appears to be broken." + ) version = version.strip() logger.info("Found NCL version %s", version) - major, minor = (int(i) for i in version.split('.')[:2]) + major, minor = (int(i) for i in version.split(".")[:2]) if major < 6 or (major == 6 and minor < 4): - raise RecipeError("NCL version 6.4 or higher is required to run " - "a recipe containing NCL scripts.") + raise RecipeError( + "NCL version 6.4 or higher is required to run " + "a recipe containing NCL scripts." + ) def recipe_with_schema(filename): """Check if the recipe content matches schema.""" - schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml') + schema_file = os.path.join(os.path.dirname(__file__), "recipe_schema.yml") logger.debug("Checking recipe against schema %s", schema_file) recipe = yamale.make_data(filename) schema = yamale.make_schema(schema_file) @@ -67,13 +74,14 @@ def recipe_with_schema(filename): def diagnostics(diags): """Check diagnostics in recipe.""" if diags is None: - raise RecipeError('The given recipe does not have any diagnostic.') + raise RecipeError("The given recipe does not have any diagnostic.") for name, diagnostic in diags.items(): - if 'scripts' not in diagnostic: + if "scripts" not in diagnostic: raise RecipeError( - f"Missing scripts section in diagnostic '{name}'.") - variable_names = tuple(diagnostic.get('variables', {})) - scripts = diagnostic.get('scripts') + f"Missing scripts section in diagnostic '{name}'." + ) + variable_names = tuple(diagnostic.get("variables", {})) + scripts = diagnostic.get("scripts") if scripts is None: scripts = {} for script_name, script in scripts.items(): @@ -81,11 +89,13 @@ def diagnostics(diags): raise RecipeError( f"Invalid script name '{script_name}' encountered " f"in diagnostic '{name}': scripts cannot have the " - "same name as variables.") - if not script.get('script'): + "same name as variables." + ) + if not script.get("script"): raise RecipeError( f"No script defined for script '{script_name}' in " - f"diagnostic '{name}'.") + f"diagnostic '{name}'." + ) def duplicate_datasets( @@ -98,13 +108,15 @@ def duplicate_datasets( raise RecipeError( "You have not specified any dataset or additional_dataset " f"groups for variable '{variable_group}' in diagnostic " - f"'{diagnostic}'.") + f"'{diagnostic}'." + ) checked_datasets_ = [] for dataset in datasets: if dataset in checked_datasets_: raise RecipeError( f"Duplicate dataset\n{pformat(dataset)}\nfor variable " - f"'{variable_group}' in diagnostic '{diagnostic}'.") + f"'{variable_group}' in diagnostic '{diagnostic}'." + ) checked_datasets_.append(dataset) @@ -112,7 +124,7 @@ def variable( var: dict[str, Any], required_keys: Iterable[str], diagnostic: str, - variable_group: str + variable_group: str, ) -> None: """Check variables as derived from recipe.""" required = set(required_keys) @@ -120,7 +132,8 @@ def variable( if missing: raise RecipeError( f"Missing keys {missing} in\n{pformat(var)}\nfor variable " - f"'{variable_group}' in diagnostic '{diagnostic}'.") + f"'{variable_group}' in diagnostic '{diagnostic}'." + ) def _log_data_availability_errors(dataset): @@ -131,9 +144,9 @@ def _log_data_availability_errors(dataset): logger.error("No input files found for %s", dataset) if patterns: if len(patterns) == 1: - msg = f': {patterns[0]}' + msg = f": {patterns[0]}" else: - msg = '\n{}'.format('\n'.join(str(p) for p in patterns)) + msg = "\n{}".format("\n".join(str(p) for p in patterns)) logger.error("Looked for files matching%s", msg) logger.error("Set 'log_level' to 'debug' to get more information") @@ -175,10 +188,10 @@ def data_availability(dataset, log=True): if not input_files: raise InputFilesNotFound(f"Missing data for {dataset.summary(True)}") - if 'timerange' not in facets: + if "timerange" not in facets: return - start_date, end_date = _parse_period(facets['timerange']) + start_date, end_date = _parse_period(facets["timerange"]) start_year = int(start_date[0:4]) end_year = int(end_date[0:4]) required_years = set(range(start_year, end_year + 1, 1)) @@ -194,32 +207,35 @@ def data_availability(dataset, log=True): raise InputFilesNotFound( "No input data available for years {} in files:\n{}".format( - missing_txt, "\n".join(str(f) for f in input_files))) + missing_txt, "\n".join(str(f) for f in input_files) + ) + ) def preprocessor_supplementaries(dataset, settings): """Check that the required supplementary variables have been added.""" steps = [step for step in settings if step in PREPROCESSOR_SUPPLEMENTARIES] - supplementaries = {d.facets['short_name'] for d in dataset.supplementaries} + supplementaries = {d.facets["short_name"] for d in dataset.supplementaries} for step in steps: ancs = PREPROCESSOR_SUPPLEMENTARIES[step] - for short_name in ancs['variables']: + for short_name in ancs["variables"]: if short_name in supplementaries: break else: - if ancs['required'] == "require_at_least_one": + if ancs["required"] == "require_at_least_one": raise RecipeError( f"Preprocessor function {step} requires that at least " f"one supplementary variable of {ancs['variables']} is " - f"defined in the recipe for {dataset}.") - if ancs['required'] == "prefer_at_least_one": + f"defined in the recipe for {dataset}." + ) + if ancs["required"] == "prefer_at_least_one": logger.warning( "Preprocessor function %s works best when at least " "one supplementary variable of %s is defined in the " "recipe for %s.", step, - ancs['variables'], + ancs["variables"], dataset, ) @@ -239,26 +255,30 @@ def tasks_valid(tasks): def check_for_temporal_preprocs(profile): """Check for temporal operations on fx variables.""" temp_preprocs = [ - preproc for preproc in profile + preproc + for preproc in profile if profile[preproc] and preproc in TIME_PREPROCESSORS ] if temp_preprocs: raise RecipeError( "Time coordinate preprocessor step(s) {} not permitted on fx " - "vars, please remove them from recipe".format(temp_preprocs)) + "vars, please remove them from recipe".format(temp_preprocs) + ) def extract_shape(settings): """Check that `extract_shape` arguments are valid.""" - shapefile = settings.get('shapefile', '') + shapefile = settings.get("shapefile", "") if not os.path.exists(shapefile): - raise RecipeError("In preprocessor function `extract_shape`: " - f"Unable to find 'shapefile: {shapefile}'") + raise RecipeError( + "In preprocessor function `extract_shape`: " + f"Unable to find 'shapefile: {shapefile}'" + ) valid = { - 'method': {'contains', 'representative'}, - 'crop': {True, False}, - 'decomposed': {True, False}, + "method": {"contains", "representative"}, + "crop": {True, False}, + "decomposed": {True, False}, } for key in valid: value = settings.get(key) @@ -266,17 +286,19 @@ def extract_shape(settings): raise RecipeError( f"In preprocessor function `extract_shape`: Invalid value " f"'{value}' for argument '{key}', choose from " - "{}".format(', '.join(f"'{k}'".lower() for k in valid[key]))) + "{}".format(", ".join(f"'{k}'".lower() for k in valid[key])) + ) def _verify_span_value(span): """Raise error if span argument cannot be verified.""" - valid_names = ('overlap', 'full') + valid_names = ("overlap", "full") if span not in valid_names: raise RecipeError( "Invalid value encountered for `span` in preprocessor " f"`multi_model_statistics`. Valid values are {valid_names}." - f"Got {span}.") + f"Got {span}." + ) def _verify_groupby(groupby): @@ -285,7 +307,8 @@ def _verify_groupby(groupby): raise RecipeError( "Invalid value encountered for `groupby` in preprocessor " "`multi_model_statistics`.`groupby` must be defined as a " - f"list. Got {groupby}.") + f"list. Got {groupby}." + ) def _verify_keep_input_datasets(keep_input_datasets): @@ -293,7 +316,8 @@ def _verify_keep_input_datasets(keep_input_datasets): raise RecipeError( f"Invalid value encountered for `keep_input_datasets`." f"Must be defined as a boolean (true or false). " - f"Got {keep_input_datasets}.") + f"Got {keep_input_datasets}." + ) def _verify_ignore_scalar_coords(ignore_scalar_coords): @@ -301,41 +325,44 @@ def _verify_ignore_scalar_coords(ignore_scalar_coords): raise RecipeError( f"Invalid value encountered for `ignore_scalar_coords`." f"Must be defined as a boolean (true or false). Got " - f"{ignore_scalar_coords}.") + f"{ignore_scalar_coords}." + ) def multimodel_statistics_preproc(settings): """Check that the multi-model settings are valid.""" - span = settings.get('span', None) # optional, default: overlap + span = settings.get("span", None) # optional, default: overlap if span: _verify_span_value(span) - groupby = settings.get('groupby', None) # optional, default: None + groupby = settings.get("groupby", None) # optional, default: None if groupby: _verify_groupby(groupby) - keep_input_datasets = settings.get('keep_input_datasets', True) + keep_input_datasets = settings.get("keep_input_datasets", True) _verify_keep_input_datasets(keep_input_datasets) - ignore_scalar_coords = settings.get('ignore_scalar_coords', False) + ignore_scalar_coords = settings.get("ignore_scalar_coords", False) _verify_ignore_scalar_coords(ignore_scalar_coords) def ensemble_statistics_preproc(settings): """Check that the ensemble settings are valid.""" - span = settings.get('span', 'overlap') # optional, default: overlap + span = settings.get("span", "overlap") # optional, default: overlap if span: _verify_span_value(span) - ignore_scalar_coords = settings.get('ignore_scalar_coords', False) + ignore_scalar_coords = settings.get("ignore_scalar_coords", False) _verify_ignore_scalar_coords(ignore_scalar_coords) def _check_delimiter(timerange): if len(timerange) != 2: - raise RecipeError("Invalid value encountered for `timerange`. " - "Valid values must be separated by `/`. " - f"Got {timerange} instead.") + raise RecipeError( + "Invalid value encountered for `timerange`. " + "Valid values must be separated by `/`. " + f"Got {timerange} instead." + ) def _check_duration_periods(timerange): @@ -349,13 +376,15 @@ def _check_duration_periods(timerange): except ValueError: pass else: - raise RecipeError("Invalid value encountered for `timerange`. " - "Cannot set both the beginning and the end " - "as duration periods.") + raise RecipeError( + "Invalid value encountered for `timerange`. " + "Cannot set both the beginning and the end " + "as duration periods." + ) def _check_format_years(date): - if date != '*' and not date.startswith('P'): + if date != "*" and not date.startswith("P"): if len(date) < 4: date = date.zfill(4) return date @@ -368,18 +397,20 @@ def _check_timerange_values(date, timerange): try: isodate.parse_duration(date) except ValueError as exc: - if date != '*': - raise RecipeError("Invalid value encountered for `timerange`. " - "Valid value must follow ISO 8601 standard " - "for dates and duration periods, or be " - "set to '*' to load available years. " - f"Got {timerange} instead.") from exc + if date != "*": + raise RecipeError( + "Invalid value encountered for `timerange`. " + "Valid value must follow ISO 8601 standard " + "for dates and duration periods, or be " + "set to '*' to load available years. " + f"Got {timerange} instead." + ) from exc def valid_time_selection(timerange): """Check that `timerange` tag is well defined.""" - if timerange != '*': - timerange = timerange.split('/') + if timerange != "*": + timerange = timerange.split("/") _check_delimiter(timerange) _check_duration_periods(timerange) for date in timerange: @@ -393,7 +424,8 @@ def differing_timeranges(timeranges, required_vars): raise ValueError( f"Differing timeranges with values {timeranges} " f"found for required variables {required_vars}. " - "Set `timerange` to a common value.") + "Set `timerange` to a common value." + ) def _check_literal( @@ -401,7 +433,7 @@ def _check_literal( *, step: str, option: str, - allowed_values: tuple[str], + allowed_values: tuple[None | str, ...], ) -> None: """Check that an option for a preprocessor has a valid value.""" if step not in settings: @@ -416,32 +448,32 @@ def _check_literal( bias_type = partial( _check_literal, - step='bias', - option='bias_type', - allowed_values=('absolute', 'relative'), + step="bias", + option="bias_type", + allowed_values=("absolute", "relative"), ) metric_type = partial( _check_literal, - step='distance_metric', - option='metric', + step="distance_metric", + option="metric", allowed_values=( - 'rmse', - 'weighted_rmse', - 'pearsonr', - 'weighted_pearsonr', - 'emd', - 'weighted_emd', + "rmse", + "weighted_rmse", + "pearsonr", + "weighted_pearsonr", + "emd", + "weighted_emd", ), ) resample_hours = partial( _check_literal, - step='resample_hours', - option='interpolate', - allowed_values=(None, 'nearest', 'linear'), + step="resample_hours", + option="interpolate", + allowed_values=(None, "nearest", "linear"), ) @@ -469,35 +501,35 @@ def _check_ref_attributes(products: set, *, step: str, attr_name: str) -> None: f"products\n{pformat(products_str)},\nfound " f"{len(reference_products):d}{ref_products_str}Please also " f"ensure that the reference dataset is not excluded with the " - f"'exclude' option") + f"'exclude' option" + ) reference_for_bias_preproc = partial( - _check_ref_attributes, step='bias', attr_name='reference_for_bias' + _check_ref_attributes, step="bias", attr_name="reference_for_bias" ) reference_for_distance_metric_preproc = partial( _check_ref_attributes, - step='distance_metric', - attr_name='reference_for_metric', + step="distance_metric", + attr_name="reference_for_metric", ) def statistics_preprocessors(settings: dict) -> None: """Check options of statistics preprocessors.""" mm_stats = ( - 'multi_model_statistics', - 'ensemble_statistics', + "multi_model_statistics", + "ensemble_statistics", ) - for (step, step_settings) in settings.items(): - + for step, step_settings in settings.items(): # For multi-model statistics, we need to check each entry of statistics if step in mm_stats: _check_mm_stat(step, step_settings) # For other statistics, check optional kwargs for operator - elif '_statistics' in step: + elif "_statistics" in step: _check_regular_stat(step, step_settings) @@ -508,7 +540,7 @@ def _check_regular_stat(step, step_settings): # Some preprocessors like climate_statistics use default 'mean' for # operator. If 'operator' is missing for those preprocessors with no # default, this will be detected in PreprocessorFile.check() later. - operator = step_settings.pop('operator', 'mean') + operator = step_settings.pop("operator", "mean") # If preprocessor does not exist, do nothing here; this will be detected in # PreprocessorFile.check() later. @@ -519,8 +551,10 @@ def _check_regular_stat(step, step_settings): # Ignore other preprocessor arguments, e.g., 'hours' for hourly_statistics other_args = [ - n for (n, p) in inspect.signature(preproc_func).parameters.items() if - p.kind in ( + n + for (n, p) in inspect.signature(preproc_func).parameters.items() + if p.kind + in ( inspect.Parameter.POSITIONAL_ONLY, inspect.Parameter.POSITIONAL_OR_KEYWORD, ) @@ -531,14 +565,12 @@ def _check_regular_stat(step, step_settings): try: get_iris_aggregator(operator, **operator_kwargs) except ValueError as exc: - raise RecipeError( - f"Invalid options for {step}: {exc}" - ) + raise RecipeError(f"Invalid options for {step}: {exc}") def _check_mm_stat(step, step_settings): """Check multi-model statistic step.""" - statistics = step_settings.get('statistics', []) + statistics = step_settings.get("statistics", []) for stat in statistics: try: (operator, kwargs) = _get_operator_and_kwargs(stat) @@ -547,35 +579,33 @@ def _check_mm_stat(step, step_settings): try: get_iris_aggregator(operator, **kwargs) except ValueError as exc: - raise RecipeError( - f"Invalid options for {step}: {exc}" - ) + raise RecipeError(f"Invalid options for {step}: {exc}") def regridding_schemes(settings: dict): """Check :obj:`str` regridding schemes.""" - if 'regrid' not in settings: + if "regrid" not in settings: return # Note: If 'scheme' is missing, this will be detected in # PreprocessorFile.check() later - scheme = settings['regrid'].get('scheme') + scheme = settings["regrid"].get("scheme") # Check built-in regridding schemes (given as str) if isinstance(scheme, str): - scheme = settings['regrid']['scheme'] + scheme = settings["regrid"]["scheme"] # Also allow deprecated 'linear_extrapolate' and 'unstructured_nearest' # schemes (the corresponding deprecation warnings will be raised in the # regrid() preprocessor) TODO: Remove in v2.13.0 - if scheme in ('linear_extrapolate', 'unstructured_nearest'): + if scheme in ("linear_extrapolate", "unstructured_nearest"): return allowed_regridding_schemes = list( set( - list(HORIZONTAL_SCHEMES_IRREGULAR) + - list(HORIZONTAL_SCHEMES_REGULAR) + - list(HORIZONTAL_SCHEMES_UNSTRUCTURED) + list(HORIZONTAL_SCHEMES_IRREGULAR) + + list(HORIZONTAL_SCHEMES_REGULAR) + + list(HORIZONTAL_SCHEMES_UNSTRUCTURED) ) ) if scheme not in allowed_regridding_schemes: diff --git a/esmvalcore/_recipe/from_datasets.py b/esmvalcore/_recipe/from_datasets.py index 8bd33fd5e9..60384c8026 100644 --- a/esmvalcore/_recipe/from_datasets.py +++ b/esmvalcore/_recipe/from_datasets.py @@ -1,4 +1,5 @@ """Functions for creating/updating a recipe with `Dataset`s.""" + from __future__ import annotations import itertools @@ -28,55 +29,58 @@ def _datasets_to_raw_recipe(datasets: Iterable[Dataset]) -> Recipe: diagnostics: dict[str, dict[str, Any]] = {} for dataset in datasets: - diagnostic_name: str = dataset.facets['diagnostic'] # type: ignore + diagnostic_name: str = dataset.facets["diagnostic"] # type: ignore if diagnostic_name not in diagnostics: - diagnostics[diagnostic_name] = {'variables': {}} - variables = diagnostics[diagnostic_name]['variables'] - if 'variable_group' in dataset.facets: - variable_group = dataset.facets['variable_group'] + diagnostics[diagnostic_name] = {"variables": {}} + variables = diagnostics[diagnostic_name]["variables"] + if "variable_group" in dataset.facets: + variable_group = dataset.facets["variable_group"] else: - variable_group = dataset.facets['short_name'] + variable_group = dataset.facets["short_name"] if variable_group not in variables: - variables[variable_group] = {'additional_datasets': []} + variables[variable_group] = {"additional_datasets": []} facets: dict[str, Any] = dataset.minimal_facets - facets.pop('diagnostic', None) - if facets['short_name'] == variable_group: - facets.pop('short_name') + facets.pop("diagnostic", None) + if facets["short_name"] == variable_group: + facets.pop("short_name") if dataset.supplementaries: - facets['supplementary_variables'] = [] + facets["supplementary_variables"] = [] for supplementary in dataset.supplementaries: anc_facets = {} for key, value in supplementary.minimal_facets.items(): if facets.get(key) != value: anc_facets[key] = value - facets['supplementary_variables'].append(anc_facets) - variables[variable_group]['additional_datasets'].append(facets) + facets["supplementary_variables"].append(anc_facets) + variables[variable_group]["additional_datasets"].append(facets) - recipe = {'diagnostics': diagnostics} + recipe = {"diagnostics": diagnostics} return recipe def _datasets_to_recipe(datasets: Iterable[Dataset]) -> Recipe: """Convert datasets to a condensed recipe dict.""" for dataset in datasets: - if 'diagnostic' not in dataset.facets: - raise RecipeError(f"'diagnostic' facet missing from {dataset}," - "unable to convert to recipe.") + if "diagnostic" not in dataset.facets: + raise RecipeError( + f"'diagnostic' facet missing from {dataset}," + "unable to convert to recipe." + ) recipe = _datasets_to_raw_recipe(datasets) - diagnostics = recipe['diagnostics'].values() + diagnostics = recipe["diagnostics"].values() # Group ensemble members for diagnostic in diagnostics: - for variable in diagnostic['variables'].values(): - variable['additional_datasets'] = _group_ensemble_members( - variable['additional_datasets']) + for variable in diagnostic["variables"].values(): + variable["additional_datasets"] = _group_ensemble_members( + variable["additional_datasets"] + ) # Move identical facets from dataset to variable for diagnostic in diagnostics: - diagnostic['variables'] = { + diagnostic["variables"] = { variable_group: _group_identical_facets(variable) - for variable_group, variable in diagnostic['variables'].items() + for variable_group, variable in diagnostic["variables"].items() } # Deduplicate by moving datasets up from variable to diagnostic to recipe @@ -88,11 +92,11 @@ def _datasets_to_recipe(datasets: Iterable[Dataset]) -> Recipe: def _move_datasets_up(recipe: Recipe) -> Recipe: """Move datasets from variable to diagnostic to recipe.""" # Move `additional_datasets` from variable to diagnostic level - for diagnostic in recipe['diagnostics'].values(): - _move_one_level_up(diagnostic, 'variables', 'additional_datasets') + for diagnostic in recipe["diagnostics"].values(): + _move_one_level_up(diagnostic, "variables", "additional_datasets") # Move `additional_datasets` from diagnostic to `datasets` at recipe level - _move_one_level_up(recipe, 'diagnostics', 'datasets') + _move_one_level_up(recipe, "diagnostics", "datasets") return recipe @@ -117,8 +121,7 @@ def _move_one_level_up(base: dict, level: str, target: str): dataset_mapping = {} for name, group in groups.items(): dataset_mapping[name] = { - _to_frozen(ds): ds - for ds in group['additional_datasets'] + _to_frozen(ds): ds for ds in group["additional_datasets"] } # Set datasets that are common to all groups @@ -135,28 +138,30 @@ def _move_one_level_up(base: dict, level: str, target: str): group = groups[name] var_datasets = set(datasets) - common_datasets if var_datasets: - group['additional_datasets'] = [ + group["additional_datasets"] = [ v for k, v in datasets.items() if k in var_datasets ] else: - group.pop('additional_datasets') + group.pop("additional_datasets") def _group_identical_facets(variable: Mapping[str, Any]) -> Recipe: """Move identical facets from datasets to variable.""" result = dict(variable) - dataset_facets = result.pop('additional_datasets') + dataset_facets = result.pop("additional_datasets") variable_keys = [ - k for k, v in dataset_facets[0].items() - if k != 'dataset' # keep at least one key in every dataset + k + for k, v in dataset_facets[0].items() + if k != "dataset" # keep at least one key in every dataset and all((k, v) in d.items() for d in dataset_facets[1:]) ] result.update( - (k, v) for k, v in dataset_facets[0].items() if k in variable_keys) - result['additional_datasets'] = [{ - k: v - for k, v in d.items() if k not in variable_keys - } for d in dataset_facets] + (k, v) for k, v in dataset_facets[0].items() if k in variable_keys + ) + result["additional_datasets"] = [ + {k: v for k, v in d.items() if k not in variable_keys} + for d in dataset_facets + ] return result @@ -169,20 +174,21 @@ def _group_ensemble_members(dataset_facets: Iterable[Facets]) -> list[Facets]: def grouper(facets): return sorted( - (f, str(v)) for f, v in facets.items() if f != 'ensemble') + (f, str(v)) for f, v in facets.items() if f != "ensemble" + ) result = [] dataset_facets = sorted(dataset_facets, key=grouper) for _, group_iter in itertools.groupby(dataset_facets, key=grouper): group = list(group_iter) - ensembles = [f['ensemble'] for f in group if 'ensemble' in f] + ensembles = [f["ensemble"] for f in group if "ensemble" in f] group_facets = group[0] if not ensembles: result.append(dict(group_facets)) else: for ensemble in _group_ensemble_names(ensembles): facets = dict(group_facets) - facets['ensemble'] = ensemble + facets["ensemble"] = ensemble result.append(facets) return result @@ -204,7 +210,7 @@ def _group_ensemble_names(ensemble_names: Iterable[str]) -> list[str]: ]. """ ensemble_tuples = [ - tuple(int(i) for i in re.findall(r'\d+', ens)) + tuple(int(i) for i in re.findall(r"\d+", ens)) for ens in ensemble_names ] @@ -212,8 +218,8 @@ def _group_ensemble_names(ensemble_names: Iterable[str]) -> list[str]: groups = [] for ensemble_range in ensemble_ranges: - txt = '' - for name, value in zip('ripf', ensemble_range): + txt = "" + for name, value in zip("ripf", ensemble_range): txt += name if value[0] == value[1]: txt += f"{value[0]}" @@ -225,8 +231,8 @@ def _group_ensemble_names(ensemble_names: Iterable[str]) -> list[str]: def _create_ensemble_ranges( - ensembles: Sequence[tuple[int, - ...]], ) -> list[tuple[tuple[int, int], ...]]: + ensembles: Sequence[tuple[int, ...]], +) -> list[tuple[tuple[int, int], ...]]: """Create ranges from tuples. Examples @@ -246,19 +252,19 @@ def _create_ensemble_ranges( """ def order(i, ens): - prefix, suffix = ens[:i], ens[i + 1:] + prefix, suffix = ens[:i], ens[i + 1 :] return (prefix, suffix, ens[i]) def grouper(i, ens): - prefix, suffix = ens[:i], ens[i + 1:] + prefix, suffix = ens[:i], ens[i + 1 :] return (prefix, suffix) for i in range(len(ensembles[0])): grouped_ensembles = [] ensembles = sorted(ensembles, key=partial(order, i)) - for (prefix, - suffix), ibunch in itertools.groupby(ensembles, - key=partial(grouper, i)): + for (prefix, suffix), ibunch in itertools.groupby( + ensembles, key=partial(grouper, i) + ): bunch = list(ibunch) prev = bunch[0][i] groups = [[prev]] @@ -272,7 +278,7 @@ def grouper(i, ens): groups[-1].append(prev) result = [] for group in groups: - item = prefix + (tuple(group), ) + suffix + item = prefix + (tuple(group),) + suffix result.append(item) grouped_ensembles.extend(result) @@ -284,22 +290,21 @@ def grouper(i, ens): def _clean_recipe(recipe: Recipe, diagnostics: list[str]) -> Recipe: """Clean up the input recipe.""" # Format description nicer - if 'documentation' in recipe: - doc = recipe['documentation'] - for key in ['title', 'description']: + if "documentation" in recipe: + doc = recipe["documentation"] + for key in ["title", "description"]: if key in doc: doc[key] = doc[key].strip() # Filter out unused diagnostics - recipe['diagnostics'] = { - k: v - for k, v in recipe['diagnostics'].items() if k in diagnostics + recipe["diagnostics"] = { + k: v for k, v in recipe["diagnostics"].items() if k in diagnostics } # Remove legacy supplementary definitions form the recipe nested_delete( - recipe.get('preprocessors', {}), - 'fx_variables', + recipe.get("preprocessors", {}), + "fx_variables", in_place=True, ) @@ -339,26 +344,26 @@ def datasets_to_recipe( """ recipe = _load_recipe(recipe) dataset_recipe = _datasets_to_recipe(datasets) - _clean_recipe(recipe, diagnostics=dataset_recipe['diagnostics']) + _clean_recipe(recipe, diagnostics=dataset_recipe["diagnostics"]) # Remove dataset sections from recipe - recipe.pop('datasets', None) - nested_delete(recipe, 'additional_datasets', in_place=True) + recipe.pop("datasets", None) + nested_delete(recipe, "additional_datasets", in_place=True) # Update datasets section - if 'datasets' in dataset_recipe: - recipe['datasets'] = dataset_recipe['datasets'] + if "datasets" in dataset_recipe: + recipe["datasets"] = dataset_recipe["datasets"] - for diag, dataset_diagnostic in dataset_recipe['diagnostics'].items(): - if diag not in recipe['diagnostics']: - recipe['diagnostics'][diag] = {} - diagnostic = recipe['diagnostics'][diag] + for diag, dataset_diagnostic in dataset_recipe["diagnostics"].items(): + if diag not in recipe["diagnostics"]: + recipe["diagnostics"][diag] = {} + diagnostic = recipe["diagnostics"][diag] # Update diagnostic level datasets - if 'additional_datasets' in dataset_diagnostic: - additional_datasets = dataset_diagnostic['additional_datasets'] - diagnostic['additional_datasets'] = additional_datasets + if "additional_datasets" in dataset_diagnostic: + additional_datasets = dataset_diagnostic["additional_datasets"] + diagnostic["additional_datasets"] = additional_datasets # Update variable level datasets - if 'variables' in dataset_diagnostic: - diagnostic['variables'] = dataset_diagnostic['variables'] + if "variables" in dataset_diagnostic: + diagnostic["variables"] = dataset_diagnostic["variables"] return recipe diff --git a/esmvalcore/_recipe/recipe.py b/esmvalcore/_recipe/recipe.py index 1f2eb63488..06bb2fd1a4 100644 --- a/esmvalcore/_recipe/recipe.py +++ b/esmvalcore/_recipe/recipe.py @@ -1,4 +1,5 @@ """Recipe parser.""" + from __future__ import annotations import fnmatch @@ -67,7 +68,7 @@ def read_recipe_file(filename: Path, session): """Read a recipe from file.""" check.recipe_with_schema(filename) - with open(filename, 'r', encoding='utf-8') as file: + with open(filename, "r", encoding="utf-8") as file: raw_recipe = yaml.safe_load(file) return Recipe(raw_recipe, session, recipe_file=filename) @@ -75,17 +76,18 @@ def read_recipe_file(filename: Path, session): def _special_name_to_dataset(facets, special_name): """Convert special names to dataset names.""" - if special_name in ('reference_dataset', 'alternative_dataset'): + if special_name in ("reference_dataset", "alternative_dataset"): if special_name not in facets: raise RecipeError( "Preprocessor '{preproc}' uses '{name}', but '{name}' is not " "defined for variable '{variable_group}' of diagnostic " "'{diagnostic}'.".format( - preproc=facets['preprocessor'], + preproc=facets["preprocessor"], name=special_name, - variable_group=facets['variable_group'], - diagnostic=facets['diagnostic'], - )) + variable_group=facets["variable_group"], + diagnostic=facets["diagnostic"], + ) + ) special_name = facets[special_name] return special_name @@ -93,53 +95,56 @@ def _special_name_to_dataset(facets, special_name): def _update_target_levels(dataset, datasets, settings): """Replace the target levels dataset name with a filename if needed.""" - levels = settings.get('extract_levels', {}).get('levels') + levels = settings.get("extract_levels", {}).get("levels") if not levels: return levels = _special_name_to_dataset(dataset.facets, levels) # If levels is a dataset name, replace it by a dict with a 'dataset' entry - if any(levels == d.facets['dataset'] for d in datasets): - settings['extract_levels']['levels'] = {'dataset': levels} - levels = settings['extract_levels']['levels'] + if any(levels == d.facets["dataset"] for d in datasets): + settings["extract_levels"]["levels"] = {"dataset": levels} + levels = settings["extract_levels"]["levels"] if not isinstance(levels, dict): return - if 'cmor_table' in levels and 'coordinate' in levels: - settings['extract_levels']['levels'] = get_cmor_levels( - levels['cmor_table'], levels['coordinate']) - elif 'dataset' in levels: - dataset_name = levels['dataset'] - if dataset.facets['dataset'] == dataset_name: - del settings['extract_levels'] + if "cmor_table" in levels and "coordinate" in levels: + settings["extract_levels"]["levels"] = get_cmor_levels( + levels["cmor_table"], levels["coordinate"] + ) + elif "dataset" in levels: + dataset_name = levels["dataset"] + if dataset.facets["dataset"] == dataset_name: + del settings["extract_levels"] else: target_ds = _select_dataset(dataset_name, datasets) representative_ds = _representative_datasets(target_ds)[0] check.data_availability(representative_ds) - settings['extract_levels']['levels'] = get_reference_levels( - representative_ds) + settings["extract_levels"]["levels"] = get_reference_levels( + representative_ds + ) def _update_target_grid(dataset, datasets, settings): """Replace the target grid dataset name with a filename if needed.""" - grid = settings.get('regrid', {}).get('target_grid') + grid = settings.get("regrid", {}).get("target_grid") if not grid: return grid = _special_name_to_dataset(dataset.facets, grid) - if dataset.facets['dataset'] == grid: - del settings['regrid'] - elif any(grid == d.facets['dataset'] for d in datasets): + if dataset.facets["dataset"] == grid: + del settings["regrid"] + elif any(grid == d.facets["dataset"] for d in datasets): representative_ds = _representative_datasets( - _select_dataset(grid, datasets))[0] + _select_dataset(grid, datasets) + )[0] check.data_availability(representative_ds) - settings['regrid']['target_grid'] = representative_ds + settings["regrid"]["target_grid"] = representative_ds else: # Check that MxN grid spec is correct - target_grid = settings['regrid']['target_grid'] + target_grid = settings["regrid"]["target_grid"] if isinstance(target_grid, str): parse_cell_spec(target_grid) # Check that cdo spec is correct @@ -149,47 +154,49 @@ def _update_target_grid(dataset, datasets, settings): def _update_regrid_time(dataset: Dataset, settings: dict) -> None: """Input data frequency automatically for regrid_time preprocessor.""" - if 'regrid_time' not in settings: + if "regrid_time" not in settings: return - if 'frequency' not in settings['regrid_time']: - settings['regrid_time']['frequency'] = dataset.facets['frequency'] + if "frequency" not in settings["regrid_time"]: + settings["regrid_time"]["frequency"] = dataset.facets["frequency"] def _select_dataset(dataset_name, datasets): for dataset in datasets: - if dataset.facets['dataset'] == dataset_name: + if dataset.facets["dataset"] == dataset_name: return dataset - diagnostic = datasets[0].facets['diagnostic'] - variable_group = datasets[0].facets['variable_group'] + diagnostic = datasets[0].facets["diagnostic"] + variable_group = datasets[0].facets["variable_group"] raise RecipeError( f"Unable to find dataset '{dataset_name}' in the list of datasets" - f"for variable '{variable_group}' of diagnostic '{diagnostic}'.") + f"for variable '{variable_group}' of diagnostic '{diagnostic}'." + ) def _limit_datasets(datasets, profile): """Try to limit the number of datasets to max_datasets.""" - max_datasets = datasets[0].session['max_datasets'] + max_datasets = datasets[0].session["max_datasets"] if not max_datasets: return datasets logger.info("Limiting the number of datasets to %s", max_datasets) required_datasets = [ - (profile.get('extract_levels') or {}).get('levels'), - (profile.get('regrid') or {}).get('target_grid'), - datasets[0].facets.get('reference_dataset'), - datasets[0].facets.get('alternative_dataset'), + (profile.get("extract_levels") or {}).get("levels"), + (profile.get("regrid") or {}).get("target_grid"), + datasets[0].facets.get("reference_dataset"), + datasets[0].facets.get("alternative_dataset"), ] - limited = [d for d in datasets if d.facets['dataset'] in required_datasets] + limited = [d for d in datasets if d.facets["dataset"] in required_datasets] for dataset in datasets: if len(limited) >= max_datasets: break if dataset not in limited: limited.append(dataset) - logger.info("Only considering %s", - ', '.join(d.facets['alias'] for d in limited)) + logger.info( + "Only considering %s", ", ".join(d.facets["alias"] for d in limited) + ) return limited @@ -202,20 +209,20 @@ def _get_default_settings(dataset): settings = {} if _derive_needed(dataset): - settings['derive'] = { - 'short_name': facets['short_name'], - 'standard_name': facets['standard_name'], - 'long_name': facets['long_name'], - 'units': facets['units'], + settings["derive"] = { + "short_name": facets["short_name"], + "standard_name": facets["standard_name"], + "long_name": facets["long_name"], + "units": facets["units"], } # Strip supplementary variables before saving - settings['remove_supplementary_variables'] = {} + settings["remove_supplementary_variables"] = {} # Configure saving cubes to file - settings['save'] = {'compress': session['compress_netcdf']} - if facets['short_name'] != facets['original_short_name']: - settings['save']['alias'] = facets['short_name'] + settings["save"] = {"compress": session["compress_netcdf"]} + if facets["short_name"] != facets["original_short_name"]: + settings["save"]["alias"] = facets["short_name"] return settings @@ -224,19 +231,22 @@ def _exclude_dataset(settings, facets, step): """Exclude dataset from specific preprocessor step if requested.""" exclude = { _special_name_to_dataset(facets, dataset) - for dataset in settings[step].pop('exclude', []) + for dataset in settings[step].pop("exclude", []) } - if facets['dataset'] in exclude: + if facets["dataset"] in exclude: settings.pop(step) - logger.debug("Excluded dataset '%s' from preprocessor step '%s'", - facets['dataset'], step) + logger.debug( + "Excluded dataset '%s' from preprocessor step '%s'", + facets["dataset"], + step, + ) def _update_weighting_settings(settings, facets): """Update settings for the weighting preprocessors.""" - if 'weighting_landsea_fraction' not in settings: + if "weighting_landsea_fraction" not in settings: return - _exclude_dataset(settings, facets, 'weighting_landsea_fraction') + _exclude_dataset(settings, facets, "weighting_landsea_fraction") def _add_to_download_list(dataset): @@ -244,7 +254,7 @@ def _add_to_download_list(dataset): for i, file in enumerate(dataset.files): if isinstance(file, esgf.ESGFFile): DOWNLOAD_FILES.add(file) - dataset.files[i] = file.local_file(dataset.session['download_dir']) + dataset.files[i] = file.local_file(dataset.session["download_dir"]) def _schedule_for_download(datasets): @@ -269,18 +279,20 @@ def _log_input_files(datasets: Iterable[Dataset]) -> None: logger.debug( "Using input files for variable %s of dataset %s:\n%s%s", - dataset.facets['short_name'], - dataset.facets['alias'].replace('_', ' '), # type: ignore + dataset.facets["short_name"], + dataset.facets["alias"].replace("_", " "), # type: ignore _get_files_str(dataset), - supplementary_files_str + supplementary_files_str, ) def _get_files_str(dataset: Dataset) -> str: """Get nice string representation of all files of a dataset.""" - return '\n'.join( - f' {f}' if f.exists() # type: ignore - else f' {f} (will be downloaded)' for f in dataset.files + return "\n".join( + f" {f}" + if f.exists() # type: ignore + else f" {f} (will be downloaded)" + for f in dataset.files ) @@ -326,20 +338,20 @@ def _get_common_attributes(products, settings): # "span" setting: if "span=overlap", the intersection of all periods is # used; if "span=full", the union is used. The default value for "span" is # "overlap". - span = settings.get('span', 'overlap') + span = settings.get("span", "overlap") for product in products: - timerange = product.attributes['timerange'] + timerange = product.attributes["timerange"] start, end = _parse_period(timerange) - if 'timerange' not in attributes: - attributes['timerange'] = _dates_to_timerange(start, end) + if "timerange" not in attributes: + attributes["timerange"] = _dates_to_timerange(start, end) else: - start_date, end_date = _parse_period(attributes['timerange']) + start_date, end_date = _parse_period(attributes["timerange"]) start_date, start = _truncate_dates(start_date, start) end_date, end = _truncate_dates(end_date, end) # If "span=overlap", always use the latest start_date and the # earliest end_date - if span == 'overlap': + if span == "overlap": start_date = max([start, start_date]) end_date = min([end, end_date]) @@ -350,12 +362,12 @@ def _get_common_attributes(products, settings): start_date = min([start, start_date]) end_date = max([end, end_date]) - attributes['timerange'] = _dates_to_timerange(start_date, end_date) + attributes["timerange"] = _dates_to_timerange(start_date, end_date) # Ensure that attributes start_year and end_year are always available - start_year, end_year = _parse_period(attributes['timerange']) - attributes['start_year'] = int(str(start_year[0:4])) - attributes['end_year'] = int(str(end_year[0:4])) + start_year, end_year = _parse_period(attributes["timerange"]) + attributes["start_year"] = int(str(start_year[0:4])) + attributes["end_year"] = int(str(end_year[0:4])) return attributes @@ -363,7 +375,7 @@ def _get_common_attributes(products, settings): def _get_downstream_settings(step, order, products): """Get downstream preprocessor settings shared between products.""" settings = {} - remaining_steps = order[order.index(step) + 1:] + remaining_steps = order[order.index(step) + 1 :] some_product = next(iter(products)) for key, value in some_product.settings.items(): if key in remaining_steps: @@ -383,12 +395,12 @@ def _update_multi_dataset_settings(facets, settings): def _get_tag(step, identifier, statistic): # Avoid . in filename for percentiles - statistic = statistic.replace('.', '-') + statistic = statistic.replace(".", "-") - if step == 'ensemble_statistics': - tag = 'Ensemble' + statistic.title() - elif identifier == '': - tag = 'MultiModel' + statistic.title() + if step == "ensemble_statistics": + tag = "Ensemble" + statistic.title() + elif identifier == "": + tag = "MultiModel" + statistic.title() else: tag = identifier + statistic.title() @@ -412,42 +424,46 @@ def _update_multiproduct(input_products, order, preproc_dir, step): settings = list(products)[0].settings[step] - if step == 'ensemble_statistics': + if step == "ensemble_statistics": check.ensemble_statistics_preproc(settings) - grouping = ['project', 'dataset', 'exp', 'sub_experiment'] + grouping = ["project", "dataset", "exp", "sub_experiment"] else: check.multimodel_statistics_preproc(settings) - grouping = settings.get('groupby', None) + grouping = settings.get("groupby", None) downstream_settings = _get_downstream_settings(step, order, products) relevant_settings = { - 'output_products': defaultdict(dict) + "output_products": defaultdict(dict) } # pass to ancestors output_products = set() for identifier, products in _group_products(products, by_key=grouping): common_attributes = _get_common_attributes(products, settings) - statistics = settings.get('statistics', []) + statistics = settings.get("statistics", []) for statistic in statistics: statistic_attributes = dict(common_attributes) stat_id = _get_stat_identifier(statistic) statistic_attributes[step] = _get_tag(step, identifier, stat_id) - statistic_attributes.setdefault('alias', - statistic_attributes[step]) - statistic_attributes.setdefault('dataset', - statistic_attributes[step]) - filename = _get_multiproduct_filename(statistic_attributes, - preproc_dir) + statistic_attributes.setdefault( + "alias", statistic_attributes[step] + ) + statistic_attributes.setdefault( + "dataset", statistic_attributes[step] + ) + filename = _get_multiproduct_filename( + statistic_attributes, preproc_dir + ) statistic_product = PreprocessorFile( filename=filename, attributes=statistic_attributes, settings=downstream_settings, ) # Note that ancestors is set when running the preprocessor func. output_products.add(statistic_product) - relevant_settings['output_products'][identifier][ - stat_id] = statistic_product + relevant_settings["output_products"][identifier][stat_id] = ( + statistic_product + ) return output_products, relevant_settings @@ -462,20 +478,23 @@ def update_ancestors(ancestors, step, downstream_settings): def _update_extract_shape(settings, session): - if 'extract_shape' in settings: - shapefile = settings['extract_shape'].get('shapefile') + if "extract_shape" in settings: + shapefile = settings["extract_shape"].get("shapefile") if shapefile: shapefile = _update_shapefile_path(shapefile, session=session) - settings['extract_shape']['shapefile'] = shapefile - check.extract_shape(settings['extract_shape']) + settings["extract_shape"]["shapefile"] = shapefile + check.extract_shape(settings["extract_shape"]) def _allow_skipping(dataset: Dataset): """Allow skipping of datasets.""" - allow_skipping = all([ - dataset.session['skip_nonexistent'], - dataset.facets['dataset'] != dataset.facets.get('reference_dataset'), - ]) + allow_skipping = all( + [ + dataset.session["skip_nonexistent"], + dataset.facets["dataset"] + != dataset.facets.get("reference_dataset"), + ] + ) return allow_skipping @@ -484,14 +503,14 @@ def _set_version(dataset: Dataset, input_datasets: list[Dataset]): versions = set() for in_dataset in input_datasets: in_dataset.set_version() - if version := in_dataset.facets.get('version'): + if version := in_dataset.facets.get("version"): if isinstance(version, list): versions.update(version) else: versions.add(version) if versions: version = versions.pop() if len(versions) == 1 else sorted(versions) - dataset.set_facet('version', version) + dataset.set_facet("version", version) for supplementary_ds in dataset.supplementaries: supplementary_ds.set_version() @@ -551,8 +570,9 @@ def _get_preprocessor_products( if missing_vars: separator = "\n- " raise InputFilesNotFound( - f'Missing data for preprocessor {name}:{separator}' - f'{separator.join(sorted(missing_vars))}') + f"Missing data for preprocessor {name}:{separator}" + f"{separator.join(sorted(missing_vars))}" + ) check.reference_for_bias_preproc(products) check.reference_for_distance_metric_preproc(products) @@ -578,11 +598,12 @@ def _configure_multi_product_preprocessor( order: Sequence[str], ): """Configure preprocessing of ensemble and multimodel statistics.""" - ensemble_step = 'ensemble_statistics' - multi_model_step = 'multi_model_statistics' + ensemble_step = "ensemble_statistics" + multi_model_step = "multi_model_statistics" if ensemble_step in profile: ensemble_products, ensemble_settings = _update_multiproduct( - products, order, preproc_dir, ensemble_step) + products, order, preproc_dir, ensemble_step + ) # check for ensemble_settings to bypass tests update_ancestors( @@ -595,7 +616,8 @@ def _configure_multi_product_preprocessor( if multi_model_step in profile: multimodel_products, multimodel_settings = _update_multiproduct( - ensemble_products, order, preproc_dir, multi_model_step) + ensemble_products, order, preproc_dir, multi_model_step + ) # check for multi_model_settings to bypass tests update_ancestors( @@ -625,10 +647,10 @@ def _set_start_end_year(product: PreprocessorFile) -> None: These attributes are used by many diagnostic scripts in ESMValTool. """ - if 'timerange' in product.attributes: - start_year, end_year = _parse_period(product.attributes['timerange']) - product.attributes['start_year'] = int(str(start_year[0:4])) - product.attributes['end_year'] = int(str(end_year[0:4])) + if "timerange" in product.attributes: + start_year, end_year = _parse_period(product.attributes["timerange"]) + product.attributes["start_year"] = int(str(start_year[0:4])) + product.attributes["end_year"] = int(str(end_year[0:4])) def _update_preproc_functions(settings, dataset, datasets, missing_vars): @@ -652,7 +674,7 @@ def _update_preproc_functions(settings, dataset, datasets, missing_vars): except RecipeError as ex: missing_vars.add(ex.message) _update_regrid_time(dataset, settings) - if dataset.facets.get('frequency') == 'fx': + if dataset.facets.get("frequency") == "fx": check.check_for_temporal_preprocs(settings) check.statistics_preprocessors(settings) check.regridding_schemes(settings) @@ -666,13 +688,17 @@ def _get_preprocessor_task(datasets, profiles, task_name): # First set up the preprocessor profile facets = datasets[0].facets session = datasets[0].session - preprocessor = facets.get('preprocessor', 'default') + preprocessor = facets.get("preprocessor", "default") if preprocessor not in profiles: raise RecipeError( f"Unknown preprocessor '{preprocessor}' in variable " - f"{facets['variable_group']} of diagnostic {facets['diagnostic']}") - logger.info("Creating preprocessor '%s' task for variable '%s'", - preprocessor, facets['variable_group']) + f"{facets['variable_group']} of diagnostic {facets['diagnostic']}" + ) + logger.info( + "Creating preprocessor '%s' task for variable '%s'", + preprocessor, + facets["variable_group"], + ) profile = deepcopy(profiles[preprocessor]) order = _extract_preprocessor_order(profile) @@ -691,24 +717,27 @@ def _get_preprocessor_task(datasets, profiles, task_name): products=products, name=task_name, order=order, - debug=session['save_intermediary_cubes'], - write_ncl_interface=session['write_ncl_interface'], + debug=session["save_intermediary_cubes"], + write_ncl_interface=session["write_ncl_interface"], ) logger.info("PreprocessingTask %s created.", task.name) - logger.debug("PreprocessingTask %s will create the files:\n%s", task.name, - '\n'.join(str(p.filename) for p in task.products)) + logger.debug( + "PreprocessingTask %s will create the files:\n%s", + task.name, + "\n".join(str(p.filename) for p in task.products), + ) return task def _extract_preprocessor_order(profile): """Extract the order of the preprocessing steps from the profile.""" - custom_order = profile.pop('custom_order', False) + custom_order = profile.pop("custom_order", False) if not custom_order: return DEFAULT_ORDER - if 'derive' not in profile: - initial_steps = INITIAL_STEPS + ('derive', ) + if "derive" not in profile: + initial_steps = INITIAL_STEPS + ("derive",) else: initial_steps = INITIAL_STEPS order = tuple(p for p in profile if p not in initial_steps + FINAL_STEPS) @@ -725,18 +754,21 @@ def __init__(self, raw_recipe, session, recipe_file: Path): USED_DATASETS.clear() self._download_files: set[esgf.ESGFFile] = set() self.session = session - self.session['write_ncl_interface'] = self._need_ncl( - raw_recipe['diagnostics']) + self.session["write_ncl_interface"] = self._need_ncl( + raw_recipe["diagnostics"] + ) self._raw_recipe = raw_recipe self._filename = Path(recipe_file.name) - self._preprocessors = raw_recipe.get('preprocessors', {}) - if 'default' not in self._preprocessors: - self._preprocessors['default'] = {} + self._preprocessors = raw_recipe.get("preprocessors", {}) + if "default" not in self._preprocessors: + self._preprocessors["default"] = {} self.datasets = Dataset.from_recipe(recipe_file, session) self.diagnostics = self._initialize_diagnostics( - raw_recipe['diagnostics']) + raw_recipe["diagnostics"] + ) self.entity = self._initialize_provenance( - raw_recipe.get('documentation', {})) + raw_recipe.get("documentation", {}) + ) try: self.tasks = self.initialize_tasks() except RecipeError as exc: @@ -749,29 +781,32 @@ def _log_recipe_errors(self, exc): for task in exc.failed_tasks: logger.error(task.message) - if self.session['search_esgf'] == 'never' and any( - isinstance(err, InputFilesNotFound) - for err in exc.failed_tasks): + if self.session["search_esgf"] == "never" and any( + isinstance(err, InputFilesNotFound) for err in exc.failed_tasks + ): logger.error( "Not all input files required to run the recipe could be" - " found.") + " found." + ) logger.error( "If the files are available locally, please check" " your `rootpath` and `drs` settings in your user " - "configuration file %s", self.session['config_file']) + "configuration file %s", + self.session["config_file"], + ) logger.error( "To automatically download the required files to " "`download_dir: %s`, set `search_esgf: when_missing` or " "`search_esgf: always` in %s, or run the recipe with the " "extra command line argument --search_esgf=when_missing or " "--search_esgf=always", - self.session['download_dir'], - self.session['config_file'], + self.session["download_dir"], + self.session["config_file"], ) logger.info( "Note that automatic download is only available for files" " that are hosted on the ESGF, i.e. for projects: %s, and %s", - ', '.join(list(esgf.facets.FACETS)[:-1]), + ", ".join(list(esgf.facets.FACETS)[:-1]), list(esgf.facets.FACETS)[-1], ) @@ -780,10 +815,10 @@ def _need_ncl(raw_diagnostics): if not raw_diagnostics: return False for diagnostic in raw_diagnostics.values(): - if not diagnostic.get('scripts'): + if not diagnostic.get("scripts"): continue - for script in diagnostic['scripts'].values(): - if script.get('script', '').lower().endswith('.ncl'): + for script in diagnostic["scripts"].values(): + if script.get("script", "").lower().endswith(".ncl"): logger.info("NCL script detected, checking NCL version") check.ncl_version() return True @@ -806,23 +841,25 @@ def _initialize_diagnostics(self, raw_diagnostics): for name, raw_diagnostic in raw_diagnostics.items(): diagnostic = {} - diagnostic['name'] = name - diagnostic['datasets'] = [ - ds for ds in self.datasets if ds.facets['diagnostic'] == name + diagnostic["name"] = name + diagnostic["datasets"] = [ + ds for ds in self.datasets if ds.facets["diagnostic"] == name ] - variable_names = tuple(raw_diagnostic.get('variables', {})) - diagnostic['scripts'] = self._initialize_scripts( - name, raw_diagnostic.get('scripts'), variable_names) - for key in ('themes', 'realms'): + variable_names = tuple(raw_diagnostic.get("variables", {})) + diagnostic["scripts"] = self._initialize_scripts( + name, raw_diagnostic.get("scripts"), variable_names + ) + for key in ("themes", "realms"): if key in raw_diagnostic: - for script in diagnostic['scripts'].values(): - script['settings'][key] = raw_diagnostic[key] + for script in diagnostic["scripts"].values(): + script["settings"][key] = raw_diagnostic[key] diagnostics[name] = diagnostic return diagnostics - def _initialize_scripts(self, diagnostic_name, raw_scripts, - variable_names): + def _initialize_scripts( + self, diagnostic_name, raw_scripts, variable_names + ): """Define script in diagnostic.""" if not raw_scripts: return {} @@ -833,37 +870,40 @@ def _initialize_scripts(self, diagnostic_name, raw_scripts, for script_name, raw_settings in raw_scripts.items(): settings = deepcopy(raw_settings) - script = settings.pop('script') + script = settings.pop("script") ancestors = [] - for id_glob in settings.pop('ancestors', variable_names): + for id_glob in settings.pop("ancestors", variable_names): if TASKSEP not in id_glob: id_glob = diagnostic_name + TASKSEP + id_glob ancestors.append(id_glob) - settings['recipe'] = self._filename - settings['version'] = __version__ - settings['script'] = script_name + settings["recipe"] = self._filename + settings["version"] = __version__ + settings["script"] = script_name # Add output dirs to settings - for dir_name in ('run_dir', 'plot_dir', 'work_dir'): + for dir_name in ("run_dir", "plot_dir", "work_dir"): settings[dir_name] = os.path.join( - getattr(self.session, dir_name), diagnostic_name, - script_name) + getattr(self.session, dir_name), + diagnostic_name, + script_name, + ) # Copy other settings - if self.session['write_ncl_interface']: - settings['exit_on_ncl_warning'] = self.session[ - 'exit_on_warning'] + if self.session["write_ncl_interface"]: + settings["exit_on_ncl_warning"] = self.session[ + "exit_on_warning" + ] for key in ( - 'output_file_type', - 'log_level', - 'profile_diagnostic', - 'auxiliary_data_dir', + "output_file_type", + "log_level", + "profile_diagnostic", + "auxiliary_data_dir", ): settings[key] = self.session[key] scripts[script_name] = { - 'script': script, - 'output_dir': settings['work_dir'], - 'settings': settings, - 'ancestors': ancestors, + "script": script, + "output_dir": settings["work_dir"], + "settings": settings, + "ancestors": ancestors, } return scripts @@ -872,27 +912,33 @@ def _resolve_diagnostic_ancestors(self, tasks): """Resolve diagnostic ancestors.""" tasks = {t.name: t for t in tasks} for diagnostic_name, diagnostic in self.diagnostics.items(): - for script_name, script_cfg in diagnostic['scripts'].items(): + for script_name, script_cfg in diagnostic["scripts"].items(): task_id = diagnostic_name + TASKSEP + script_name - if task_id in tasks and isinstance(tasks[task_id], - DiagnosticTask): - logger.debug("Linking tasks for diagnostic %s script %s", - diagnostic_name, script_name) + if task_id in tasks and isinstance( + tasks[task_id], DiagnosticTask + ): + logger.debug( + "Linking tasks for diagnostic %s script %s", + diagnostic_name, + script_name, + ) ancestors = [] - for id_glob in script_cfg['ancestors']: + for id_glob in script_cfg["ancestors"]: ancestor_ids = fnmatch.filter(tasks, id_glob) if not ancestor_ids: raise RecipeError( "Could not find any ancestors matching " - f"'{id_glob}'.") - logger.debug("Pattern %s matches %s", id_glob, - ancestor_ids) + f"'{id_glob}'." + ) + logger.debug( + "Pattern %s matches %s", id_glob, ancestor_ids + ) ancestors.extend(tasks[a] for a in ancestor_ids) tasks[task_id].ancestors = ancestors def _get_tasks_to_run(self): """Get tasks filtered and add ancestors if needed.""" - tasknames_to_run = self.session['diagnostics'] + tasknames_to_run = self.session["diagnostics"] if tasknames_to_run: tasknames_to_run = set(tasknames_to_run) while self._update_with_ancestors(tasknames_to_run): @@ -907,11 +953,11 @@ def _update_with_ancestors(self, tasknames_to_run): # those tasks that match one of the patterns given by tasknames_to_run # to for diagnostic_name, diagnostic in self.diagnostics.items(): - for script_name, script_cfg in diagnostic['scripts'].items(): + for script_name, script_cfg in diagnostic["scripts"].items(): task_name = diagnostic_name + TASKSEP + script_name for pattern in tasknames_to_run: if fnmatch.fnmatch(task_name, pattern): - ancestors = script_cfg.get('ancestors', []) + ancestors = script_cfg.get("ancestors", []) if isinstance(ancestors, str): ancestors = ancestors.split() for ancestor in ancestors: @@ -929,13 +975,14 @@ def _update_with_ancestors(self, tasknames_to_run): return num_filters != len(tasknames_to_run) - def _create_diagnostic_tasks(self, diagnostic_name, diagnostic, - tasknames_to_run): + def _create_diagnostic_tasks( + self, diagnostic_name, diagnostic, tasknames_to_run + ): """Create diagnostic tasks.""" tasks = [] - if self.session['run_diagnostic']: - for script_name, script_cfg in diagnostic['scripts'].items(): + if self.session["run_diagnostic"]: + for script_name, script_cfg in diagnostic["scripts"].items(): task_name = diagnostic_name + TASKSEP + script_name # Skip diagnostic tasks if desired by the user @@ -944,29 +991,35 @@ def _create_diagnostic_tasks(self, diagnostic_name, diagnostic, if fnmatch.fnmatch(task_name, pattern): break else: - logger.info("Skipping task %s due to filter", - task_name) + logger.info( + "Skipping task %s due to filter", task_name + ) continue logger.info("Creating diagnostic task %s", task_name) task = DiagnosticTask( - script=script_cfg['script'], - output_dir=script_cfg['output_dir'], - settings=script_cfg['settings'], + script=script_cfg["script"], + output_dir=script_cfg["output_dir"], + settings=script_cfg["settings"], name=task_name, ) tasks.append(task) return tasks - def _create_preprocessor_tasks(self, diagnostic_name, diagnostic, - tasknames_to_run, any_diag_script_is_run): + def _create_preprocessor_tasks( + self, + diagnostic_name, + diagnostic, + tasknames_to_run, + any_diag_script_is_run, + ): """Create preprocessor tasks.""" tasks = [] failed_tasks = [] for variable_group, datasets in groupby( - diagnostic['datasets'], - key=lambda ds: ds.facets['variable_group']): + diagnostic["datasets"], key=lambda ds: ds.facets["variable_group"] + ): task_name = diagnostic_name + TASKSEP + variable_group # Skip preprocessor if not a single diagnostic script is run and @@ -977,21 +1030,25 @@ def _create_preprocessor_tasks(self, diagnostic_name, diagnostic, if fnmatch.fnmatch(task_name, pattern): break else: - logger.info("Skipping task %s due to filter", - task_name) + logger.info( + "Skipping task %s due to filter", task_name + ) continue # Resume previous runs if requested, else create a new task - for resume_dir in self.session['resume_from']: + for resume_dir in self.session["resume_from"]: prev_preproc_dir = Path( resume_dir, - 'preproc', + "preproc", diagnostic_name, variable_group, ) if prev_preproc_dir.exists(): - logger.info("Re-using preprocessed files from %s for %s", - prev_preproc_dir, task_name) + logger.info( + "Re-using preprocessed files from %s for %s", + prev_preproc_dir, + task_name, + ) preproc_dir = Path( self.session.preproc_dir, diagnostic_name, @@ -1029,9 +1086,9 @@ def _create_tasks(self): logger.info("Creating tasks for diagnostic %s", diagnostic_name) # Create diagnostic tasks - new_tasks = self._create_diagnostic_tasks(diagnostic_name, - diagnostic, - tasknames_to_run) + new_tasks = self._create_diagnostic_tasks( + diagnostic_name, diagnostic, tasknames_to_run + ) any_diag_script_is_run = bool(new_tasks) for task in new_tasks: task.priority = priority @@ -1040,8 +1097,11 @@ def _create_tasks(self): # Create preprocessor tasks new_tasks, failed = self._create_preprocessor_tasks( - diagnostic_name, diagnostic, tasknames_to_run, - any_diag_script_is_run) + diagnostic_name, + diagnostic, + tasknames_to_run, + any_diag_script_is_run, + ) failed_tasks.extend(failed) for task in new_tasks: for task0 in task.flatten(): @@ -1050,14 +1110,14 @@ def _create_tasks(self): priority += 1 if failed_tasks: - recipe_error = RecipeError('Could not create all tasks') + recipe_error = RecipeError("Could not create all tasks") recipe_error.failed_tasks.extend(failed_tasks) raise recipe_error check.tasks_valid(tasks) # Resolve diagnostic ancestors - if self.session['run_diagnostic']: + if self.session["run_diagnostic"]: self._resolve_diagnostic_ancestors(tasks) return tasks @@ -1066,8 +1126,10 @@ def initialize_tasks(self): """Define tasks in recipe.""" tasks = self._create_tasks() tasks = tasks.flatten() - logger.info("These tasks will be executed: %s", - ', '.join(t.name for t in tasks)) + logger.info( + "These tasks will be executed: %s", + ", ".join(t.name for t in tasks), + ) # Initialize task provenance for task in tasks: @@ -1081,22 +1143,24 @@ def initialize_tasks(self): def __str__(self): """Get human readable summary.""" - return '\n\n'.join(str(task) for task in self.tasks) + return "\n\n".join(str(task) for task in self.tasks) def run(self): """Run all tasks in the recipe.""" if not self.tasks: - raise RecipeError('No tasks to run!') + raise RecipeError("No tasks to run!") filled_recipe = self.write_filled_recipe() # Download required data - if self.session['search_esgf'] != 'never': - esgf.download(self._download_files, self.session['download_dir']) + if self.session["search_esgf"] != "never": + esgf.download(self._download_files, self.session["download_dir"]) - self.tasks.run(max_parallel_tasks=self.session['max_parallel_tasks']) + self.tasks.run(max_parallel_tasks=self.session["max_parallel_tasks"]) logger.info( "Wrote recipe with version numbers and wildcards " - "to:\nfile://%s", filled_recipe) + "to:\nfile://%s", + filled_recipe, + ) self.write_html_summary() def get_output(self) -> dict: @@ -1109,17 +1173,18 @@ def get_output(self) -> dict: """ output = {} - output['session'] = self.session - output['recipe_filename'] = self._filename - output['recipe_data'] = self._raw_recipe - output['task_output'] = {} + output["session"] = self.session + output["recipe_filename"] = self._filename + output["recipe_data"] = self._raw_recipe + output["task_output"] = {} for task in sorted(self.tasks.flatten(), key=lambda t: t.priority): - if self.session['remove_preproc_dir'] and isinstance( - task, PreprocessingTask): + if self.session["remove_preproc_dir"] and isinstance( + task, PreprocessingTask + ): # Skip preprocessing tasks that are deleted afterwards continue - output['task_output'][task.name] = task.get_product_attributes() + output["task_output"][task.name] = task.get_product_attributes() return output @@ -1127,11 +1192,13 @@ def write_filled_recipe(self): """Write copy of recipe with filled wildcards.""" recipe = datasets_to_recipe(USED_DATASETS, self._raw_recipe) filename = self.session.run_dir / f"{self._filename.stem}_filled.yml" - with filename.open('w', encoding='utf-8') as file: + with filename.open("w", encoding="utf-8") as file: yaml.safe_dump(recipe, file, sort_keys=False) logger.info( "Wrote recipe with version numbers and wildcards " - "to:\nfile://%s", filename) + "to:\nfile://%s", + filename, + ) return filename def write_html_summary(self): @@ -1141,6 +1208,7 @@ def write_html_summary(self): warnings.simplefilter("ignore") # keep RecipeOutput here to avoid circular import from esmvalcore.experimental.recipe_output import RecipeOutput + output = self.get_output() try: diff --git a/esmvalcore/_recipe/to_datasets.py b/esmvalcore/_recipe/to_datasets.py index 962d732a9d..2e814e6d8c 100644 --- a/esmvalcore/_recipe/to_datasets.py +++ b/esmvalcore/_recipe/to_datasets.py @@ -1,4 +1,5 @@ """Module that contains functions for reading the `Dataset`s from a recipe.""" + from __future__ import annotations import logging @@ -26,14 +27,14 @@ logger = logging.getLogger(__name__) _ALIAS_INFO_KEYS = ( - 'project', - 'activity', - 'driver', - 'dataset', - 'exp', - 'sub_experiment', - 'ensemble', - 'version', + "project", + "activity", + "driver", + "dataset", + "exp", + "sub_experiment", + "ensemble", + "version", ) """List of keys to be used to compose the alias, ordered by priority.""" @@ -43,7 +44,7 @@ def _facet_to_str(facet_value: FacetValue) -> str: if isinstance(facet_value, str): return facet_value if isinstance(facet_value, Iterable): - return '-'.join(str(v) for v in facet_value) + return "-".join(str(v) for v in facet_value) return str(facet_value) @@ -100,10 +101,11 @@ def _set_alias(variables): for dataset in variable: alias = tuple( _facet_to_str(dataset.facets.get(key, None)) - for key in _ALIAS_INFO_KEYS) + for key in _ALIAS_INFO_KEYS + ) datasets_info.add(alias) - if 'alias' not in dataset.facets: - dataset.facets['alias'] = alias + if "alias" not in dataset.facets: + dataset.facets["alias"] = alias alias = {} for info in datasets_info: @@ -113,15 +115,17 @@ def _set_alias(variables): _get_next_alias(alias, datasets_info, 0) for info in datasets_info: - alias[info] = '_'.join( - [str(value) for value in alias[info] if value is not None]) + alias[info] = "_".join( + [str(value) for value in alias[info] if value is not None] + ) if not alias[info]: - alias[info] = info[_ALIAS_INFO_KEYS.index('dataset')] + alias[info] = info[_ALIAS_INFO_KEYS.index("dataset")] for variable in variables: for dataset in variable: - dataset.facets['alias'] = alias.get(dataset.facets['alias'], - dataset.facets['alias']) + dataset.facets["alias"] = alias.get( + dataset.facets["alias"], dataset.facets["alias"] + ) def _get_next_alias(alias, datasets_info, i): @@ -145,10 +149,11 @@ def _get_next_alias(alias, datasets_info, i): def _check_supplementaries_valid(supplementaries: Iterable[Facets]) -> None: """Check that supplementary variables have a short_name.""" for facets in supplementaries: - if 'short_name' not in facets: + if "short_name" not in facets: raise RecipeError( "'short_name' is required for supplementary_variables " - f"entries, but missing in {facets}") + f"entries, but missing in {facets}" + ) def _merge_supplementary_dicts( @@ -163,9 +168,9 @@ def _merge_supplementary_dicts( _check_supplementaries_valid(ds_facets) merged = {} for facets in var_facets: - merged[facets['short_name']] = facets + merged[facets["short_name"]] = facets for facets in ds_facets: - short_name = facets['short_name'] + short_name = facets["short_name"] if short_name not in merged: merged[short_name] = {} merged[short_name].update(facets) @@ -174,18 +179,23 @@ def _merge_supplementary_dicts( def _fix_cmip5_fx_ensemble(dataset: Dataset): """Automatically correct the wrong ensemble for CMIP5 fx variables.""" - if (dataset.facets.get('project') == 'CMIP5' - and dataset.facets.get('mip') == 'fx' - and dataset.facets.get('ensemble') != 'r0i0p0' - and not dataset.files): - original_ensemble = dataset['ensemble'] + if ( + dataset.facets.get("project") == "CMIP5" + and dataset.facets.get("mip") == "fx" + and dataset.facets.get("ensemble") != "r0i0p0" + and not dataset.files + ): + original_ensemble = dataset["ensemble"] copy = dataset.copy() - copy.facets['ensemble'] = 'r0i0p0' + copy.facets["ensemble"] = "r0i0p0" if copy.files: - dataset.facets['ensemble'] = 'r0i0p0' - logger.info("Corrected wrong 'ensemble' from '%s' to '%s' for %s", - original_ensemble, dataset['ensemble'], - dataset.summary(shorten=True)) + dataset.facets["ensemble"] = "r0i0p0" + logger.info( + "Corrected wrong 'ensemble' from '%s' to '%s' for %s", + original_ensemble, + dataset["ensemble"], + dataset.summary(shorten=True), + ) dataset.find_files() @@ -197,18 +207,18 @@ def _get_supplementary_short_names( # Determine if the main variable is an ocean variable. var_facets = dict(facets) _update_cmor_facets(var_facets) - realms = var_facets.get('modeling_realm', []) + realms = var_facets.get("modeling_realm", []) if isinstance(realms, (str, Number)): realms = [str(realms)] - ocean_realms = {'ocean', 'seaIce', 'ocnBgchem'} + ocean_realms = {"ocean", "seaIce", "ocnBgchem"} is_ocean_variable = any(realm in ocean_realms for realm in realms) # Guess the best matching supplementary variable based on the realm. - short_names = PREPROCESSOR_SUPPLEMENTARIES[step]['variables'] - if set(short_names) == {'areacella', 'areacello'}: - short_names = ['areacello'] if is_ocean_variable else ['areacella'] - if set(short_names) == {'sftlf', 'sftof'}: - short_names = ['sftof'] if is_ocean_variable else ['sftlf'] + short_names = PREPROCESSOR_SUPPLEMENTARIES[step]["variables"] + if set(short_names) == {"areacella", "areacello"}: + short_names = ["areacello"] if is_ocean_variable else ["areacella"] + if set(short_names) == {"sftlf", "sftof"}: + short_names = ["sftof"] if is_ocean_variable else ["sftlf"] return short_names @@ -221,21 +231,21 @@ def _append_missing_supplementaries( """Append wildcard definitions for missing supplementary variables.""" steps = [step for step in settings if step in PREPROCESSOR_SUPPLEMENTARIES] - project: str = facets['project'] # type: ignore + project: str = facets["project"] # type: ignore for step in steps: for short_name in _get_supplementary_short_names(facets, step): - short_names = {f['short_name'] for f in supplementaries} + short_names = {f["short_name"] for f in supplementaries} if short_name in short_names: continue supplementary_facets: Facets = { - facet: '*' - for facet in FACETS.get(project, ['mip']) + facet: "*" + for facet in FACETS.get(project, ["mip"]) if facet not in _CMOR_KEYS } - if 'version' in facets: - supplementary_facets['version'] = '*' - supplementary_facets['short_name'] = short_name + if "version" in facets: + supplementary_facets["version"] = "*" + supplementary_facets["short_name"] = short_name supplementaries.append(supplementary_facets) @@ -249,54 +259,55 @@ def _get_dataset_facets_from_recipe( ) -> tuple[Facets, list[Facets]]: """Read the facets for a single dataset definition from the recipe.""" facets = deepcopy(recipe_variable) - facets.pop('additional_datasets', None) + facets.pop("additional_datasets", None) recipe_dataset = deepcopy(recipe_dataset) supplementaries = _merge_supplementary_dicts( - facets.pop('supplementary_variables', []), - recipe_dataset.pop('supplementary_variables', []), + facets.pop("supplementary_variables", []), + recipe_dataset.pop("supplementary_variables", []), ) facets.update(recipe_dataset) - if 'short_name' not in facets: - facets['short_name'] = variable_group + if "short_name" not in facets: + facets["short_name"] = variable_group # Flaky support for limiting the number of years in a recipe. # If we want this to work, it should actually be done based on `timerange`, # after any wildcards have been resolved. - if 'end_year' in facets and session['max_years']: - facets['end_year'] = min( - facets['end_year'], - facets['start_year'] + session['max_years'] - 1) + if "end_year" in facets and session["max_years"]: + facets["end_year"] = min( + facets["end_year"], facets["start_year"] + session["max_years"] - 1 + ) # Legacy: support start_year and end_year instead of timerange _replace_years_with_timerange(facets) # Legacy: support wrong capitalization of obs4MIPs - if facets['project'] == 'obs4mips': - logger.warning("Correcting capitalization, project 'obs4mips' " - "should be written as 'obs4MIPs'") - facets['project'] = 'obs4MIPs' + if facets["project"] == "obs4mips": + logger.warning( + "Correcting capitalization, project 'obs4mips' " + "should be written as 'obs4MIPs'" + ) + facets["project"] = "obs4MIPs" check.variable( facets, required_keys=( - 'short_name', - 'mip', - 'dataset', - 'project', + "short_name", + "mip", + "dataset", + "project", ), diagnostic=diagnostic_name, - variable_group=variable_group + variable_group=variable_group, ) - preprocessor = facets.get('preprocessor', 'default') + preprocessor = facets.get("preprocessor", "default") settings = profiles.get(preprocessor, {}) _append_missing_supplementaries(supplementaries, facets, settings) supplementaries = [ - facets for facets in supplementaries - if not facets.pop('skip', False) + facets for facets in supplementaries if not facets.pop("skip", False) ] return facets, supplementaries @@ -309,14 +320,16 @@ def _get_facets_from_recipe( session: Session, ) -> Iterator[tuple[Facets, list[Facets]]]: """Read the facets for the detasets of one variable from the recipe.""" - diagnostic = recipe['diagnostics'][diagnostic_name] - recipe_variable = diagnostic['variables'][variable_group] + diagnostic = recipe["diagnostics"][diagnostic_name] + recipe_variable = diagnostic["variables"][variable_group] if recipe_variable is None: recipe_variable = {} - recipe_datasets = (recipe.get('datasets', []) + - diagnostic.get('additional_datasets', []) + - recipe_variable.get('additional_datasets', [])) + recipe_datasets = ( + recipe.get("datasets", []) + + diagnostic.get("additional_datasets", []) + + recipe_variable.get("additional_datasets", []) + ) check.duplicate_datasets(recipe_datasets, diagnostic_name, variable_group) # The NCL interface requires a distinction between variable and @@ -324,7 +337,7 @@ def _get_facets_from_recipe( # keep track of which keys are part of the dataset. DATASET_KEYS.update(key for ds in recipe_datasets for key in ds) - profiles = recipe.setdefault('preprocessors', {'default': {}}) + profiles = recipe.setdefault("preprocessors", {"default": {}}) for recipe_dataset in recipe_datasets: yield _get_dataset_facets_from_recipe( @@ -345,17 +358,19 @@ def _get_datasets_for_variable( ) -> list[Dataset]: """Read the datasets from a variable definition in the recipe.""" logger.debug( - "Populating list of datasets for variable %s in " - "diagnostic %s", variable_group, diagnostic_name) + "Populating list of datasets for variable %s in diagnostic %s", + variable_group, + diagnostic_name, + ) datasets = [] idx = 0 for facets, supplementaries in _get_facets_from_recipe( - recipe, - diagnostic_name=diagnostic_name, - variable_group=variable_group, - session=session, + recipe, + diagnostic_name=diagnostic_name, + variable_group=variable_group, + session=session, ): template0 = Dataset(**facets) template0.session = session @@ -363,11 +378,11 @@ def _get_datasets_for_variable( for supplementary_facets in supplementaries: template1.add_supplementary(**supplementary_facets) for supplementary_ds in template1.supplementaries: - supplementary_ds.facets.pop('preprocessor', None) + supplementary_ds.facets.pop("preprocessor", None) for dataset in _dataset_from_files(template1): - dataset['variable_group'] = variable_group - dataset['diagnostic'] = diagnostic_name - dataset['recipe_dataset_index'] = idx # type: ignore + dataset["variable_group"] = variable_group + dataset["diagnostic"] = diagnostic_name + dataset["recipe_dataset_index"] = idx # type: ignore logger.debug("Found %s", dataset.summary(shorten=True)) datasets.append(dataset) idx += 1 @@ -383,10 +398,10 @@ def datasets_from_recipe( datasets = [] recipe = _load_recipe(recipe) - diagnostics = recipe.get('diagnostics') or {} + diagnostics = recipe.get("diagnostics") or {} for name, diagnostic in diagnostics.items(): diagnostic_datasets = [] - for variable_group in diagnostic.get('variables', {}): + for variable_group in diagnostic.get("variables", {}): variable_datasets = _get_datasets_for_variable( recipe, diagnostic_name=name, @@ -409,7 +424,9 @@ def _dataset_from_files(dataset: Dataset) -> list[Dataset]: if any(_isglob(f) for f in dataset.facets.values()): logger.debug( "Expanding dataset globs for dataset %s, " - "this may take a while..", dataset.summary(shorten=True)) + "this may take a while..", + dataset.summary(shorten=True), + ) representative_datasets = _representative_datasets(dataset) @@ -423,8 +440,9 @@ def _dataset_from_files(dataset: Dataset) -> list[Dataset]: unexpanded_globs = {} for key, value in dataset.facets.items(): if _isglob(value): - if (key in expanded_ds.facets and - not _isglob(expanded_ds[key])): + if key in expanded_ds.facets and not _isglob( + expanded_ds[key] + ): updated_facets[key] = expanded_ds.facets[key] else: unexpanded_globs[key] = value @@ -444,7 +462,7 @@ def _dataset_from_files(dataset: Dataset) -> list[Dataset]: # If globs have been expanded, only consider those datasets that contain # all necessary input variables if derivation is necessary - for (updated_facets, new_ds) in all_datasets[0]: + for updated_facets, new_ds in all_datasets[0]: other_facets = [[d[0] for d in ds] for ds in all_datasets[1:]] if all(updated_facets in facets for facets in other_facets): result.append(new_ds) @@ -452,7 +470,7 @@ def _dataset_from_files(dataset: Dataset) -> list[Dataset]: logger.debug( "Not all necessary input variables to derive '%s' are " "available for dataset %s", - dataset['short_name'], + dataset["short_name"], updated_facets, ) @@ -469,9 +487,9 @@ def _report_unexpanded_globs( ) -> str: """Get error message for unexpanded globs.""" msg = ( - "Unable to replace " + - ", ".join(f"{k}={v}" for k, v in unexpanded_globs.items()) + - f" by a value for\n{unexpanded_ds}" + "Unable to replace " + + ", ".join(f"{k}={v}" for k, v in unexpanded_globs.items()) + + f" by a value for\n{unexpanded_ds}" ) # Set supplementaries to [] to avoid searching for supplementary files @@ -483,20 +501,22 @@ def _report_unexpanded_globs( else: paths_msg = "" msg = ( - f"{msg}\nDo the {paths_msg}files:\n" + - "\n".join( + f"{msg}\nDo the {paths_msg}files:\n" + + "\n".join( f"{f} with facets: {f.facets}" for f in expanded_ds.files - ) + - "\nprovide the missing facet values?" + ) + + "\nprovide the missing facet values?" ) else: - timerange = expanded_ds.facets.get('timerange') + timerange = expanded_ds.facets.get("timerange") patterns = expanded_ds._file_globs msg = ( - f"{msg}\nNo files found matching:\n" + - "\n".join(str(p) for p in patterns) + ( # type:ignore + f"{msg}\nNo files found matching:\n" + + "\n".join(str(p) for p in patterns) # type: ignore[union-attr] + + ( # type:ignore f"\nwithin the requested timerange {timerange}." - if timerange else "" + if timerange + else "" ) ) @@ -505,14 +525,14 @@ def _report_unexpanded_globs( def _derive_needed(dataset: Dataset) -> bool: """Check if dataset needs to be derived from other datasets.""" - if not dataset.facets.get('derive'): + if not dataset.facets.get("derive"): return False - if dataset.facets.get('force_derivation'): + if dataset.facets.get("force_derivation"): return True - if _isglob(dataset.facets.get('timerange', '')): + if _isglob(dataset.facets.get("timerange", "")): # Our file finding routines are not able to handle globs. dataset = dataset.copy() - dataset.facets.pop('timerange') + dataset.facets.pop("timerange") copy = dataset.copy() copy.supplementaries = [] @@ -528,30 +548,32 @@ def _get_input_datasets(dataset: Dataset) -> list[Dataset]: # Configure input datasets needed to derive variable datasets = [] - required_vars = get_required(facets['short_name'], facets['project']) + required_vars = get_required(facets["short_name"], facets["project"]) # idea: add option to specify facets in list of dicts that is value of # 'derive' in the recipe and use that instead of get_required? for input_facets in required_vars: input_dataset = dataset.copy() - keep = {'alias', 'recipe_dataset_index', *dataset.minimal_facets} + keep = {"alias", "recipe_dataset_index", *dataset.minimal_facets} input_dataset.facets = { k: v for k, v in input_dataset.facets.items() if k in keep } input_dataset.facets.update(input_facets) input_dataset.augment_facets() _fix_cmip5_fx_ensemble(input_dataset) - if input_facets.get('optional') and not input_dataset.files: + if input_facets.get("optional") and not input_dataset.files: logger.info( "Skipping: no data found for %s which is marked as " - "'optional'", input_dataset) + "'optional'", + input_dataset, + ) else: datasets.append(input_dataset) # Check timeranges of available input data. timeranges = set() for input_dataset in datasets: - if 'timerange' in input_dataset.facets: - timeranges.add(input_dataset.facets['timerange']) + if "timerange" in input_dataset.facets: + timeranges.add(input_dataset.facets["timerange"]) check.differing_timeranges(timeranges, required_vars) return datasets diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index 2b785bc8d6..dc15718169 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -1,4 +1,5 @@ """ESMValtool task definition.""" + import abc import contextlib import datetime @@ -29,7 +30,7 @@ def path_representer(dumper, data): """For printing pathlib.Path objects in yaml files.""" - return dumper.represent_scalar('tag:yaml.org,2002:str', str(data)) + return dumper.represent_scalar("tag:yaml.org,2002:str", str(data)) yaml.representer.SafeRepresenter.add_representer(Path, path_representer) @@ -38,7 +39,7 @@ def path_representer(dumper, data): logger = logging.getLogger(__name__) DATASET_KEYS = { - 'mip', + "mip", } @@ -46,23 +47,23 @@ def _get_resource_usage(process, start_time, children=True): """Get resource usage.""" # yield header first entries = [ - 'Date and time (UTC)', - 'Real time (s)', - 'CPU time (s)', - 'CPU (%)', - 'Memory (GB)', - 'Memory (%)', - 'Disk read (GB)', - 'Disk write (GB)', + "Date and time (UTC)", + "Real time (s)", + "CPU time (s)", + "CPU (%)", + "Memory (GB)", + "Memory (%)", + "Disk read (GB)", + "Disk write (GB)", ] - fmt = '{}\t' * len(entries[:-1]) + '{}\n' - yield (fmt.format(*entries), 0.) + fmt = "{}\t" * len(entries[:-1]) + "{}\n" + yield (fmt.format(*entries), 0.0) # Compute resource usage gigabyte = float(2**30) precision = [1, 1, None, 1, None, 3, 3] cache = {} - max_memory = 0. + max_memory = 0.0 try: process.io_counters() except AttributeError: @@ -92,10 +93,16 @@ def _get_resource_usage(process, start_time, children=True): proc.cpu_percent(), proc.memory_info().rss / gigabyte, proc.memory_percent(), - (proc.io_counters().read_bytes / - gigabyte if counters_available else float('nan')), - (proc.io_counters().write_bytes / - gigabyte if counters_available else float('nan')), + ( + proc.io_counters().read_bytes / gigabyte + if counters_available + else float("nan") + ), + ( + proc.io_counters().write_bytes / gigabyte + if counters_available + else float("nan") + ), ] except (OSError, psutil.AccessDenied, psutil.NoSuchProcess): # Try again if an error occurs because some process died @@ -119,17 +126,20 @@ def _log_resource_usage(): """Write resource usage to file.""" process = psutil.Process(pid) start_time = time.time() - with open(filename, 'w', encoding='utf-8') as file: - for msg, max_mem in _get_resource_usage(process, start_time, - children): + with open(filename, "w", encoding="utf-8") as file: + for msg, max_mem in _get_resource_usage( + process, start_time, children + ): file.write(msg) time.sleep(interval) if halt.is_set(): - logger.info('Maximum memory used (estimate): %.1f GB', - max_mem) logger.info( - 'Sampled every second. It may be inaccurate if short ' - 'but high spikes in memory consumption occur.') + "Maximum memory used (estimate): %.1f GB", max_mem + ) + logger.info( + "Sampled every second. It may be inaccurate if short " + "but high spikes in memory consumption occur." + ) return thread = threading.Thread(target=_log_resource_usage) @@ -141,16 +151,16 @@ def _log_resource_usage(): thread.join() -def _py2ncl(value, var_name=''): +def _py2ncl(value, var_name=""): """Format a structure of Python list/dict/etc items as NCL.""" - txt = var_name + ' = ' if var_name else '' + txt = var_name + " = " if var_name else "" if value is None: - txt += '_Missing' + txt += "_Missing" elif isinstance(value, (str, Path)): txt += '"{}"'.format(value) elif isinstance(value, (list, tuple)): if not value: - txt += '_Missing' + txt += "_Missing" else: if isinstance(value[0], numbers.Real): type_ = numbers.Real @@ -158,32 +168,34 @@ def _py2ncl(value, var_name=''): type_ = type(value[0]) if any(not isinstance(v, type_) for v in value): raise ValueError( - "NCL array cannot be mixed type: {}".format(value)) - txt += '(/{}/)'.format(', '.join(_py2ncl(v) for v in value)) + "NCL array cannot be mixed type: {}".format(value) + ) + txt += "(/{}/)".format(", ".join(_py2ncl(v) for v in value)) elif isinstance(value, dict): if not var_name: raise ValueError( - "NCL does not support nested dicts: {}".format(value)) - txt += 'True\n' + "NCL does not support nested dicts: {}".format(value) + ) + txt += "True\n" for key in value: - txt += '{}@{} = {}\n'.format(var_name, key, _py2ncl(value[key])) + txt += "{}@{} = {}\n".format(var_name, key, _py2ncl(value[key])) else: txt += str(value) return txt -def write_ncl_settings(settings, filename, mode='wt'): +def write_ncl_settings(settings, filename, mode="wt"): """Write a dictionary with generic settings to NCL file.""" logger.debug("Writing NCL configuration file %s", filename) def _ncl_type(value): """Convert some Python types to NCL types.""" typemap = { - bool: 'logical', - str: 'string', - float: 'double', - int: 'int64', - dict: 'logical', + bool: "logical", + str: "string", + float: "double", + int: "int64", + dict: "logical", } for type_ in typemap: if isinstance(value, type_): @@ -193,41 +205,50 @@ def _ncl_type(value): lines = [] # ignore some settings for NCL diagnostic - ignore_settings = ['profile_diagnostic', ] + ignore_settings = [ + "profile_diagnostic", + ] for sett in ignore_settings: settings_copy = dict(settings) - if 'diag_script_info' not in settings_copy: + if "diag_script_info" not in settings_copy: settings.pop(sett, None) else: - settings_copy['diag_script_info'].pop(sett, None) + settings_copy["diag_script_info"].pop(sett, None) for var_name, value in sorted(settings_copy.items()): if isinstance(value, (list, tuple)): # Create an NCL list that can span multiple files - lines.append('if (.not. isdefined("{var_name}")) then\n' - ' {var_name} = NewList("fifo")\n' - 'end if\n'.format(var_name=var_name)) + lines.append( + 'if (.not. isdefined("{var_name}")) then\n' + ' {var_name} = NewList("fifo")\n' + "end if\n".format(var_name=var_name) + ) for item in value: - lines.append('ListAppend({var_name}, new(1, {type}))\n' - 'i = ListCount({var_name}) - 1'.format( - var_name=var_name, type=_ncl_type(item))) - lines.append(_py2ncl(item, var_name + '[i]')) + lines.append( + "ListAppend({var_name}, new(1, {type}))\n" + "i = ListCount({var_name}) - 1".format( + var_name=var_name, type=_ncl_type(item) + ) + ) + lines.append(_py2ncl(item, var_name + "[i]")) else: # Create an NCL variable that overwrites previous variables - lines.append('if (isvar("{var_name}")) then\n' - ' delete({var_name})\n' - 'end if\n'.format(var_name=var_name)) + lines.append( + 'if (isvar("{var_name}")) then\n' + " delete({var_name})\n" + "end if\n".format(var_name=var_name) + ) lines.append(_py2ncl(value, var_name)) - with open(filename, mode, encoding='utf-8') as file: - file.write('\n'.join(lines)) - file.write('\n') + with open(filename, mode, encoding="utf-8") as file: + file.write("\n".join(lines)) + file.write("\n") class BaseTask: """Base class for defining task classes.""" - def __init__(self, ancestors=None, name='', products=None): + def __init__(self, ancestors=None, name="", products=None): """Initialize task.""" self.ancestors = [] if ancestors is None else ancestors self.products = set() if products is None else set(products) @@ -240,7 +261,8 @@ def initialize_provenance(self, recipe_entity): """Initialize task provenance activity.""" if self.activity is not None: raise ValueError( - "Provenance of {} already initialized".format(self)) + "Provenance of {} already initialized".format(self) + ) self.activity = get_task_provenance(self, recipe_entity) def flatten(self): @@ -258,13 +280,18 @@ def run(self, input_files=None): input_files = [] for task in self.ancestors: input_files.extend(task.run()) - logger.info("Starting task %s in process [%s]", self.name, - os.getpid()) + logger.info( + "Starting task %s in process [%s]", self.name, os.getpid() + ) start = datetime.datetime.now() self.output_files = self._run(input_files) runtime = datetime.datetime.now() - start - logger.info("Successfully completed task %s (priority %s) in %s", - self.name, self.priority, runtime) + logger.info( + "Successfully completed task %s (priority %s) in %s", + self.name, + self.priority, + runtime, + ) return self.output_files @@ -281,9 +308,14 @@ def get_product_attributes(self) -> dict: def print_ancestors(self): """Return a nicely formatted description.""" - txt = 'ancestors:\n{}'.format('\n\n'.join( - textwrap.indent(str(task), prefix=' ') - for task in self.ancestors) if self.ancestors else 'None') + txt = "ancestors:\n{}".format( + "\n\n".join( + textwrap.indent(str(task), prefix=" ") + for task in self.ancestors + ) + if self.ancestors + else "None" + ) return txt def __repr__(self): @@ -297,11 +329,11 @@ class ResumeTask(BaseTask): def __init__(self, prev_preproc_dir, preproc_dir, name): """Create a resume task.""" # Set the path to the file resulting from running this task - self._metadata_file = preproc_dir / 'metadata.yml' + self._metadata_file = preproc_dir / "metadata.yml" # Reconstruct output - prev_metadata_file = prev_preproc_dir / 'metadata.yml' - with prev_metadata_file.open('r', encoding='utf-8') as file: + prev_metadata_file = prev_preproc_dir / "metadata.yml" + with prev_metadata_file.open("r", encoding="utf-8") as file: prev_metadata = yaml.safe_load(file) products = set() @@ -309,10 +341,10 @@ def __init__(self, prev_preproc_dir, preproc_dir, name): # Update the filename in case the output directory was moved # since the original run filename = str(prev_preproc_dir / Path(prov_filename).name) - attributes['filename'] = filename - product = TrackedFile(filename, - attributes, - prov_filename=prov_filename) + attributes["filename"] = filename + product = TrackedFile( + filename, attributes, prov_filename=prov_filename + ) products.add(product) super().__init__(ancestors=None, name=name, products=products) @@ -323,7 +355,7 @@ def _run(self, _): # Write metadata to file self._metadata_file.parent.mkdir(parents=True) - with self._metadata_file.open('w', encoding='utf-8') as file: + with self._metadata_file.open("w", encoding="utf-8") as file: yaml.safe_dump(metadata, file) return [str(self._metadata_file)] @@ -336,7 +368,7 @@ class DiagnosticError(Exception): class DiagnosticTask(BaseTask): """Task for running a diagnostic.""" - def __init__(self, script, settings, output_dir, ancestors=None, name=''): + def __init__(self, script, settings, output_dir, ancestors=None, name=""): """Create a diagnostic task.""" super().__init__(ancestors=ancestors, name=name) self.script = script @@ -344,8 +376,8 @@ def __init__(self, script, settings, output_dir, ancestors=None, name=''): self.output_dir = output_dir self.cmd = self._initialize_cmd() self.env = self._initialize_env() - self.log = Path(settings['run_dir']) / 'log.txt' - self.resource_log = Path(settings['run_dir']) / 'resource_usage.txt' + self.log = Path(settings["run_dir"]) / "log.txt" + self.resource_log = Path(settings["run_dir"]) / "resource_usage.txt" def _initialize_cmd(self): """Create an executable command from script.""" @@ -360,32 +392,34 @@ def _initialize_cmd(self): cmd = [] interpreters = { - 'jl': 'julia', - 'ncl': 'ncl', - 'py': 'python', - 'r': 'Rscript', + "jl": "julia", + "ncl": "ncl", + "py": "python", + "r": "Rscript", } args = { - 'ncl': ['-n', '-p'], + "ncl": ["-n", "-p"], } - if self.settings['profile_diagnostic']: - profile_file = Path(self.settings['run_dir'], 'profile.json') - args['py'] = ['-m', 'vprof', '-o', str(profile_file), '-c', 'c'] + if self.settings["profile_diagnostic"]: + profile_file = Path(self.settings["run_dir"], "profile.json") + args["py"] = ["-m", "vprof", "-o", str(profile_file), "-c", "c"] ext = script_file.suffix.lower()[1:] if ext in interpreters: - if ext == 'py' and sys.executable: + if ext == "py" and sys.executable: interpreter = sys.executable else: interpreter = which(interpreters[ext]) if interpreter is None: raise DiagnosticError( - f"{err_msg}: program '{interpreters[ext]}' not installed.") + f"{err_msg}: program '{interpreters[ext]}' not installed." + ) cmd.append(interpreter) elif not os.access(script_file, os.X_OK): raise DiagnosticError( f"{err_msg}: non-executable file with unknown extension " - f"'{script_file.suffix}'.") + f"'{script_file.suffix}'." + ) cmd.extend(args.get(ext, [])) cmd.append(str(script_file)) @@ -396,36 +430,38 @@ def _initialize_env(self): """Create an environment for executing script.""" ext = Path(self.script).suffix.lower() env = {} - if ext in ('.py', '.jl'): + if ext in (".py", ".jl"): # Set non-interactive matplotlib backend - env['MPLBACKEND'] = 'Agg' - if ext in ('.r', '.ncl'): + env["MPLBACKEND"] = "Agg" + if ext in (".r", ".ncl"): # Make diag_scripts path available to diagostic script - env['diag_scripts'] = str(DIAGNOSTICS.scripts) - if ext == '.jl': + env["diag_scripts"] = str(DIAGNOSTICS.scripts) + if ext == ".jl": # Set the julia virtual environment - env['JULIA_LOAD_PATH'] = "{}:{}".format( - DIAGNOSTICS.path / 'install' / 'Julia', - os.environ.get('JULIA_LOAD_PATH', ''), + env["JULIA_LOAD_PATH"] = "{}:{}".format( + DIAGNOSTICS.path / "install" / "Julia", + os.environ.get("JULIA_LOAD_PATH", ""), ) return env def write_settings(self): """Write settings to file.""" - run_dir = Path(self.settings['run_dir']) + run_dir = Path(self.settings["run_dir"]) run_dir.mkdir(parents=True, exist_ok=True) # ignore some settings for diagnostic - ignore_settings = ['profile_diagnostic', ] + ignore_settings = [ + "profile_diagnostic", + ] for sett in ignore_settings: settings_copy = dict(self.settings) settings_copy.pop(sett, None) - filename = run_dir / 'settings.yml' - filename.write_text(yaml.safe_dump(settings_copy)) + filename = run_dir / "settings.yml" + filename.write_text(yaml.safe_dump(settings_copy, sort_keys=False)) # If running an NCL script: - if Path(self.script).suffix.lower() == '.ncl': + if Path(self.script).suffix.lower() == ".ncl": # Also write an NCL file and return the name of that instead. return self._write_ncl_settings() @@ -433,21 +469,21 @@ def write_settings(self): def _write_ncl_settings(self): """Write settings to NCL file.""" - filename = Path(self.settings['run_dir']) / 'settings.ncl' + filename = Path(self.settings["run_dir"]) / "settings.ncl" config_user_keys = { - 'run_dir', - 'plot_dir', - 'work_dir', - 'output_file_type', - 'log_level', + "run_dir", + "plot_dir", + "work_dir", + "output_file_type", + "log_level", } - settings = {'diag_script_info': {}, 'config_user_info': {}} + settings = {"diag_script_info": {}, "config_user_info": {}} for key, value in self.settings.items(): if key in config_user_keys: - settings['config_user_info'][key] = value + settings["config_user_info"][key] = value elif not isinstance(value, dict): - settings['diag_script_info'][key] = value + settings["diag_script_info"][key] = value else: settings[key] = value @@ -463,21 +499,23 @@ def _control_ncl_execution(self, process, lines): """ ignore_warnings = [ warning.strip() - for warning in self.settings.get('ignore_ncl_warnings', []) + for warning in self.settings.get("ignore_ncl_warnings", []) ] - errors = ['error:', 'fatal:'] - if self.settings['exit_on_ncl_warning']: - errors.append('warning:') + errors = ["error:", "fatal:"] + if self.settings["exit_on_ncl_warning"]: + errors.append("warning:") - msg = ("An error occurred during execution of NCL script {}, " - "see the log in {}".format(self.script, self.log)) + msg = ( + "An error occurred during execution of NCL script {}, " + "see the log in {}".format(self.script, self.log) + ) warned = False for line in lines: if line.strip() in ignore_warnings: continue - if 'warning:' in line: + if "warning:" in line: logger.warning("NCL: %s", line) warned = True for error in errors: @@ -495,26 +533,29 @@ def _control_ncl_execution(self, process, lines): if warned: logger.warning( "There were warnings during the execution of NCL script %s, " - "for details, see the log %s", self.script, self.log) + "for details, see the log %s", + self.script, + self.log, + ) def _start_diagnostic_script(self, cmd, env): """Start the diagnostic script.""" logger.info("Running command %s", cmd) logger.debug("in environment\n%s", pprint.pformat(env)) - cwd = self.settings['run_dir'] + cwd = self.settings["run_dir"] logger.debug("in current working directory: %s", cwd) logger.info("Writing output to %s", self.output_dir) - logger.info("Writing plots to %s", self.settings['plot_dir']) + logger.info("Writing plots to %s", self.settings["plot_dir"]) logger.info("Writing log to %s", self.log) - rerun_msg = 'cd {}; '.format(cwd) + rerun_msg = "cd {}; ".format(cwd) if env: - rerun_msg += ' '.join('{}="{}"'.format(k, env[k]) for k in env) + rerun_msg += " ".join('{}="{}"'.format(k, env[k]) for k in env) if "vprof" in cmd: script_args = ' "' + cmd[-1] + '"' - rerun_msg += ' ' + ' '.join(cmd[:-1]) + script_args + rerun_msg += " " + " ".join(cmd[:-1]) + script_args else: - rerun_msg += ' ' + ' '.join(cmd) + rerun_msg += " " + " ".join(cmd) logger.info("To re-run this diagnostic script, run:\n%s", rerun_msg) complete_env = dict(os.environ) @@ -538,26 +579,28 @@ def _run(self, input_files): return output_files ext = Path(self.script).suffix.lower() - if ext == '.ncl': - self.settings['input_files'] = [ - f for f in input_files - if f.endswith('.ncl') or os.path.isdir(f) + if ext == ".ncl": + self.settings["input_files"] = [ + f + for f in input_files + if f.endswith(".ncl") or os.path.isdir(f) ] else: - self.settings['input_files'] = [ - f for f in input_files - if f.endswith('.yml') or os.path.isdir(f) + self.settings["input_files"] = [ + f + for f in input_files + if f.endswith(".yml") or os.path.isdir(f) ] env = dict(self.env) cmd = list(self.cmd) settings_file = self.write_settings() - if ext == '.ncl': - env['settings'] = settings_file + if ext == ".ncl": + env["settings"] = settings_file else: - if self.settings['profile_diagnostic']: + if self.settings["profile_diagnostic"]: script_file = cmd.pop() - combo_with_settings = script_file + ' ' + str(settings_file) + combo_with_settings = script_file + " " + str(settings_file) cmd.append(combo_with_settings) else: cmd.append(settings_file) @@ -566,9 +609,10 @@ def _run(self, input_files): returncode = None - with resource_usage_logger(process.pid, self.resource_log), \ - open(self.log, 'ab') as log: - last_line = [''] + with resource_usage_logger(process.pid, self.resource_log), open( + self.log, "ab" + ) as log: + last_line = [""] while returncode is None: returncode = process.poll() txt = process.stdout.read() @@ -577,9 +621,9 @@ def _run(self, input_files): # Check if an error occurred in an NCL script # Last line is treated separately to avoid missing # error messages spread out over multiple lines. - if ext == '.ncl': - txt = txt.decode(encoding='utf-8', errors='ignore') - lines = txt.split('\n') + if ext == ".ncl": + txt = txt.decode(encoding="utf-8", errors="ignore") + lines = txt.split("\n") self._control_ncl_execution(process, last_line + lines) last_line = lines[-1:] @@ -594,67 +638,79 @@ def _run(self, input_files): raise DiagnosticError( "Diagnostic script {} failed with return code {}. See the log " - "in {}".format(self.script, returncode, self.log)) + "in {}".format(self.script, returncode, self.log) + ) def _collect_provenance(self): """Process provenance information provided by the diagnostic script.""" - provenance_file = Path( - self.settings['run_dir']) / 'diagnostic_provenance.yml' + provenance_file = ( + Path(self.settings["run_dir"]) / "diagnostic_provenance.yml" + ) if not provenance_file.is_file(): logger.warning( "No provenance information was written to %s. Unable to " "record provenance for files created by diagnostic script %s " - "in task %s", provenance_file, self.script, self.name) + "in task %s", + provenance_file, + self.script, + self.name, + ) return logger.debug("Collecting provenance from %s", provenance_file) start = time.time() - table = yaml.safe_load(provenance_file.read_text(encoding='utf-8')) + table = yaml.safe_load(provenance_file.read_text(encoding="utf-8")) ignore = ( - 'auxiliary_data_dir', - 'exit_on_ncl_warning', - 'input_files', - 'log_level', - 'output_file_type', - 'plot_dir', - 'profile_diagnostic', - 'recipe', - 'run_dir', - 'version', - 'write_ncl_interface', - 'work_dir', + "auxiliary_data_dir", + "exit_on_ncl_warning", + "input_files", + "log_level", + "output_file_type", + "plot_dir", + "profile_diagnostic", + "recipe", + "run_dir", + "version", + "write_ncl_interface", + "work_dir", ) attrs = { - 'script_file': self.script, + "script_file": self.script, } for key in self.settings: if key not in ignore: attrs[key] = self.settings[key] ancestor_products = { - str(p.filename): p - for a in self.ancestors for p in a.products + str(p.filename): p for a in self.ancestors for p in a.products } valid = True for filename, attributes in table.items(): # copy to avoid updating other entries if file contains anchors attributes = deepcopy(attributes) - ancestor_files = attributes.pop('ancestors', []) + ancestor_files = attributes.pop("ancestors", []) if not ancestor_files: logger.warning( "No ancestor files specified for recording provenance of " - "%s, created by diagnostic script %s in task %s", filename, - self.script, self.name) + "%s, created by diagnostic script %s in task %s", + filename, + self.script, + self.name, + ) valid = False ancestors = set() if isinstance(ancestor_files, str): logger.warning( "Ancestor file(s) %s specified for recording provenance " "of %s, created by diagnostic script %s in task %s is " - "a string but should be a list of strings", ancestor_files, - filename, self.script, self.name) + "a string but should be a list of strings", + ancestor_files, + filename, + self.script, + self.name, + ) ancestor_files = [ancestor_files] for ancestor_file in ancestor_files: if ancestor_file in ancestor_products: @@ -664,8 +720,12 @@ def _collect_provenance(self): logger.warning( "Invalid ancestor file %s specified for recording " "provenance of %s, created by diagnostic script %s " - "in task %s", ancestor_file, filename, self.script, - self.name) + "in task %s", + ancestor_file, + filename, + self.script, + self.name, + ) attributes.update(deepcopy(attrs)) @@ -680,30 +740,37 @@ def _collect_provenance(self): if not valid: logger.warning( "Valid ancestor files for diagnostic script %s in task %s " - "are:\n%s", self.script, self.name, - '\n'.join(ancestor_products)) - logger.debug("Collecting provenance of task %s took %.1f seconds", - self.name, - time.time() - start) + "are:\n%s", + self.script, + self.name, + "\n".join(ancestor_products), + ) + logger.debug( + "Collecting provenance of task %s took %.1f seconds", + self.name, + time.time() - start, + ) def __repr__(self): """Get human readable description.""" settings_string = pprint.pformat(self.settings) - string = (f"{self.__class__.__name__}: {self.name}\n" - f"script: {self.script}\n" - f"settings:\n{settings_string}\n" - f"{self.print_ancestors()}\n") + string = ( + f"{self.__class__.__name__}: {self.name}\n" + f"script: {self.script}\n" + f"settings:\n{settings_string}\n" + f"{self.print_ancestors()}\n" + ) return string class TaskSet(set): """Container for tasks.""" - def flatten(self) -> 'TaskSet': + def flatten(self) -> "TaskSet": """Flatten the list of tasks.""" return TaskSet(t for task in self for t in task.flatten()) - def get_independent(self) -> 'TaskSet': + def get_independent(self) -> "TaskSet": """Return a set of independent tasks.""" independent_tasks = TaskSet() all_tasks = self.flatten() @@ -726,11 +793,13 @@ def run(self, max_parallel_tasks: Optional[int] = None) -> None: else: address = client.scheduler.address for task in self.flatten(): - if (isinstance(task, DiagnosticTask) - and Path(task.script).suffix.lower() == '.py'): + if ( + isinstance(task, DiagnosticTask) + and Path(task.script).suffix.lower() == ".py" + ): # Only insert the scheduler address if running a # Python script. - task.settings['scheduler_address'] = address + task.settings["scheduler_address"] = address if max_parallel_tasks == 1: self._run_sequential() @@ -757,8 +826,9 @@ def _run_parallel(self, scheduler_address, max_parallel_tasks): if max_parallel_tasks is None: max_parallel_tasks = os.cpu_count() max_parallel_tasks = min(max_parallel_tasks, n_tasks) - logger.info("Running %s tasks using %s processes", n_tasks, - max_parallel_tasks) + logger.info( + "Running %s tasks using %s processes", n_tasks, max_parallel_tasks + ) def done(task): """Assume a task is done if it not scheduled or running.""" @@ -771,8 +841,9 @@ def done(task): if len(running) >= max_parallel_tasks: break if all(done(t) for t in task.ancestors): - future = pool.apply_async(_run_task, - [task, scheduler_address]) + future = pool.apply_async( + _run_task, [task, scheduler_address] + ) running[task] = future scheduled.remove(task) @@ -792,8 +863,12 @@ def done(task): n_done = n_tasks - n_scheduled - n_running logger.info( "Progress: %s tasks running, %s tasks waiting for " - "ancestors, %s/%s done", n_running, n_scheduled, - n_done, n_tasks) + "ancestors, %s/%s done", + n_running, + n_scheduled, + n_done, + n_tasks, + ) logger.info("Successfully completed all tasks.") pool.close() diff --git a/esmvalcore/_version.py b/esmvalcore/_version.py index 42324d139c..cc288b59c4 100644 --- a/esmvalcore/_version.py +++ b/esmvalcore/_version.py @@ -1,4 +1,5 @@ """Define the ESMValCore version.""" + from importlib.metadata import PackageNotFoundError, version try: @@ -6,4 +7,5 @@ except PackageNotFoundError as exc: raise PackageNotFoundError( "ESMValCore package not found, please run `pip install -e .` before " - "importing the package.") from exc + "importing the package." + ) from exc diff --git a/esmvalcore/cmor/_fixes/access/__init__.py b/esmvalcore/cmor/_fixes/access/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/esmvalcore/cmor/_fixes/access/_base_fix.py b/esmvalcore/cmor/_fixes/access/_base_fix.py new file mode 100644 index 0000000000..e34e2428d3 --- /dev/null +++ b/esmvalcore/cmor/_fixes/access/_base_fix.py @@ -0,0 +1,28 @@ +"""Fix base classes for ACCESS-ESM on-the-fly CMORizer.""" + +import logging + +from iris.cube import CubeList + +from esmvalcore.cmor._fixes.native_datasets import NativeDatasetFix + +logger = logging.getLogger(__name__) + + +class AccessFix(NativeDatasetFix): + """Fixes functions.""" + + def fix_coord_system(self, cube): + """Delete coord_system to make CubeList able to merge.""" + for dim in cube.dim_coords: + if dim.coord_system is not None: + cube.coord(dim.standard_name).coord_system = None + + def get_cubes_from_multivar(self, cubes): + """Get cube before calculate from multiple variables.""" + name_list = self.extra_facets.get("raw_name", self.vardef.short_name) + + data_list = [] + for name in name_list: + data_list.append(self.get_cube(cubes, name)) + return CubeList(data_list) diff --git a/esmvalcore/cmor/_fixes/access/access_esm1_5.py b/esmvalcore/cmor/_fixes/access/access_esm1_5.py new file mode 100644 index 0000000000..6ce4226cee --- /dev/null +++ b/esmvalcore/cmor/_fixes/access/access_esm1_5.py @@ -0,0 +1,128 @@ +"""On-the-fly CMORizer for ACCESS-ESM.""" + +import logging + +from iris.cube import CubeList + +from ._base_fix import AccessFix + +logger = logging.getLogger(__name__) + + +class AllVars(AccessFix): + """Fixes for all variables.""" + + def fix_metadata(self, cubes): + """Fix metadata. + + Parameters + ---------- + cubes : iris.cube.CubeList + Input cubes. + + Returns + ------- + iris.cube.CubeList + """ + if len(cubes) == 1: + cube = cubes[0] + else: + cube = self.get_cube(cubes) + + # Fix coordinates + self.fix_scalar_coords(cube) + self.fix_var_metadata(cube) + self.fix_lon_metadata(cube) + self.fix_lat_metadata(cube) + + # Fix coordinate 'height' + if "height_0" in [var.var_name for var in cube.coords()]: + self.fix_height_metadata(cube) + # Fix coordinate 'pressure' + if "pressure" in [var.var_name for var in cube.coords()]: + self.fix_plev_metadata(cube, coord="pressure") + + # Fix coord system + self.fix_coord_system(cube) + + return CubeList([cube]) + + +class Rlus(AccessFix): + """Fixes for Rlus.""" + + def fix_rlus_data(self, cubes): + """Fix rlus data.""" + return cubes[0] - cubes[1] + cubes[2] - cubes[3] + + def fix_metadata(self, cubes): + """Fix metadata. + + Parameters + ---------- + cubes : iris.cube.CubeList + Input cubes. + + Returns + ------- + iris.cube.CubeList + """ + cubes = self.get_cubes_from_multivar(cubes) + + cube = self.fix_rlus_data(cubes) + + return CubeList([cube]) + + +class Rsus(AccessFix): + """Fixes for Rsus.""" + + def fix_rsus_data(self, cubes): + """Fix rsus data.""" + return cubes[0] - cubes[1] + + def fix_metadata(self, cubes): + """Fix metadata. + + Parameters + ---------- + cubes : iris.cube.CubeList + Input cubes. + + Returns + ------- + iris.cube.CubeList + """ + cubes = self.get_cubes_from_multivar(cubes) + + cube = self.fix_rsus_data(cubes) + + return CubeList([cube]) + + +class Tas(AccessFix): + """Fixes for Rsus.""" + + def fix_metadata(self, cubes): + """Fix metadata. + + Parameters + ---------- + cubes : iris.cube.CubeList + Input cubes. + + Returns + ------- + iris.cube.CubeList + """ + cube = self.get_cube(cubes) + + self.fix_height_metadata(cube) + self.fix_height_value(cube) + + return CubeList([cube]) + + def fix_height_value(self, cube): + """Fix height value to make it comparable to other dataset.""" + if cube.coord("height").points[0] != 2: + cube.coord("height").points = [2] diff --git a/esmvalcore/cmor/_fixes/cesm/cesm2.py b/esmvalcore/cmor/_fixes/cesm/cesm2.py index 5b92121555..bf82ed1c8b 100644 --- a/esmvalcore/cmor/_fixes/cesm/cesm2.py +++ b/esmvalcore/cmor/_fixes/cesm/cesm2.py @@ -29,7 +29,7 @@ class AllVars(NativeDatasetFix): # Dictionary to map invalid units in the data to valid entries INVALID_UNITS = { - 'fraction': '1', + "fraction": "1", } def fix_metadata(self, cubes): @@ -68,16 +68,16 @@ def _fix_time(self, cube): """ # Only modify time points if data contains a time dimension, is monthly # data, and does not describe point measurements. - if not self.vardef.has_coord_with_standard_name('time'): + if not self.vardef.has_coord_with_standard_name("time"): return - if self.extra_facets['frequency'] != 'mon': + if self.extra_facets["frequency"] != "mon": return for cell_method in cube.cell_methods: - if 'point' in cell_method.method: + if "point" in cell_method.method: return # Fix time coordinate - time_coord = cube.coord('time') + time_coord = cube.coord("time") if time_coord.has_bounds(): time_coord.points = time_coord.core_bounds().mean(axis=-1) self.fix_regular_time(cube, coord=time_coord) diff --git a/esmvalcore/cmor/_fixes/cmip5/access1_0.py b/esmvalcore/cmor/_fixes/cmip5/access1_0.py index 4fa4da7554..acb8202a54 100644 --- a/esmvalcore/cmor/_fixes/cmip5/access1_0.py +++ b/esmvalcore/cmor/_fixes/cmip5/access1_0.py @@ -1,4 +1,5 @@ """Fixes for the ACCESS1-0 model.""" + import iris from cf_units import Unit @@ -27,14 +28,18 @@ def fix_metadata(self, cubes): """ for cube in cubes: try: - time = cube.coord('time') + time = cube.coord("time") except iris.exceptions.CoordinateNotFoundError: continue else: - if time.units.calendar == 'proleptic_gregorian': - time.convert_units(Unit("days since 1850-01-01", - calendar='proleptic_gregorian')) - time.units = Unit(time.units.name, 'gregorian') + if time.units.calendar == "proleptic_gregorian": + time.convert_units( + Unit( + "days since 1850-01-01", + calendar="proleptic_gregorian", + ) + ) + time.units = Unit(time.units.name, "gregorian") return cubes @@ -44,7 +49,7 @@ class Cl(ClFixHybridHeightCoord): def fix_metadata(self, cubes): """Remove attributes from ``vertical coordinate formula term: b(k)``. - Additionally add pressure level coordiante. + Additionally add pressure level coordinate. Parameters ---------- @@ -58,6 +63,6 @@ def fix_metadata(self, cubes): """ cubes = super().fix_metadata(cubes) cube = self.get_cube_from_list(cubes) - coord = cube.coord(long_name='vertical coordinate formula term: b(k)') + coord = cube.coord(long_name="vertical coordinate formula term: b(k)") coord.attributes = {} return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/access1_3.py b/esmvalcore/cmor/_fixes/cmip5/access1_3.py index f026181660..193c0d87bd 100644 --- a/esmvalcore/cmor/_fixes/cmip5/access1_3.py +++ b/esmvalcore/cmor/_fixes/cmip5/access1_3.py @@ -1,11 +1,11 @@ """Fixes for ACCESS1-3 model.""" + import iris from cf_units import Unit from ..fix import Fix from .access1_0 import Cl as BaseCl - Cl = BaseCl @@ -29,12 +29,16 @@ def fix_metadata(self, cubes): """ for cube in cubes: try: - time = cube.coord('time') + time = cube.coord("time") except iris.exceptions.CoordinateNotFoundError: continue else: - if time.units.calendar == 'proleptic_gregorian': - time.convert_units(Unit("days since 1850-01-01", - calendar='proleptic_gregorian')) - time.units = Unit(time.units.name, 'gregorian') + if time.units.calendar == "proleptic_gregorian": + time.convert_units( + Unit( + "days since 1850-01-01", + calendar="proleptic_gregorian", + ) + ) + time.units = Unit(time.units.name, "gregorian") return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1.py b/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1.py index 28593a9d21..d7921ea6cb 100644 --- a/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1.py +++ b/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1.py @@ -1,4 +1,5 @@ """Fixes for bcc-csm1-1.""" + from ..common import ClFixHybridPressureCoord, OceanFixGrid Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1_m.py b/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1_m.py index 4794048f67..8191fce5bb 100644 --- a/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1_m.py +++ b/esmvalcore/cmor/_fixes/cmip5/bcc_csm1_1_m.py @@ -1,4 +1,5 @@ """Fixes for bcc-csm1-1-m.""" + from ..common import ClFixHybridPressureCoord, OceanFixGrid Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/bnu_esm.py b/esmvalcore/cmor/_fixes/cmip5/bnu_esm.py index e01639a306..90a8b74bb2 100644 --- a/esmvalcore/cmor/_fixes/cmip5/bnu_esm.py +++ b/esmvalcore/cmor/_fixes/cmip5/bnu_esm.py @@ -1,4 +1,5 @@ """Fixes for BNU-ESM model.""" + from cf_units import Unit from dask import array as da @@ -50,7 +51,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - self.get_cube_from_list(cubes).units = Unit('kg m-2 s-1') + self.get_cube_from_list(cubes).units = Unit("kg m-2 s-1") return cubes def fix_data(self, cube): @@ -94,7 +95,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - self.get_cube_from_list(cubes).units = Unit('1e-9') + self.get_cube_from_list(cubes).units = Unit("1e-9") return cubes def fix_data(self, cube): @@ -115,7 +116,7 @@ def fix_data(self, cube): """ metadata = cube.metadata - cube *= 29.0 / 16.0 * 1.e9 + cube *= 29.0 / 16.0 * 1.0e9 cube.metadata = metadata return cube @@ -139,7 +140,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - self.get_cube_from_list(cubes).units = Unit('1e-6') + self.get_cube_from_list(cubes).units = Unit("1e-6") return cubes def fix_data(self, cube): @@ -159,7 +160,7 @@ def fix_data(self, cube): """ metadata = cube.metadata - cube *= 29.0 / 44.0 * 1.e6 + cube *= 29.0 / 44.0 * 1.0e6 cube.metadata = metadata return cube @@ -184,7 +185,7 @@ def fix_data(self, cube): """ metadata = cube.metadata - cube *= 1.e6 + cube *= 1.0e6 cube.metadata = metadata return cube @@ -208,7 +209,7 @@ def fix_data(self, cube): iris.cube.Cube """ - data = da.ma.masked_equal(cube.core_data(), 1.e36) + data = da.ma.masked_equal(cube.core_data(), 1.0e36) return cube.copy(data) diff --git a/esmvalcore/cmor/_fixes/cmip5/canesm2.py b/esmvalcore/cmor/_fixes/cmip5/canesm2.py index b445a4dc3a..7fb48b75ec 100644 --- a/esmvalcore/cmor/_fixes/cmip5/canesm2.py +++ b/esmvalcore/cmor/_fixes/cmip5/canesm2.py @@ -1,8 +1,8 @@ """Fixes for CanESM2 model.""" + from ..common import ClFixHybridPressureCoord from ..fix import Fix - Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/ccsm4.py b/esmvalcore/cmor/_fixes/cmip5/ccsm4.py index 573d32a608..5af0aae64d 100644 --- a/esmvalcore/cmor/_fixes/cmip5/ccsm4.py +++ b/esmvalcore/cmor/_fixes/cmip5/ccsm4.py @@ -27,7 +27,7 @@ def fix_data(self, cube): ------- iris.cube.Cube """ - cube.data = da.ma.masked_equal(cube.core_data(), 1.e33) + cube.data = da.ma.masked_equal(cube.core_data(), 1.0e33) return cube @@ -55,7 +55,7 @@ def fix_metadata(self, cubes): ------- iris.cube.CubeList """ - return round_coordinates(cubes, decimals=3, coord_names=['latitude']) + return round_coordinates(cubes, decimals=3, coord_names=["latitude"]) class So(Fix): @@ -75,5 +75,5 @@ def fix_metadata(self, cubes): ------- iris.cube.CubeList """ - self.get_cube_from_list(cubes).units = '1e3' + self.get_cube_from_list(cubes).units = "1e3" return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/cesm1_bgc.py b/esmvalcore/cmor/_fixes/cmip5/cesm1_bgc.py index 2e25296b8d..ddeac554dc 100644 --- a/esmvalcore/cmor/_fixes/cmip5/cesm1_bgc.py +++ b/esmvalcore/cmor/_fixes/cmip5/cesm1_bgc.py @@ -5,7 +5,6 @@ from ..fix import Fix from .cesm1_cam5 import Cl as BaseCl - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip5/cesm1_fastchem.py b/esmvalcore/cmor/_fixes/cmip5/cesm1_fastchem.py index d5ba1f7641..319b96d261 100644 --- a/esmvalcore/cmor/_fixes/cmip5/cesm1_fastchem.py +++ b/esmvalcore/cmor/_fixes/cmip5/cesm1_fastchem.py @@ -2,5 +2,4 @@ from .cesm1_cam5 import Cl as BaseCl - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip5/cesm1_waccm.py b/esmvalcore/cmor/_fixes/cmip5/cesm1_waccm.py index 818d14e4c2..86c5cbd349 100644 --- a/esmvalcore/cmor/_fixes/cmip5/cesm1_waccm.py +++ b/esmvalcore/cmor/_fixes/cmip5/cesm1_waccm.py @@ -2,5 +2,4 @@ from .cesm1_cam5 import Cl as BaseCl - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip5/cnrm_cm5.py b/esmvalcore/cmor/_fixes/cmip5/cnrm_cm5.py index 71e6547461..5fd6848129 100644 --- a/esmvalcore/cmor/_fixes/cmip5/cnrm_cm5.py +++ b/esmvalcore/cmor/_fixes/cmip5/cnrm_cm5.py @@ -1,5 +1,5 @@ - """Fixes for CNRM-CM5 model.""" + from ..fix import Fix diff --git a/esmvalcore/cmor/_fixes/cmip5/csiro_mk3_6_0.py b/esmvalcore/cmor/_fixes/cmip5/csiro_mk3_6_0.py index 4c0a5a7564..0135def226 100644 --- a/esmvalcore/cmor/_fixes/cmip5/csiro_mk3_6_0.py +++ b/esmvalcore/cmor/_fixes/cmip5/csiro_mk3_6_0.py @@ -1,5 +1,5 @@ """Fixes for CSIRO-Mk3-6-0 model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/ec_earth.py b/esmvalcore/cmor/_fixes/cmip5/ec_earth.py index 1ef16df970..6830092698 100644 --- a/esmvalcore/cmor/_fixes/cmip5/ec_earth.py +++ b/esmvalcore/cmor/_fixes/cmip5/ec_earth.py @@ -1,4 +1,5 @@ """Fixes for EC-Earth model.""" + import iris import numpy as np from dask import array as da @@ -11,19 +12,18 @@ class Sic(Fix): """Fixes for sic.""" def fix_data(self, cube): - """ - Fix data. + """Fix data. Fixes discrepancy between declared units and real units Parameters ---------- cube: iris.cube.Cube + Cube to fix. Returns ------- iris.cube.Cube - """ metadata = cube.metadata cube *= 100 @@ -35,19 +35,18 @@ class Sftlf(Fix): """Fixes for sftlf.""" def fix_data(self, cube): - """ - Fix data. + """Fix data. Fixes discrepancy between declared units and real units Parameters ---------- cube: iris.cube.Cube + Cube to fix. Returns ------- iris.cube.Cube - """ metadata = cube.metadata cube *= 100 @@ -59,19 +58,18 @@ class Tos(Fix): """Fixes for tos.""" def fix_data(self, cube): - """ - Fix tos data. + """Fix tos data. Fixes mask Parameters ---------- cube: iris.cube.Cube + Cube to fix. Returns ------- iris.cube.Cube - """ cube.data = da.ma.masked_equal(cube.core_data(), 273.15) return cube @@ -81,8 +79,7 @@ class Tas(Fix): """Fixes for tas.""" def fix_metadata(self, cubes): - """ - Fix potentially missing scalar dimension. + """Fix potentially missing scalar dimension. Parameters ---------- @@ -92,15 +89,13 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ - for cube in cubes: - if not cube.coords(var_name='height'): + if not cube.coords(var_name="height"): add_scalar_height_coord(cube) - if cube.coord('time').long_name is None: - cube.coord('time').long_name = 'time' + if cube.coord("time").long_name is None: + cube.coord("time").long_name = "time" return cubes @@ -109,8 +104,7 @@ class Areacello(Fix): """Fixes for areacello.""" def fix_metadata(self, cubes): - """ - Fix potentially missing scalar dimension. + """Fix potentially missing scalar dimension. Parameters ---------- @@ -120,16 +114,19 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ - areacello = cubes.extract('Areas of grid cell')[0] - lat = cubes.extract('latitude')[0] - lon = cubes.extract('longitude')[0] + areacello = cubes.extract("Areas of grid cell")[0] + lat = cubes.extract("latitude")[0] + lon = cubes.extract("longitude")[0] areacello.add_aux_coord(cube_to_aux_coord(lat), (0, 1)) areacello.add_aux_coord(cube_to_aux_coord(lon), (0, 1)) - return iris.cube.CubeList([areacello, ]) + return iris.cube.CubeList( + [ + areacello, + ] + ) class Pr(Fix): @@ -150,56 +147,19 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ new_list = iris.cube.CubeList() for cube in cubes: try: - old_time = cube.coord('time') + time_coord = cube.coord("time") except iris.exceptions.CoordinateNotFoundError: new_list.append(cube) else: - if old_time.is_monotonic(): + if time_coord.is_monotonic(): new_list.append(cube) else: - time_units = old_time.units - time_data = old_time.points - # erase erroneously copy-pasted points - time_diff = np.diff(time_data) - idx_neg = np.where(time_diff <= 0.)[0] - while len(idx_neg) > 0: - time_data = np.delete(time_data, idx_neg[0] + 1) - time_diff = np.diff(time_data) - idx_neg = np.where(time_diff <= 0.)[0] - - # create the new time coord - new_time = iris.coords.DimCoord(time_data, - standard_name='time', - var_name='time', - units=time_units) - - # create a new cube with the right shape - dims = (time_data.shape[0], - cube.coord('latitude').shape[0], - cube.coord('longitude').shape[0]) - data = cube.data - new_data = np.ma.append(data[:dims[0] - 1, :, :], - data[-1, :, :]) - new_data = new_data.reshape(dims) - - tmp_cube = iris.cube.Cube( - new_data, - standard_name=cube.standard_name, - long_name=cube.long_name, - var_name=cube.var_name, - units=cube.units, - attributes=cube.attributes, - cell_methods=cube.cell_methods, - dim_coords_and_dims=[(new_time, 0), - (cube.coord('latitude'), 1), - (cube.coord('longitude'), 2)]) - - new_list.append(tmp_cube) + select = np.unique(time_coord.points, return_index=True)[1] + new_list.append(cube[select]) return new_list diff --git a/esmvalcore/cmor/_fixes/cmip5/fgoals_g2.py b/esmvalcore/cmor/_fixes/cmip5/fgoals_g2.py index 1c2df64db0..5a9c93f680 100644 --- a/esmvalcore/cmor/_fixes/cmip5/fgoals_g2.py +++ b/esmvalcore/cmor/_fixes/cmip5/fgoals_g2.py @@ -1,4 +1,5 @@ """Fixes for FGOALS-g2 model.""" + import iris from cf_units import Unit @@ -27,12 +28,12 @@ def fix_metadata(self, cubes): """ for cube in cubes: try: - time = cube.coord('time') + time = cube.coord("time") except iris.exceptions.CoordinateNotFoundError: pass else: time.units = Unit(time.units.name, time.units.calendar) - round_coordinates(cubes, 4, coord_names=['longitude']) + round_coordinates(cubes, 4, coord_names=["longitude"]) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/fgoals_s2.py b/esmvalcore/cmor/_fixes/cmip5/fgoals_s2.py index c82151226f..7ba73c8f0e 100644 --- a/esmvalcore/cmor/_fixes/cmip5/fgoals_s2.py +++ b/esmvalcore/cmor/_fixes/cmip5/fgoals_s2.py @@ -1,4 +1,5 @@ """Fixes for FGOALS-s2 model.""" + import iris from ..fix import Fix @@ -24,7 +25,7 @@ def fix_metadata(self, cubes): """ for cube in cubes: try: - lat_coord = cube.coord('latitude') + lat_coord = cube.coord("latitude") except iris.exceptions.CoordinateNotFoundError: continue if lat_coord.ndim != 1: diff --git a/esmvalcore/cmor/_fixes/cmip5/fio_esm.py b/esmvalcore/cmor/_fixes/cmip5/fio_esm.py index cc9ed79421..56385419b4 100644 --- a/esmvalcore/cmor/_fixes/cmip5/fio_esm.py +++ b/esmvalcore/cmor/_fixes/cmip5/fio_esm.py @@ -1,9 +1,8 @@ - """Fixes for FIO ESM model.""" + from ..fix import Fix from .cesm1_cam5 import Cl as BaseCl - Cl = BaseCl @@ -26,7 +25,7 @@ def fix_data(self, cube): """ metadata = cube.metadata - cube *= 29. / 44. * 1.e6 + cube *= 29.0 / 44.0 * 1.0e6 cube.metadata = metadata return cube @@ -50,6 +49,6 @@ def fix_data(self, cube): """ metadata = cube.metadata - cube *= 29. / 16. * 1.e9 + cube *= 29.0 / 16.0 * 1.0e9 cube.metadata = metadata return cube diff --git a/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py b/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py index f7c9ba83e9..fef947c4be 100644 --- a/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py +++ b/esmvalcore/cmor/_fixes/cmip5/gfdl_cm2p1.py @@ -1,4 +1,5 @@ """Fixes for GFDL CM2p1 model.""" + from copy import deepcopy import cftime @@ -34,7 +35,7 @@ def fix_metadata(self, cubes): iris.cube.Cube """ cube = self.get_cube_from_list(cubes) - cube.units = 'm2' + cube.units = "m2" return cubes @@ -77,13 +78,10 @@ def fix_metadata(self, cubes): iris.cube.Cube """ cube = self.get_cube_from_list(cubes) - time = cube.coord('time') + time = cube.coord("time") if self._fix_required(time): times = time.units.num2date(time.points) - starts = [ - cftime.DatetimeJulian(c.year, c.month, 1) - for c in times - ] + starts = [cftime.DatetimeJulian(c.year, c.month, 1) for c in times] ends = [ cftime.DatetimeJulian(c.year, c.month + 1, 1) if c.month < 12 @@ -94,9 +92,8 @@ def fix_metadata(self, cubes): return cubes def _fix_required(self, time): - return ( - self.vardef.frequency == 'mon' and - not (time.bounds[-1, 0] < time.points[-1] < time.bounds[-1, 1]) + return self.vardef.frequency == "mon" and not ( + time.bounds[-1, 0] < time.points[-1] < time.bounds[-1, 1] ) @@ -135,5 +132,5 @@ def fix_metadata(self, cubes): iris.cube.Cube """ cube = self.get_cube_from_list(cubes) - cube.standard_name = 'sea_surface_temperature' + cube.standard_name = "sea_surface_temperature" return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/gfdl_cm3.py b/esmvalcore/cmor/_fixes/cmip5/gfdl_cm3.py index bc1b3cf9b6..f2cf4b7101 100644 --- a/esmvalcore/cmor/_fixes/cmip5/gfdl_cm3.py +++ b/esmvalcore/cmor/_fixes/cmip5/gfdl_cm3.py @@ -1,7 +1,7 @@ """Fixes for GFDL CM3 model.""" -from ..fix import Fix from ..cmip5.gfdl_esm2g import AllVars as BaseAllVars +from ..fix import Fix class AllVars(BaseAllVars): @@ -27,7 +27,7 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - cube.units = 'm2' + cube.units = "m2" return cubes @@ -74,5 +74,5 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - cube.standard_name = 'sea_surface_temperature' + cube.standard_name = "sea_surface_temperature" return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2g.py b/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2g.py index 3ba4a6dd83..f3722c9d07 100644 --- a/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2g.py +++ b/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2g.py @@ -1,4 +1,3 @@ - """Fixes for GFDL ESM2G.""" import iris @@ -31,9 +30,9 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - _get_and_remove(cubes, 'Start time for average period') - _get_and_remove(cubes, 'End time for average period') - _get_and_remove(cubes, 'Length of average period') + _get_and_remove(cubes, "Start time for average period") + _get_and_remove(cubes, "End time for average period") + _get_and_remove(cubes, "Length of average period") return cubes @@ -56,7 +55,7 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - cube.units = 'm2' + cube.units = "m2" return cubes @@ -100,8 +99,8 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - _get_and_remove(cubes, 'Latitude of tracer (h) points') - _get_and_remove(cubes, 'Longitude of tracer (h) points') + _get_and_remove(cubes, "Latitude of tracer (h) points") + _get_and_remove(cubes, "Longitude of tracer (h) points") return cubes @@ -122,7 +121,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ cube = self.get_cube_from_list(cubes) - cube.standard_name = 'sea_ice_x_velocity' + cube.standard_name = "sea_ice_x_velocity" return cubes @@ -143,5 +142,5 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ cube = self.get_cube_from_list(cubes) - cube.standard_name = 'sea_ice_y_velocity' + cube.standard_name = "sea_ice_y_velocity" return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2m.py b/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2m.py index 7722118e48..e358bf525c 100644 --- a/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2m.py +++ b/esmvalcore/cmor/_fixes/cmip5/gfdl_esm2m.py @@ -1,8 +1,7 @@ - """Fixes for GFDL ESM2M.""" -from ..fix import Fix from ..cmip5.gfdl_esm2g import AllVars as BaseAllVars +from ..fix import Fix class AllVars(BaseAllVars): @@ -28,7 +27,7 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - cube.units = 'm2' + cube.units = "m2" return cubes @@ -99,5 +98,5 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - cube.standard_name = 'sea_surface_temperature' + cube.standard_name = "sea_surface_temperature" return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/giss_e2_h.py b/esmvalcore/cmor/_fixes/cmip5/giss_e2_h.py index 5ee8c8aff1..4329bbc18f 100644 --- a/esmvalcore/cmor/_fixes/cmip5/giss_e2_h.py +++ b/esmvalcore/cmor/_fixes/cmip5/giss_e2_h.py @@ -1,5 +1,5 @@ """Fixes for GISS-E2-H.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/giss_e2_r.py b/esmvalcore/cmor/_fixes/cmip5/giss_e2_r.py index 0d538e7960..945f941c45 100644 --- a/esmvalcore/cmor/_fixes/cmip5/giss_e2_r.py +++ b/esmvalcore/cmor/_fixes/cmip5/giss_e2_r.py @@ -1,5 +1,5 @@ """Fixes for GISS-E2-R.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/hadgem2_cc.py b/esmvalcore/cmor/_fixes/cmip5/hadgem2_cc.py index 3531f78f06..50dde42430 100644 --- a/esmvalcore/cmor/_fixes/cmip5/hadgem2_cc.py +++ b/esmvalcore/cmor/_fixes/cmip5/hadgem2_cc.py @@ -1,8 +1,8 @@ """Fix HadGEM2_CC.""" + from ..fix import Fix from .hadgem2_es import AllVars as BaseAllVars - AllVars = BaseAllVars @@ -22,8 +22,8 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - std = 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' - long_name = 'Dissolved Oxygen Concentration' + std = "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water" + long_name = "Dissolved Oxygen Concentration" cubes[0].long_name = long_name cubes[0].standard_name = std diff --git a/esmvalcore/cmor/_fixes/cmip5/hadgem2_es.py b/esmvalcore/cmor/_fixes/cmip5/hadgem2_es.py index f7360dae2b..b5df822c81 100644 --- a/esmvalcore/cmor/_fixes/cmip5/hadgem2_es.py +++ b/esmvalcore/cmor/_fixes/cmip5/hadgem2_es.py @@ -1,4 +1,5 @@ """Fix HadGEM2_ES.""" + import numpy as np from ..common import ClFixHybridHeightCoord @@ -22,13 +23,13 @@ def fix_metadata(self, cubes): """ for cube in cubes: - lats = cube.coords('latitude') + lats = cube.coords("latitude") if lats: - lat = cube.coord('latitude') - lat.points = np.clip(lat.core_points(), -90., 90.) + lat = cube.coord("latitude") + lat.points = np.clip(lat.core_points(), -90.0, 90.0) if not lat.has_bounds(): lat.guess_bounds() - lat.bounds = np.clip(lat.core_bounds(), -90., 90.) + lat.bounds = np.clip(lat.core_bounds(), -90.0, 90.0) return cubes @@ -52,8 +53,8 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - std = 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' - long_name = 'Dissolved Oxygen Concentration' + std = "mole_concentration_of_dissolved_molecular_oxygen_in_sea_water" + long_name = "Dissolved Oxygen Concentration" cubes[0].long_name = long_name cubes[0].standard_name = std diff --git a/esmvalcore/cmor/_fixes/cmip5/inmcm4.py b/esmvalcore/cmor/_fixes/cmip5/inmcm4.py index e89cea019d..ffc804a712 100644 --- a/esmvalcore/cmor/_fixes/cmip5/inmcm4.py +++ b/esmvalcore/cmor/_fixes/cmip5/inmcm4.py @@ -1,10 +1,10 @@ """Fixes for inmcm4 model.""" + import iris from ..common import ClFixHybridPressureCoord from ..fix import Fix - Cl = ClFixHybridPressureCoord @@ -76,8 +76,8 @@ def fix_metadata(self, cubes): """ cubes[0].standard_name = ( - 'surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_' - 'carbon_due_to_all_land_processes' + "surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_" + "carbon_due_to_all_land_processes" ) return cubes @@ -100,12 +100,13 @@ def fix_metadata(self, cubes): """ typebare = iris.coords.AuxCoord( - 'bare_ground', - standard_name='area_type', - long_name='surface type', - var_name='type', - units='1', - bounds=None) + "bare_ground", + standard_name="area_type", + long_name="surface type", + var_name="type", + units="1", + bounds=None, + ) for cube in cubes: cube.add_aux_coord(typebare) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_lr.py b/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_lr.py index 894af45317..b4f4c3984a 100644 --- a/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_lr.py +++ b/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_lr.py @@ -1,5 +1,5 @@ """Fixes for IPSL-CM5A-LR model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_mr.py b/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_mr.py index 893b9779c2..611e4389b8 100644 --- a/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_mr.py +++ b/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5a_mr.py @@ -1,5 +1,5 @@ """Fixes for IPSL-CM5A-MR model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5b_lr.py b/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5b_lr.py index 03bed332d6..f0979acdfd 100644 --- a/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5b_lr.py +++ b/esmvalcore/cmor/_fixes/cmip5/ipsl_cm5b_lr.py @@ -1,5 +1,5 @@ """Fixes for IPSL-CM5B-LR model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/miroc5.py b/esmvalcore/cmor/_fixes/cmip5/miroc5.py index ec0e009402..6a91e6fc00 100644 --- a/esmvalcore/cmor/_fixes/cmip5/miroc5.py +++ b/esmvalcore/cmor/_fixes/cmip5/miroc5.py @@ -1,11 +1,11 @@ """Fixes for MIROC5 model.""" + from dask import array as da from ..common import ClFixHybridPressureCoord from ..fix import Fix from ..shared import round_coordinates - Cl = ClFixHybridPressureCoord @@ -111,7 +111,7 @@ def fix_data(self, cube): iris.cube.Cube """ - cube.data = da.ma.masked_equal(cube.core_data(), 0.) + cube.data = da.ma.masked_equal(cube.core_data(), 0.0) return cube @@ -161,7 +161,7 @@ def fix_data(self, cube): iris.cube.Cube """ - cube.data = da.ma.masked_equal(cube.core_data(), 0.) + cube.data = da.ma.masked_equal(cube.core_data(), 0.0) return cube diff --git a/esmvalcore/cmor/_fixes/cmip5/miroc_esm.py b/esmvalcore/cmor/_fixes/cmip5/miroc_esm.py index fc94bc45cc..440cbc101b 100644 --- a/esmvalcore/cmor/_fixes/cmip5/miroc_esm.py +++ b/esmvalcore/cmor/_fixes/cmip5/miroc_esm.py @@ -55,7 +55,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - self.get_cube_from_list(cubes).units = '1.0e-6' + self.get_cube_from_list(cubes).units = "1.0e-6" return cubes @@ -82,34 +82,35 @@ def fix_metadata(self, cubes): for cube in cubes: # Fix air_pressure try: - old = cube.coord('AR5PL35') + old = cube.coord("AR5PL35") dims = cube.coord_dims(old) cube.remove_coord(old) plev = DimCoord.from_coord(old) - plev.var_name = 'plev' - plev.standard_name = 'air_pressure' - plev.long_name = 'pressure' + plev.var_name = "plev" + plev.standard_name = "air_pressure" + plev.long_name = "pressure" cube.add_dim_coord(plev, dims) except CoordinateNotFoundError: pass # Fix time for files that contain year < 1 (which is not allowed) - if cube.coords('time'): - expected_time_units = Unit('days since 1950-1-1 00:00:00', - calendar='gregorian') - if cube.coord('time').units != expected_time_units: + if cube.coords("time"): + expected_time_units = Unit( + "days since 1950-1-1 00:00:00", calendar="gregorian" + ) + if cube.coord("time").units != expected_time_units: continue - if not cube.coord('time').has_bounds(): + if not cube.coord("time").has_bounds(): continue # Only apply fix if there is a year < 1 in the first element # of the time bounds (-711860.5 days from 1950-01-01 is < # year 1) - if np.isclose(cube.coord('time').bounds[0][0], -711860.5): - new_points = cube.coord('time').points.copy() + 3.5 - new_bounds = cube.coord('time').bounds.copy() + 3.5 - cube.coord('time').points = new_points - cube.coord('time').bounds = new_bounds + if np.isclose(cube.coord("time").bounds[0][0], -711860.5): + new_points = cube.coord("time").points.copy() + 3.5 + new_bounds = cube.coord("time").bounds.copy() + 3.5 + cube.coord("time").points = new_points + cube.coord("time").bounds = new_bounds return cubes diff --git a/esmvalcore/cmor/_fixes/cmip5/miroc_esm_chem.py b/esmvalcore/cmor/_fixes/cmip5/miroc_esm_chem.py index 83d6e28f20..240fc9c80a 100644 --- a/esmvalcore/cmor/_fixes/cmip5/miroc_esm_chem.py +++ b/esmvalcore/cmor/_fixes/cmip5/miroc_esm_chem.py @@ -1,5 +1,5 @@ - """Fixes for MIROC ESM CHEM model.""" + from ..fix import Fix diff --git a/esmvalcore/cmor/_fixes/cmip5/mpi_esm_lr.py b/esmvalcore/cmor/_fixes/cmip5/mpi_esm_lr.py index a24aebd499..a451a05659 100644 --- a/esmvalcore/cmor/_fixes/cmip5/mpi_esm_lr.py +++ b/esmvalcore/cmor/_fixes/cmip5/mpi_esm_lr.py @@ -1,8 +1,8 @@ """Fixes for MPI-ESM-LR model.""" + from ..common import ClFixHybridPressureCoord from ..fix import Fix - Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/mpi_esm_mr.py b/esmvalcore/cmor/_fixes/cmip5/mpi_esm_mr.py index 92b1a6a06b..de8deef8bb 100644 --- a/esmvalcore/cmor/_fixes/cmip5/mpi_esm_mr.py +++ b/esmvalcore/cmor/_fixes/cmip5/mpi_esm_mr.py @@ -1,5 +1,5 @@ """Fixes for MPI-ESM-MR model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/mpi_esm_p.py b/esmvalcore/cmor/_fixes/cmip5/mpi_esm_p.py index 303e1b0fd1..940d9838d0 100644 --- a/esmvalcore/cmor/_fixes/cmip5/mpi_esm_p.py +++ b/esmvalcore/cmor/_fixes/cmip5/mpi_esm_p.py @@ -1,5 +1,5 @@ """Fixes for MPI-ESM-P model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/mri_cgcm3.py b/esmvalcore/cmor/_fixes/cmip5/mri_cgcm3.py index a21b82eb7f..0e2d666416 100644 --- a/esmvalcore/cmor/_fixes/cmip5/mri_cgcm3.py +++ b/esmvalcore/cmor/_fixes/cmip5/mri_cgcm3.py @@ -1,10 +1,10 @@ """Fixes for MRI-CGCM3 model.""" + from dask import array as da from ..common import ClFixHybridPressureCoord from ..fix import Fix - Cl = ClFixHybridPressureCoord @@ -27,7 +27,7 @@ def fix_data(self, cube): iris.cube.Cube """ - cube.data = da.ma.masked_equal(cube.core_data(), 0.) + cube.data = da.ma.masked_equal(cube.core_data(), 0.0) return cube @@ -50,5 +50,5 @@ def fix_data(self, cube): iris.cube.Cube """ - cube.data = da.ma.masked_equal(cube.core_data(), 0.) + cube.data = da.ma.masked_equal(cube.core_data(), 0.0) return cube diff --git a/esmvalcore/cmor/_fixes/cmip5/mri_esm1.py b/esmvalcore/cmor/_fixes/cmip5/mri_esm1.py index 694e4cc33f..270dd3c252 100644 --- a/esmvalcore/cmor/_fixes/cmip5/mri_esm1.py +++ b/esmvalcore/cmor/_fixes/cmip5/mri_esm1.py @@ -1,5 +1,5 @@ - """Fixes for MRI-ESM1 model.""" + from dask import array as da from ..fix import Fix @@ -23,5 +23,5 @@ def fix_data(self, cube): iris.cube.Cube """ - cube.data = da.ma.masked_equal(cube.core_data(), 0.) + cube.data = da.ma.masked_equal(cube.core_data(), 0.0) return cube diff --git a/esmvalcore/cmor/_fixes/cmip5/noresm1_m.py b/esmvalcore/cmor/_fixes/cmip5/noresm1_m.py index 703ef054d7..9a7b9f79f7 100644 --- a/esmvalcore/cmor/_fixes/cmip5/noresm1_m.py +++ b/esmvalcore/cmor/_fixes/cmip5/noresm1_m.py @@ -1,5 +1,5 @@ """Fixes for NorESM1-M.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip5/noresm1_me.py b/esmvalcore/cmor/_fixes/cmip5/noresm1_me.py index 08fe028419..e44ad9ae1d 100644 --- a/esmvalcore/cmor/_fixes/cmip5/noresm1_me.py +++ b/esmvalcore/cmor/_fixes/cmip5/noresm1_me.py @@ -1,4 +1,5 @@ """Fixes for NorESM1-ME model.""" + from ..fix import Fix from ..shared import round_coordinates @@ -22,7 +23,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - return round_coordinates(cubes, 12, coord_names=['latitude']) + return round_coordinates(cubes, 12, coord_names=["latitude"]) class Tas(Fix): diff --git a/esmvalcore/cmor/_fixes/cmip6/access_cm2.py b/esmvalcore/cmor/_fixes/cmip6/access_cm2.py index 7627e9b3a4..a46f536486 100644 --- a/esmvalcore/cmor/_fixes/cmip6/access_cm2.py +++ b/esmvalcore/cmor/_fixes/cmip6/access_cm2.py @@ -1,4 +1,5 @@ """Fixes for ACCESS-CM2.""" + import iris from ..common import ClFixHybridHeightCoord @@ -22,30 +23,96 @@ def fix_metadata(self, cubes): """ for cube in cubes: try: - bcoeff = cube.coord(var_name='b') + bcoeff = cube.coord(var_name="b") # values taken from UK-ESM1-0-LL model, which uses the same # atmospheric component as ACCESS-CM2 (MetUM-HadGEM3-GA7.1, # N96 (192x144), 85 vertical levels, top = 85 km) bcoeff.points = [ - 0.997741281986237, 0.993982434272766, 0.988731920719147, - 0.982001721858978, 0.973807096481323, 0.964166879653931, - 0.953103065490723, 0.940641283988953, 0.926810503005981, - 0.911642968654633, 0.895174443721771, 0.877444267272949, - 0.858494758605957, 0.838372051715851, 0.81712543964386, - 0.7948077917099, 0.77147513628006, 0.747187197208405, - 0.722006916999817, 0.696000635623932, 0.669238269329071, - 0.641793012619019, 0.613741397857666, 0.585163474082947, - 0.556142747402191, 0.526765942573547, 0.49712336063385, - 0.467308610677719, 0.437418729066849, 0.40755420923233, - 0.377818822860718, 0.348319888114929, 0.319168090820312, - 0.290477395057678, 0.262365132570267, 0.234952658414841, - 0.20836341381073, 0.182725623250008, 0.158169254660606, - 0.134828746318817, 0.112841464579105, 0.0923482477664948, - 0.0734933465719223, 0.0564245767891407, 0.041294027119875, - 0.028257654979825, 0.0174774676561356, 0.00912047084420919, - 0.00336169824004173, 0.000384818413294852, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0.997741281986237, + 0.993982434272766, + 0.988731920719147, + 0.982001721858978, + 0.973807096481323, + 0.964166879653931, + 0.953103065490723, + 0.940641283988953, + 0.926810503005981, + 0.911642968654633, + 0.895174443721771, + 0.877444267272949, + 0.858494758605957, + 0.838372051715851, + 0.81712543964386, + 0.7948077917099, + 0.77147513628006, + 0.747187197208405, + 0.722006916999817, + 0.696000635623932, + 0.669238269329071, + 0.641793012619019, + 0.613741397857666, + 0.585163474082947, + 0.556142747402191, + 0.526765942573547, + 0.49712336063385, + 0.467308610677719, + 0.437418729066849, + 0.40755420923233, + 0.377818822860718, + 0.348319888114929, + 0.319168090820312, + 0.290477395057678, + 0.262365132570267, + 0.234952658414841, + 0.20836341381073, + 0.182725623250008, + 0.158169254660606, + 0.134828746318817, + 0.112841464579105, + 0.0923482477664948, + 0.0734933465719223, + 0.0564245767891407, + 0.041294027119875, + 0.028257654979825, + 0.0174774676561356, + 0.00912047084420919, + 0.00336169824004173, + 0.000384818413294852, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, ] bcoeff.bounds = [ [1, 0.995860934257507], @@ -98,11 +165,41 @@ def fix_metadata(self, cubes): [0.01296216994524, 0.00588912842795253], [0.00588912842795253, 0.00150532135739923], [0.00150532135739923, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], ] except iris.exceptions.CoordinateNotFoundError: pass diff --git a/esmvalcore/cmor/_fixes/cmip6/access_esm1_5.py b/esmvalcore/cmor/_fixes/cmip6/access_esm1_5.py index 7b0497e493..3691fe6eed 100644 --- a/esmvalcore/cmor/_fixes/cmip6/access_esm1_5.py +++ b/esmvalcore/cmor/_fixes/cmip6/access_esm1_5.py @@ -1,4 +1,5 @@ """Fixes for ACCESS-ESM1-5.""" + import iris import numpy as np @@ -20,25 +21,51 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ for cube in cubes: try: - bcoeff = cube.coord(var_name='b') + bcoeff = cube.coord(var_name="b") # values taken from HadGEM2-ES model (CMIP5), which uses the # same atmospheric component as ACCESS-ESM1-5 (HadGAM2, N96L38) bcoeff.points = [ - 0.99771648645401, 0.990881502628326, 0.979542553424835, - 0.9637770652771, 0.943695485591888, 0.919438362121582, - 0.891178011894226, 0.859118342399597, 0.823493480682373, - 0.784570515155792, 0.742646217346191, 0.698050200939178, - 0.651142716407776, 0.602314412593842, 0.55198872089386, - 0.500619947910309, 0.44869339466095, 0.39672577381134, - 0.34526526927948, 0.294891387224197, 0.24621507525444, - 0.199878215789795, 0.156554222106934, 0.116947874426842, - 0.0817952379584312, 0.0518637150526047, 0.0279368180781603, - 0.0107164792716503, 0.00130179093685001, - 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0.99771648645401, + 0.990881502628326, + 0.979542553424835, + 0.9637770652771, + 0.943695485591888, + 0.919438362121582, + 0.891178011894226, + 0.859118342399597, + 0.823493480682373, + 0.784570515155792, + 0.742646217346191, + 0.698050200939178, + 0.651142716407776, + 0.602314412593842, + 0.55198872089386, + 0.500619947910309, + 0.44869339466095, + 0.39672577381134, + 0.34526526927948, + 0.294891387224197, + 0.24621507525444, + 0.199878215789795, + 0.156554222106934, + 0.116947874426842, + 0.0817952379584312, + 0.0518637150526047, + 0.0279368180781603, + 0.0107164792716503, + 0.00130179093685001, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, ] bcoeff.bounds = [ [1, 0.994296252727509], @@ -70,8 +97,15 @@ def fix_metadata(self, cubes): [0.0389823913574219, 0.0183146875351667], [0.0183146875351667, 0.00487210927531123], [0.00487210927531123, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], ] except iris.exceptions.CoordinateNotFoundError: pass @@ -80,7 +114,6 @@ def fix_metadata(self, cubes): Cli = Cl - Clw = Cl @@ -98,13 +131,14 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.Cube - """ cube = self.get_cube_from_list(cubes) - cube.coord('air_pressure').points = \ - np.round(cube.coord('air_pressure').core_points(), 0) - cube.coord('air_pressure').bounds = \ - np.round(cube.coord('air_pressure').core_bounds(), 0) + cube.coord("air_pressure").points = np.round( + cube.coord("air_pressure").core_points(), 0 + ) + cube.coord("air_pressure").bounds = np.round( + cube.coord("air_pressure").core_bounds(), 0 + ) return cubes @@ -122,11 +156,12 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.Cube - """ cube = self.get_cube_from_list(cubes) - cube.coord('air_pressure').points = \ - np.round(cube.coord('air_pressure').points, 0) - cube.coord('air_pressure').bounds = \ - np.round(cube.coord('air_pressure').bounds, 0) + cube.coord("air_pressure").points = np.round( + cube.coord("air_pressure").core_points(), 0 + ) + cube.coord("air_pressure").bounds = np.round( + cube.coord("air_pressure").core_bounds(), 0 + ) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/awi_cm_1_1_mr.py b/esmvalcore/cmor/_fixes/cmip6/awi_cm_1_1_mr.py index 56d8dac0f8..0a1c3094dc 100644 --- a/esmvalcore/cmor/_fixes/cmip6/awi_cm_1_1_mr.py +++ b/esmvalcore/cmor/_fixes/cmip6/awi_cm_1_1_mr.py @@ -19,11 +19,11 @@ def fix_metadata(self, cubes): iris.cube.Cube """ coords_longnames_to_change = { - 'latitude': 'latitude', + "latitude": "latitude", } for cube in cubes: - for (std_name, long_name) in coords_longnames_to_change.items(): + for std_name, long_name in coords_longnames_to_change.items(): coord = cube.coord(std_name) if coord.long_name != long_name: coord.long_name = long_name diff --git a/esmvalcore/cmor/_fixes/cmip6/awi_esm_1_1_lr.py b/esmvalcore/cmor/_fixes/cmip6/awi_esm_1_1_lr.py index 2c53d38fb4..af3b851213 100644 --- a/esmvalcore/cmor/_fixes/cmip6/awi_esm_1_1_lr.py +++ b/esmvalcore/cmor/_fixes/cmip6/awi_esm_1_1_lr.py @@ -1,4 +1,5 @@ """Fixes for AWI-ESM-1-1-LR model.""" + from ..fix import Fix @@ -17,13 +18,15 @@ def fix_metadata(self, cubes): ------- iris.cube.CubeList """ - parent_units = 'parent_time_units' - bad_value = 'days since 0000-01-01 00:00:00' + parent_units = "parent_time_units" + bad_value = "days since 0000-01-01 00:00:00" for cube in cubes: try: - if cube.attributes[parent_units] == bad_value: - cube.attributes[parent_units] = 'days since 0001-01-01 ' \ - + '00:00:00' + if parent_units in cube.attributes: + if cube.attributes[parent_units] == bad_value: + cube.attributes[parent_units] = ( + "days since 0001-01-01 00:00:00" + ) except AttributeError: pass return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/bcc_csm2_mr.py b/esmvalcore/cmor/_fixes/cmip6/bcc_csm2_mr.py index 99bc9d6289..82d0577faf 100644 --- a/esmvalcore/cmor/_fixes/cmip6/bcc_csm2_mr.py +++ b/esmvalcore/cmor/_fixes/cmip6/bcc_csm2_mr.py @@ -1,4 +1,5 @@ """Fixes for BCC-CSM2-MR model.""" + from ..common import ClFixHybridPressureCoord, OceanFixGrid Areacello = OceanFixGrid diff --git a/esmvalcore/cmor/_fixes/cmip6/bcc_esm1.py b/esmvalcore/cmor/_fixes/cmip6/bcc_esm1.py index b0b38c0e8a..522cd74ee5 100644 --- a/esmvalcore/cmor/_fixes/cmip6/bcc_esm1.py +++ b/esmvalcore/cmor/_fixes/cmip6/bcc_esm1.py @@ -1,4 +1,5 @@ """Fixes for BCC-ESM1 model.""" + from ..common import ClFixHybridPressureCoord, OceanFixGrid Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/cams_csm1_0.py b/esmvalcore/cmor/_fixes/cmip6/cams_csm1_0.py index 59f09bcd28..5b8c44b1b6 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cams_csm1_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/cams_csm1_0.py @@ -1,6 +1,6 @@ """Fixes for CAMS-CSM1-0 model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/canesm5.py b/esmvalcore/cmor/_fixes/cmip6/canesm5.py index d4cdbc95e1..073154fe79 100644 --- a/esmvalcore/cmor/_fixes/cmip6/canesm5.py +++ b/esmvalcore/cmor/_fixes/cmip6/canesm5.py @@ -1,4 +1,5 @@ """Fixes for CanESM5 model.""" + import dask.array as da from ..fix import Fix @@ -21,7 +22,7 @@ def fix_data(self, cube): """ metadata = cube.metadata - cube *= 1.e-6 + cube *= 1.0e-6 cube.metadata = metadata return cube diff --git a/esmvalcore/cmor/_fixes/cmip6/cas_esm2_0.py b/esmvalcore/cmor/_fixes/cmip6/cas_esm2_0.py index 75170627a1..b1ea055f25 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cas_esm2_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/cas_esm2_0.py @@ -1,4 +1,5 @@ """Fixes for CAS-ESM2-0 model.""" + from .ciesm import Cl as BaseCl Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip6/cesm2.py b/esmvalcore/cmor/_fixes/cmip6/cesm2.py index 6ded187fbb..0c5c0eed94 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cesm2.py +++ b/esmvalcore/cmor/_fixes/cmip6/cesm2.py @@ -1,4 +1,5 @@ """Fixes for CESM2 model.""" + from shutil import copyfile import numpy as np @@ -29,10 +30,11 @@ def _fix_formula_terms( output_dir, filepath, add_unique_suffix=add_unique_suffix ) copyfile(filepath, new_path) - dataset = Dataset(new_path, mode='a') - dataset.variables['lev'].formula_terms = 'p0: p0 a: a b: b ps: ps' - dataset.variables['lev'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') + dataset = Dataset(new_path, mode="a") + dataset.variables["lev"].formula_terms = "p0: p0 a: a b: b ps: ps" + dataset.variables[ + "lev" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" dataset.close() return new_path @@ -66,9 +68,9 @@ def fix_file(self, filepath, output_dir, add_unique_suffix=False): new_path = self._fix_formula_terms( filepath, output_dir, add_unique_suffix=add_unique_suffix ) - dataset = Dataset(new_path, mode='a') - dataset.variables['a_bnds'][:] = dataset.variables['a_bnds'][::-1, :] - dataset.variables['b_bnds'][:] = dataset.variables['b_bnds'][::-1, :] + dataset = Dataset(new_path, mode="a") + dataset.variables["a_bnds"][:] = dataset.variables["a_bnds"][::-1, :] + dataset.variables["b_bnds"][:] = dataset.variables["b_bnds"][::-1, :] dataset.close() return new_path @@ -88,12 +90,12 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - lev_coord = cube.coord(var_name='lev') - a_coord = cube.coord(var_name='a') - b_coord = cube.coord(var_name='b') + lev_coord = cube.coord(var_name="lev") + a_coord = cube.coord(var_name="a") + b_coord = cube.coord(var_name="b") lev_coord.points = a_coord.core_points() + b_coord.core_points() lev_coord.bounds = a_coord.core_bounds() + b_coord.core_bounds() - lev_coord.units = '1' + lev_coord.units = "1" return cubes @@ -142,12 +144,13 @@ def fix_metadata(self, cubes): """ for cube in cubes: - for coord_name in ['latitude', 'longitude']: + for coord_name in ["latitude", "longitude"]: coord = cube.coord(coord_name) if not coord.has_bounds(): coord.guess_bounds() - coord.bounds = np.round(coord.core_bounds().astype(np.float64), - 4) + coord.bounds = np.round( + coord.core_bounds().astype(np.float64), 4 + ) return cubes @@ -247,9 +250,10 @@ def fix_metadata(self, cubes): cube = self.get_cube_from_list(cubes) for cube in cubes: - if cube.attributes['mipTable'] == 'Omon': - cube.coord('time').points = \ - np.round(cube.coord('time').points, 1) + if cube.attributes["mipTable"] == "Omon": + cube.coord("time").points = np.round( + cube.coord("time").points, 1 + ) return cubes @@ -270,13 +274,13 @@ def fix_metadata(self, cubes): """ for cube in cubes: - if cube.coords(axis='Z'): - z_coord = cube.coord(axis='Z') + if cube.coords(axis="Z"): + z_coord = cube.coord(axis="Z") # Only points need to be fixed, not bounds - if z_coord.units == 'cm': + if z_coord.units == "cm": z_coord.points = z_coord.core_points() / 100.0 - z_coord.units = 'm' + z_coord.units = "m" # Fix depth metadata if z_coord.standard_name is None: diff --git a/esmvalcore/cmor/_fixes/cmip6/cesm2_fv2.py b/esmvalcore/cmor/_fixes/cmip6/cesm2_fv2.py index 0783c125fe..4c55a20f03 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cesm2_fv2.py +++ b/esmvalcore/cmor/_fixes/cmip6/cesm2_fv2.py @@ -1,10 +1,10 @@ """Fixes for CESM2-FV2 model.""" + +from ..common import SiconcFixScalarCoord from .cesm2 import Cl as BaseCl from .cesm2 import Fgco2 as BaseFgco2 from .cesm2 import Omon as BaseOmon from .cesm2 import Tas as BaseTas -from ..common import SiconcFixScalarCoord - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py b/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py index f7263a00dd..156a656b5f 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py +++ b/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm.py @@ -1,4 +1,5 @@ """Fixes for CESM2-WACCM model.""" + from netCDF4 import Dataset from ..common import SiconcFixScalarCoord @@ -41,9 +42,9 @@ def fix_file(self, filepath, output_dir, add_unique_suffix=False): new_path = self._fix_formula_terms( filepath, output_dir, add_unique_suffix=add_unique_suffix ) - dataset = Dataset(new_path, mode='a') - dataset.variables['a_bnds'][:] = dataset.variables['a_bnds'][:, ::-1] - dataset.variables['b_bnds'][:] = dataset.variables['b_bnds'][:, ::-1] + dataset = Dataset(new_path, mode="a") + dataset.variables["a_bnds"][:] = dataset.variables["a_bnds"][:, ::-1] + dataset.variables["b_bnds"][:] = dataset.variables["b_bnds"][:, ::-1] dataset.close() return new_path diff --git a/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm_fv2.py b/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm_fv2.py index 89c55b3b10..23f77fbd07 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm_fv2.py +++ b/esmvalcore/cmor/_fixes/cmip6/cesm2_waccm_fv2.py @@ -1,12 +1,12 @@ """Fixes for CESM2-WACCM-FV2 model.""" -from .cesm2 import Tas as BaseTas + +from ..common import SiconcFixScalarCoord from .cesm2 import Fgco2 as BaseFgco2 from .cesm2 import Omon as BaseOmon +from .cesm2 import Tas as BaseTas from .cesm2_waccm import Cl as BaseCl from .cesm2_waccm import Cli as BaseCli from .cesm2_waccm import Clw as BaseClw -from ..common import SiconcFixScalarCoord - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip6/ciesm.py b/esmvalcore/cmor/_fixes/cmip6/ciesm.py index c344c1b611..1e3198f579 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ciesm.py +++ b/esmvalcore/cmor/_fixes/cmip6/ciesm.py @@ -1,4 +1,5 @@ """Fixes for CIESM model.""" + from ..common import ClFixHybridPressureCoord from ..fix import Fix @@ -21,8 +22,8 @@ def fix_data(self, cube): iris.cube.Cube """ if cube.core_data().max() <= 1.0: - cube.units = '1' - cube.convert_units('%') + cube.units = "1" + cube.convert_units("%") return cube @@ -44,8 +45,8 @@ def fix_data(self, cube): iris.cube.Cube """ if cube.core_data().max() <= 1.0: - cube.units = '1' - cube.convert_units('%') + cube.units = "1" + cube.convert_units("%") return cube @@ -57,6 +58,6 @@ def fix_data(self, cube): The values of v20200417 are off by a factor 1000. """ - if float(cube.core_data()[:10].mean()) < 1.e-5: - cube.data = cube.core_data() * 1000. + if float(cube.core_data()[:10].mean()) < 1.0e-5: + cube.data = cube.core_data() * 1000.0 return cube diff --git a/esmvalcore/cmor/_fixes/cmip6/cmcc_cm2_sr5.py b/esmvalcore/cmor/_fixes/cmip6/cmcc_cm2_sr5.py index 00983b36a7..cdd0fa9273 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cmcc_cm2_sr5.py +++ b/esmvalcore/cmor/_fixes/cmip6/cmcc_cm2_sr5.py @@ -1,4 +1,5 @@ """Fixes for CMCC-CM2-SR5 model.""" + from ..common import ClFixHybridPressureCoord @@ -19,6 +20,6 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - ps_coord = cube.coord(var_name='ps') + ps_coord = cube.coord(var_name="ps") ps_coord.standard_name = None return super().fix_metadata(cubes) diff --git a/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1.py b/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1.py index da515e66c2..81d30f9e14 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1.py +++ b/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1.py @@ -1,12 +1,13 @@ """Fixes for CNRM-CM6-1 model.""" + import iris from ..common import ClFixHybridPressureCoord from ..fix import Fix from ..shared import ( add_aux_coords_from_cubes, + fix_ocean_depth_coord, get_bounds_cube, - fix_ocean_depth_coord ) @@ -24,25 +25,25 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.Cube - """ cube = self.get_cube_from_list(cubes) # Add auxiliary coordinate from list of cubes coords_to_add = { - 'ap': 1, - 'b': 1, - 'ps': (0, 2, 3), + "ap": 1, + "b": 1, + "ps": (0, 2, 3), } add_aux_coords_from_cubes(cube, cubes, coords_to_add) - cube.coord(var_name='ap').units = 'Pa' + cube.coord(var_name="ap").units = "Pa" # Fix vertical coordinate bounds - for coord_name in ('ap', 'b'): + for coord_name in ("ap", "b"): bounds_cube = get_bounds_cube(cubes, coord_name) - bounds = bounds_cube.data.reshape(-1, 2) - new_bounds_cube = iris.cube.Cube(bounds, - **bounds_cube.metadata._asdict()) + bounds = bounds_cube.core_data().reshape(-1, 2) + new_bounds_cube = iris.cube.Cube( + bounds, **bounds_cube.metadata._asdict() + ) cubes.remove(bounds_cube) cubes.append(new_bounds_cube) @@ -50,7 +51,7 @@ def fix_metadata(self, cubes): cubes = super().fix_metadata(cubes) # Fix horizontal coordinates bounds - for coord_name in ('latitude', 'longitude'): + for coord_name in ("latitude", "longitude"): cube.coord(coord_name).guess_bounds() return cubes @@ -72,8 +73,8 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - alt_40_coord = cube.coord('alt40') - alt_40_coord.standard_name = 'altitude' + alt_40_coord = cube.coord("alt40") + alt_40_coord.standard_name = "altitude" return iris.cube.CubeList([cube]) @@ -91,11 +92,10 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ for cube in cubes: - if cube.coords(axis='Z'): - z_coord = cube.coord(axis='Z') + if cube.coords(axis="Z"): + z_coord = cube.coord(axis="Z") if z_coord.standard_name is None: fix_ocean_depth_coord(cube) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1_hr.py b/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1_hr.py index b5db40fc11..5e2b3ca36b 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1_hr.py +++ b/esmvalcore/cmor/_fixes/cmip6/cnrm_cm6_1_hr.py @@ -1,9 +1,9 @@ """Fixes for CNRM-CM6-1-HR model.""" + from .cnrm_cm6_1 import Cl as BaseCl from .cnrm_cm6_1 import Cli as BaseCli from .cnrm_cm6_1 import Clw as BaseClw - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip6/cnrm_esm2_1.py b/esmvalcore/cmor/_fixes/cmip6/cnrm_esm2_1.py index 838ae3836c..8eb9aafe6c 100644 --- a/esmvalcore/cmor/_fixes/cmip6/cnrm_esm2_1.py +++ b/esmvalcore/cmor/_fixes/cmip6/cnrm_esm2_1.py @@ -1,11 +1,11 @@ """Fixes for CNRM-ESM2-1 model.""" + from .cnrm_cm6_1 import Cl as BaseCl from .cnrm_cm6_1 import Clcalipso as BaseClcalipso from .cnrm_cm6_1 import Cli as BaseCli from .cnrm_cm6_1 import Clw as BaseClw from .cnrm_cm6_1 import Omon as BaseOmon - Cl = BaseCl diff --git a/esmvalcore/cmor/_fixes/cmip6/e3sm_1_0.py b/esmvalcore/cmor/_fixes/cmip6/e3sm_1_0.py index 4a01fca0fe..764b949baa 100644 --- a/esmvalcore/cmor/_fixes/cmip6/e3sm_1_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/e3sm_1_0.py @@ -1,4 +1,5 @@ """Fixes for E3SM-1-0 model.""" + from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/ec_earth3.py b/esmvalcore/cmor/_fixes/cmip6/ec_earth3.py index 49bb6a66a2..1ef3b6fe9d 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ec_earth3.py +++ b/esmvalcore/cmor/_fixes/cmip6/ec_earth3.py @@ -1,4 +1,5 @@ """Fixes for EC-Earth3 model.""" + import cf_units import numpy as np @@ -12,17 +13,19 @@ class AllVars(Fix): def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: - if cube.attributes.get('variant_label', '') == 'r3i1p1f1': + if cube.attributes.get("variant_label", "") == "r3i1p1f1": round_coordinates( [cube], decimals=3, - coord_names=['latitude'], + coord_names=["latitude"], ) - if (cube.attributes.get('experiment_id', '') == 'historical' - and cube.coords('time')): - time_coord = cube.coord('time') - time_coord.units = cf_units.Unit(time_coord.units.origin, - 'proleptic_gregorian') + if cube.attributes.get( + "experiment_id", "" + ) == "historical" and cube.coords("time"): + time_coord = cube.coord("time") + time_coord.units = cf_units.Unit( + time_coord.units.origin, "proleptic_gregorian" + ) return cubes @@ -43,7 +46,7 @@ def fix_data(self, cube): ------- iris.cube.Cube """ - cube.data = cube.core_data() * 100. + cube.data = cube.core_data() * 100.0 return cube @@ -67,7 +70,7 @@ def fix_metadata(self, cubes): cube = self.get_cube_from_list(cubes) for cube in cubes: - latitude = cube.coord('latitude') + latitude = cube.coord("latitude") latitude.points = np.round(latitude.core_points(), 8) latitude.bounds = np.round(latitude.core_bounds(), 8) diff --git a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg.py b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg.py index 73bf689725..8268280ca4 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg.py +++ b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg.py @@ -1,4 +1,5 @@ """Fixes for EC-Earth3-Veg model.""" + import cf_units import numpy as np @@ -22,7 +23,7 @@ def fix_data(self, cube): ------- iris.cube.Cube """ - cube.data = cube.core_data() * 100. + cube.data = cube.core_data() * 100.0 return cube @@ -36,10 +37,11 @@ class CalendarFix(Fix): def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: - if cube.coords('time'): - time_coord = cube.coord('time') - time_coord.units = cf_units.Unit(time_coord.units.origin, - 'proleptic_gregorian') + if cube.coords("time"): + time_coord = cube.coord("time") + time_coord.units = cf_units.Unit( + time_coord.units.origin, "proleptic_gregorian" + ) return cubes @@ -71,7 +73,7 @@ def fix_metadata(self, cubes): cube = self.get_cube_from_list(cubes) for cube in cubes: - latitude = cube.coord('latitude') + latitude = cube.coord("latitude") latitude.points = np.round(latitude.core_points(), 8) latitude.bounds = np.round(latitude.core_bounds(), 8) diff --git a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py index 333b4eb98b..14e38a47f5 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py +++ b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_veg_lr.py @@ -1,5 +1,5 @@ """Fixes for EC-Earth3-Veg-LR model.""" -from ..common import OceanFixGrid +from ..common import OceanFixGrid Siconc = OceanFixGrid diff --git a/esmvalcore/cmor/_fixes/cmip6/fgoals_f3_l.py b/esmvalcore/cmor/_fixes/cmip6/fgoals_f3_l.py index f6f5b7af76..3e8e400780 100644 --- a/esmvalcore/cmor/_fixes/cmip6/fgoals_f3_l.py +++ b/esmvalcore/cmor/_fixes/cmip6/fgoals_f3_l.py @@ -1,4 +1,5 @@ """Fixes for CMIP6 FGOALS-f3-L model.""" + import cftime import dask.array as da import numpy as np @@ -16,6 +17,7 @@ class AllVars(Fix): """Fixes for all vars.""" + def fix_metadata(self, cubes): """Fix parent time units. @@ -31,14 +33,14 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ for cube in cubes: - if cube.attributes['table_id'] == 'Amon': - for coord in ['latitude', 'longitude']: + if cube.attributes["table_id"] == "Amon": + for coord in ["latitude", "longitude"]: cube_coord = cube.coord(coord) bounds = cube_coord.bounds if np.any(bounds[:-1, 1] != bounds[1:, 0]): cube_coord.bounds = None cube_coord.guess_bounds() - time = cube.coord('time') + time = cube.coord("time") if np.any(time.bounds[:-1, 1] != time.bounds[1:, 0]): times = time.units.num2date(time.points) starts = [ @@ -46,12 +48,14 @@ def fix_metadata(self, cubes): for c in times ] ends = [ - cftime.DatetimeNoLeap(c.year, c.month + - 1, 1) if c.month < 12 else - cftime.DatetimeNoLeap(c.year + 1, 1, 1) for c in times + cftime.DatetimeNoLeap(c.year, c.month + 1, 1) + if c.month < 12 + else cftime.DatetimeNoLeap(c.year + 1, 1, 1) + for c in times ] - time.bounds = date2num(np.stack([starts, ends], -1), - time.units) + time.bounds = date2num( + np.stack([starts, ends], -1), time.units + ) return cubes @@ -73,8 +77,8 @@ def fix_data(self, cube): iris.cube.Cube Fixed cube. It can be a difference instance. """ - if cube.units == "%" and da.max(cube.core_data()).compute() <= 1.: - cube.data = cube.core_data() * 100. + if cube.units == "%" and da.max(cube.core_data()).compute() <= 1.0: + cube.data = cube.core_data() * 100.0 return cube diff --git a/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py b/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py index 4126ef524b..591fa54b86 100644 --- a/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py +++ b/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py @@ -1,4 +1,6 @@ """Fixes for FGOALS-g3 model.""" + +import dask.array as da import iris from ..common import OceanFixGrid @@ -22,7 +24,13 @@ def fix_metadata(self, cubes): """Fix metadata. FGOALS-g3 data contain latitude and longitude data set to >1e30 in some - places. + places. Note that the corresponding data is all masked. + + Example files: + v20191030/siconc_SImon_FGOALS-g3_piControl_r1i1p1f1_gn_070001-079912.nc + v20191030/siconc_SImon_FGOALS-g3_piControl_r1i1p1f1_gn_080001-089912.nc + v20200706/siconc_SImon_FGOALS-g3_ssp534-over_r1i1p1f1_gn_201501-210012 + .nc Parameters ---------- @@ -32,13 +40,13 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ cube = self.get_cube_from_list(cubes) - cube.coord('latitude').points[ - cube.coord('latitude').points > 1000.0] = 0.0 - cube.coord('longitude').points[ - cube.coord('longitude').points > 1000.0] = 0.0 + for coord_name in ["latitude", "longitude"]: + coord = cube.coord(coord_name) + bad_indices = coord.core_points() > 1000.0 + coord.points = da.ma.where(bad_indices, 0.0, coord.core_points()) + return super().fix_metadata(cubes) @@ -51,7 +59,7 @@ class Mrsos(Fix): def fix_metadata(self, cubes): """Fix metadata. - FGOALS-g3 mrsos data contains error in co-ordinate bounds. + FGOALS-g3 mrsos data contains error in coordinate bounds. Parameters ---------- diff --git a/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py b/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py index d6462ad922..28b56082a4 100644 --- a/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py @@ -1,4 +1,5 @@ """Fixes for FIO-ESM-2-0 model.""" + import logging import numpy as np @@ -27,14 +28,14 @@ def fix_metadata(self, cubes): ------- iris.cube.CubeList """ - round_coordinates(cubes, - decimals=6, - coord_names=["longitude", "latitude"]) + round_coordinates( + cubes, decimals=6, coord_names=["longitude", "latitude"] + ) logger.warning( "Using 'area_weighted' regridder scheme in Omon variables " "for dataset %s causes discontinuities in the longitude " "coordinate.", - self.extra_facets['dataset'], + self.extra_facets["dataset"], ) return cubes @@ -90,6 +91,6 @@ def fix_data(self, cube): iris.cube.Cube """ if cube.core_data().max() <= 1.0: - cube.units = '1' - cube.convert_units('%') + cube.units = "1" + cube.convert_units("%") return cube diff --git a/esmvalcore/cmor/_fixes/cmip6/gfdl_cm4.py b/esmvalcore/cmor/_fixes/cmip6/gfdl_cm4.py index f73b16f4b8..547c76a855 100644 --- a/esmvalcore/cmor/_fixes/cmip6/gfdl_cm4.py +++ b/esmvalcore/cmor/_fixes/cmip6/gfdl_cm4.py @@ -1,4 +1,5 @@ """Fixes for GFDL-CM4 model.""" + import iris from ..common import ( @@ -30,9 +31,9 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) coords_to_add = { - 'ap': 1, - 'b': 1, - 'ps': (0, 2, 3), + "ap": 1, + "b": 1, + "ps": (0, 2, 3), } add_aux_coords_from_cubes(cube, cubes, coords_to_add) return super().fix_metadata(cubes) @@ -66,7 +67,7 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) try: - cube.coord('height').attributes.pop('description') + cube.coord("height").attributes.pop("description") except iris.exceptions.CoordinateNotFoundError: add_scalar_height_coord(cube, 2.0) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/gfdl_esm4.py b/esmvalcore/cmor/_fixes/cmip6/gfdl_esm4.py index c9788eb37c..2336c0a5c7 100644 --- a/esmvalcore/cmor/_fixes/cmip6/gfdl_esm4.py +++ b/esmvalcore/cmor/_fixes/cmip6/gfdl_esm4.py @@ -1,5 +1,6 @@ """Fixes for GFDL-ESM4 model.""" -from ..common import SiconcFixScalarCoord, OceanFixGrid + +from ..common import OceanFixGrid, SiconcFixScalarCoord from ..fix import Fix from ..shared import ( add_scalar_depth_coord, @@ -45,8 +46,8 @@ def fix_metadata(self, cubes): """ for cube in cubes: - if cube.coords(axis='Z'): - z_coord = cube.coord(axis='Z') + if cube.coords(axis="Z"): + z_coord = cube.coord(axis="Z") if z_coord.standard_name is None: fix_ocean_depth_coord(cube) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_g.py b/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_g.py index f4fb27d35c..f9d159701a 100644 --- a/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_g.py +++ b/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_g.py @@ -1,4 +1,5 @@ """Fixes for GISS-E2-1-G model.""" + from ..common import ClFixHybridPressureCoord from ..fix import Fix @@ -19,8 +20,10 @@ def fix_metadata(self, cubes): units in the files are 'degC', but the values are in 'K'. """ for cube in cubes: - if (cube.units == 'degC' - and cube.core_data().ravel()[:1000].max() > 100.): - cube.units = 'K' + if ( + cube.units == "degC" + and cube.core_data().ravel()[:1000].max() > 100.0 + ): + cube.units = "K" cube.convert_units(self.vardef.units) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_h.py b/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_h.py index 054a42255f..f799aea962 100644 --- a/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_h.py +++ b/esmvalcore/cmor/_fixes/cmip6/giss_e2_1_h.py @@ -1,6 +1,6 @@ """Fixes for GISS-E2-1-H model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/hadgem3_gc31_ll.py b/esmvalcore/cmor/_fixes/cmip6/hadgem3_gc31_ll.py index 99db1e2be1..fe49f55e21 100644 --- a/esmvalcore/cmor/_fixes/cmip6/hadgem3_gc31_ll.py +++ b/esmvalcore/cmor/_fixes/cmip6/hadgem3_gc31_ll.py @@ -1,8 +1,8 @@ """Fixes for CMIP6 HadGEM-GC31-LL.""" + from ..common import ClFixHybridHeightCoord from .ukesm1_0_ll import AllVars as BaseAllVars - AllVars = BaseAllVars diff --git a/esmvalcore/cmor/_fixes/cmip6/icon_esm_lr.py b/esmvalcore/cmor/_fixes/cmip6/icon_esm_lr.py index 01657b133e..e4c63305b5 100644 --- a/esmvalcore/cmor/_fixes/cmip6/icon_esm_lr.py +++ b/esmvalcore/cmor/_fixes/cmip6/icon_esm_lr.py @@ -21,12 +21,12 @@ def fix_metadata(self, cubes): """ varnames_to_change = { - 'latitude': 'lat', - 'longitude': 'lon', + "latitude": "lat", + "longitude": "lon", } for cube in cubes: - for (std_name, var_name) in varnames_to_change.items(): + for std_name, var_name in varnames_to_change.items(): if cube.coords(std_name): cube.coord(std_name).var_name = var_name diff --git a/esmvalcore/cmor/_fixes/cmip6/iitm_esm.py b/esmvalcore/cmor/_fixes/cmip6/iitm_esm.py index 6ed2108ff7..dbb14b9d5c 100644 --- a/esmvalcore/cmor/_fixes/cmip6/iitm_esm.py +++ b/esmvalcore/cmor/_fixes/cmip6/iitm_esm.py @@ -1,4 +1,5 @@ """Fixes for IITM-ESM model.""" + import logging import numpy as np @@ -40,6 +41,6 @@ def fix_metadata(self, cubes): "Using 'area_weighted' regridder scheme in Omon variables " "for dataset %s causes discontinuities in the longitude " "coordinate.", - self.extra_facets['dataset'], + self.extra_facets["dataset"], ) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/ipsl_cm5a2_inca.py b/esmvalcore/cmor/_fixes/cmip6/ipsl_cm5a2_inca.py index 57d925e6d1..ebbb3f9dea 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ipsl_cm5a2_inca.py +++ b/esmvalcore/cmor/_fixes/cmip6/ipsl_cm5a2_inca.py @@ -1,9 +1,9 @@ """Fixes for IPSL-CM5A2-INCA model.""" + from .ipsl_cm6a_lr import AllVars as BaseAllVars from .ipsl_cm6a_lr import Clcalipso as BaseClcalipso from .ipsl_cm6a_lr import Omon as BaseOmon - AllVars = BaseAllVars diff --git a/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr.py b/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr.py index dfd7116275..f660bff1fe 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr.py +++ b/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr.py @@ -1,4 +1,5 @@ """Fixes for IPSL-CM6A-LR model.""" + from iris.cube import CubeList from ..fix import Fix @@ -23,10 +24,10 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - if cube.coords('latitude'): - cube.coord('latitude').var_name = 'lat' - if cube.coords('longitude'): - cube.coord('longitude').var_name = 'lon' + if cube.coords("latitude"): + cube.coord("latitude").var_name = "lat" + if cube.coords("longitude"): + cube.coord("longitude").var_name = "lon" return CubeList([cube]) @@ -47,10 +48,10 @@ def fix_metadata(self, cubes): """ cube = self.get_cube_from_list(cubes) - alt_40_coord = cube.coord('height') - alt_40_coord.long_name = 'altitude' - alt_40_coord.standard_name = 'altitude' - alt_40_coord.var_name = 'alt40' + alt_40_coord = cube.coord("height") + alt_40_coord.long_name = "altitude" + alt_40_coord.standard_name = "altitude" + alt_40_coord.var_name = "alt40" return CubeList([cube]) @@ -71,8 +72,8 @@ def fix_metadata(self, cubes): """ for cube in cubes: - if cube.coords(axis='Z'): - z_coord = cube.coord(axis='Z') - if z_coord.var_name == 'olevel': + if cube.coords(axis="Z"): + z_coord = cube.coord(axis="Z") + if z_coord.var_name == "olevel": fix_ocean_depth_coord(cube) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr_inca.py b/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr_inca.py index 3a8a94b8de..b871dbad03 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr_inca.py +++ b/esmvalcore/cmor/_fixes/cmip6/ipsl_cm6a_lr_inca.py @@ -1,9 +1,9 @@ """Fixes for IPSL-CM6A-LR-INCA model.""" + from .ipsl_cm6a_lr import AllVars as BaseAllVars from .ipsl_cm6a_lr import Clcalipso as BaseClcalipso from .ipsl_cm6a_lr import Omon as BaseOmon - AllVars = BaseAllVars diff --git a/esmvalcore/cmor/_fixes/cmip6/kace_1_0_g.py b/esmvalcore/cmor/_fixes/cmip6/kace_1_0_g.py index e4c2cc420e..d930c4ee2a 100644 --- a/esmvalcore/cmor/_fixes/cmip6/kace_1_0_g.py +++ b/esmvalcore/cmor/_fixes/cmip6/kace_1_0_g.py @@ -1,4 +1,5 @@ """Fixes for KACE-1-0-G.""" + import logging import numpy as np @@ -46,6 +47,6 @@ def fix_metadata(self, cubes): "Using 'area_weighted' regridder scheme in Omon variables " "for dataset %s causes discontinuities in the longitude " "coordinate.", - self.extra_facets['dataset'], + self.extra_facets["dataset"], ) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/kiost_esm.py b/esmvalcore/cmor/_fixes/cmip6/kiost_esm.py index e06922a0af..12bddd4e21 100644 --- a/esmvalcore/cmor/_fixes/cmip6/kiost_esm.py +++ b/esmvalcore/cmor/_fixes/cmip6/kiost_esm.py @@ -1,4 +1,5 @@ """Fixes for KIOST-ESM model.""" + from dask import array as da from ..common import SiconcFixScalarCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/mcm_ua_1_0.py b/esmvalcore/cmor/_fixes/cmip6/mcm_ua_1_0.py index a9b200cde6..7484449fcc 100644 --- a/esmvalcore/cmor/_fixes/cmip6/mcm_ua_1_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/mcm_ua_1_0.py @@ -1,4 +1,5 @@ """Fixes for MCM-UA-1-0 model.""" + import iris import numpy as np from dask import array as da @@ -9,7 +10,7 @@ def strip_cube_metadata(cube): """Remove unnecessary spaces in cube metadata.""" - attributes_to_strip = ('standard_name', 'long_name') + attributes_to_strip = ("standard_name", "long_name") for attr in attributes_to_strip: if getattr(cube, attr) is not None: setattr(cube, attr, getattr(cube, attr).strip()) @@ -25,7 +26,7 @@ class AllVars(Fix): def fix_metadata(self, cubes): """Fix metadata. - Remove unnecessary spaces in metadat and rename ``var_name`` of + Remove unnecessary spaces in metadata and rename ``var_name`` of latitude and longitude and fix longitude boundary description may be wrong (lons=[0, ..., 356.25]; on_bnds=[[-1.875, 1.875], ..., [354.375, 360]]). @@ -41,43 +42,47 @@ def fix_metadata(self, cubes): """ coords_to_change = { - 'latitude': 'lat', - 'longitude': 'lon', + "latitude": "lat", + "longitude": "lon", } for cube in cubes: strip_cube_metadata(cube) - for (std_name, var_name) in coords_to_change.items(): + for std_name, var_name in coords_to_change.items(): try: coord = cube.coord(std_name) except iris.exceptions.CoordinateNotFoundError: pass else: coord.var_name = var_name - time_units = cube.attributes.get('parent_time_units') + time_units = cube.attributes.get("parent_time_units") if time_units is not None: - cube.attributes['parent_time_units'] = time_units.replace( - ' (noleap)', '') + cube.attributes["parent_time_units"] = time_units.replace( + " (noleap)", "" + ) for cube in cubes: coord_names = [cor.standard_name for cor in cube.coords()] - if 'longitude' in coord_names: - lon_coord = cube.coord('longitude') + if "longitude" in coord_names: + lon_coord = cube.coord("longitude") if lon_coord.ndim == 1 and lon_coord.has_bounds(): lon_bnds = lon_coord.bounds.copy() # atmos & land - if lon_coord.points[0] == 0. and \ - lon_coord.points[-1] == 356.25 and \ - lon_bnds[-1][-1] == 360.: + if ( + lon_coord.points[0] == 0.0 + and lon_coord.points[-1] == 356.25 + and lon_bnds[-1][-1] == 360.0 + ): lon_bnds[-1][-1] = 358.125 lon_coord.bounds = lon_bnds lon_coord.circular = True # ocean & seaice if lon_coord.points[0] == -0.9375: - lon_dim = cube.coord_dims('longitude')[0] + lon_dim = cube.coord_dims("longitude")[0] cube.data = da.roll(cube.core_data(), -1, axis=lon_dim) lon_points = np.roll(lon_coord.core_points(), -1) - lon_bounds = np.roll(lon_coord.core_bounds(), -1, - axis=0) + lon_bounds = np.roll( + lon_coord.core_bounds(), -1, axis=0 + ) lon_points[-1] += 360.0 lon_bounds[-1] += 360.0 lon_coord.points = lon_points @@ -103,8 +108,8 @@ def fix_metadata(self, cubes): """ for cube in cubes: - if cube.coords(axis='Z'): - z_coord = cube.coord(axis='Z') + if cube.coords(axis="Z"): + z_coord = cube.coord(axis="Z") if z_coord.standard_name is None: fix_ocean_depth_coord(cube) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/miroc6.py b/esmvalcore/cmor/_fixes/cmip6/miroc6.py index cf1d40ca86..98dd933d9c 100644 --- a/esmvalcore/cmor/_fixes/cmip6/miroc6.py +++ b/esmvalcore/cmor/_fixes/cmip6/miroc6.py @@ -1,4 +1,5 @@ """Fixes for MIROC6 model.""" + import numpy as np from ..common import ClFixHybridPressureCoord @@ -28,12 +29,14 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ for cube in cubes: - for coord_name in ['latitude', 'longitude']: + for coord_name in ["latitude", "longitude"]: coord = cube.coord(coord_name) - coord.points = coord.core_points().astype(np.float32).astype( - np.float64) + coord.points = ( + coord.core_points().astype(np.float32).astype(np.float64) + ) if not coord.has_bounds(): coord.guess_bounds() - coord.bounds = coord.core_bounds().astype(np.float32).astype( - np.float64) + coord.bounds = ( + coord.core_bounds().astype(np.float32).astype(np.float64) + ) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/miroc_es2l.py b/esmvalcore/cmor/_fixes/cmip6/miroc_es2l.py index aa28ae13cc..d4d986f833 100644 --- a/esmvalcore/cmor/_fixes/cmip6/miroc_es2l.py +++ b/esmvalcore/cmor/_fixes/cmip6/miroc_es2l.py @@ -1,6 +1,6 @@ """Fixes for MIROC-ES2L model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_hr.py b/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_hr.py index c8a9ec58bb..fc327db646 100644 --- a/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_hr.py +++ b/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_hr.py @@ -1,4 +1,5 @@ """Fixes for MPI-ESM1-2-HR model.""" + from ..common import ClFixHybridPressureCoord from ..fix import Fix from ..shared import add_scalar_height_coord, round_coordinates @@ -10,11 +11,11 @@ class AllVars(Fix): def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: - if cube.attributes.get('variant_label', '') == 'r2i1p1f1': + if cube.attributes.get("variant_label", "") == "r2i1p1f1": round_coordinates( [cube], decimals=11, - coord_names=['latitude'], + coord_names=["latitude"], ) return cubes @@ -67,8 +68,8 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ for cube in cubes: - plev = cube.coord('air_pressure') - plev.var_name = 'plev' + plev = cube.coord("air_pressure") + plev.var_name = "plev" return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_lr.py b/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_lr.py index 0992928c97..b824e6599d 100644 --- a/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_lr.py +++ b/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_lr.py @@ -1,4 +1,5 @@ """Fixes for MPI-ESM1-2-LR model.""" + from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_xr.py b/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_xr.py index 201ffcca63..f603b9d197 100644 --- a/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_xr.py +++ b/esmvalcore/cmor/_fixes/cmip6/mpi_esm1_2_xr.py @@ -1,8 +1,8 @@ """Fixes for MPI-ESM1-2-XR model.""" -from .mpi_esm1_2_hr import Tas as BaseTas -from .mpi_esm1_2_hr import Ta as BaseFix from .mpi_esm1_2_hr import SfcWind as BaseSfcWind +from .mpi_esm1_2_hr import Ta as BaseFix +from .mpi_esm1_2_hr import Tas as BaseTas class Tas(BaseTas): diff --git a/esmvalcore/cmor/_fixes/cmip6/mpi_esm_1_2_ham.py b/esmvalcore/cmor/_fixes/cmip6/mpi_esm_1_2_ham.py index d2cc99cf2d..0862583143 100644 --- a/esmvalcore/cmor/_fixes/cmip6/mpi_esm_1_2_ham.py +++ b/esmvalcore/cmor/_fixes/cmip6/mpi_esm_1_2_ham.py @@ -1,4 +1,5 @@ """Fixes for MPI-ESM-1-2-HAM model.""" + from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/mri_esm2_0.py b/esmvalcore/cmor/_fixes/cmip6/mri_esm2_0.py index 720670b4d5..565b7dee9d 100644 --- a/esmvalcore/cmor/_fixes/cmip6/mri_esm2_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/mri_esm2_0.py @@ -1,6 +1,6 @@ """Fixes for MRI-ESM2-0 model.""" -from ..common import ClFixHybridPressureCoord +from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/nesm3.py b/esmvalcore/cmor/_fixes/cmip6/nesm3.py index 1ad4af1c1d..eb529cb7d9 100644 --- a/esmvalcore/cmor/_fixes/cmip6/nesm3.py +++ b/esmvalcore/cmor/_fixes/cmip6/nesm3.py @@ -1,4 +1,5 @@ """Fixes for NESM3 model.""" + from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/noresm2_lm.py b/esmvalcore/cmor/_fixes/cmip6/noresm2_lm.py index 2a07905002..fb1581ac1d 100644 --- a/esmvalcore/cmor/_fixes/cmip6/noresm2_lm.py +++ b/esmvalcore/cmor/_fixes/cmip6/noresm2_lm.py @@ -1,4 +1,5 @@ """Fixes for NorESM2-LM model.""" + import numpy as np from ..common import ClFixHybridPressureCoord @@ -22,31 +23,34 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ for cube in cubes: coord_names = [cor.standard_name for cor in cube.coords()] - if 'longitude' in coord_names: - if cube.coord('longitude').ndim == 1 and \ - cube.coord('longitude').has_bounds(): - lon_bnds = cube.coord('longitude').bounds.copy() - if cube.coord('longitude').points[0] == 0. and \ - lon_bnds[0][0] == 0.: + if "longitude" in coord_names: + if ( + cube.coord("longitude").ndim == 1 + and cube.coord("longitude").has_bounds() + ): + lon_bnds = cube.coord("longitude").bounds.copy() + if ( + cube.coord("longitude").points[0] == 0.0 + and lon_bnds[0][0] == 0.0 + ): lon_bnds[0][0] = -1.25 - if cube.coord('longitude').points[-1] == 357.5 and \ - lon_bnds[-1][-1] == 360.: + if ( + cube.coord("longitude").points[-1] == 357.5 + and lon_bnds[-1][-1] == 360.0 + ): lon_bnds[-1][-1] = 358.75 - cube.coord('longitude').bounds = lon_bnds + cube.coord("longitude").bounds = lon_bnds return cubes Cl = ClFixHybridPressureCoord - Cli = ClFixHybridPressureCoord - Clw = ClFixHybridPressureCoord @@ -68,12 +72,11 @@ def fix_metadata(self, cubes): Returns ------- iris.cube.CubeList - """ for cube in cubes: - latitude = cube.coord('latitude') - latitude.bounds = np.round(latitude.bounds, 4) - longitude = cube.coord('longitude') - longitude.bounds = np.round(longitude.bounds, 4) + latitude = cube.coord("latitude") + latitude.bounds = np.round(latitude.core_bounds(), 4) + longitude = cube.coord("longitude") + longitude.bounds = np.round(longitude.core_bounds(), 4) return cubes diff --git a/esmvalcore/cmor/_fixes/cmip6/noresm2_mm.py b/esmvalcore/cmor/_fixes/cmip6/noresm2_mm.py index 3a9a97faa0..39af538887 100644 --- a/esmvalcore/cmor/_fixes/cmip6/noresm2_mm.py +++ b/esmvalcore/cmor/_fixes/cmip6/noresm2_mm.py @@ -1,4 +1,5 @@ """Fixes for NorESM2-MM model.""" + from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/sam0_unicon.py b/esmvalcore/cmor/_fixes/cmip6/sam0_unicon.py index dc0aa1ccb8..6f2f67c4bc 100644 --- a/esmvalcore/cmor/_fixes/cmip6/sam0_unicon.py +++ b/esmvalcore/cmor/_fixes/cmip6/sam0_unicon.py @@ -1,8 +1,8 @@ """Fixes for SAM0-UNICON model.""" + from ..common import ClFixHybridPressureCoord from ..fix import Fix - Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/taiesm1.py b/esmvalcore/cmor/_fixes/cmip6/taiesm1.py index d92b67079d..1ff3821748 100644 --- a/esmvalcore/cmor/_fixes/cmip6/taiesm1.py +++ b/esmvalcore/cmor/_fixes/cmip6/taiesm1.py @@ -1,4 +1,5 @@ """Fixes for TaiESM1 model.""" + from ..common import ClFixHybridPressureCoord Cl = ClFixHybridPressureCoord diff --git a/esmvalcore/cmor/_fixes/cmip6/ukesm1_0_ll.py b/esmvalcore/cmor/_fixes/cmip6/ukesm1_0_ll.py index 8bef01c21f..a599131cc2 100644 --- a/esmvalcore/cmor/_fixes/cmip6/ukesm1_0_ll.py +++ b/esmvalcore/cmor/_fixes/cmip6/ukesm1_0_ll.py @@ -1,4 +1,5 @@ """Fixes for CMIP6 UKESM1-0-LL.""" + from ..common import ClFixHybridHeightCoord from ..fix import Fix @@ -19,12 +20,12 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ - parent_units = 'parent_time_units' - bad_value = 'days since 1850-01-01-00-00-00' + parent_units = "parent_time_units" + bad_value = "days since 1850-01-01-00-00-00" for cube in cubes: try: if cube.attributes[parent_units] == bad_value: - cube.attributes[parent_units] = 'days since 1850-01-01' + cube.attributes[parent_units] = "days since 1850-01-01" except AttributeError: pass return cubes diff --git a/esmvalcore/cmor/_fixes/common.py b/esmvalcore/cmor/_fixes/common.py index d68bbac538..0d65c6faa6 100644 --- a/esmvalcore/cmor/_fixes/common.py +++ b/esmvalcore/cmor/_fixes/common.py @@ -1,4 +1,5 @@ """Common fixes used for multiple datasets.""" + import logging import iris @@ -33,13 +34,13 @@ def fix_metadata(self, cubes): cube.remove_aux_factory(aux_factory) # Fix bounds - fix_bounds(cube, cubes, ('lev', 'b')) + fix_bounds(cube, cubes, ("lev", "b")) # Add aux_factory again height_coord_factory = iris.aux_factory.HybridHeightFactory( - delta=cube.coord(var_name='lev'), - sigma=cube.coord(var_name='b'), - orography=cube.coord(var_name='orog'), + delta=cube.coord(var_name="lev"), + sigma=cube.coord(var_name="b"), + orography=cube.coord(var_name="orog"), ) cube.add_aux_factory(height_coord_factory) @@ -71,38 +72,38 @@ def fix_metadata(self, cubes): cube.remove_aux_factory(aux_factory) # Fix bounds - coords_to_fix = ['b'] + coords_to_fix = ["b"] try: - cube.coord(var_name='a') - coords_to_fix.append('a') + cube.coord(var_name="a") + coords_to_fix.append("a") except iris.exceptions.CoordinateNotFoundError: - coords_to_fix.append('ap') + coords_to_fix.append("ap") fix_bounds(cube, cubes, coords_to_fix) # Fix bounds for ap if only a is given in original file # This was originally done by iris, but it has to be repeated since # a has bounds now - ap_coord = cube.coord(var_name='ap') + ap_coord = cube.coord(var_name="ap") if not ap_coord.has_bounds(): cube.remove_coord(ap_coord) - a_coord = cube.coord(var_name='a') - p0_coord = cube.coord(var_name='p0') + a_coord = cube.coord(var_name="a") + p0_coord = cube.coord(var_name="p0") ap_coord = a_coord * p0_coord.points[0] ap_coord.units = a_coord.units * p0_coord.units - ap_coord.rename('vertical pressure') - ap_coord.var_name = 'ap' + ap_coord.rename("vertical pressure") + ap_coord.var_name = "ap" cube.add_aux_coord(ap_coord, cube.coord_dims(a_coord)) # Add aux_factory again pressure_coord_factory = iris.aux_factory.HybridPressureFactory( delta=ap_coord, - sigma=cube.coord(var_name='b'), - surface_air_pressure=cube.coord(var_name='ps'), + sigma=cube.coord(var_name="b"), + surface_air_pressure=cube.coord(var_name="ps"), ) cube.add_aux_factory(pressure_coord_factory) # Remove attributes from Surface Air Pressure coordinate - cube.coord(var_name='ps').attributes = {} + cube.coord(var_name="ps").attributes = {} return iris.cube.CubeList([cube]) @@ -128,7 +129,9 @@ def fix_metadata(self, cubes): logger.warning( "OceanFixGrid is designed to work on any data with an " "irregular ocean grid, but it was only tested on 3D (time, " - "latitude, longitude) data so far; got %dD data", cube.ndim) + "latitude, longitude) data so far; got %dD data", + cube.ndim, + ) try: cube.coord("longitude", dim_coords=False) @@ -140,34 +143,42 @@ def fix_metadata(self, cubes): # - Second dimension j -> Y-direction (= latitude) (j_dim, i_dim) = sorted( set( - cube.coord_dims(cube.coord('latitude', dim_coords=False)) + - cube.coord_dims(cube.coord('longitude', dim_coords=False)))) + cube.coord_dims(cube.coord("latitude", dim_coords=False)) + + cube.coord_dims(cube.coord("longitude", dim_coords=False)) + ) + ) try: cube.coord(dim_coords=True, dimensions=i_dim) cube.coord(dim_coords=True, dimensions=j_dim) except iris.exceptions.CoordinateNotFoundError: cube.add_dim_coord( - iris.coords.DimCoord(np.arange(cube.shape[i_dim]), - var_name="i"), i_dim) + iris.coords.DimCoord( + np.arange(cube.shape[i_dim]), var_name="i" + ), + i_dim, + ) cube.add_dim_coord( - iris.coords.DimCoord(np.arange(cube.shape[j_dim]), - var_name="j"), j_dim) + iris.coords.DimCoord( + np.arange(cube.shape[j_dim]), var_name="j" + ), + j_dim, + ) i_coord = cube.coord(dim_coords=True, dimensions=i_dim) j_coord = cube.coord(dim_coords=True, dimensions=j_dim) # Fix metadata of coordinate i - i_coord.var_name = 'i' + i_coord.var_name = "i" i_coord.standard_name = None - i_coord.long_name = 'cell index along first dimension' - i_coord.units = '1' + i_coord.long_name = "cell index along first dimension" + i_coord.units = "1" i_coord.circular = False # Fix metadata of coordinate j - j_coord.var_name = 'j' + j_coord.var_name = "j" j_coord.standard_name = None - j_coord.long_name = 'cell index along second dimension' - j_coord.units = '1' + j_coord.long_name = "cell index along second dimension" + j_coord.units = "1" # Fix points and bounds of index coordinates i and j for idx_coord in (i_coord, j_coord): @@ -183,24 +194,28 @@ def fix_metadata(self, cubes): # latitudes in the midpoints between the cell centers. lat_vertices = [] lon_vertices = [] - for (j, i) in [(0, 0), (0, 1), (1, 1), (1, 0)]: - (j_v, i_v) = np.meshgrid(j_coord.bounds[:, j], - i_coord.bounds[:, i], - indexing='ij') + for j, i in [(0, 0), (0, 1), (1, 1), (1, 0)]: + (j_v, i_v) = np.meshgrid( + j_coord.bounds[:, j], i_coord.bounds[:, i], indexing="ij" + ) lat_vertices.append( - map_coordinates(cube.coord('latitude').points, [j_v, i_v], - mode='nearest')) + map_coordinates( + cube.coord("latitude").points, [j_v, i_v], mode="nearest" + ) + ) lon_vertices.append( - map_coordinates(cube.coord('longitude').points, [j_v, i_v], - mode='wrap')) + map_coordinates( + cube.coord("longitude").points, [j_v, i_v], mode="wrap" + ) + ) lat_vertices = np.array(lat_vertices) lon_vertices = np.array(lon_vertices) lat_vertices = np.moveaxis(lat_vertices, 0, -1) lon_vertices = np.moveaxis(lon_vertices, 0, -1) # Copy vertices to cube - cube.coord('latitude').bounds = lat_vertices - cube.coord('longitude').bounds = lon_vertices + cube.coord("latitude").bounds = lat_vertices + cube.coord("longitude").bounds = lon_vertices return iris.cube.CubeList([cube]) diff --git a/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/aladin63.py b/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/aladin63.py index 01ae2bb942..d192e3709b 100644 --- a/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/aladin63.py +++ b/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/aladin63.py @@ -1,4 +1,5 @@ """Fixes for rcm ALADIN63 driven by CNRM-CERFACS-CNRM-CM5.""" + import numpy as np from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix @@ -23,9 +24,9 @@ def fix_metadata(self, cubes): """ for cube in cubes: add_scalar_height_coord(cube) - if cube.coord('height').points != 2.: - cube.coord('height').points = np.ma.array([2.0]) - cube.coord('time').long_name = 'time' + if cube.coord("height").points != 2.0: + cube.coord("height").points = np.ma.array([2.0]) + cube.coord("time").long_name = "time" return cubes diff --git a/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/hadrem3_ga7_05.py b/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/hadrem3_ga7_05.py index 1806f3f0a0..ad169ebb0b 100644 --- a/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/hadrem3_ga7_05.py +++ b/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/hadrem3_ga7_05.py @@ -1,6 +1,8 @@ """Fixes for rcm HadREM3-GA7-05 driven by CNRM-CERFACS-CNRM-CM5.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - MOHCHadREM3GA705 as BaseFix) + MOHCHadREM3GA705 as BaseFix, +) Tas = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/wrf381p.py b/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/wrf381p.py index 26fab6a87c..20de40753a 100644 --- a/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/wrf381p.py +++ b/esmvalcore/cmor/_fixes/cordex/cnrm_cerfacs_cnrm_cm5/wrf381p.py @@ -1,4 +1,5 @@ """Fixes for rcm WRF381P driven by CNRM-CERFACS-CNRM-CM5.""" + from esmvalcore.cmor._fixes.shared import add_scalar_height_coord from esmvalcore.cmor.fix import Fix diff --git a/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py b/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py index 73c5e53ef0..7d148f561f 100644 --- a/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py +++ b/esmvalcore/cmor/_fixes/cordex/cordex_fixes.py @@ -1,4 +1,5 @@ """Fixes that are shared between datasets and drivers.""" + import logging from functools import lru_cache @@ -42,9 +43,9 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ for cube in cubes: - cube.coord('latitude').var_name = 'lat' - cube.coord('longitude').var_name = 'lon' - cube.coord('time').long_name = 'time' + cube.coord("latitude").var_name = "lat" + cube.coord("longitude").var_name = "lon" + cube.coord("time").long_name = "time" return cubes @@ -65,7 +66,7 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ for cube in cubes: - cube.coord('time').long_name = 'time' + cube.coord("time").long_name = "time" return cubes @@ -92,17 +93,19 @@ def fix_metadata(self, cubes): iris.cube.CubeList """ for cube in cubes: - time_unit = cube.coord('time').units - if time_unit.calendar == 'standard': - new_unit = time_unit.change_calendar('proleptic_gregorian') - cube.coord('time').units = new_unit + time_unit = cube.coord("time").units + if time_unit.calendar == "standard": + new_unit = time_unit.change_calendar("proleptic_gregorian") + cube.coord("time").units = new_unit for coord in cube.coords(): - if coord.dtype in ['>f8', '>f4']: + if coord.dtype in [">f8", ">f4"]: coord.points = coord.core_points().astype( - np.float64, casting='same_kind') + np.float64, casting="same_kind" + ) if coord.has_bounds(): coord.bounds = coord.core_bounds().astype( - np.float64, casting='same_kind') + np.float64, casting="same_kind" + ) return cubes @@ -115,30 +118,36 @@ def _check_grid_differences(self, old_coord, new_coord): logger.debug( "Maximum difference between original %s" "points and standard %s domain points " - "for dataset %s and driver %s is: %s.", new_coord.var_name, - self.extra_facets['domain'], self.extra_facets['dataset'], - self.extra_facets['driver'], str(diff)) + "for dataset %s and driver %s is: %s.", + new_coord.var_name, + self.extra_facets["domain"], + self.extra_facets["dataset"], + self.extra_facets["driver"], + str(diff), + ) if diff > 10e-4: raise RecipeError( "Differences between the original grid and the " - f"standardised grid are above 10e-4 {new_coord.units}.") + f"standardised grid are above 10e-4 {new_coord.units}." + ) def _fix_rotated_coords(self, cube, domain, domain_info): """Fix rotated coordinates.""" - for dim_coord in ['rlat', 'rlon']: + for dim_coord in ["rlat", "rlon"]: old_coord = cube.coord(domain[dim_coord].standard_name) old_coord_dims = old_coord.cube_dims(cube) points = domain[dim_coord].data coord_system = iris.coord_systems.RotatedGeogCS( - grid_north_pole_latitude=domain_info['pollat'], - grid_north_pole_longitude=domain_info['pollon']) + grid_north_pole_latitude=domain_info["pollat"], + grid_north_pole_longitude=domain_info["pollon"], + ) new_coord = iris.coords.DimCoord( points, var_name=dim_coord, standard_name=domain[dim_coord].standard_name, long_name=domain[dim_coord].long_name, - units=Unit('degrees'), + units=Unit("degrees"), coord_system=coord_system, ) self._check_grid_differences(old_coord, new_coord) @@ -148,11 +157,11 @@ def _fix_rotated_coords(self, cube, domain, domain_info): def _fix_geographical_coords(self, cube, domain): """Fix geographical coordinates.""" - for aux_coord in ['lat', 'lon']: + for aux_coord in ["lat", "lon"]: old_coord = cube.coord(domain[aux_coord].standard_name) cube.remove_coord(old_coord) points = domain[aux_coord].data - bounds = domain[f'{aux_coord}_vertices'].data + bounds = domain[f"{aux_coord}_vertices"].data new_coord = iris.coords.AuxCoord( points, var_name=aux_coord, @@ -161,13 +170,14 @@ def _fix_geographical_coords(self, cube, domain): units=Unit(domain[aux_coord].units), bounds=bounds, ) - if aux_coord == 'lon' and new_coord.points.min() < 0.: - lon_inds = (new_coord.points < 0.) & (old_coord.points > 0.) - old_coord.points[lon_inds] = old_coord.points[lon_inds] - 360. + if aux_coord == "lon" and new_coord.points.min() < 0.0: + lon_inds = (new_coord.points < 0.0) & (old_coord.points > 0.0) + old_coord.points[lon_inds] = old_coord.points[lon_inds] - 360.0 self._check_grid_differences(old_coord, new_coord) - aux_coord_dims = (cube.coord(var_name='rlat').cube_dims(cube) + - cube.coord(var_name='rlon').cube_dims(cube)) + aux_coord_dims = cube.coord(var_name="rlat").cube_dims( + cube + ) + cube.coord(var_name="rlon").cube_dims(cube) cube.add_aux_coord(new_coord, aux_coord_dims) def fix_metadata(self, cubes): @@ -188,7 +198,7 @@ def fix_metadata(self, cubes): ------- iris.cube.CubeList """ - data_domain = self.extra_facets['domain'] + data_domain = self.extra_facets["domain"] domain = _get_domain(data_domain) domain_info = _get_domain_info(data_domain) for cube in cubes: @@ -200,11 +210,13 @@ def fix_metadata(self, cubes): logger.warning( "Support for CORDEX datasets in a Lambert Conformal " "coordinate system is ongoing. Certain preprocessor " - "functions may fail.") + "functions may fail." + ) else: raise RecipeError( f"Coordinate system {coord_system.grid_mapping_name} " "not supported in CORDEX datasets. Must be " - "rotated_latitude_longitude or lambert_conformal_conic.") + "rotated_latitude_longitude or lambert_conformal_conic." + ) return cubes diff --git a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/cclm4_8_17.py b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/cclm4_8_17.py index 88d2123420..9a1b82ad5a 100644 --- a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/cclm4_8_17.py +++ b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/cclm4_8_17.py @@ -1,5 +1,7 @@ """Fixes for rcm CCLM4-8-17 driven by ICHEC-EC-EARTH.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - CLMcomCCLM4817 as BaseFix) + CLMcomCCLM4817 as BaseFix, +) AllVars = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/hadrem3_ga7_05.py b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/hadrem3_ga7_05.py index 2f816ba55e..fd9743e0af 100644 --- a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/hadrem3_ga7_05.py +++ b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/hadrem3_ga7_05.py @@ -1,6 +1,8 @@ """Fixes for rcm HadREM3-GA7-05 driven by ICHEC-EC-EARTH.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - MOHCHadREM3GA705 as BaseFix) + MOHCHadREM3GA705 as BaseFix, +) Tas = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/racmo22e.py b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/racmo22e.py index 9f4cb7a2bc..1d8a1b9176 100644 --- a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/racmo22e.py +++ b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/racmo22e.py @@ -1,5 +1,5 @@ """Fixes for rcm RACMO22E driven by ICHEC-EC-EARTH.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/rca4.py b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/rca4.py index 114715a5c9..f81010d59e 100644 --- a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/rca4.py +++ b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/rca4.py @@ -1,6 +1,6 @@ """Fixes for rcm RCA4 driven by ICHEC-EC-EARTH.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/remo2015.py b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/remo2015.py index c50d9a23e2..b00933ab56 100644 --- a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/remo2015.py +++ b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/remo2015.py @@ -1,5 +1,5 @@ """Fixes for rcm REMO2015 driven by ICHEC-EC-EARTH.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/wrf381p.py b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/wrf381p.py index 509b0d3290..3e6a954cf5 100644 --- a/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/wrf381p.py +++ b/esmvalcore/cmor/_fixes/cordex/ichec_ec_earth/wrf381p.py @@ -1,4 +1,5 @@ """Fixes for rcm WRF381P driven by ICHEC-EC-EARTH.""" + from esmvalcore.cmor._fixes.shared import add_scalar_height_coord from esmvalcore.cmor.fix import Fix diff --git a/esmvalcore/cmor/_fixes/cordex/ipsl_ipsl_cm5a_mr/wrf381p.py b/esmvalcore/cmor/_fixes/cordex/ipsl_ipsl_cm5a_mr/wrf381p.py index 86da149533..4574d38094 100644 --- a/esmvalcore/cmor/_fixes/cordex/ipsl_ipsl_cm5a_mr/wrf381p.py +++ b/esmvalcore/cmor/_fixes/cordex/ipsl_ipsl_cm5a_mr/wrf381p.py @@ -1,4 +1,5 @@ """Fixes for rcm WRF381P driven by IPSL-IPSL-CM5A-MR.""" + from esmvalcore.cmor._fixes.shared import add_scalar_height_coord from esmvalcore.cmor.fix import Fix diff --git a/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/cclm4_8_17.py b/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/cclm4_8_17.py index ac0460904d..635e0670e5 100644 --- a/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/cclm4_8_17.py +++ b/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/cclm4_8_17.py @@ -1,5 +1,7 @@ """Fixes for rcm CCLM4-8-17 driven by MIROC-MIROC5.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - CLMcomCCLM4817 as BaseFix) + CLMcomCCLM4817 as BaseFix, +) AllVars = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/remo2015.py b/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/remo2015.py index fbd13bdfab..c4394f53c6 100644 --- a/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/remo2015.py +++ b/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/remo2015.py @@ -1,5 +1,5 @@ """Fixes for rcm REMO2015 driven by MIROC-MIROC5.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/wrf361h.py b/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/wrf361h.py index f8a69bca9b..285cc124d2 100644 --- a/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/wrf361h.py +++ b/esmvalcore/cmor/_fixes/cordex/miroc_miroc5/wrf361h.py @@ -1,5 +1,7 @@ """Fixes for rcm WRF361H driven by MIROC-MIROC5.""" + import iris + from esmvalcore.cmor.fix import Fix @@ -24,11 +26,8 @@ def fix_metadata(self, cubes): """ fixed_cubes = iris.cube.CubeList() for cube in cubes: - height = cube.coord('height') + height = cube.coord("height") if isinstance(height, iris.coords.DimCoord): - iris.util.demote_dim_coord_to_aux_coord( - cube, - height - ) + iris.util.demote_dim_coord_to_aux_coord(cube, height) fixed_cubes.append(iris.util.squeeze(cube)) return fixed_cubes diff --git a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hadrem3_ga7_05.py b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hadrem3_ga7_05.py index 7964a583e0..2852e95492 100644 --- a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hadrem3_ga7_05.py +++ b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hadrem3_ga7_05.py @@ -1,6 +1,8 @@ """Fixes for rcm HadREM3-GA7-05 driven by MOHC-HadGEM2-ES.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - MOHCHadREM3GA705 as BaseFix) + MOHCHadREM3GA705 as BaseFix, +) Tas = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hirham5.py b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hirham5.py index 5dfb91f274..8a7e659c5a 100644 --- a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hirham5.py +++ b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/hirham5.py @@ -1,4 +1,5 @@ """Fixes for rcm HIRHAM driven by MOHC-HadGEM2.""" + from esmvalcore.cmor.fix import Fix @@ -19,7 +20,7 @@ def fix_metadata(self, cubes): """ for cube in cubes: - cube.coord('latitude').attributes = {} - cube.coord('longitude').attributes = {} + cube.coord("latitude").attributes = {} + cube.coord("longitude").attributes = {} return cubes diff --git a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/rca4.py b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/rca4.py index 740711fcea..4cecccec4f 100644 --- a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/rca4.py +++ b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/rca4.py @@ -1,6 +1,6 @@ """Fixes for rcm RCA4 driven by MOHC-HadGEM2-ES.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/remo2015.py b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/remo2015.py index cea145f2d3..446a346825 100644 --- a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/remo2015.py +++ b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/remo2015.py @@ -1,6 +1,6 @@ """Fixes for rcm REMO2015 driven by MOHC-HadGEM2.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/wrf381p.py b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/wrf381p.py index 4e4d3a316f..aec6f17d66 100644 --- a/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/wrf381p.py +++ b/esmvalcore/cmor/_fixes/cordex/mohc_hadgem2_es/wrf381p.py @@ -1,4 +1,5 @@ """Fixes for rcm WRF381P driven by MOHC-HadGEM2-ES.""" + from esmvalcore.cmor._fixes.shared import add_scalar_height_coord from esmvalcore.cmor.fix import Fix diff --git a/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/hadrem3_ga7_05.py b/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/hadrem3_ga7_05.py index 4875edfc93..c2d52052e9 100644 --- a/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/hadrem3_ga7_05.py +++ b/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/hadrem3_ga7_05.py @@ -1,6 +1,8 @@ """Fixes for rcm HadREM3-GA7-05 driven by MPI-M-MPI-ESM-LR.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - MOHCHadREM3GA705 as BaseFix) + MOHCHadREM3GA705 as BaseFix, +) Tas = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/racmo22e.py b/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/racmo22e.py index 676e0dfc47..ec4870d862 100644 --- a/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/racmo22e.py +++ b/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/racmo22e.py @@ -1,5 +1,5 @@ """Fixes for rcm RACMO22E driven by MPI-M-MPI-ESM-LR.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/regcm4_6.py b/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/regcm4_6.py index f863ed1712..4a34fb0ad7 100644 --- a/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/regcm4_6.py +++ b/esmvalcore/cmor/_fixes/cordex/mpi_m_mpi_esm_lr/regcm4_6.py @@ -1,6 +1,6 @@ """Fixes for rcm RegCM4-6 driven by MPI-M-MPI-ESM-LR.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/hadrem3_ga7_05.py b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/hadrem3_ga7_05.py index 1aa2d11b1b..97cac1a896 100644 --- a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/hadrem3_ga7_05.py +++ b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/hadrem3_ga7_05.py @@ -1,6 +1,8 @@ """Fixes for rcm HadREM3-GA7-05 driven by NCC-NorESM1-M.""" + from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - MOHCHadREM3GA705 as BaseFix) + MOHCHadREM3GA705 as BaseFix, +) Tas = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/racmo22e.py b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/racmo22e.py index e9e2e38734..63dbf9c5a3 100644 --- a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/racmo22e.py +++ b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/racmo22e.py @@ -1,5 +1,5 @@ """Fixes for rcm RACMO22E driven by NCC-NorESM1-M.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/rca4.py b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/rca4.py index 58d14599da..b0be8a352e 100644 --- a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/rca4.py +++ b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/rca4.py @@ -1,6 +1,6 @@ """Fixes for rcm RCA4 driven by NCC-NorESM1-M.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/remo2015.py b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/remo2015.py index 9722263d70..60f5351229 100644 --- a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/remo2015.py +++ b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/remo2015.py @@ -1,5 +1,5 @@ """Fixes for rcm REMO2015 driven by NCC-NorESM1-M.""" -from esmvalcore.cmor._fixes.cordex.cordex_fixes import ( - TimeLongName as BaseFix) + +from esmvalcore.cmor._fixes.cordex.cordex_fixes import TimeLongName as BaseFix Pr = BaseFix diff --git a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/wrf381p.py b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/wrf381p.py index d373d9ddd2..1eb30af84c 100644 --- a/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/wrf381p.py +++ b/esmvalcore/cmor/_fixes/cordex/ncc_noresm1_m/wrf381p.py @@ -1,4 +1,5 @@ """Fixes for rcm WRF381P driven by NCC-NorESM1-M.""" + from esmvalcore.cmor._fixes.shared import add_scalar_height_coord from esmvalcore.cmor.fix import Fix diff --git a/esmvalcore/cmor/_fixes/emac/_base_fixes.py b/esmvalcore/cmor/_fixes/emac/_base_fixes.py index 5082f612c9..4552453588 100644 --- a/esmvalcore/cmor/_fixes/emac/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/emac/_base_fixes.py @@ -17,8 +17,9 @@ def get_cube(self, cubes, var_name=None): """Extract single cube.""" # If no var_name given, use the CMOR short_name if var_name is None: - var_name = self.extra_facets.get('raw_name', - self.vardef.short_name) + var_name = self.extra_facets.get( + "raw_name", self.vardef.short_name + ) # Convert to list if only a single var_name is given if isinstance(var_name, str): @@ -38,11 +39,8 @@ def get_cube(self, cubes, var_name=None): raise ValueError( f"No variable of {var_names} necessary for the extraction/" f"derivation the CMOR variable '{self.vardef.short_name}' is " - f"available in the input file. Hint: in case you tried to extract " - f"a 3D variable defined on pressure levels, it might be necessary " - f"to define the EMAC variable name in the recipe (e.g., " - f"'raw_name: tm1_p39_cav') if the default number of pressure " - f"levels is not available in the input file." + f"available in the input file. Please specify a valid `raw_name` " + f"in the recipe or extra facets file." ) diff --git a/esmvalcore/cmor/_fixes/emac/emac.py b/esmvalcore/cmor/_fixes/emac/emac.py index 38ae862916..e5150f16f3 100644 --- a/esmvalcore/cmor/_fixes/emac/emac.py +++ b/esmvalcore/cmor/_fixes/emac/emac.py @@ -34,7 +34,7 @@ class AllVars(EmacFix): # Dictionary to map invalid units in the data to valid entries INVALID_UNITS = { - 'kg/m**2s': 'kg m-2 s-1', + "kg/m**2s": "kg m-2 s-1", } def fix_file(self, filepath, output_dir, add_unique_suffix=False): @@ -49,17 +49,17 @@ def fix_file(self, filepath, output_dir, add_unique_suffix=False): in the class:`iris.cube.CubeList` object returned by :mod:`iris.load`. """ - if 'alevel' not in self.vardef.dimensions: + if "alevel" not in self.vardef.dimensions: return filepath new_path = self.get_fixed_filepath( output_dir, filepath, add_unique_suffix=add_unique_suffix ) copyfile(filepath, new_path) - with Dataset(new_path, mode='a') as dataset: - if 'formula_terms' in dataset.variables['lev'].ncattrs(): - del dataset.variables['lev'].formula_terms - if 'formula_terms' in dataset.variables['ilev'].ncattrs(): - del dataset.variables['ilev'].formula_terms + with Dataset(new_path, mode="a") as dataset: + if "formula_terms" in dataset.variables["lev"].ncattrs(): + del dataset.variables["lev"].formula_terms + if "formula_terms" in dataset.variables["ilev"].ncattrs(): + del dataset.variables["ilev"].formula_terms return new_path def fix_metadata(self, cubes): @@ -72,11 +72,11 @@ def fix_metadata(self, cubes): self.fix_regular_lon(cube) # Fix regular pressure levels (considers plev19, plev39, etc.) - if self.vardef.has_coord_with_standard_name('air_pressure'): + if self.vardef.has_coord_with_standard_name("air_pressure"): self._fix_plev(cube) # Fix hybrid pressure levels - if 'alevel' in self.vardef.dimensions: + if "alevel" in self.vardef.dimensions: cube = self._fix_alevel(cube, cubes) # Fix scalar coordinates @@ -92,9 +92,9 @@ def _fix_plev(self, cube): for coord in cube.coords(): coord_type = iris.util.guess_coord_axis(coord) - if coord_type != 'Z': + if coord_type != "Z": continue - if not coord.units.is_convertible('Pa'): + if not coord.units.is_convertible("Pa"): continue self.fix_plev_metadata(cube, coord) @@ -104,59 +104,60 @@ def _fix_plev(self, cube): raise ValueError( f"Cannot find requested pressure level coordinate for variable " f"'{self.vardef.short_name}', searched for Z-coordinates with " - f"units that are convertible to Pa") + f"units that are convertible to Pa" + ) @staticmethod def _fix_alevel(cube, cubes): """Fix hybrid pressure level coordinate of cube.""" # Add coefficients for hybrid pressure level coordinate coords_to_add = { - 'hyam': 1, - 'hybm': 1, - 'aps_ave': (0, 2, 3), + "hyam": 1, + "hybm": 1, + "aps_ave": (0, 2, 3), } add_aux_coords_from_cubes(cube, cubes, coords_to_add) # Reverse entire cube along Z-axis so that index 0 is surface level # Note: This would automatically be fixed by the CMOR checker, but this # fails to fix the bounds of ap and b - cube = iris.util.reverse(cube, cube.coord(var_name='lev')) + cube = iris.util.reverse(cube, cube.coord(var_name="lev")) # Adapt metadata of coordinates - lev_coord = cube.coord(var_name='lev') - ap_coord = cube.coord(var_name='hyam') - b_coord = cube.coord(var_name='hybm') - ps_coord = cube.coord(var_name='aps_ave') - - lev_coord.var_name = 'lev' - lev_coord.standard_name = 'atmosphere_hybrid_sigma_pressure_coordinate' - lev_coord.long_name = 'hybrid sigma pressure coordinate' - lev_coord.units = '1' - lev_coord.attributes['positive'] = 'down' - - ap_coord.var_name = 'ap' + lev_coord = cube.coord(var_name="lev") + ap_coord = cube.coord(var_name="hyam") + b_coord = cube.coord(var_name="hybm") + ps_coord = cube.coord(var_name="aps_ave") + + lev_coord.var_name = "lev" + lev_coord.standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + lev_coord.long_name = "hybrid sigma pressure coordinate" + lev_coord.units = "1" + lev_coord.attributes["positive"] = "down" + + ap_coord.var_name = "ap" ap_coord.standard_name = None - ap_coord.long_name = 'vertical coordinate formula term: ap(k)' + ap_coord.long_name = "vertical coordinate formula term: ap(k)" ap_coord.attributes = {} - b_coord.var_name = 'b' + b_coord.var_name = "b" b_coord.standard_name = None - b_coord.long_name = 'vertical coordinate formula term: b(k)' + b_coord.long_name = "vertical coordinate formula term: b(k)" b_coord.attributes = {} - ps_coord.var_name = 'ps' - ps_coord.standard_name = 'surface_air_pressure' - ps_coord.long_name = 'Surface Air Pressure' + ps_coord.var_name = "ps" + ps_coord.standard_name = "surface_air_pressure" + ps_coord.long_name = "Surface Air Pressure" ps_coord.attributes = {} # Add bounds for coefficients # (make sure to reverse cubes beforehand so index 0 is surface level) ap_bnds_cube = iris.util.reverse( - cubes.extract_cube(NameConstraint(var_name='hyai')), + cubes.extract_cube(NameConstraint(var_name="hyai")), 0, ) b_bnds_cube = iris.util.reverse( - cubes.extract_cube(NameConstraint(var_name='hybi')), + cubes.extract_cube(NameConstraint(var_name="hybi")), 0, ) ap_bounds = da.stack( @@ -173,17 +174,21 @@ def _fix_alevel(cube, cubes): # Convert arrays to float64 for coord in (ap_coord, b_coord, ps_coord): coord.points = coord.core_points().astype( - float, casting='same_kind') + float, casting="same_kind" + ) if coord.has_bounds(): coord.bounds = coord.core_bounds().astype( - float, casting='same_kind') + float, casting="same_kind" + ) # Fix values of lev coordinate # Note: lev = a + b with a = ap / p0 (p0 = 100000 Pa) - lev_coord.points = (ap_coord.core_points() / 100000.0 + - b_coord.core_points()) - lev_coord.bounds = (ap_coord.core_bounds() / 100000.0 + - b_coord.core_bounds()) + lev_coord.points = ( + ap_coord.core_points() / 100000.0 + b_coord.core_points() + ) + lev_coord.bounds = ( + ap_coord.core_bounds() / 100000.0 + b_coord.core_bounds() + ) # Add HybridPressureFactory pressure_coord_factory = HybridPressureFactory( @@ -201,13 +206,37 @@ class Clwvi(EmacFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name=['xlvi_cav', 'xlvi_ave', - 'xlvi']) + - self.get_cube(cubes, var_name=['xivi_cav', 'xivi_ave', - 'xivi']) + cube = self.get_cube( + cubes, var_name=["xlvi_cav", "xlvi_ave", "xlvi"] + ) + self.get_cube(cubes, var_name=["xivi_cav", "xivi_ave", "xivi"]) + cube.var_name = self.vardef.short_name + return CubeList([cube]) + + +class Prodlnox(EmacFix): + """Fixes for ``prodlnox``.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + noxcg_cube = self.get_cube( + cubes, var_name=["NOxcg_cav", "NOxcg_ave", "NOxcg"] ) + noxic_cube = self.get_cube( + cubes, var_name=["NOxic_cav", "NOxic_ave", "NOxic"] + ) + dt_cube = self.get_cube(cubes, var_name="dt") + + cube = ( + noxcg_cube.collapsed( + ["longitude", "latitude"], iris.analysis.SUM, weights=None + ) + + noxic_cube.collapsed( + ["longitude", "latitude"], iris.analysis.SUM, weights=None + ) + ) / dt_cube + cube.units = "kg s-1" cube.var_name = self.vardef.short_name + return CubeList([cube]) @@ -227,7 +256,7 @@ def fix_metadata(self, cubes): """Fix metadata.""" cubes = super().fix_metadata(cubes) cube = self.get_cube(cubes) - z_coord = cube.coord(axis='Z') + z_coord = cube.coord(axis="Z") cube = cube.collapsed(z_coord, iris.analysis.SUM) return CubeList([cube]) @@ -237,12 +266,9 @@ class Pr(EmacFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name=['aprl_cav', 'aprl_ave', - 'aprl']) + - self.get_cube(cubes, var_name=['aprc_cav', 'aprc_ave', - 'aprc']) - ) + cube = self.get_cube( + cubes, var_name=["aprl_cav", "aprl_ave", "aprl"] + ) + self.get_cube(cubes, var_name=["aprc_cav", "aprc_ave", "aprc"]) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -252,11 +278,10 @@ class Rlds(EmacFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name=['flxtbot_cav', 'flxtbot_ave', - 'flxsbot']) - - self.get_cube(cubes, var_name=['tradsu_cav', 'tradsu_ave', - 'tradsu']) + cube = self.get_cube( + cubes, var_name=["flxtbot_cav", "flxtbot_ave", "flxsbot"] + ) - self.get_cube( + cubes, var_name=["tradsu_cav", "tradsu_ave", "tradsu"] ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -276,11 +301,10 @@ class Rsds(EmacFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name=['flxsbot_cav', 'flxsbot_ave', - 'flxsbot']) - - self.get_cube(cubes, var_name=['sradsu_cav', 'sradsu_ave', - 'sradsu']) + cube = self.get_cube( + cubes, var_name=["flxsbot_cav", "flxsbot_ave", "flxsbot"] + ) - self.get_cube( + cubes, var_name=["sradsu_cav", "sradsu_ave", "sradsu"] ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -291,11 +315,10 @@ class Rsdt(EmacFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name=['flxstop_cav', 'flxstop_ave', - 'flxstop']) - - self.get_cube(cubes, var_name=['srad0u_cav', 'srad0u_ave', - 'srad0u']) + cube = self.get_cube( + cubes, var_name=["flxstop_cav", "flxstop_ave", "flxstop"] + ) - self.get_cube( + cubes, var_name=["srad0u_cav", "srad0u_ave", "srad0u"] ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -315,11 +338,10 @@ class Rtmt(EmacFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name=['flxttop_cav', 'flxttop_ave', - 'flxttop']) + - self.get_cube(cubes, var_name=['flxstop_cav', 'flxstop_ave', - 'flxstop']) + cube = self.get_cube( + cubes, var_name=["flxttop_cav", "flxttop_ave", "flxttop"] + ) + self.get_cube( + cubes, var_name=["flxstop_cav", "flxstop_ave", "flxstop"] ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -343,7 +365,7 @@ def fix_metadata(self, cubes): # Note: 1 mm = 100 DU cube = self.get_cube(cubes) cube.data = cube.core_data() / 100.0 - cube.units = 'mm' + cube.units = "mm" return CubeList([cube]) @@ -357,8 +379,8 @@ def fix_metadata(self, cubes): Z = Phi / g0 (g0 is standard acceleration of gravity) """ - g0_value = constants.value('standard acceleration of gravity') - g0_units = constants.unit('standard acceleration of gravity') + g0_value = constants.value("standard acceleration of gravity") + g0_units = constants.unit("standard acceleration of gravity") cube = self.get_cube(cubes) cube.data = cube.core_data() / g0_value @@ -376,14 +398,18 @@ class MP_BC_tot(EmacFix): # noqa: N801 def fix_metadata(self, cubes): """Fix metadata.""" cube = ( - self.get_cube(cubes, var_name=['MP_BC_ki_cav', 'MP_BC_ki_ave', - 'MP_BC_ki']) + - self.get_cube(cubes, var_name=['MP_BC_ks_cav', 'MP_BC_ks_ave', - 'MP_BC_ks']) + - self.get_cube(cubes, var_name=['MP_BC_as_cav', 'MP_BC_as_ave', - 'MP_BC_as']) + - self.get_cube(cubes, var_name=['MP_BC_cs_cav', 'MP_BC_cs_ave', - 'MP_BC_cs']) + self.get_cube( + cubes, var_name=["MP_BC_ki_cav", "MP_BC_ki_ave", "MP_BC_ki"] + ) + + self.get_cube( + cubes, var_name=["MP_BC_ks_cav", "MP_BC_ks_ave", "MP_BC_ks"] + ) + + self.get_cube( + cubes, var_name=["MP_BC_as_cav", "MP_BC_as_ave", "MP_BC_as"] + ) + + self.get_cube( + cubes, var_name=["MP_BC_cs_cav", "MP_BC_cs_ave", "MP_BC_cs"] + ) ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -395,14 +421,18 @@ class MP_DU_tot(EmacFix): # noqa: N801 def fix_metadata(self, cubes): """Fix metadata.""" cube = ( - self.get_cube(cubes, var_name=['MP_DU_ai_cav', 'MP_DU_ai_ave', - 'MP_DU_ai']) + - self.get_cube(cubes, var_name=['MP_DU_as_cav', 'MP_DU_as_ave', - 'MP_DU_as']) + - self.get_cube(cubes, var_name=['MP_DU_ci_cav', 'MP_DU_ci_ave', - 'MP_DU_ci']) + - self.get_cube(cubes, var_name=['MP_DU_cs_cav', 'MP_DU_cs_ave', - 'MP_DU_cs']) + self.get_cube( + cubes, var_name=["MP_DU_ai_cav", "MP_DU_ai_ave", "MP_DU_ai"] + ) + + self.get_cube( + cubes, var_name=["MP_DU_as_cav", "MP_DU_as_ave", "MP_DU_as"] + ) + + self.get_cube( + cubes, var_name=["MP_DU_ci_cav", "MP_DU_ci_ave", "MP_DU_ci"] + ) + + self.get_cube( + cubes, var_name=["MP_DU_cs_cav", "MP_DU_cs_ave", "MP_DU_cs"] + ) ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -415,17 +445,21 @@ def fix_metadata(self, cubes): """Fix metadata.""" cube = ( self.get_cube( - cubes, var_name=['MP_SO4mm_ns_cav', 'MP_SO4mm_ns_ave', - 'MP_SO4mm_ns']) + - self.get_cube( - cubes, var_name=['MP_SO4mm_ks_cav', 'MP_SO4mm_ks_ave', - 'MP_SO4mm_ks']) + - self.get_cube( - cubes, var_name=['MP_SO4mm_as_cav', 'MP_SO4mm_as_ave', - 'MP_SO4mm_as']) + - self.get_cube( - cubes, var_name=['MP_SO4mm_cs_cav', 'MP_SO4mm_cs_ave', - 'MP_SO4mm_cs']) + cubes, + var_name=["MP_SO4mm_ns_cav", "MP_SO4mm_ns_ave", "MP_SO4mm_ns"], + ) + + self.get_cube( + cubes, + var_name=["MP_SO4mm_ks_cav", "MP_SO4mm_ks_ave", "MP_SO4mm_ks"], + ) + + self.get_cube( + cubes, + var_name=["MP_SO4mm_as_cav", "MP_SO4mm_as_ave", "MP_SO4mm_as"], + ) + + self.get_cube( + cubes, + var_name=["MP_SO4mm_cs_cav", "MP_SO4mm_cs_ave", "MP_SO4mm_cs"], + ) ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -437,12 +471,15 @@ class MP_SS_tot(EmacFix): # noqa: N801 def fix_metadata(self, cubes): """Fix metadata.""" cube = ( - self.get_cube(cubes, var_name=['MP_SS_ks_cav', 'MP_SS_ks_ave', - 'MP_SS_ks']) + - self.get_cube(cubes, var_name=['MP_SS_as_cav', 'MP_SS_as_ave', - 'MP_SS_as']) + - self.get_cube(cubes, var_name=['MP_SS_cs_cav', 'MP_SS_cs_ave', - 'MP_SS_cs']) + self.get_cube( + cubes, var_name=["MP_SS_ks_cav", "MP_SS_ks_ave", "MP_SS_ks"] + ) + + self.get_cube( + cubes, var_name=["MP_SS_as_cav", "MP_SS_as_ave", "MP_SS_as"] + ) + + self.get_cube( + cubes, var_name=["MP_SS_cs_cav", "MP_SS_cs_ave", "MP_SS_cs"] + ) ) cube.var_name = self.vardef.short_name return CubeList([cube]) diff --git a/esmvalcore/cmor/_fixes/fix.py b/esmvalcore/cmor/_fixes/fix.py index cf2aed42ec..5aa41f6486 100644 --- a/esmvalcore/cmor/_fixes/fix.py +++ b/esmvalcore/cmor/_fixes/fix.py @@ -1,4 +1,5 @@ """Contains the base class for dataset fixes.""" + from __future__ import annotations import importlib @@ -33,7 +34,7 @@ from esmvalcore.config import Session logger = logging.getLogger(__name__) -generic_fix_logger = logging.getLogger(f'{__name__}.genericfix') +generic_fix_logger = logging.getLogger(f"{__name__}.genericfix") class Fix: @@ -235,9 +236,9 @@ def get_fixes( """ vardef = get_var_info(project, mip, short_name) - project = project.replace('-', '_').lower() - dataset = dataset.replace('-', '_').lower() - short_name = short_name.replace('-', '_').lower() + project = project.replace("-", "_").lower() + dataset = dataset.replace("-", "_").lower() + short_name = short_name.replace("-", "_").lower() if extra_facets is None: extra_facets = {} @@ -245,30 +246,40 @@ def get_fixes( fixes = [] fixes_modules = [] - if project == 'cordex': - driver = extra_facets['driver'].replace('-', '_').lower() - extra_facets['dataset'] = dataset + if project == "cordex": + driver = extra_facets["driver"].replace("-", "_").lower() + extra_facets["dataset"] = dataset try: - fixes_modules.append(importlib.import_module( - f'esmvalcore.cmor._fixes.{project}.{driver}.{dataset}' - )) + fixes_modules.append( + importlib.import_module( + f"esmvalcore.cmor._fixes.{project}.{driver}.{dataset}" + ) + ) except ImportError: pass - fixes_modules.append(importlib.import_module( - 'esmvalcore.cmor._fixes.cordex.cordex_fixes')) + fixes_modules.append( + importlib.import_module( + "esmvalcore.cmor._fixes.cordex.cordex_fixes" + ) + ) else: try: - fixes_modules.append(importlib.import_module( - f'esmvalcore.cmor._fixes.{project}.{dataset}')) + fixes_modules.append( + importlib.import_module( + f"esmvalcore.cmor._fixes.{project}.{dataset}" + ) + ) except ImportError: pass for fixes_module in fixes_modules: classes = dict( - (name.lower(), value) for (name, value) in - inspect.getmembers(fixes_module, inspect.isclass) + (name.lower(), value) + for (name, value) in inspect.getmembers( + fixes_module, inspect.isclass + ) ) - for fix_name in (short_name, mip.lower(), 'allvars'): + for fix_name in (short_name, mip.lower(), "allvars"): if fix_name in classes: fixes.append( classes[fix_name]( @@ -344,7 +355,7 @@ def fix_metadata(self, cubes: Sequence[Cube]) -> CubeList: """ # Make sure the this fix also works when no extra_facets are given - if 'project' in self.extra_facets and 'dataset' in self.extra_facets: + if "project" in self.extra_facets and "dataset" in self.extra_facets: dataset_str = ( f"{self.extra_facets['project']}:" f"{self.extra_facets['dataset']}" @@ -391,7 +402,7 @@ def fix_data(self, cube: Cube) -> Cube: @staticmethod def _msg_suffix(cube: Cube) -> str: """Get prefix for log messages.""" - if 'source_file' in cube.attributes: + if "source_file" in cube.attributes: return f"\n(for file {cube.attributes['source_file']})" return f"\n(for variable {cube.var_name})" @@ -430,8 +441,8 @@ def _reverse_coord(self, cube: Cube, coord: Coord) -> tuple[Cube, Coord]: def _get_effective_units(self) -> str: """Get effective units.""" - if self.vardef.units.lower() == 'psu': - return '1' + if self.vardef.units.lower() == "psu": + return "1" return self.vardef.units def _fix_units(self, cube: Cube) -> Cube: @@ -497,9 +508,9 @@ def _fix_long_name(self, cube: Cube) -> Cube: def _fix_psu_units(self, cube: Cube) -> Cube: """Fix psu units.""" - if cube.attributes.get('invalid_units', '').lower() == 'psu': - cube.units = '1' - cube.attributes.pop('invalid_units') + if cube.attributes.get("invalid_units", "").lower() == "psu": + cube.units = "1" + cube.attributes.pop("invalid_units") self._debug_msg(cube, "Units converted from 'psu' to '1'") return cube @@ -521,7 +532,7 @@ def _fix_alternative_generic_level_coords(self, cube: Cube) -> Cube: """Fix alternative generic level coordinates.""" # Avoid overriding existing variable information cmor_var_coordinates = self.vardef.coordinates.copy() - for (coord_name, cmor_coord) in cmor_var_coordinates.items(): + for coord_name, cmor_coord in cmor_var_coordinates.items(): if not cmor_coord.generic_level: continue # Ignore non-generic-level coordinates if not cmor_coord.generic_lev_coords: @@ -557,9 +568,10 @@ def _fix_alternative_generic_level_coords(self, cube: Cube) -> Cube: # Search for alternative coordinates (i.e., regular level # coordinates); if none found, do nothing try: - (alternative_coord, - cube_coord) = _get_alternative_generic_lev_coord( - cube, coord_name, self.vardef.table_type + (alternative_coord, cube_coord) = ( + _get_alternative_generic_lev_coord( + cube, coord_name, self.vardef.table_type + ) ) except ValueError: # no alternatives found continue @@ -578,11 +590,13 @@ def _fix_cmip6_multidim_lat_lon_coord( cube_coord: Coord, ) -> None: """Fix CMIP6 multidimensional latitude and longitude coordinates.""" - is_cmip6_multidim_lat_lon = all([ - 'CMIP6' in self.vardef.table_type, - cube_coord.ndim > 1, - cube_coord.standard_name in ('latitude', 'longitude'), - ]) + is_cmip6_multidim_lat_lon = all( + [ + "CMIP6" in self.vardef.table_type, + cube_coord.ndim > 1, + cube_coord.standard_name in ("latitude", "longitude"), + ] + ) if is_cmip6_multidim_lat_lon: self._debug_msg( cube, @@ -674,7 +688,7 @@ def _fix_longitude_0_360( cube_coord: Coord, ) -> tuple[Cube, Coord]: """Fix longitude coordinate to be in [0, 360].""" - if not cube_coord.standard_name == 'longitude': + if not cube_coord.standard_name == "longitude": return (cube, cube_coord) points = cube_coord.core_points() @@ -696,7 +710,7 @@ def _fix_longitude_0_360( # nbounds>2 implies an irregular grid with bounds given as vertices # of the cell polygon. if cube_coord.ndim == 1 and cube_coord.nbounds in (0, 2): - lon_extent = CoordExtent(cube_coord, 0.0, 360., True, False) + lon_extent = CoordExtent(cube_coord, 0.0, 360.0, True, False) cube = cube.intersection(lon_extent) else: new_lons = cube_coord.core_points().copy() @@ -724,12 +738,14 @@ def _fix_coord_bounds( cube_coord: Coord, ) -> None: """Fix coordinate bounds.""" - if cmor_coord.must_have_bounds != 'yes' or cube_coord.has_bounds(): + if cmor_coord.must_have_bounds != "yes" or cube_coord.has_bounds(): return # Skip guessing bounds for unstructured grids if has_unstructured_grid(cube) and cube_coord.standard_name in ( - 'latitude', 'longitude'): + "latitude", + "longitude", + ): self._debug_msg( cube, "Will not guess bounds for coordinate %s of unstructured grid", @@ -762,10 +778,11 @@ def _fix_coord_direction( # Skip fix for a variety of reasons if cube_coord.ndim > 1: return (cube, cube_coord) - if cube_coord.dtype.kind == 'U': + if cube_coord.dtype.kind == "U": return (cube, cube_coord) if has_unstructured_grid(cube) and cube_coord.standard_name in ( - 'latitude', 'longitude' + "latitude", + "longitude", ): return (cube, cube_coord) if len(cube_coord.core_points()) == 1: @@ -774,10 +791,10 @@ def _fix_coord_direction( return (cube, cube_coord) # Fix coordinates with wrong direction - if cmor_coord.stored_direction == 'increasing': + if cmor_coord.stored_direction == "increasing": if cube_coord.core_points()[0] > cube_coord.core_points()[1]: (cube, cube_coord) = self._reverse_coord(cube, cube_coord) - elif cmor_coord.stored_direction == 'decreasing': + elif cmor_coord.stored_direction == "decreasing": if cube_coord.core_points()[0] < cube_coord.core_points()[1]: (cube, cube_coord) = self._reverse_coord(cube, cube_coord) @@ -789,7 +806,7 @@ def _fix_time_units(self, cube: Cube, cube_coord: Coord) -> None: old_units = cube_coord.units cube_coord.convert_units( Unit( - 'days since 1850-1-1 00:00:00', + "days since 1850-1-1 00:00:00", calendar=cube_coord.units.calendar, ) ) @@ -800,9 +817,9 @@ def _fix_time_units(self, cube: Cube, cube_coord: Coord) -> None: # Fix units of time-related cube attributes attrs = cube.attributes - parent_time = 'parent_time_units' + parent_time = "parent_time_units" if parent_time in attrs: - if attrs[parent_time] in 'no parent': + if attrs[parent_time] in "no parent": pass else: try: @@ -810,24 +827,26 @@ def _fix_time_units(self, cube: Cube, cube_coord: Coord) -> None: except ValueError: pass else: - attrs[parent_time] = 'days since 1850-1-1 00:00:00' + attrs[parent_time] = "days since 1850-1-1 00:00:00" - branch_parent = 'branch_time_in_parent' + branch_parent = "branch_time_in_parent" if branch_parent in attrs: attrs[branch_parent] = parent_units.convert( - attrs[branch_parent], cube_coord.units) + attrs[branch_parent], cube_coord.units + ) - branch_child = 'branch_time_in_child' + branch_child = "branch_time_in_child" if branch_child in attrs: attrs[branch_child] = old_units.convert( - attrs[branch_child], cube_coord.units) + attrs[branch_child], cube_coord.units + ) def _fix_time_bounds(self, cube: Cube, cube_coord: Coord) -> None: """Fix time bounds.""" - times = {'time', 'time1', 'time2', 'time3'} + times = {"time", "time1", "time2", "time3"} key = times.intersection(self.vardef.coordinates) - cmor = self.vardef.coordinates[' '.join(key)] - if cmor.must_have_bounds == 'yes' and not cube_coord.has_bounds(): + cmor = self.vardef.coordinates[" ".join(key)] + if cmor.must_have_bounds == "yes" and not cube_coord.has_bounds(): cube_coord.bounds = get_time_bounds(cube_coord, self.frequency) self._warning_msg( cube, @@ -838,10 +857,10 @@ def _fix_time_bounds(self, cube: Cube, cube_coord: Coord) -> None: def _fix_time_coord(self, cube: Cube) -> Cube: """Fix time coordinate.""" # Make sure to get dimensional time coordinate if possible - if cube.coords('time', dim_coords=True): - cube_coord = cube.coord('time', dim_coords=True) - elif cube.coords('time'): - cube_coord = cube.coord('time') + if cube.coords("time", dim_coords=True): + cube_coord = cube.coord("time", dim_coords=True) + elif cube.coords("time"): + cube_coord = cube.coord("time") else: return cube @@ -853,7 +872,7 @@ def _fix_time_coord(self, cube: Cube) -> Cube: self._fix_time_units(cube, cube_coord) # Remove time_origin from coordinate attributes - cube_coord.attributes.pop('time_origin', None) + cube_coord.attributes.pop("time_origin", None) # Fix time bounds self._fix_time_bounds(cube, cube_coord) @@ -881,7 +900,6 @@ def _fix_coord( def _fix_coords(self, cube: Cube) -> Cube: """Fix non-time coordinates.""" for cmor_coord in self.vardef.coordinates.values(): - # Cannot fix generic level coords with no unique CMOR information if cmor_coord.generic_level and not cmor_coord.out_name: continue @@ -892,7 +910,7 @@ def _fix_coords(self, cube: Cube) -> Cube: cube_coord = cube.coord(var_name=cmor_coord.out_name) # Fixes for time coord are done separately - if cube_coord.var_name == 'time': + if cube_coord.var_name == "time": continue # Fixes diff --git a/esmvalcore/cmor/_fixes/icon/_base_fixes.py b/esmvalcore/cmor/_fixes/icon/_base_fixes.py index 9c242ef73d..be77c9d6c8 100644 --- a/esmvalcore/cmor/_fixes/icon/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/icon/_base_fixes.py @@ -1,4 +1,5 @@ """Fix base classes for ICON on-the-fly CMORizer.""" + from __future__ import annotations import logging @@ -16,7 +17,7 @@ import requests from iris import NameConstraint from iris.cube import Cube, CubeList -from iris.experimental.ugrid import Connectivity, Mesh +from iris.mesh import Connectivity, MeshXY from esmvalcore.cmor._fixes.native_datasets import NativeDatasetFix from esmvalcore.local import _get_data_sources @@ -27,10 +28,10 @@ class IconFix(NativeDatasetFix): """Base class for all ICON fixes.""" - CACHE_DIR = Path.home() / '.esmvaltool' / 'cache' + CACHE_DIR = Path.home() / ".esmvaltool" / "cache" CACHE_VALIDITY = 7 * 24 * 60 * 60 # [s]; = 1 week TIMEOUT = 5 * 60 # [s]; = 5 min - GRID_FILE_ATTR = 'grid_file_uri' + GRID_FILE_ATTR = "grid_file_uri" def __init__(self, *args, **kwargs): """Initialize ICON fix.""" @@ -38,24 +39,24 @@ def __init__(self, *args, **kwargs): self._horizontal_grids = {} self._meshes = {} - def _create_mesh(self, cube): + def _create_mesh(self, cube: Cube) -> MeshXY: """Create mesh from horizontal grid file. Note ---- - This functions creates a new :class:`iris.experimental.ugrid.Mesh` from - the ``clat`` (already present in the cube), ``clon`` (already present - in the cube), ``vertex_index``, ``vertex_of_cell``, ``vlat``, and - ``vlon`` variables of the horizontal grid file. - - We do not use :func:`iris.experimental.ugrid.Mesh.from_coords` with the - existing latitude and longitude coordinates here because this would - produce lots of duplicated entries for the node coordinates. The reason - for this is that the node coordinates are constructed from the bounds; - since each node is contained 6 times in the bounds array (each node is - shared by 6 neighboring cells) the number of nodes is 6 times higher - with :func:`iris.experimental.ugrid.Mesh.from_coords` compared to using - the information already present in the horizontal grid file. + This functions creates a new :class:`iris.mesh.MeshXY` from the + ``clat`` (already present in the cube), ``clon`` (already present in + the cube), ``vertex_index``, ``vertex_of_cell``, ``vlat``, and ``vlon`` + variables of the horizontal grid file. + + We do not use :func:`iris.mesh.MeshXY.from_coords` with the existing + latitude and longitude coordinates here because this would produce lots + of duplicated entries for the node coordinates. The reason for this is + that the node coordinates are constructed from the bounds; since each + node is contained 6 times in the bounds array (each node is shared by 6 + neighboring cells) the number of nodes is 6 times higher with + :func:`iris.mesh.MeshXY.from_coords` compared to using the information + already present in the horizontal grid file. """ horizontal_grid = self.get_horizontal_grid(cube) @@ -65,7 +66,8 @@ def _create_mesh(self, cube): # 'vertex_of_cell'; since UGRID expects a different dimension ordering # we transpose the cube here) vertex_of_cell = horizontal_grid.extract_cube( - NameConstraint(var_name='vertex_of_cell')) + NameConstraint(var_name="vertex_of_cell") + ) vertex_of_cell.transpose() # Extract start index used to name nodes from the the horizontal grid @@ -74,8 +76,8 @@ def _create_mesh(self, cube): # Extract face coordinates from cube (in ICON jargon called 'cell # latitude' and 'cell longitude') - face_lat = cube.coord('latitude') - face_lon = cube.coord('longitude') + face_lat = cube.coord("latitude") + face_lon = cube.coord("longitude") # Extract node coordinates from horizontal grid (node_lat, node_lon) = self._get_node_coords(horizontal_grid) @@ -87,11 +89,11 @@ def _create_mesh(self, cube): # Latitude: there might be slight numerical differences (-> check that # the differences are very small before fixing it) - close_kwargs = {'rtol': 1e-3, 'atol': 1e-5} + close_kwargs = {"rtol": 1e-3, "atol": 1e-5} if not np.allclose( - face_lat.bounds, - node_lat.points[conn_node_inds], - **close_kwargs, + face_lat.bounds, + node_lat.points[conn_node_inds], + **close_kwargs, # type: ignore ): logger.warning( "Latitude bounds of the face coordinate ('clat_vertices' in " @@ -108,7 +110,7 @@ def _create_mesh(self, cube): # differ by 360°, which is also okay. face_lon_bounds_to_check = face_lon.bounds % 360 node_lon_conn_to_check = node_lon.points[conn_node_inds] % 360 - idx_notclose = ~np.isclose( + idx_notclose = ~np.isclose( # type: ignore face_lon_bounds_to_check, node_lon_conn_to_check, **close_kwargs, @@ -127,15 +129,15 @@ def _create_mesh(self, cube): # Create mesh connectivity = Connectivity( indices=vertex_of_cell.data, - cf_role='face_node_connectivity', + cf_role="face_node_connectivity", start_index=start_index, location_axis=0, ) - mesh = Mesh( + mesh = MeshXY( topology_dimension=2, - node_coords_and_axes=[(node_lat, 'y'), (node_lon, 'x')], + node_coords_and_axes=[(node_lat, "y"), (node_lon, "x")], connectivities=[connectivity], - face_coords_and_axes=[(face_lat, 'y'), (face_lon, 'x')], + face_coords_and_axes=[(face_lat, "y"), (face_lon, "x")], ) return mesh @@ -146,7 +148,8 @@ def _get_grid_url(self, cube): raise ValueError( f"Cube does not contain the attribute '{self.GRID_FILE_ATTR}' " f"necessary to download the ICON horizontal grid file:\n" - f"{cube}") + f"{cube}" + ) grid_url = cube.attributes[self.GRID_FILE_ATTR] parsed_url = urlparse(grid_url) grid_name = Path(parsed_url.path).name @@ -162,21 +165,22 @@ def _get_node_coords(self, horizontal_grid): """ dual_area_cube = horizontal_grid.extract_cube( - NameConstraint(var_name='dual_area')) - node_lat = dual_area_cube.coord(var_name='vlat') - node_lon = dual_area_cube.coord(var_name='vlon') + NameConstraint(var_name="dual_area") + ) + node_lat = dual_area_cube.coord(var_name="vlat") + node_lon = dual_area_cube.coord(var_name="vlon") # Fix metadata node_lat.bounds = None node_lon.bounds = None - node_lat.var_name = 'nlat' - node_lon.var_name = 'nlon' - node_lat.standard_name = 'latitude' - node_lon.standard_name = 'longitude' - node_lat.long_name = 'node latitude' - node_lon.long_name = 'node longitude' - node_lat.convert_units('degrees_north') - node_lon.convert_units('degrees_east') + node_lat.var_name = "nlat" + node_lon.var_name = "nlon" + node_lat.standard_name = "latitude" + node_lon.standard_name = "longitude" + node_lat.long_name = "node latitude" + node_lon.long_name = "node longitude" + node_lat.convert_units("degrees_north") + node_lon.convert_units("degrees_east") # Convert longitude to [0, 360] self._set_range_in_0_360(node_lon) @@ -186,10 +190,10 @@ def _get_node_coords(self, horizontal_grid): def _get_path_from_facet(self, facet, description=None): """Try to get path from facet.""" if description is None: - description = 'File' + description = "File" path = Path(os.path.expandvars(self.extra_facets[facet])).expanduser() if not path.is_file(): - new_path = self.session['auxiliary_data_dir'] / path + new_path = self.session["auxiliary_data_dir"] / path if not new_path.is_file(): raise FileNotFoundError( f"{description} '{path}' given by facet '{facet}' does " @@ -239,8 +243,8 @@ def add_additional_cubes(self, cubes): """ facets_to_consider = [ - 'zg_file', - 'zghalf_file', + "zg_file", + "zghalf_file", ] for facet in facets_to_consider: if self.extra_facets.get(facet) is None: @@ -255,7 +259,7 @@ def add_additional_cubes(self, cubes): def _get_grid_from_facet(self): """Get horizontal grid from user-defined facet `horizontal_grid`.""" grid_path = self._get_path_from_facet( - 'horizontal_grid', 'Horizontal grid file' + "horizontal_grid", "Horizontal grid file" ) grid_name = grid_path.name @@ -298,7 +302,7 @@ def _get_grid_from_cube_attr(self, cube: Cube) -> Cube: def _get_grid_from_rootpath(self, grid_name: str) -> CubeList | None: """Try to get grid from the ICON rootpath.""" glob_patterns: list[Path] = [] - for data_source in _get_data_sources('ICON'): + for data_source in _get_data_sources("ICON"): glob_patterns.extend( data_source.get_glob_patterns(**self.extra_facets) ) @@ -335,8 +339,10 @@ def _get_downloaded_grid(self, grid_url: str, grid_name: str) -> CubeList: logger.debug("Using cached ICON grid file '%s'", grid_path) valid_cache = True else: - logger.debug("Existing cached ICON grid file '%s' is outdated", - grid_path) + logger.debug( + "Existing cached ICON grid file '%s' is outdated", + grid_path, + ) # File is not present in cache or too old -> download it if not valid_cache: @@ -348,12 +354,12 @@ def _get_downloaded_grid(self, grid_url: str, grid_name: str) -> CubeList: tmp_path, ) with requests.get( - grid_url, - stream=True, - timeout=self.TIMEOUT, + grid_url, + stream=True, + timeout=self.TIMEOUT, ) as response: response.raise_for_status() - with tmp_path.open('wb') as file: + with tmp_path.open("wb") as file: copyfileobj(response.raw, file) shutil.move(tmp_path, grid_path) logger.info( @@ -404,7 +410,7 @@ def get_horizontal_grid(self, cube): file. """ - if self.extra_facets.get('horizontal_grid') is not None: + if self.extra_facets.get("horizontal_grid") is not None: grid = self._get_grid_from_facet() else: grid = self._get_grid_from_cube_attr(cube) @@ -429,8 +435,8 @@ def get_mesh(self, cube): Returns ------- - iris.experimental.ugrid.Mesh - Mesh. + iris.mesh.MeshXY + Mesh of the cube. Raises ------ @@ -445,9 +451,9 @@ def get_mesh(self, cube): """ # If specified by the user, use `horizontal_grid` facet to determine # grid name; otherwise, use the `grid_file_uri` attribute of the cube - if self.extra_facets.get('horizontal_grid') is not None: + if self.extra_facets.get("horizontal_grid") is not None: grid_path = self._get_path_from_facet( - 'horizontal_grid', 'Horizontal grid file' + "horizontal_grid", "Horizontal grid file" ) grid_name = grid_path.name else: @@ -475,7 +481,8 @@ def _get_start_index(horizontal_grid): """ vertex_index = horizontal_grid.extract_cube( - NameConstraint(var_name='vertex_index')) + NameConstraint(var_name="vertex_index") + ) return np.int32(np.min(vertex_index.data)) @staticmethod @@ -483,24 +490,24 @@ def _load_cubes(path: Path | str) -> CubeList: """Load cubes and ignore certain warnings.""" with warnings.catch_warnings(): warnings.filterwarnings( - 'ignore', + "ignore", message="Ignoring netCDF variable .* invalid units .*", category=UserWarning, - module='iris', + module="iris", ) # iris < 3.8 warnings.filterwarnings( - 'ignore', + "ignore", message="Ignoring invalid units .* on netCDF variable .*", category=UserWarning, - module='iris', + module="iris", ) # iris >= 3.8 warnings.filterwarnings( - 'ignore', + "ignore", message="Failed to create 'height' dimension coordinate: The " - "'height' DimCoord bounds array must be strictly " - "monotonic.", + "'height' DimCoord bounds array must be strictly " + "monotonic.", category=UserWarning, - module='iris', + module="iris", ) cubes = iris.load(path) return cubes diff --git a/esmvalcore/cmor/_fixes/icon/icon.py b/esmvalcore/cmor/_fixes/icon/icon.py index 707a47f20c..c5792019c8 100644 --- a/esmvalcore/cmor/_fixes/icon/icon.py +++ b/esmvalcore/cmor/_fixes/icon/icon.py @@ -30,38 +30,39 @@ def fix_metadata(self, cubes): cube = self.get_cube(cubes) # Fix time - if self.vardef.has_coord_with_standard_name('time'): + if self.vardef.has_coord_with_standard_name("time"): cube = self._fix_time(cube, cubes) # Fix height (note: cannot use "if 'height' in self.vardef.dimensions" # here since the name of the z-coord varies from variable to variable) - if cube.coords('height'): + if cube.coords("height"): # In case a scalar height is required, remove it here (it is added # at a later stage). The step _fix_height() is designed to fix # non-scalar height coordinates. - if (cube.coord('height').shape[0] == 1 and ( - 'height2m' in self.vardef.dimensions or - 'height10m' in self.vardef.dimensions)): + if cube.coord("height").shape[0] == 1 and ( + "height2m" in self.vardef.dimensions + or "height10m" in self.vardef.dimensions + ): # If height is a dimensional coordinate with length 1, squeeze # the cube. # Note: iris.util.squeeze is not used here since it might # accidentally squeeze other dimensions. - if cube.coords('height', dim_coords=True): + if cube.coords("height", dim_coords=True): slices = [slice(None)] * cube.ndim - slices[cube.coord_dims('height')[0]] = 0 + slices[cube.coord_dims("height")[0]] = 0 cube = cube[tuple(slices)] - cube.remove_coord('height') + cube.remove_coord("height") else: cube = self._fix_height(cube, cubes) # Fix latitude - if self.vardef.has_coord_with_standard_name('latitude'): + if self.vardef.has_coord_with_standard_name("latitude"): lat_idx = self._fix_lat(cube) else: lat_idx = None # Fix longitude - if self.vardef.has_coord_with_standard_name('longitude'): + if self.vardef.has_coord_with_standard_name("longitude"): lon_idx = self._fix_lon(cube) else: lon_idx = None @@ -105,13 +106,14 @@ def _add_coord_from_grid_file(self, cube, coord_name): # The following dict maps from desired coordinate name in output file # (dict keys) to coordinate name in grid file (dict values) coord_names_mapping = { - 'latitude': 'grid_latitude', - 'longitude': 'grid_longitude', + "latitude": "grid_latitude", + "longitude": "grid_longitude", } if coord_name not in coord_names_mapping: raise ValueError( f"coord_name must be one of {list(coord_names_mapping)}, got " - f"'{coord_name}'") + f"'{coord_name}'" + ) coord_name_in_grid = coord_names_mapping[coord_name] # Use 'cell_area' as dummy cube to extract desired coordinates @@ -119,7 +121,8 @@ def _add_coord_from_grid_file(self, cube, coord_name): # supported horizontal_grid = self.get_horizontal_grid(cube) grid_cube = horizontal_grid.extract_cube( - NameConstraint(var_name='cell_area')) + NameConstraint(var_name="cell_area") + ) coord = grid_cube.coord(coord_name_in_grid) # Find index of mesh dimension (= single unnamed dimension) @@ -128,7 +131,8 @@ def _add_coord_from_grid_file(self, cube, coord_name): raise ValueError( f"Cannot determine coordinate dimension for coordinate " f"'{coord_name}', cube does not contain a single unnamed " - f"dimension:\n{cube}") + f"dimension:\n{cube}" + ) coord_dims = () for idx in range(cube.ndim): if not cube.coords(dimensions=idx, dim_coords=True): @@ -145,21 +149,22 @@ def _add_time(self, cube, cubes): """Add time coordinate from other cube in cubes.""" # Try to find time cube from other cubes and it to target cube for other_cube in cubes: - if not other_cube.coords('time'): + if not other_cube.coords("time"): continue - time_coord = other_cube.coord('time') + time_coord = other_cube.coord("time") cube = add_leading_dim_to_cube(cube, time_coord) return cube raise ValueError( f"Cannot add required coordinate 'time' to variable " f"'{self.vardef.short_name}', cube and other cubes in file do not " - f"contain it") + f"contain it" + ) def _get_z_coord(self, cubes, points_name, bounds_name=None): """Get z-coordinate without metadata (reversed).""" points_cube = iris.util.reverse( cubes.extract_cube(NameConstraint(var_name=points_name)), - 'height', + "height", ) points = points_cube.core_data() @@ -167,7 +172,7 @@ def _get_z_coord(self, cubes, points_name, bounds_name=None): if bounds_name is not None: bounds_cube = iris.util.reverse( cubes.extract_cube(NameConstraint(var_name=bounds_name)), - 'height', + "height", ) bounds = bounds_cube.core_data() bounds = da.stack( @@ -187,29 +192,29 @@ def _fix_height(self, cube, cubes): """Fix height coordinate of cube.""" # Reverse entire cube along height axis so that index 0 is surface # level - cube = iris.util.reverse(cube, 'height') + cube = iris.util.reverse(cube, "height") # If possible, extract reversed air_pressure coordinate from list of # cubes and add it to cube # Note: pfull/phalf have dimensions (time, height, spatial_dim) - if cubes.extract(NameConstraint(var_name='pfull')): - if cubes.extract(NameConstraint(var_name='phalf')): - phalf = 'phalf' + if cubes.extract(NameConstraint(var_name="pfull")): + if cubes.extract(NameConstraint(var_name="phalf")): + phalf = "phalf" else: phalf = None - plev_coord = self._get_z_coord(cubes, 'pfull', bounds_name=phalf) + plev_coord = self._get_z_coord(cubes, "pfull", bounds_name=phalf) self.fix_plev_metadata(cube, plev_coord) cube.add_aux_coord(plev_coord, np.arange(cube.ndim)) # If possible, extract reversed altitude coordinate from list of cubes # and add it to cube # Note: zg/zghalf have dimensions (height, spatial_dim) - if cubes.extract(NameConstraint(var_name='zg')): - if cubes.extract(NameConstraint(var_name='zghalf')): - zghalf = 'zghalf' + if cubes.extract(NameConstraint(var_name="zg")): + if cubes.extract(NameConstraint(var_name="zghalf")): + zghalf = "zghalf" else: zghalf = None - alt_coord = self._get_z_coord(cubes, 'zg', bounds_name=zghalf) + alt_coord = self._get_z_coord(cubes, "zg", bounds_name=zghalf) self.fix_alt16_metadata(cube, alt_coord) # Altitude coordinate only spans height and spatial dimensions (no @@ -217,15 +222,15 @@ def _fix_height(self, cube, cubes): cube.add_aux_coord(alt_coord, np.arange(cube.ndim)[-2:]) # Fix metadata - z_coord = cube.coord('height') - if z_coord.units.is_convertible('m'): + z_coord = cube.coord("height") + if z_coord.units.is_convertible("m"): self.fix_height_metadata(cube, z_coord) else: - z_coord.var_name = 'model_level' + z_coord.var_name = "model_level" z_coord.standard_name = None - z_coord.long_name = 'model level number' - z_coord.units = 'no unit' - z_coord.attributes['positive'] = 'up' + z_coord.long_name = "model level number" + z_coord.units = "no unit" + z_coord.attributes["positive"] = "up" z_coord.points = np.arange(len(z_coord.points)) z_coord.bounds = None @@ -233,12 +238,12 @@ def _fix_height(self, cube, cubes): def _fix_lat(self, cube): """Fix latitude coordinate of cube.""" - lat_name = self.extra_facets.get('latitude', 'latitude') + lat_name = self.extra_facets.get("latitude", "latitude") # Add latitude coordinate if not already present if not cube.coords(lat_name): try: - self._add_coord_from_grid_file(cube, 'latitude') + self._add_coord_from_grid_file(cube, "latitude") except Exception as exc: msg = "Failed to add missing latitude coordinate to cube" raise ValueError(msg) from exc @@ -250,12 +255,12 @@ def _fix_lat(self, cube): def _fix_lon(self, cube): """Fix longitude coordinate of cube.""" - lon_name = self.extra_facets.get('longitude', 'longitude') + lon_name = self.extra_facets.get("longitude", "longitude") # Add longitude coordinate if not already present if not cube.coords(lon_name): try: - self._add_coord_from_grid_file(cube, 'longitude') + self._add_coord_from_grid_file(cube, "longitude") except Exception as exc: msg = "Failed to add missing longitude coordinate to cube" raise ValueError(msg) from exc @@ -269,7 +274,7 @@ def _fix_lon(self, cube): def _fix_time(self, cube, cubes): """Fix time coordinate of cube.""" # Add time coordinate if not already present - if not cube.coords('time'): + if not cube.coords("time"): cube = self._add_time(cube, cubes) # Fix metadata @@ -277,14 +282,14 @@ def _fix_time(self, cube, cubes): # If necessary, convert invalid time units of the form "day as # %Y%m%d.%f" to CF format (e.g., "days since 1850-01-01") - if 'invalid_units' in time_coord.attributes: + if "invalid_units" in time_coord.attributes: self._fix_invalid_time_units(time_coord) # ICON usually reports aggregated values at the end of the time period, # e.g., for monthly output, ICON reports the month February as 1 March. # Thus, if not disabled, shift all time points back by 1/2 of the given # time period. - if self.extra_facets.get('shift_time', True): + if self.extra_facets.get("shift_time", True): self._shift_time_coord(cube, time_coord) # If not already present, try to add bounds here. Usually bounds are @@ -297,13 +302,15 @@ def _shift_time_coord(self, cube, time_coord): """Shift time points back by 1/2 of given time period (in-place).""" # Do not modify time coordinate for point measurements for cell_method in cube.cell_methods: - is_point_measurement = ('time' in cell_method.coord_names and - 'point' in cell_method.method) + is_point_measurement = ( + "time" in cell_method.coord_names + and "point" in cell_method.method + ) if is_point_measurement: logger.debug( "ICON data describes point measurements: time coordinate " "will not be shifted back by 1/2 of output interval (%s)", - self.extra_facets['frequency'], + self.extra_facets["frequency"], ) return @@ -311,11 +318,11 @@ def _shift_time_coord(self, cube, time_coord): time_coord.bounds = None # For decadal, yearly and monthly data, round datetimes to closest day - freq = self.extra_facets['frequency'] - if 'dec' in freq or 'yr' in freq or 'mon' in freq: + freq = self.extra_facets["frequency"] + if "dec" in freq or "yr" in freq or "mon" in freq: time_units = time_coord.units time_coord.convert_units( - Unit('days since 1850-01-01', calendar=time_units.calendar) + Unit("days since 1850-01-01", calendar=time_units.calendar) ) try: time_coord.points = np.around(time_coord.points) @@ -344,19 +351,19 @@ def _shift_time_coord(self, cube, time_coord): ([previous_time_point], time_coord.points) ) time_coord.points = ( - np.convolve(extended_time_points, np.ones(2), 'valid') / 2.0 + np.convolve(extended_time_points, np.ones(2), "valid") / 2.0 ) # running mean with window length 2 time_coord.bounds = np.stack( (extended_time_points[:-1], extended_time_points[1:]), axis=-1 ) logger.debug( "Shifted ICON time coordinate back by 1/2 of output interval (%s)", - self.extra_facets['frequency'], + self.extra_facets["frequency"], ) def _get_previous_timestep(self, datetime_point): """Get previous time step.""" - freq = self.extra_facets['frequency'] + freq = self.extra_facets["frequency"] year = datetime_point.year month = datetime_point.month @@ -366,12 +373,12 @@ def _get_previous_timestep(self, datetime_point): f"step for frequency '{freq}'. Use `shift_time=false` in the " f"recipe to disable this feature" ) - if 'fx' in freq or 'subhr' in freq: + if "fx" in freq or "subhr" in freq: raise ValueError(invalid_freq_error_msg) # For decadal, yearly and monthly data, the points needs to be the # first of the month 00:00:00 - if 'dec' in freq or 'yr' in freq or 'mon' in freq: + if "dec" in freq or "yr" in freq or "mon" in freq: if datetime_point != datetime(year, month, 1): raise ValueError( f"Cannot shift time coordinate: expected first of the " @@ -381,26 +388,26 @@ def _get_previous_timestep(self, datetime_point): ) # Decadal data - if 'dec' in freq: + if "dec" in freq: return datetime_point.replace(year=year - 10) # Yearly data - if 'yr' in freq: + if "yr" in freq: return datetime_point.replace(year=year - 1) # Monthly data - if 'mon' in freq: + if "mon" in freq: new_month = (month - 2) % 12 + 1 new_year = year + (month - 2) // 12 return datetime_point.replace(year=new_year, month=new_month) # Daily data - if 'day' in freq: + if "day" in freq: return datetime_point - timedelta(days=1) # Hourly data - if 'hr' in freq: - (n_hours, _, _) = freq.partition('hr') + if "hr" in freq: + (n_hours, _, _) = freq.partition("hr") if not n_hours: n_hours = 1 return datetime_point - timedelta(hours=int(n_hours)) @@ -418,20 +425,22 @@ def _fix_mesh(self, cube, mesh_idx): # Add dimensional coordinate that describes the mesh dimension index_coord = DimCoord( np.arange(cube.shape[mesh_idx[0]]), - var_name='i', - long_name=('first spatial index for variables stored on an ' - 'unstructured grid'), - units='1', + var_name="i", + long_name=( + "first spatial index for variables stored on an " + "unstructured grid" + ), + units="1", ) cube.add_dim_coord(index_coord, mesh_idx) # If desired, get mesh and replace the original latitude and longitude # coordinates with their new mesh versions - if self.extra_facets.get('ugrid', True): + if self.extra_facets.get("ugrid", True): mesh = self.get_mesh(cube) - cube.remove_coord('latitude') - cube.remove_coord('longitude') - for mesh_coord in mesh.to_MeshCoords('face'): + cube.remove_coord("latitude") + cube.remove_coord("longitude") + for mesh_coord in mesh.to_MeshCoords("face"): cube.add_aux_coord(mesh_coord, mesh_idx) @staticmethod @@ -462,15 +471,15 @@ def _fix_invalid_time_units(time_coord): # ICON data usually has no time bounds. To be 100% sure, we remove the # bounds here (they will be added at a later stage). time_coord.bounds = None - time_format = 'day as %Y%m%d.%f' - t_unit = time_coord.attributes.pop('invalid_units') + time_format = "day as %Y%m%d.%f" + t_unit = time_coord.attributes.pop("invalid_units") if t_unit != time_format: raise ValueError( f"Expected time units '{time_format}' in input file, got " f"'{t_unit}'" ) new_t_units = Unit( - 'days since 1850-01-01', calendar='proleptic_gregorian' + "days since 1850-01-01", calendar="proleptic_gregorian" ) # New routine to convert time of daily and hourly data. The string %f @@ -480,25 +489,28 @@ def _fix_invalid_time_units(time_coord): # First, extract date (year, month, day) from string and convert it to # datetime object - year_month_day_str = time_str.str.extract(r'(\d*)\.?\d*', expand=False) - year_month_day = pd.to_datetime(year_month_day_str, format='%Y%m%d') + year_month_day_str = time_str.str.extract(r"(\d*)\.?\d*", expand=False) + year_month_day = pd.to_datetime(year_month_day_str, format="%Y%m%d") # Second, extract day fraction and convert it to timedelta object day_float_str = time_str.str.extract( - r'\d*(\.\d*)', expand=False - ).fillna('0.0') - day_float = pd.to_timedelta(day_float_str.astype(float), unit='D') + r"\d*(\.\d*)", expand=False + ).fillna("0.0") + day_float = pd.to_timedelta(day_float_str.astype(float), unit="D") # Finally, add date and day fraction to get final datetime and convert # it to correct units. Note: we also round to next second, otherwise # this results in times that are off by 1s (e.g., 13:59:59 instead of - # 14:00:00). - rounded_datetimes = (year_month_day + day_float).round('s') + # 14:00:00). We round elements individually since rounding the + # pd.Series object directly is broken + # (https://github.com/pandas-dev/pandas/issues/57002). + datetimes = year_month_day + day_float + rounded_datetimes = pd.Series(dt.round("s") for dt in datetimes) with warnings.catch_warnings(): # We already fixed the deprecated code as recommended in the # warning, but it still shows up -> ignore it warnings.filterwarnings( - 'ignore', + "ignore", message="The behavior of DatetimeProperties.to_pydatetime .*", category=FutureWarning, ) @@ -515,9 +527,8 @@ class Clwvi(IconFix): def fix_metadata(self, cubes): """Fix metadata.""" - cube = ( - self.get_cube(cubes, var_name='cllvi') + - self.get_cube(cubes, var_name='clivi') + cube = self.get_cube(cubes, var_name="cllvi") + self.get_cube( + cubes, var_name="clivi" ) cube.var_name = self.vardef.short_name return CubeList([cube]) @@ -529,9 +540,9 @@ class Rtmt(IconFix): def fix_metadata(self, cubes): """Fix metadata.""" cube = ( - self.get_cube(cubes, var_name='rsdt') - - self.get_cube(cubes, var_name='rsut') - - self.get_cube(cubes, var_name='rlut') + self.get_cube(cubes, var_name="rsdt") + - self.get_cube(cubes, var_name="rsut") + - self.get_cube(cubes, var_name="rlut") ) cube.var_name = self.vardef.short_name return CubeList([cube]) diff --git a/esmvalcore/cmor/_fixes/ipslcm/ipsl_cm6.py b/esmvalcore/cmor/_fixes/ipslcm/ipsl_cm6.py index 362d950f6c..5902711a23 100644 --- a/esmvalcore/cmor/_fixes/ipslcm/ipsl_cm6.py +++ b/esmvalcore/cmor/_fixes/ipslcm/ipsl_cm6.py @@ -1,4 +1,5 @@ """Fixes for IPSLCM6 TS output format.""" + import logging import subprocess import time @@ -32,7 +33,8 @@ def fix_file(self, filepath, output_dir, add_unique_suffix=False): """ if "_" + self.extra_facets.get( - "group", "non-sense") + ".nc" not in str(filepath): + "group", "non-sense" + ) + ".nc" not in str(filepath): # No need to filter the file logger.debug("Not filtering for %s", filepath) return filepath @@ -76,13 +78,13 @@ def fix_metadata(self, cubes): # Iris function does not support to have more than one # coordinate with standard_name='time' for coordinate in cube.coords(dim_coords=False): - if coordinate.standard_name == 'time': - coordinate.standard_name = '' + if coordinate.standard_name == "time": + coordinate.standard_name = "" # Fix variable name for time_counter for coordinate in cube.coords(dim_coords=True): - if coordinate.var_name == 'time_counter': - coordinate.var_name = 'time' + if coordinate.var_name == "time_counter": + coordinate.var_name = "time" positive = self.extra_facets.get("positive") if positive: diff --git a/esmvalcore/cmor/_fixes/native6/era5.py b/esmvalcore/cmor/_fixes/native6/era5.py index 6c67494aaa..aa4beae984 100644 --- a/esmvalcore/cmor/_fixes/native6/era5.py +++ b/esmvalcore/cmor/_fixes/native6/era5.py @@ -1,4 +1,5 @@ """Fixes for ERA5.""" + import datetime import logging @@ -17,58 +18,60 @@ def get_frequency(cube): """Determine time frequency of input cube.""" try: - time = cube.coord(axis='T') + time = cube.coord(axis="T") except iris.exceptions.CoordinateNotFoundError: - return 'fx' + return "fx" - time.convert_units('days since 1850-1-1 00:00:00.0') + time.convert_units("days since 1850-1-1 00:00:00.0") if len(time.points) == 1: - if cube.long_name != 'Geopotential': - raise ValueError('Unable to infer frequency of cube ' - f'with length 1 time dimension: {cube}') - return 'fx' + if cube.long_name != "Geopotential": + raise ValueError( + "Unable to infer frequency of cube " + f"with length 1 time dimension: {cube}" + ) + return "fx" interval = time.points[1] - time.points[0] if interval - 1 / 24 < 1e-4: - return 'hourly' + return "hourly" - return 'monthly' + return "monthly" def fix_hourly_time_coordinate(cube): """Shift aggregated variables 30 minutes back in time.""" - if get_frequency(cube) == 'hourly': - time = cube.coord(axis='T') + if get_frequency(cube) == "hourly": + time = cube.coord(axis="T") time.points = time.points - 1 / 48 return cube def fix_accumulated_units(cube): """Convert accumulations to fluxes.""" - if get_frequency(cube) == 'monthly': - cube.units = cube.units * 'd-1' - elif get_frequency(cube) == 'hourly': - cube.units = cube.units * 'h-1' + if get_frequency(cube) == "monthly": + cube.units = cube.units * "d-1" + elif get_frequency(cube) == "hourly": + cube.units = cube.units * "h-1" return cube def multiply_with_density(cube, density=1000): """Convert precipitatin from m to kg/m2.""" cube.data = cube.core_data() * density - cube.units *= 'kg m**-3' + cube.units *= "kg m**-3" return cube def remove_time_coordinate(cube): """Remove time coordinate for invariant parameters.""" cube = cube[0] - cube.remove_coord('time') + cube.remove_coord("time") return cube def divide_by_gravity(cube): """Convert geopotential to height.""" - cube.units = cube.units / 'm s-2' + cube.units = cube.units / "m s-2" cube.data = cube.core_data() / 9.80665 return cube @@ -80,8 +83,8 @@ def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: # Invalid input cube units (ignored on load) were '0-1' - cube.units = '%' - cube.data = cube.core_data() * 100. + cube.units = "%" + cube.data = cube.core_data() * 100.0 return cubes @@ -93,8 +96,8 @@ def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: # Invalid input cube units (ignored on load) were '0-1' - cube.units = '%' - cube.data = cube.core_data() * 100. + cube.units = "%" + cube.data = cube.core_data() * 100.0 return cubes @@ -106,7 +109,7 @@ def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: # Set input cube units for invalid units were ignored on load - cube.units = 'm' + cube.units = "m" fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) multiply_with_density(cube) @@ -123,7 +126,7 @@ def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: # Set input cube units for invalid units were ignored on load - cube.units = 'm' + cube.units = "m" fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) multiply_with_density(cube) @@ -179,7 +182,7 @@ def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: # Set input cube units for invalid units were ignored on load - cube.units = 'm' + cube.units = "m" fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) multiply_with_density(cube) @@ -206,7 +209,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -219,7 +222,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -232,7 +235,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'up' + cube.attributes["positive"] = "up" return cubes @@ -244,7 +247,7 @@ def fix_metadata(self, cubes): """Fix metadata.""" for cube in cubes: fix_hourly_time_coordinate(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -257,7 +260,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -270,7 +273,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -283,7 +286,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'up' + cube.attributes["positive"] = "up" return cubes @@ -296,7 +299,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -309,7 +312,7 @@ def fix_metadata(self, cubes): for cube in cubes: fix_hourly_time_coordinate(cube) fix_accumulated_units(cube) - cube.attributes['positive'] = 'down' + cube.attributes["positive"] = "down" return cubes @@ -352,17 +355,17 @@ def _fix_coordinates(self, cube): # Fix coordinate increasing direction slices = [] for coord in cube.coords(): - if coord.var_name in ('latitude', 'pressure_level'): + if coord.var_name in ("latitude", "pressure_level"): slices.append(slice(None, None, -1)) else: slices.append(slice(None)) cube = cube[tuple(slices)] # Add scalar height coordinates - if 'height2m' in self.vardef.dimensions: - add_scalar_height_coord(cube, 2.) - if 'height10m' in self.vardef.dimensions: - add_scalar_height_coord(cube, 10.) + if "height2m" in self.vardef.dimensions: + add_scalar_height_coord(cube, 2.0) + if "height10m" in self.vardef.dimensions: + add_scalar_height_coord(cube, 10.0) for coord_def in self.vardef.coordinates.values(): axis = coord_def.axis @@ -372,18 +375,21 @@ def _fix_coordinates(self, cube): # (https://github.com/ESMValGroup/ESMValCore/issues/1029) if axis == "" and coord_def.name == "alevel": axis = "Z" - coord_def = CMOR_TABLES['CMIP6'].coords['plev19'] + coord_def = CMOR_TABLES["CMIP6"].coords["plev19"] coord = cube.coord(axis=axis) - if axis == 'T': - coord.convert_units('days since 1850-1-1 00:00:00.0') - if axis == 'Z': + if axis == "T": + coord.convert_units("days since 1850-1-1 00:00:00.0") + if axis == "Z": coord.convert_units(coord_def.units) coord.standard_name = coord_def.standard_name coord.var_name = coord_def.out_name coord.long_name = coord_def.long_name - coord.points = coord.core_points().astype('float64') - if (not coord.has_bounds() and len(coord.core_points()) > 1 - and coord_def.must_have_bounds == "yes"): + coord.points = coord.core_points().astype("float64") + if ( + not coord.has_bounds() + and len(coord.core_points()) > 1 + and coord_def.must_have_bounds == "yes" + ): coord.guess_bounds() self._fix_monthly_time_coord(cube) @@ -393,8 +399,8 @@ def _fix_coordinates(self, cube): @staticmethod def _fix_monthly_time_coord(cube): """Set the monthly time coordinates to the middle of the month.""" - if get_frequency(cube) == 'monthly': - coord = cube.coord(axis='T') + if get_frequency(cube) == "monthly": + coord = cube.coord(axis="T") end = [] for cell in coord.cells(): month = cell.point.month + 1 @@ -424,11 +430,12 @@ def fix_metadata(self, cubes): cube = self._fix_coordinates(cube) self._fix_units(cube) - cube.data = cube.core_data().astype('float32') + cube.data = cube.core_data().astype("float32") year = datetime.datetime.now().year - cube.attributes['comment'] = ( - 'Contains modified Copernicus Climate Change ' - f'Service Information {year}') + cube.attributes["comment"] = ( + "Contains modified Copernicus Climate Change " + f"Service Information {year}" + ) fixed_cubes.append(cube) diff --git a/esmvalcore/cmor/_fixes/native6/era5_land.py b/esmvalcore/cmor/_fixes/native6/era5_land.py index 5b6fbff276..1515e8044f 100644 --- a/esmvalcore/cmor/_fixes/native6/era5_land.py +++ b/esmvalcore/cmor/_fixes/native6/era5_land.py @@ -1,11 +1,13 @@ """Fixes for ERA5-Land.""" -import logging -from esmvalcore.cmor._fixes.native6.era5 import (Pr, - Evspsbl, - Evspsblpot, - AllVars) +import logging +from esmvalcore.cmor._fixes.native6.era5 import ( + AllVars, + Evspsbl, + Evspsblpot, + Pr, +) logger = logging.getLogger(__name__) logger.info("Load classes from era5.py") diff --git a/esmvalcore/cmor/_fixes/native6/mswep.py b/esmvalcore/cmor/_fixes/native6/mswep.py index 7ed50fcdcb..904064dd4a 100644 --- a/esmvalcore/cmor/_fixes/native6/mswep.py +++ b/esmvalcore/cmor/_fixes/native6/mswep.py @@ -1,4 +1,5 @@ """Fixes for MSWEP.""" + from datetime import datetime import cf_units @@ -16,11 +17,11 @@ def fix_time_month(cube): Convert from months since 1899-12 to days since 1850 as per CMOR standard. """ - time_coord = cube.coord('time') + time_coord = cube.coord("time") origin = time_coord.units.origin origin_year, origin_month = [ - int(val) for val in origin.split()[2].split('-') + int(val) for val in origin.split()[2].split("-") ] dates = [] @@ -32,8 +33,8 @@ def fix_time_month(cube): t_unit = cf_units.Unit("days since 1850-01-01", calendar="standard") - cube.coord('time').points = date2num(dates, t_unit) - cube.coord('time').units = t_unit + cube.coord("time").points = date2num(dates, t_unit) + cube.coord("time").units = t_unit def fix_time_day(cube): @@ -42,14 +43,14 @@ def fix_time_day(cube): Convert from days since 1899-12-31 to days since 1850 as per CMOR standard. """ - time_coord = cube.coord('time') - time_coord.convert_units('days since 1850-1-1 00:00:00.0') + time_coord = cube.coord("time") + time_coord.convert_units("days since 1850-1-1 00:00:00.0") def fix_longitude(cube): """Fix longitude coordinate from -180:180 to 0:360.""" - lon_axis = cube.coord_dims('longitude') - lon = cube.coord(axis='X') + lon_axis = cube.coord_dims("longitude") + lon = cube.coord(axis="X") if not lon.is_monotonic(): raise ValueError("Data must be monotonic to fix longitude.") @@ -80,12 +81,12 @@ def _fix_time(self, cube): """Fix time.""" frequency = self.vardef.frequency - if frequency in ('day', '3hr'): + if frequency in ("day", "3hr"): fix_time_day(cube) - elif frequency == 'mon': + elif frequency == "mon": fix_time_month(cube) else: - raise ValueError(f'Cannot fix time for frequency: {frequency!r}') + raise ValueError(f"Cannot fix time for frequency: {frequency!r}") def _fix_units(self, cube): """Convert units from mm/[t] to kg m-2 s-1 units.""" @@ -93,22 +94,23 @@ def _fix_units(self, cube): cube.units = Unit(self.vardef.units) - if frequency in ('day', '3hr'): + if frequency in ("day", "3hr"): # divide by number of seconds in a day cube.data = cube.core_data() / (60 * 60 * 24) - elif frequency == 'mon': + elif frequency == "mon": # divide by number of seconds in a month cube.data = cube.core_data() / (60 * 60 * 24 * 30) else: - raise ValueError(f'Cannot fix units for frequency: {frequency!r}') + raise ValueError(f"Cannot fix units for frequency: {frequency!r}") def _fix_bounds(self, cube): """Add bounds to coords.""" - coord_defs = tuple(coord_def - for coord_def in self.vardef.coordinates.values()) + coord_defs = tuple( + coord_def for coord_def in self.vardef.coordinates.values() + ) for coord_def in coord_defs: - if not coord_def.must_have_bounds == 'yes': + if not coord_def.must_have_bounds == "yes": continue coord = cube.coord(axis=coord_def.axis) @@ -122,8 +124,9 @@ def _fix_names(self, cube): cube.standard_name = self.vardef.standard_name cube.long_name = self.vardef.long_name - coord_defs = tuple(coord_def - for coord_def in self.vardef.coordinates.values()) + coord_defs = tuple( + coord_def for coord_def in self.vardef.coordinates.values() + ) for coord_def in coord_defs: coord = cube.coord(axis=coord_def.axis) diff --git a/esmvalcore/cmor/_fixes/native_datasets.py b/esmvalcore/cmor/_fixes/native_datasets.py index 20cfa5590f..6b48d92f54 100644 --- a/esmvalcore/cmor/_fixes/native_datasets.py +++ b/esmvalcore/cmor/_fixes/native_datasets.py @@ -31,14 +31,14 @@ def fix_scalar_coords(self, cube): (in-place). """ - if 'height2m' in self.vardef.dimensions: + if "height2m" in self.vardef.dimensions: add_scalar_height_coord(cube, 2.0) - if 'height10m' in self.vardef.dimensions: + if "height10m" in self.vardef.dimensions: add_scalar_height_coord(cube, 10.0) - if 'lambda550nm' in self.vardef.dimensions: + if "lambda550nm" in self.vardef.dimensions: add_scalar_lambda550nm_coord(cube) - if 'typesi' in self.vardef.dimensions: - add_scalar_typesi_coord(cube, 'sea_ice') + if "typesi" in self.vardef.dimensions: + add_scalar_typesi_coord(cube, "sea_ice") def fix_var_metadata(self, cube): """Fix variable metadata of cube (in-place). @@ -50,7 +50,7 @@ def fix_var_metadata(self, cube): """ # Fix names - if self.vardef.standard_name == '': + if self.vardef.standard_name == "": cube.standard_name = None else: cube.standard_name = self.vardef.standard_name @@ -59,28 +59,29 @@ def fix_var_metadata(self, cube): # Fix units # (1) raw_units set in recipe or extra_facets - if 'raw_units' in self.extra_facets: - cube.units = self.extra_facets['raw_units'] - cube.attributes.pop('invalid_units', None) + if "raw_units" in self.extra_facets: + cube.units = self.extra_facets["raw_units"] + cube.attributes.pop("invalid_units", None) # (2) Try to handle other invalid units in the input files - if 'invalid_units' in cube.attributes: - invalid_units = cube.attributes.pop('invalid_units') + if "invalid_units" in cube.attributes: + invalid_units = cube.attributes.pop("invalid_units") new_units = self.INVALID_UNITS.get( invalid_units, - invalid_units.replace('**', '^'), + invalid_units.replace("**", "^"), ) try: cube.units = new_units except ValueError as exc: raise ValueError( f"Failed to fix invalid units '{invalid_units}' for " - f"variable '{self.vardef.short_name}'") from exc + f"variable '{self.vardef.short_name}'" + ) from exc cube.convert_units(self.vardef.units) # Fix attributes - if self.vardef.positive != '': - cube.attributes['positive'] = self.vardef.positive + if self.vardef.positive != "": + cube.attributes["positive"] = self.vardef.positive def get_cube(self, cubes, var_name=None): """Extract single cube from :class:`iris.cube.CubeList`. @@ -106,12 +107,14 @@ def get_cube(self, cubes, var_name=None): """ if var_name is None: - var_name = self.extra_facets.get('raw_name', - self.vardef.short_name) + var_name = self.extra_facets.get( + "raw_name", self.vardef.short_name + ) if not cubes.extract(NameConstraint(var_name=var_name)): raise ValueError( f"Variable '{var_name}' used to extract " - f"'{self.vardef.short_name}' is not available in input file") + f"'{self.vardef.short_name}' is not available in input file" + ) return cubes.extract_cube(NameConstraint(var_name=var_name)) def fix_regular_time(self, cube, coord=None, guess_bounds=True): @@ -129,7 +132,7 @@ def fix_regular_time(self, cube, coord=None, guess_bounds=True): bounds. """ - if not self.vardef.has_coord_with_standard_name('time'): + if not self.vardef.has_coord_with_standard_name("time"): return coord = self.fix_time_metadata(cube, coord) if guess_bounds: @@ -150,7 +153,7 @@ def fix_regular_lat(self, cube, coord=None, guess_bounds=True): bounds. """ - if not self.vardef.has_coord_with_standard_name('latitude'): + if not self.vardef.has_coord_with_standard_name("latitude"): return coord = self.fix_lat_metadata(cube, coord) if guess_bounds: @@ -171,7 +174,7 @@ def fix_regular_lon(self, cube, coord=None, guess_bounds=True): bounds. """ - if not self.vardef.has_coord_with_standard_name('longitude'): + if not self.vardef.has_coord_with_standard_name("longitude"): return coord = self.fix_lon_metadata(cube, coord) if guess_bounds: @@ -229,12 +232,12 @@ def fix_time_metadata(cube, coord=None): """ if coord is None: - coord = cube.coord('time') + coord = cube.coord("time") elif isinstance(coord, str): coord = cube.coord(coord) - coord.var_name = 'time' - coord.standard_name = 'time' - coord.long_name = 'time' + coord.var_name = "time" + coord.standard_name = "time" + coord.long_name = "time" return coord @staticmethod @@ -257,14 +260,14 @@ def fix_alt16_metadata(cube, coord=None): """ if coord is None: - coord = cube.coord('altitude') + coord = cube.coord("altitude") elif isinstance(coord, str): coord = cube.coord(coord) - coord.var_name = 'alt16' - coord.standard_name = 'altitude' - coord.long_name = 'altitude' - coord.convert_units('m') - coord.attributes['positive'] = 'up' + coord.var_name = "alt16" + coord.standard_name = "altitude" + coord.long_name = "altitude" + coord.convert_units("m") + coord.attributes["positive"] = "up" return coord @staticmethod @@ -287,14 +290,14 @@ def fix_height_metadata(cube, coord=None): """ if coord is None: - coord = cube.coord('height') + coord = cube.coord("height") elif isinstance(coord, str): coord = cube.coord(coord) - coord.var_name = 'height' - coord.standard_name = 'height' - coord.long_name = 'height' - coord.convert_units('m') - coord.attributes['positive'] = 'up' + coord.var_name = "height" + coord.standard_name = "height" + coord.long_name = "height" + coord.convert_units("m") + coord.attributes["positive"] = "up" return coord @staticmethod @@ -317,14 +320,14 @@ def fix_plev_metadata(cube, coord=None): """ if coord is None: - coord = cube.coord('air_pressure') + coord = cube.coord("air_pressure") elif isinstance(coord, str): coord = cube.coord(coord) - coord.var_name = 'plev' - coord.standard_name = 'air_pressure' - coord.long_name = 'pressure' - coord.convert_units('Pa') - coord.attributes['positive'] = 'down' + coord.var_name = "plev" + coord.standard_name = "air_pressure" + coord.long_name = "pressure" + coord.convert_units("Pa") + coord.attributes["positive"] = "down" return coord @staticmethod @@ -347,13 +350,13 @@ def fix_lat_metadata(cube, coord=None): """ if coord is None: - coord = cube.coord('latitude') + coord = cube.coord("latitude") elif isinstance(coord, str): coord = cube.coord(coord) - coord.var_name = 'lat' - coord.standard_name = 'latitude' - coord.long_name = 'latitude' - coord.convert_units('degrees_north') + coord.var_name = "lat" + coord.standard_name = "latitude" + coord.long_name = "latitude" + coord.convert_units("degrees_north") return coord @staticmethod @@ -376,11 +379,11 @@ def fix_lon_metadata(cube, coord=None): """ if coord is None: - coord = cube.coord('longitude') + coord = cube.coord("longitude") elif isinstance(coord, str): coord = cube.coord(coord) - coord.var_name = 'lon' - coord.standard_name = 'longitude' - coord.long_name = 'longitude' - coord.convert_units('degrees_east') + coord.var_name = "lon" + coord.standard_name = "longitude" + coord.long_name = "longitude" + coord.convert_units("degrees_east") return coord diff --git a/esmvalcore/cmor/_fixes/obs4mips/airs_2_0.py b/esmvalcore/cmor/_fixes/obs4mips/airs_2_0.py new file mode 100644 index 0000000000..1aee583abf --- /dev/null +++ b/esmvalcore/cmor/_fixes/obs4mips/airs_2_0.py @@ -0,0 +1,36 @@ +"""Fixes for obs4MIPs dataset AIRS-2-0.""" + +import dask.array as da + +from ..fix import Fix + + +class Hur(Fix): + """Fixes for hur.""" + + def fix_metadata(self, cubes): + """Fix metadata. + + Convert units from `1` to `%` and remove `valid_range` attribute. + + Parameters + ---------- + cubes: iris.cube.CubeList + Input cubes. + + Returns + ------- + iris.cube.CubeList + Fixed cubes. + + """ + for cube in cubes: + # Put information from valid_range into mask and remove the + # attribute (otherwise this will cause problems after reloading the + # data with different units) + valid_range = cube.attributes["valid_range"] + cube.data = da.ma.masked_outside(cube.core_data(), *valid_range) + cube.attributes.pop("valid_range", None) + + cube.convert_units("%") + return cubes diff --git a/esmvalcore/cmor/_fixes/obs4mips/airs_2_1.py b/esmvalcore/cmor/_fixes/obs4mips/airs_2_1.py index 93ca03eaa6..7b1438eccb 100644 --- a/esmvalcore/cmor/_fixes/obs4mips/airs_2_1.py +++ b/esmvalcore/cmor/_fixes/obs4mips/airs_2_1.py @@ -1,4 +1,5 @@ """Fixes for obs4MIPs dataset AIRS-2-1.""" + from iris.exceptions import CoordinateNotFoundError from ..fix import Fix @@ -26,10 +27,10 @@ def fix_metadata(self, cubes): """ for cube in cubes: try: - plev = cube.coord('air_pressure') + plev = cube.coord("air_pressure") except CoordinateNotFoundError: continue else: if plev.points[0] > 10000.0: - plev.units = 'Pa' + plev.units = "Pa" return cubes diff --git a/esmvalcore/cmor/_fixes/obs4mips/ssmi.py b/esmvalcore/cmor/_fixes/obs4mips/ssmi.py index cb150fe729..5357554e34 100644 --- a/esmvalcore/cmor/_fixes/obs4mips/ssmi.py +++ b/esmvalcore/cmor/_fixes/obs4mips/ssmi.py @@ -1,5 +1,5 @@ - """Fixes for SSMI model.""" + from ..fix import Fix @@ -9,9 +9,9 @@ class Prw(Fix): def fix_metadata(self, cubes): """Fix latitude varname.""" for cube in cubes: - latitude = cube.coord('latitude') - latitude.var_name = 'lat' + latitude = cube.coord("latitude") + latitude.var_name = "lat" - longitude = cube.coord('longitude') - longitude.var_name = 'lon' + longitude = cube.coord("longitude") + longitude.var_name = "lon" return cubes diff --git a/esmvalcore/cmor/_fixes/obs4mips/ssmi_meris.py b/esmvalcore/cmor/_fixes/obs4mips/ssmi_meris.py index 2fa26842b9..43f48e19d7 100644 --- a/esmvalcore/cmor/_fixes/obs4mips/ssmi_meris.py +++ b/esmvalcore/cmor/_fixes/obs4mips/ssmi_meris.py @@ -1,5 +1,5 @@ - """Fixes for CCSM4 model.""" + from iris.cube import CubeList from ..fix import Fix diff --git a/esmvalcore/cmor/_fixes/shared.py b/esmvalcore/cmor/_fixes/shared.py index 3ec14d05ab..c348b039f5 100644 --- a/esmvalcore/cmor/_fixes/shared.py +++ b/esmvalcore/cmor/_fixes/shared.py @@ -1,4 +1,5 @@ """Shared functions for fixes.""" + import logging import os from datetime import datetime, timedelta @@ -39,12 +40,13 @@ def add_aux_coords_from_cubes(cube, cubes, coord_dict): ``cubes`` do not contain a desired coordinate or multiple copies of it. """ - for (coord_name, coord_dims) in coord_dict.items(): + for coord_name, coord_dims in coord_dict.items(): coord_cube = cubes.extract(NameConstraint(var_name=coord_name)) if len(coord_cube) != 1: raise ValueError( f"Expected exactly one coordinate cube '{coord_name}' in " - f"list of cubes {cubes}, got {len(coord_cube):d}") + f"list of cubes {cubes}, got {len(coord_cube):d}" + ) coord_cube = coord_cube[0] aux_coord = cube_to_aux_coord(coord_cube) cube.add_aux_coord(aux_coord, coord_dims) @@ -100,10 +102,10 @@ def add_plev_from_altitude(cube): ValueError ``cube`` does not contain coordinate ``altitude``. """ - if cube.coords('altitude'): - height_coord = cube.coord('altitude') - if height_coord.units != 'm': - height_coord.convert_units('m') + if cube.coords("altitude"): + height_coord = cube.coord("altitude") + if height_coord.units != "m": + height_coord.convert_units("m") altitude_to_pressure = get_altitude_to_pressure_func() pressure_points = _map_on_filled( altitude_to_pressure, height_coord.core_points() @@ -114,17 +116,20 @@ def add_plev_from_altitude(cube): pressure_bounds = _map_on_filled( altitude_to_pressure, height_coord.core_bounds() ) - pressure_coord = iris.coords.AuxCoord(pressure_points, - bounds=pressure_bounds, - var_name='plev', - standard_name='air_pressure', - long_name='pressure', - units='Pa') + pressure_coord = iris.coords.AuxCoord( + pressure_points, + bounds=pressure_bounds, + var_name="plev", + standard_name="air_pressure", + long_name="pressure", + units="Pa", + ) cube.add_aux_coord(pressure_coord, cube.coord_dims(height_coord)) return raise ValueError( "Cannot add 'air_pressure' coordinate, 'altitude' coordinate not " - "available") + "available" + ) def add_altitude_from_plev(cube): @@ -140,10 +145,10 @@ def add_altitude_from_plev(cube): ValueError ``cube`` does not contain coordinate ``air_pressure``. """ - if cube.coords('air_pressure'): - plev_coord = cube.coord('air_pressure') - if plev_coord.units != 'Pa': - plev_coord.convert_units('Pa') + if cube.coords("air_pressure"): + plev_coord = cube.coord("air_pressure") + if plev_coord.units != "Pa": + plev_coord.convert_units("Pa") pressure_to_altitude = get_pressure_to_altitude_func() altitude_points = _map_on_filled( pressure_to_altitude, plev_coord.core_points() @@ -154,30 +159,35 @@ def add_altitude_from_plev(cube): altitude_bounds = _map_on_filled( pressure_to_altitude, plev_coord.core_bounds() ) - altitude_coord = iris.coords.AuxCoord(altitude_points, - bounds=altitude_bounds, - var_name='alt', - standard_name='altitude', - long_name='altitude', - units='m') + altitude_coord = iris.coords.AuxCoord( + altitude_points, + bounds=altitude_bounds, + var_name="alt", + standard_name="altitude", + long_name="altitude", + units="m", + ) cube.add_aux_coord(altitude_coord, cube.coord_dims(plev_coord)) return raise ValueError( "Cannot add 'altitude' coordinate, 'air_pressure' coordinate not " - "available") + "available" + ) def add_scalar_depth_coord(cube, depth=0.0): """Add scalar coordinate 'depth' with value of `depth`m.""" logger.debug("Adding depth coordinate (%sm)", depth) - depth_coord = iris.coords.AuxCoord(depth, - var_name='depth', - standard_name='depth', - long_name='depth', - units=Unit('m'), - attributes={'positive': 'down'}) + depth_coord = iris.coords.AuxCoord( + depth, + var_name="depth", + standard_name="depth", + long_name="depth", + units=Unit("m"), + attributes={"positive": "down"}, + ) try: - cube.coord('depth') + cube.coord("depth") except iris.exceptions.CoordinateNotFoundError: cube.add_aux_coord(depth_coord, ()) return cube @@ -186,14 +196,16 @@ def add_scalar_depth_coord(cube, depth=0.0): def add_scalar_height_coord(cube, height=2.0): """Add scalar coordinate 'height' with value of `height`m.""" logger.debug("Adding height coordinate (%sm)", height) - height_coord = iris.coords.AuxCoord(height, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) + height_coord = iris.coords.AuxCoord( + height, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) try: - cube.coord('height') + cube.coord("height") except iris.exceptions.CoordinateNotFoundError: cube.add_aux_coord(height_coord, ()) return cube @@ -204,58 +216,64 @@ def add_scalar_lambda550nm_coord(cube): logger.debug("Adding lambda550nm coordinate") lambda550nm_coord = iris.coords.AuxCoord( 550.0, - var_name='wavelength', - standard_name='radiation_wavelength', - long_name='Radiation Wavelength 550 nanometers', - units='nm', + var_name="wavelength", + standard_name="radiation_wavelength", + long_name="Radiation Wavelength 550 nanometers", + units="nm", ) try: - cube.coord('radiation_wavelength') + cube.coord("radiation_wavelength") except iris.exceptions.CoordinateNotFoundError: cube.add_aux_coord(lambda550nm_coord, ()) return cube -def add_scalar_typeland_coord(cube, value='default'): +def add_scalar_typeland_coord(cube, value="default"): """Add scalar coordinate 'typeland' with value of `value`.""" logger.debug("Adding typeland coordinate (%s)", value) - typeland_coord = iris.coords.AuxCoord(value, - var_name='type', - standard_name='area_type', - long_name='Land area type', - units=Unit('no unit')) + typeland_coord = iris.coords.AuxCoord( + value, + var_name="type", + standard_name="area_type", + long_name="Land area type", + units=Unit("no unit"), + ) try: - cube.coord('area_type') + cube.coord("area_type") except iris.exceptions.CoordinateNotFoundError: cube.add_aux_coord(typeland_coord, ()) return cube -def add_scalar_typesea_coord(cube, value='default'): +def add_scalar_typesea_coord(cube, value="default"): """Add scalar coordinate 'typesea' with value of `value`.""" logger.debug("Adding typesea coordinate (%s)", value) - typesea_coord = iris.coords.AuxCoord(value, - var_name='type', - standard_name='area_type', - long_name='Ocean area type', - units=Unit('no unit')) + typesea_coord = iris.coords.AuxCoord( + value, + var_name="type", + standard_name="area_type", + long_name="Ocean area type", + units=Unit("no unit"), + ) try: - cube.coord('area_type') + cube.coord("area_type") except iris.exceptions.CoordinateNotFoundError: cube.add_aux_coord(typesea_coord, ()) return cube -def add_scalar_typesi_coord(cube, value='sea_ice'): +def add_scalar_typesi_coord(cube, value="sea_ice"): """Add scalar coordinate 'typesi' with value of `value`.""" logger.debug("Adding typesi coordinate (%s)", value) - typesi_coord = iris.coords.AuxCoord(value, - var_name='type', - standard_name='area_type', - long_name='Sea Ice area type', - units=Unit('no unit')) + typesi_coord = iris.coords.AuxCoord( + value, + var_name="type", + standard_name="area_type", + long_name="Sea Ice area type", + units=Unit("no unit"), + ) try: - cube.coord('area_type') + cube.coord("area_type") except iris.exceptions.CoordinateNotFoundError: cube.add_aux_coord(typesi_coord, ()) return cube @@ -282,12 +300,14 @@ def get_altitude_to_pressure_func(): Function that converts altitude to air pressure. """ base_dir = os.path.dirname(os.path.abspath(__file__)) - source_file = os.path.join(base_dir, 'us_standard_atmosphere.csv') - data_frame = pd.read_csv(source_file, comment='#') - func = interp1d(data_frame['Altitude [m]'], - data_frame['Pressure [Pa]'], - kind='cubic', - fill_value='extrapolate') + source_file = os.path.join(base_dir, "us_standard_atmosphere.csv") + data_frame = pd.read_csv(source_file, comment="#") + func = interp1d( + data_frame["Altitude [m]"], + data_frame["Pressure [Pa]"], + kind="cubic", + fill_value="extrapolate", + ) return func @@ -314,17 +334,19 @@ def get_bounds_cube(cubes, coord_var_name): ``cubes`` do not contain the desired coordinate bounds or multiple copies of them. """ - for bounds in ('bnds', 'bounds'): - bound_var = f'{coord_var_name}_{bounds}' + for bounds in ("bnds", "bounds"): + bound_var = f"{coord_var_name}_{bounds}" cube = cubes.extract(NameConstraint(var_name=bound_var)) if len(cube) == 1: return cube[0] if len(cube) > 1: raise ValueError( - f"Multiple cubes with var_name '{bound_var}' found") + f"Multiple cubes with var_name '{bound_var}' found" + ) raise ValueError( f"No bounds for coordinate variable '{coord_var_name}' available in " - f"cubes\n{cubes}") + f"cubes\n{cubes}" + ) @lru_cache(maxsize=None) @@ -337,12 +359,14 @@ def get_pressure_to_altitude_func(): Function that converts air pressure to altitude. """ base_dir = os.path.dirname(os.path.abspath(__file__)) - source_file = os.path.join(base_dir, 'us_standard_atmosphere.csv') - data_frame = pd.read_csv(source_file, comment='#') - func = interp1d(data_frame['Pressure [Pa]'], - data_frame['Altitude [m]'], - kind='cubic', - fill_value='extrapolate') + source_file = os.path.join(base_dir, "us_standard_atmosphere.csv") + data_frame = pd.read_csv(source_file, comment="#") + func = interp1d( + data_frame["Pressure [Pa]"], + data_frame["Altitude [m]"], + kind="cubic", + fill_value="extrapolate", + ) return func @@ -421,12 +445,12 @@ def fix_ocean_depth_coord(cube): cube : iris.cube.Cube Input cube. """ - depth_coord = cube.coord(axis='Z') - depth_coord.standard_name = 'depth' - depth_coord.var_name = 'lev' - depth_coord.units = 'm' - depth_coord.long_name = 'ocean depth coordinate' - depth_coord.attributes = {'positive': 'down'} + depth_coord = cube.coord(axis="Z") + depth_coord.standard_name = "depth" + depth_coord.var_name = "lev" + depth_coord.units = "m" + depth_coord.long_name = "ocean depth coordinate" + depth_coord.attributes = {"positive": "down"} def get_next_month(month: int, year: int) -> tuple[int, int]: @@ -482,21 +506,21 @@ def get_time_bounds(time: Coord, freq: str) -> np.ndarray: dates = time.units.num2date(time.points) for date in dates: - if 'dec' in freq: + if "dec" in freq: min_bound = datetime(date.year - 5, 1, 1, 0, 0) max_bound = datetime(date.year + 5, 1, 1, 0, 0) - elif 'yr' in freq: + elif "yr" in freq: min_bound = datetime(date.year, 1, 1, 0, 0) max_bound = datetime(date.year + 1, 1, 1, 0, 0) - elif 'mon' in freq or freq == 'mo': + elif "mon" in freq or freq == "mo": next_month, next_year = get_next_month(date.month, date.year) min_bound = datetime(date.year, date.month, 1, 0, 0) max_bound = datetime(next_year, next_month, 1, 0, 0) - elif 'day' in freq: + elif "day" in freq: min_bound = date - timedelta(hours=12.0) max_bound = date + timedelta(hours=12.0) - elif 'hr' in freq: - (n_hours_str, _, _) = freq.partition('hr') + elif "hr" in freq: + (n_hours_str, _, _) = freq.partition("hr") if not n_hours_str: n_hours = 1 else: diff --git a/esmvalcore/cmor/_utils.py b/esmvalcore/cmor/_utils.py index be837d9c10..27ddf08bb8 100644 --- a/esmvalcore/cmor/_utils.py +++ b/esmvalcore/cmor/_utils.py @@ -1,4 +1,5 @@ """Utilities for CMOR module.""" + from __future__ import annotations import logging @@ -13,13 +14,13 @@ logger = logging.getLogger(__name__) _ALTERNATIVE_GENERIC_LEV_COORDS = { - 'alevel': { - 'CMIP5': ['alt40', 'plevs'], - 'CMIP6': ['alt16', 'plev3'], - 'obs4MIPs': ['alt16', 'plev3'], + "alevel": { + "CMIP5": ["alt40", "plevs"], + "CMIP6": ["alt16", "plev3"], + "obs4MIPs": ["alt16", "plev3"], }, - 'zlevel': { - 'CMIP3': ['pressure'], + "zlevel": { + "CMIP3": ["pressure"], }, } @@ -156,18 +157,18 @@ def _get_new_generic_level_coord( """ new_coord = generic_level_coord.generic_lev_coords[new_coord_name] new_coord.generic_level = True - new_coord.generic_lev_coords = ( - var_info.coordinates[generic_level_coord_name].generic_lev_coords - ) + new_coord.generic_lev_coords = var_info.coordinates[ + generic_level_coord_name + ].generic_lev_coords return new_coord def _get_simplified_calendar(calendar: str) -> str: """Simplify calendar.""" calendar_aliases = { - 'all_leap': '366_day', - 'noleap': '365_day', - 'gregorian': 'standard', + "all_leap": "366_day", + "noleap": "365_day", + "gregorian": "standard", } return calendar_aliases.get(calendar, calendar) @@ -186,9 +187,9 @@ def _get_single_cube( break if dataset_str is None: - dataset_str = '' + dataset_str = "" else: - dataset_str = f' in {dataset_str}' + dataset_str = f" in {dataset_str}" if not cube: raise ValueError( @@ -201,5 +202,9 @@ def _get_single_cube( "extra variables are usually metadata (cell area, latitude " "descriptions) that was not saved according to CF-conventions. It is " "possible that errors appear further on because of this.\nFull list " - "of cubes encountered: %s", short_name, dataset_str, cube_list) + "of cubes encountered: %s", + short_name, + dataset_str, + cube_list, + ) return cube diff --git a/esmvalcore/cmor/check.py b/esmvalcore/cmor/check.py index 09f7a6331d..342d0fff66 100644 --- a/esmvalcore/cmor/check.py +++ b/esmvalcore/cmor/check.py @@ -1,4 +1,5 @@ """Module for checking iris cubes against their CMOR definitions.""" + from __future__ import annotations import logging @@ -54,7 +55,7 @@ class CMORCheckError(Exception): """Exception raised when a cube does not pass the CMORCheck.""" -class CMORCheck(): +class CMORCheck: """Class used to check the CMOR-compliance of the data. Parameters @@ -90,20 +91,21 @@ class CMORCheck(): Expected frequency for the data. """ - _attr_msg = '{}: {} should be {}, not {}' - _does_msg = '{}: does not {}' - _is_msg = '{}: is not {}' - _vals_msg = '{}: has values {} {}' - _contain_msg = '{}: does not contain {} {}' - - def __init__(self, - cube, - var_info, - frequency=None, - fail_on_error=False, - check_level=CheckLevels.DEFAULT, - automatic_fixes=False): + _attr_msg = "{}: {} should be {}, not {}" + _does_msg = "{}: does not {}" + _is_msg = "{}: is not {}" + _vals_msg = "{}: has values {} {}" + _contain_msg = "{}: does not contain {} {}" + def __init__( + self, + cube, + var_info, + frequency=None, + fail_on_error=False, + check_level=CheckLevels.DEFAULT, + automatic_fixes=False, + ): self._cube = cube self._failerr = fail_on_error self._check_level = check_level @@ -178,7 +180,7 @@ def check_metadata(self, logger: Optional[logging.Logger] = None) -> Cube: self._check_multiple_coords_same_stdname() self._check_dim_names() self._check_coords() - if self.frequency != 'fx': + if self.frequency != "fx": self._check_time_coord() self._check_rank() @@ -239,37 +241,43 @@ def report_errors(self): If any errors were reported before calling this method. """ if self.has_errors(): - msg = '\n'.join([ - f'There were errors in variable {self._cube.var_name}:', - ' ' + '\n '.join(self._errors), - 'in cube:', - f'{self._cube}', - 'loaded from file ' + - self._cube.attributes.get('source_file', ''), - ]) + msg = "\n".join( + [ + f"There were errors in variable {self._cube.var_name}:", + " " + "\n ".join(self._errors), + "in cube:", + f"{self._cube}", + "loaded from file " + + self._cube.attributes.get("source_file", ""), + ] + ) raise CMORCheckError(msg) def report_warnings(self): """Report detected warnings to the given logger.""" if self.has_warnings(): - msg = '\n'.join([ - f'There were warnings in variable {self._cube.var_name}:', - ' ' + '\n '.join(self._warnings), - 'loaded from file ' + - self._cube.attributes.get('source_file', ''), - ]) + msg = "\n".join( + [ + f"There were warnings in variable {self._cube.var_name}:", + " " + "\n ".join(self._warnings), + "loaded from file " + + self._cube.attributes.get("source_file", ""), + ] + ) self._logger.warning(msg) def report_debug_messages(self): """Report detected debug messages to the given logger.""" if self.has_debug_messages(): - msg = '\n'.join([ - f'There were metadata changes in variable ' - f'{self._cube.var_name}:', - ' ' + '\n '.join(self._debug_messages), - 'loaded from file ' + - self._cube.attributes.get('source_file', ''), - ]) + msg = "\n".join( + [ + f"There were metadata changes in variable " + f"{self._cube.var_name}:", + " " + "\n ".join(self._debug_messages), + "loaded from file " + + self._cube.attributes.get("source_file", ""), + ] + ) self._logger.debug(msg) def _check_fill_value(self): @@ -285,43 +293,61 @@ def _check_var_metadata(self): # Check standard_name if self._cmor_var.standard_name: if self._cube.standard_name != self._cmor_var.standard_name: - self.report_error(self._attr_msg, self._cube.var_name, - 'standard_name', - self._cmor_var.standard_name, - self._cube.standard_name) + self.report_error( + self._attr_msg, + self._cube.var_name, + "standard_name", + self._cmor_var.standard_name, + self._cube.standard_name, + ) # Check long_name if self._cmor_var.long_name: if self._cube.long_name != self._cmor_var.long_name: - self.report_error(self._attr_msg, self._cube.var_name, - 'long_name', self._cmor_var.long_name, - self._cube.long_name) + self.report_error( + self._attr_msg, + self._cube.var_name, + "long_name", + self._cmor_var.long_name, + self._cube.long_name, + ) # Check units if self._cmor_var.units: units = self._get_effective_units() if self._cube.units != units: - self.report_error(self._attr_msg, self._cube.var_name, - 'units', self._cmor_var.units, - self._cube.units) + self.report_error( + self._attr_msg, + self._cube.var_name, + "units", + self._cmor_var.units, + self._cube.units, + ) # Check other variable attributes that match entries in cube.attributes - attrs = ('positive', ) + attrs = ("positive",) for attr in attrs: attr_value = getattr(self._cmor_var, attr) if attr_value: if attr not in self._cube.attributes: - self.report_warning('{}: attribute {} not present', - self._cube.var_name, attr) + self.report_warning( + "{}: attribute {} not present", + self._cube.var_name, + attr, + ) elif self._cube.attributes[attr] != attr_value: - self.report_error(self._attr_msg, self._cube.var_name, - attr, attr_value, - self._cube.attributes[attr]) + self.report_error( + self._attr_msg, + self._cube.var_name, + attr, + attr_value, + self._cube.attributes[attr], + ) def _get_effective_units(self): """Get effective units.""" # TODO: remove entire function in v2.12 - if self._cmor_var.units.lower() == 'psu': - units = '1.0' + if self._cmor_var.units.lower() == "psu": + units = "1.0" else: units = self._cmor_var.units return units @@ -344,8 +370,9 @@ def _check_rank(self): # Check number of dimension coords matches rank if self._cube.ndim != rank: - self.report_error(self._does_msg, self._cube.var_name, - 'match coordinate rank') + self.report_error( + self._does_msg, self._cube.var_name, "match coordinate rank" + ) def _check_multiple_coords_same_stdname(self): standard_names = set() @@ -353,67 +380,76 @@ def _check_multiple_coords_same_stdname(self): if coord.standard_name: if coord.standard_name in standard_names: coords = [ - c.var_name for c in self._cube.coords( - standard_name=coord.standard_name) + c.var_name + for c in self._cube.coords( + standard_name=coord.standard_name + ) ] self.report_error( - 'There are multiple coordinates with ' - f'standard_name "{coord.standard_name}": {coords}') + "There are multiple coordinates with " + f'standard_name "{coord.standard_name}": {coords}' + ) else: standard_names.add(coord.standard_name) def _check_dim_names(self): """Check dimension names.""" cmor_var_coordinates = self._cmor_var.coordinates.copy() - link = 'https://github.com/ESMValGroup/ESMValCore/discussions/1587' - for (key, coordinate) in cmor_var_coordinates.items(): + link = "https://github.com/ESMValGroup/ESMValCore/discussions/1587" + for key, coordinate in cmor_var_coordinates.items(): if coordinate.generic_level: self._check_generic_level_dim_names(key, coordinate) else: try: cube_coord = self._cube.coord(var_name=coordinate.out_name) - if (cube_coord.standard_name is None - and coordinate.standard_name == ''): + if ( + cube_coord.standard_name is None + and coordinate.standard_name == "" + ): pass elif cube_coord.standard_name != coordinate.standard_name: self.report_critical( self._attr_msg, coordinate.out_name, - 'standard_name', + "standard_name", coordinate.standard_name, cube_coord.standard_name, ) except iris.exceptions.CoordinateNotFoundError: try: coord = self._cube.coord(coordinate.standard_name) - if coord.standard_name in ['region', 'area_type']: + if coord.standard_name in ["region", "area_type"]: self.report_debug_message( - 'Coordinate {0} has var name {1} ' - 'instead of {2}. ' + "Coordinate {0} has var name {1} " + "instead of {2}. " "But that's considered OK and ignored. " - 'See also {3}', + "See also {3}", coordinate.name, coord.var_name, coordinate.out_name, - link + link, ) else: self.report_error( - 'Coordinate {0} has var name {1} ' - 'instead of {2}', + "Coordinate {0} has var name {1} " + "instead of {2}", coordinate.name, coord.var_name, coordinate.out_name, ) except iris.exceptions.CoordinateNotFoundError: - if coordinate.standard_name in ['time', 'latitude', - 'longitude'] or \ - coordinate.requested: - self.report_critical(self._does_msg, - coordinate.name, 'exist') + if ( + coordinate.standard_name + in ["time", "latitude", "longitude"] + or coordinate.requested + ): + self.report_critical( + self._does_msg, coordinate.name, "exist" + ) else: - self.report_error(self._does_msg, coordinate.name, - 'exist') + self.report_error( + self._does_msg, coordinate.name, "exist" + ) def _check_generic_level_dim_names(self, key, coordinate): """Check name of generic level coordinate.""" @@ -424,19 +460,23 @@ def _check_generic_level_dim_names(self, key, coordinate): if standard_name: if not out_name: self.report_error( - f'Generic level coordinate {key} has wrong var_name.') + f"Generic level coordinate {key} has wrong var_name." + ) level = _get_new_generic_level_coord( self._cmor_var, coordinate, key, name ) self._cmor_var.coordinates[key] = level - self.report_debug_message(f'Generic level coordinate {key} ' - 'will be checked against ' - f'{name} coordinate information') + self.report_debug_message( + f"Generic level coordinate {key} " + "will be checked against " + f"{name} coordinate information" + ) else: if out_name: self.report_critical( - f'Generic level coordinate {key} with out_name ' - f'{out_name} has wrong standard_name or is not set.') + f"Generic level coordinate {key} with out_name " + f"{out_name} has wrong standard_name or is not set." + ) else: self._check_alternative_dim_names(key) @@ -473,14 +513,15 @@ def _check_alternative_dim_names(self, key): values might be disabled. """ try: - (alternative_coord, - cube_coord) = _get_alternative_generic_lev_coord( - self._cube, key, self._cmor_var.table_type + (alternative_coord, cube_coord) = ( + _get_alternative_generic_lev_coord( + self._cube, key, self._cmor_var.table_type + ) ) # No valid alternative coordinate found -> critical error except ValueError: - self.report_critical(self._does_msg, key, 'exist') + self.report_critical(self._does_msg, key, "exist") return # Wrong standard_name -> error @@ -498,7 +539,8 @@ def _check_alternative_dim_names(self, key): f"Found alternative coordinate '{alternative_coord.out_name}' " f"for generic level coordinate '{key}'. Subsequent warnings about " f"levels that are not contained in '{alternative_coord.out_name}' " - f"can be safely ignored.") + f"can be safely ignored." + ) self._check_coord(alternative_coord, cube_coord, cube_coord.var_name) def _check_coords(self): @@ -523,13 +565,13 @@ def _check_coords(self): def _check_coord_ranges(self, coords: list[tuple[CoordinateInfo, Coord]]): """Check coordinate value are inside valid ranges.""" - Limit = namedtuple('Limit', ['name', 'type', 'limit', 'value']) + Limit = namedtuple("Limit", ["name", "type", "limit", "value"]) limits = [] for coord_info, coord in coords: points = coord.core_points() - for limit_type in 'min', 'max': - valid = getattr(coord_info, f'valid_{limit_type}') + for limit_type in "min", "max": + valid = getattr(coord_info, f"valid_{limit_type}") if valid != "": limit = Limit( name=coord_info.out_name, @@ -541,12 +583,14 @@ def _check_coord_ranges(self, coords: list[tuple[CoordinateInfo, Coord]]): limits = dask.compute(*limits) for limit in limits: - if limit.type == 'min' and limit.value < limit.limit: - self.report_critical(self._vals_msg, limit.name, - '< valid_min =', limit.limit) - if limit.type == 'max' and limit.value > limit.limit: - self.report_critical(self._vals_msg, limit.name, - '> valid_max =', limit.limit) + if limit.type == "min" and limit.value < limit.limit: + self.report_critical( + self._vals_msg, limit.name, "< valid_min =", limit.limit + ) + if limit.type == "max" and limit.value > limit.limit: + self.report_critical( + self._vals_msg, limit.name, "> valid_max =", limit.limit + ) def _check_coords_data(self): """Check coordinate data.""" @@ -569,68 +613,78 @@ def _check_coords_data(self): ) self._check_coord_monotonicity_and_direction( - coordinate, coord, var_name) + coordinate, coord, var_name + ) def _check_coord(self, cmor, coord, var_name): """Check single coordinate.""" - if coord.var_name == 'time': + if coord.var_name == "time": return if cmor.units: if str(coord.units) != cmor.units: - self.report_critical(self._attr_msg, var_name, 'units', - cmor.units, coord.units) + self.report_critical( + self._attr_msg, var_name, "units", cmor.units, coord.units + ) self._check_coord_points(cmor, coord, var_name) def _check_coord_bounds(self, cmor, coord, var_name): - if cmor.must_have_bounds == 'yes' and not coord.has_bounds(): + if cmor.must_have_bounds == "yes" and not coord.has_bounds(): self.report_warning( - 'Coordinate {0} from var {1} does not have bounds', - coord.var_name, var_name) + "Coordinate {0} from var {1} does not have bounds", + coord.var_name, + var_name, + ) def _check_time_bounds(self, time): - times = {'time', 'time1', 'time2', 'time3'} + times = {"time", "time1", "time2", "time3"} key = times.intersection(self._cmor_var.coordinates) cmor = self._cmor_var.coordinates[" ".join(key)] - if cmor.must_have_bounds == 'yes' and not time.has_bounds(): + if cmor.must_have_bounds == "yes" and not time.has_bounds(): self.report_warning( - 'Coordinate {0} from var {1} does not have bounds', - time.var_name, self._cmor_var.short_name) + "Coordinate {0} from var {1} does not have bounds", + time.var_name, + self._cmor_var.short_name, + ) def _check_coord_monotonicity_and_direction(self, cmor, coord, var_name): """Check monotonicity and direction of coordinate.""" if coord.ndim > 1: return - if coord.dtype.kind == 'U': + if coord.dtype.kind == "U": return - if (self._unstructured_grid and - coord.standard_name in ['latitude', 'longitude']): + if self._unstructured_grid and coord.standard_name in [ + "latitude", + "longitude", + ]: self.report_debug_message( - f'Coordinate {coord.standard_name} appears to belong to ' - 'an unstructured grid. Skipping monotonicity and ' - 'direction tests.') + f"Coordinate {coord.standard_name} appears to belong to " + "an unstructured grid. Skipping monotonicity and " + "direction tests." + ) return if not coord.is_monotonic(): - self.report_critical(self._is_msg, var_name, 'monotonic') + self.report_critical(self._is_msg, var_name, "monotonic") if len(coord.core_points()) == 1: return if cmor.stored_direction: - if cmor.stored_direction == 'increasing': + if cmor.stored_direction == "increasing": if coord.core_points()[0] > coord.core_points()[1]: - self.report_critical(self._is_msg, var_name, 'increasing') - elif cmor.stored_direction == 'decreasing': + self.report_critical(self._is_msg, var_name, "increasing") + elif cmor.stored_direction == "decreasing": if coord.core_points()[0] < coord.core_points()[1]: - self.report_critical(self._is_msg, var_name, 'decreasing') + self.report_critical(self._is_msg, var_name, "decreasing") def _check_coord_points(self, coord_info, coord, var_name): """Check coordinate points: values, bounds and monotonicity.""" self._check_requested_values(coord, coord_info, var_name) self._check_coord_bounds(coord_info, coord, var_name) - self._check_coord_monotonicity_and_direction(coord_info, coord, - var_name) + self._check_coord_monotonicity_and_direction( + coord_info, coord, var_name + ) def _check_requested_values(self, coord, coord_info, var_name): """Check requested values.""" @@ -638,7 +692,10 @@ def _check_requested_values(self, coord, coord_info, var_name): if coord.core_points().ndim != 1: self.report_warning( "Cannot check requested values of {}D coordinate {} since " - "it is not 1D", coord.core_points().ndim, var_name) + "it is not 1D", + coord.core_points().ndim, + var_name, + ) return try: cmor_points = np.array(coord_info.requested, dtype=float) @@ -646,52 +703,60 @@ def _check_requested_values(self, coord, coord_info, var_name): cmor_points = coord_info.requested for point in cmor_points: if point not in coord.core_points(): - self.report_warning(self._contain_msg, var_name, - str(point), str(coord.units)) + self.report_warning( + self._contain_msg, + var_name, + str(point), + str(coord.units), + ) def _check_time_coord(self): """Check time coordinate.""" try: - coord = self._cube.coord('time', dim_coords=True) + coord = self._cube.coord("time", dim_coords=True) except iris.exceptions.CoordinateNotFoundError: try: - coord = self._cube.coord('time') + coord = self._cube.coord("time") except iris.exceptions.CoordinateNotFoundError: return var_name = coord.var_name if not coord.is_monotonic(): - self.report_error('Time coordinate for var {} is not monotonic', - var_name) + self.report_error( + "Time coordinate for var {} is not monotonic", var_name + ) if not coord.units.is_time_reference(): - self.report_critical(self._does_msg, var_name, - 'have time reference units') + self.report_critical( + self._does_msg, var_name, "have time reference units" + ) else: simplified_cal = _get_simplified_calendar(coord.units.calendar) attrs = self._cube.attributes - parent_time = 'parent_time_units' + parent_time = "parent_time_units" if parent_time in attrs: - if attrs[parent_time] in 'no parent': + if attrs[parent_time] in "no parent": pass else: try: cf_units.Unit(attrs[parent_time], simplified_cal) except ValueError: - self.report_warning('Attribute parent_time_units has ' - 'a wrong format and cannot be ' - 'read by cf_units. A fix needs to ' - 'be added to convert properly ' - 'attributes branch_time_in_parent ' - 'and branch_time_in_child.') + self.report_warning( + "Attribute parent_time_units has " + "a wrong format and cannot be " + "read by cf_units. A fix needs to " + "be added to convert properly " + "attributes branch_time_in_parent " + "and branch_time_in_child." + ) # Check frequency tol = 0.001 - intervals = {'dec': (3600, 3660), 'day': (1, 1)} + intervals = {"dec": (3600, 3660), "day": (1, 1)} freq = self.frequency - if freq.lower().endswith('pt'): + if freq.lower().endswith("pt"): freq = freq[:-2] - if freq in ['mon', 'mo']: + if freq in ["mon", "mo"]: dates = coord.units.num2date(coord.points) for i in range(len(coord.points) - 1): first = dates[i] @@ -701,44 +766,45 @@ def _check_time_coord(self): if second_month == 13: second_month = 1 second_year += 1 - if second_month != second.month or \ - second_year != second.year: - msg = '{}: Frequency {} does not match input data' + if second_month != second.month or second_year != second.year: + msg = "{}: Frequency {} does not match input data" self.report_error(msg, var_name, freq) break - elif freq == 'yr': + elif freq == "yr": dates = coord.units.num2date(coord.points) for i in range(len(coord.points) - 1): first = dates[i] second = dates[i + 1] second_month = first.month + 1 if first.year + 1 != second.year: - msg = '{}: Frequency {} does not match input data' + msg = "{}: Frequency {} does not match input data" self.report_error(msg, var_name, freq) break else: if freq in intervals: interval = intervals[freq] target_interval = (interval[0] - tol, interval[1] + tol) - elif freq.endswith('hr'): - if freq == 'hr': - freq = '1hr' + elif freq.endswith("hr"): + if freq == "hr": + freq = "1hr" frequency = freq[:-2] - if frequency == 'sub': + if frequency == "sub": frequency = 1.0 / 24 target_interval = (-tol, frequency + tol) else: frequency = float(frequency) / 24 target_interval = (frequency - tol, frequency + tol) else: - msg = '{}: Frequency {} not supported by checker' + msg = "{}: Frequency {} not supported by checker" self.report_error(msg, var_name, freq) return for i in range(len(coord.points) - 1): interval = coord.points[i + 1] - coord.points[i] - if (interval < target_interval[0] - or interval > target_interval[1]): - msg = '{}: Frequency {} does not match input data' + if ( + interval < target_interval[0] + or interval > target_interval[1] + ): + msg = "{}: Frequency {} does not match input data" self.report_error(msg, var_name, freq) break @@ -805,8 +871,9 @@ def report(self, level, message, *args): self._warnings.append(msg) else: if self._failerr: - raise CMORCheckError(msg + - '\n in cube:\n{}'.format(self._cube)) + raise CMORCheckError( + msg + "\n in cube:\n{}".format(self._cube) + ) self._errors.append(msg) def report_critical(self, message, *args): @@ -874,12 +941,14 @@ def _get_cmor_checker( var_info = get_var_info(project, mip, short_name) def _checker(cube: Cube) -> CMORCheck: - return CMORCheck(cube, - var_info, - frequency=frequency, - fail_on_error=fail_on_error, - check_level=check_level, - automatic_fixes=automatic_fixes) + return CMORCheck( + cube, + var_info, + frequency=frequency, + fail_on_error=fail_on_error, + check_level=check_level, + automatic_fixes=automatic_fixes, + ) return _checker diff --git a/esmvalcore/cmor/fix.py b/esmvalcore/cmor/fix.py index d05af5ef64..2e3209897d 100644 --- a/esmvalcore/cmor/fix.py +++ b/esmvalcore/cmor/fix.py @@ -4,6 +4,7 @@ for the given dataset. Therefore is recommended to apply them to all variables to be sure that all known errors are fixed. """ + from __future__ import annotations import logging @@ -76,21 +77,25 @@ def fix_file( """ # Update extra_facets with variable information given as regular arguments # to this function - extra_facets.update({ - 'short_name': short_name, - 'project': project, - 'dataset': dataset, - 'mip': mip, - 'frequency': frequency, - }) - - for fix in Fix.get_fixes(project=project, - dataset=dataset, - mip=mip, - short_name=short_name, - extra_facets=extra_facets, - session=session, - frequency=frequency): + extra_facets.update( + { + "short_name": short_name, + "project": project, + "dataset": dataset, + "mip": mip, + "frequency": frequency, + } + ) + + for fix in Fix.get_fixes( + project=project, + dataset=dataset, + mip=mip, + short_name=short_name, + extra_facets=extra_facets, + session=session, + frequency=frequency, + ): file = fix.fix_file( file, output_dir, add_unique_suffix=add_unique_suffix ) @@ -166,28 +171,32 @@ def fix_metadata( # Update extra_facets with variable information given as regular arguments # to this function - extra_facets.update({ - 'short_name': short_name, - 'project': project, - 'dataset': dataset, - 'mip': mip, - 'frequency': frequency, - }) - - fixes = Fix.get_fixes(project=project, - dataset=dataset, - mip=mip, - short_name=short_name, - extra_facets=extra_facets, - session=session, - frequency=frequency) + extra_facets.update( + { + "short_name": short_name, + "project": project, + "dataset": dataset, + "mip": mip, + "frequency": frequency, + } + ) + + fixes = Fix.get_fixes( + project=project, + dataset=dataset, + mip=mip, + short_name=short_name, + extra_facets=extra_facets, + session=session, + frequency=frequency, + ) fixed_cubes = CubeList() # Group cubes by input file and apply all fixes to each group element # (i.e., each file) individually by_file = defaultdict(list) for cube in cubes: - by_file[cube.attributes.get('source_file', '')].append(cube) + by_file[cube.attributes.get("source_file", "")].append(cube) for cube_list in by_file.values(): cube_list = CubeList(cube_list) @@ -210,7 +219,7 @@ def fix_metadata( ) cube = checker(cube).check_metadata() - cube.attributes.pop('source_file', None) + cube.attributes.pop("source_file", None) fixed_cubes.append(cube) return fixed_cubes @@ -287,21 +296,25 @@ def fix_data( # Update extra_facets with variable information given as regular arguments # to this function - extra_facets.update({ - 'short_name': short_name, - 'project': project, - 'dataset': dataset, - 'mip': mip, - 'frequency': frequency, - }) - - for fix in Fix.get_fixes(project=project, - dataset=dataset, - mip=mip, - short_name=short_name, - extra_facets=extra_facets, - session=session, - frequency=frequency): + extra_facets.update( + { + "short_name": short_name, + "project": project, + "dataset": dataset, + "mip": mip, + "frequency": frequency, + } + ) + + for fix in Fix.get_fixes( + project=project, + dataset=dataset, + mip=mip, + short_name=short_name, + extra_facets=extra_facets, + session=session, + frequency=frequency, + ): cube = fix.fix_data(cube) # Perform CMOR checks diff --git a/esmvalcore/cmor/fixes.py b/esmvalcore/cmor/fixes.py index 534aa3bd94..594b3687a1 100644 --- a/esmvalcore/cmor/fixes.py +++ b/esmvalcore/cmor/fixes.py @@ -8,8 +8,8 @@ ) __all__ = [ - 'add_altitude_from_plev', - 'add_plev_from_altitude', - 'get_time_bounds', - 'get_next_month', + "add_altitude_from_plev", + "add_plev_from_altitude", + "get_time_bounds", + "get_next_month", ] diff --git a/esmvalcore/cmor/table.py b/esmvalcore/cmor/table.py index a0c685654a..511ad0a4f1 100644 --- a/esmvalcore/cmor/table.py +++ b/esmvalcore/cmor/table.py @@ -3,6 +3,7 @@ Read variable information from CMOR 2 and CMOR 3 tables and make it easily available for the other components of ESMValTool """ + from __future__ import annotations import copy @@ -22,26 +23,26 @@ logger = logging.getLogger(__name__) -CMORTable = Union['CMIP3Info', 'CMIP5Info', 'CMIP6Info', 'CustomInfo'] +CMORTable = Union["CMIP3Info", "CMIP5Info", "CMIP6Info", "CustomInfo"] CMOR_TABLES: dict[str, CMORTable] = {} """dict of str, obj: CMOR info objects.""" _CMOR_KEYS = ( - 'standard_name', - 'long_name', - 'units', - 'modeling_realm', - 'frequency', + "standard_name", + "long_name", + "units", + "modeling_realm", + "frequency", ) def _update_cmor_facets(facets): """Update `facets` with information from CMOR table.""" - project = facets['project'] - mip = facets['mip'] - short_name = facets['short_name'] - derive = facets.get('derive', False) + project = facets["project"] + mip = facets["mip"] + short_name = facets["short_name"] + derive = facets.get("derive", False) table = CMOR_TABLES.get(project) if table: table_entry = table.get_variable(mip, short_name, derive) @@ -50,8 +51,9 @@ def _update_cmor_facets(facets): if table_entry is None: raise RecipeError( f"Unable to load CMOR table (project) '{project}' for variable " - f"'{short_name}' with mip '{mip}'") - facets['original_short_name'] = table_entry.short_name + f"'{short_name}' with mip '{mip}'" + ) + facets["original_short_name"] = table_entry.short_name for key in _CMOR_KEYS: if key not in facets: value = getattr(table_entry, key, None) @@ -59,8 +61,10 @@ def _update_cmor_facets(facets): facets[key] = value else: logger.debug( - "Failed to add key %s to variable %s from CMOR table", key, - facets) + "Failed to add key %s to variable %s from CMOR table", + key, + facets, + ) def _get_mips(project: str, short_name: str) -> list[str]: @@ -111,8 +115,8 @@ def get_var_info( ) # CORDEX X-hourly tables define the mip as ending in 'h' instead of 'hr' - if project == 'CORDEX' and mip.endswith('hr'): - mip = mip.replace('hr', 'h') + if project == "CORDEX" and mip.endswith("hr"): + mip = mip.replace("hr", "h") return CMOR_TABLES[project].get_variable(mip, short_name) @@ -131,10 +135,11 @@ def read_cmor_tables(cfg_developer: Optional[Path] = None) -> None: If `cfg_developer` is not a Path-like object """ if cfg_developer is None: - cfg_developer = Path(__file__).parents[1] / 'config-developer.yml' + cfg_developer = Path(__file__).parents[1] / "config-developer.yml" elif not isinstance(cfg_developer, Path): - raise TypeError("cfg_developer is not a Path-like object, got ", - cfg_developer) + raise TypeError( + "cfg_developer is not a Path-like object, got ", cfg_developer + ) mtime = cfg_developer.stat().st_mtime cmor_tables = _read_cmor_tables(cfg_developer, mtime) CMOR_TABLES.clear() @@ -154,11 +159,11 @@ def _read_cmor_tables(cfg_file: Path, mtime: float) -> dict[str, CMORTable]: `lru_cache` decorator to make sure the file is read again when it is changed. """ - with cfg_file.open('r', encoding='utf-8') as file: + with cfg_file.open("r", encoding="utf-8") as file: cfg_developer = yaml.safe_load(file) cwd = os.path.dirname(os.path.realpath(__file__)) - var_alt_names_file = os.path.join(cwd, 'variable_alt_names.yml') - with open(var_alt_names_file, 'r', encoding='utf-8') as yfile: + var_alt_names_file = os.path.join(cwd, "variable_alt_names.yml") + with open(var_alt_names_file, "r", encoding="utf-8") as yfile: alt_names = yaml.safe_load(yfile) cmor_tables: dict[str, CMORTable] = {} @@ -166,32 +171,33 @@ def _read_cmor_tables(cfg_file: Path, mtime: float) -> dict[str, CMORTable]: # Try to infer location for custom tables from config-developer.yml file, # if not possible, use default location custom_path = None - if 'custom' in cfg_developer: - custom_path = cfg_developer['custom'].get('cmor_path') + if "custom" in cfg_developer: + custom_path = cfg_developer["custom"].get("cmor_path") if custom_path is not None: custom_path = os.path.expandvars(os.path.expanduser(custom_path)) custom = CustomInfo(custom_path) - cmor_tables['custom'] = custom + cmor_tables["custom"] = custom install_dir = os.path.dirname(os.path.realpath(__file__)) for table in cfg_developer: - if table == 'custom': + if table == "custom": continue - cmor_tables[table] = _read_table(cfg_developer, table, install_dir, - custom, alt_names) + cmor_tables[table] = _read_table( + cfg_developer, table, install_dir, custom, alt_names + ) return cmor_tables def _read_table(cfg_developer, table, install_dir, custom, alt_names): project = cfg_developer[table] - cmor_type = project.get('cmor_type', 'CMIP5') - default_path = os.path.join(install_dir, 'tables', cmor_type.lower()) - table_path = project.get('cmor_path', default_path) + cmor_type = project.get("cmor_type", "CMIP5") + default_path = os.path.join(install_dir, "tables", cmor_type.lower()) + table_path = project.get("cmor_path", default_path) table_path = os.path.expandvars(os.path.expanduser(table_path)) - cmor_strict = project.get('cmor_strict', True) - default_table_prefix = project.get('cmor_default_table_prefix', '') + cmor_strict = project.get("cmor_strict", True) + default_table_prefix = project.get("cmor_default_table_prefix", "") - if cmor_type == 'CMIP3': + if cmor_type == "CMIP3": return CMIP3Info( table_path, default=custom, @@ -199,13 +205,12 @@ def _read_table(cfg_developer, table, install_dir, custom, alt_names): alt_names=alt_names, ) - if cmor_type == 'CMIP5': - return CMIP5Info(table_path, - default=custom, - strict=cmor_strict, - alt_names=alt_names) + if cmor_type == "CMIP5": + return CMIP5Info( + table_path, default=custom, strict=cmor_strict, alt_names=alt_names + ) - if cmor_type == 'CMIP6': + if cmor_type == "CMIP6": return CMIP6Info( table_path, default=custom, @@ -213,10 +218,10 @@ def _read_table(cfg_developer, table, install_dir, custom, alt_names): default_table_prefix=default_table_prefix, alt_names=alt_names, ) - raise ValueError(f'Unsupported CMOR type {cmor_type}') + raise ValueError(f"Unsupported CMOR type {cmor_type}") -class InfoBase(): +class InfoBase: """Base class for all table info classes. This uses CMOR 3 json format @@ -302,8 +307,9 @@ def get_variable( # If that didn't work either, look in default table if # cmor_strict=False or derived=True if not var_info: - var_info = self._look_in_default(derived, alt_names_list, - table_name) + var_info = self._look_in_default( + derived, alt_names_list, table_name + ) # If necessary, adapt frequency of variable (set it to the one from the # requested MIP). E.g., if the user asked for table `Amon`, but the @@ -317,7 +323,7 @@ def get_variable( def _look_in_default(self, derived, alt_names_list, table_name): """Look for variable in default table.""" var_info = None - if (not self.strict or derived): + if not self.strict or derived: for alt_names in alt_names_list: var_info = self.default.get_variable(table_name, alt_names) if var_info: @@ -327,7 +333,7 @@ def _look_in_default(self, derived, alt_names_list, table_name): def _look_in_all_tables(self, derived, alt_names_list): """Look for variable in all tables.""" var_info = None - if (not self.strict or derived): + if not self.strict or derived: for alt_names in alt_names_list: var_info = self._look_all_tables(alt_names) if var_info: @@ -339,10 +345,13 @@ def _get_alt_names_list(self, short_name): alt_names_list = [short_name] for alt_names in self.alt_names: if short_name in alt_names: - alt_names_list.extend([ - alt_name for alt_name in alt_names - if alt_name not in alt_names_list - ]) + alt_names_list.extend( + [ + alt_name + for alt_name in alt_names + if alt_name not in alt_names_list + ] + ) return alt_names_list def _update_frequency_from_mip(self, table_name, var_info): @@ -378,18 +387,19 @@ class CMIP6Info(InfoBase): found in the requested one """ - def __init__(self, - cmor_tables_path, - default=None, - alt_names=None, - strict=True, - default_table_prefix=''): - + def __init__( + self, + cmor_tables_path, + default=None, + alt_names=None, + strict=True, + default_table_prefix="", + ): super().__init__(default, alt_names, strict) cmor_tables_path = self._get_cmor_path(cmor_tables_path) - self._cmor_folder = os.path.join(cmor_tables_path, 'Tables') - if glob.glob(os.path.join(self._cmor_folder, '*_CV.json')): + self._cmor_folder = os.path.join(cmor_tables_path, "Tables") + if glob.glob(os.path.join(self._cmor_folder, "*_CV.json")): self._load_controlled_vocabulary() self.default_table_prefix = default_table_prefix @@ -397,8 +407,8 @@ def __init__(self, self.var_to_freq = {} self._load_coordinates() - for json_file in glob.glob(os.path.join(self._cmor_folder, '*.json')): - if 'CV_test' in json_file or 'grids' in json_file: + for json_file in glob.glob(os.path.join(self._cmor_folder, "*.json")): + if "CV_test" in json_file or "grids" in json_file: continue try: self._load_table(json_file) @@ -416,28 +426,29 @@ def _get_cmor_path(cmor_tables_path): if os.path.isdir(cmor_tables_path): return cmor_tables_path cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, 'tables', cmor_tables_path) + cmor_tables_path = os.path.join(cwd, "tables", cmor_tables_path) if os.path.isdir(cmor_tables_path): return cmor_tables_path raise ValueError( - 'CMOR tables not found in {}'.format(cmor_tables_path)) + "CMOR tables not found in {}".format(cmor_tables_path) + ) def _load_table(self, json_file): - with open(json_file, encoding='utf-8') as inf: + with open(json_file, encoding="utf-8") as inf: raw_data = json.loads(inf.read()) if not self._is_table(raw_data): return table = TableInfo() - header = raw_data['Header'] - table.name = header['table_id'].split(' ')[-1] + header = raw_data["Header"] + table.name = header["table_id"].split(" ")[-1] self.tables[table.name] = table - generic_levels = header['generic_levels'].split() - table.frequency = header.get('frequency', '') + generic_levels = header["generic_levels"].split() + table.frequency = header.get("frequency", "") self.var_to_freq[table.name] = {} - for var_name, var_data in raw_data['variable_entry'].items(): - var = VariableInfo('CMIP6', var_name) + for var_name, var_data in raw_data["variable_entry"].items(): + var = VariableInfo("CMIP6", var_name) var.read_json(var_data, table.frequency) self._assign_dimensions(var, generic_levels) table[var_name] = var @@ -463,8 +474,10 @@ def _assign_dimensions(self, var, generic_levels): coord = self.coords[dimension] except KeyError: logger.exception( - 'Can not find dimension %s for variable %s', dimension, - var) + "Can not find dimension %s for variable %s", + dimension, + var, + ) raise var.coordinates[dimension] = coord @@ -472,33 +485,35 @@ def _assign_dimensions(self, var, generic_levels): def _load_coordinates(self): self.coords = {} for json_file in glob.glob( - os.path.join(self._cmor_folder, '*coordinate*.json')): - with open(json_file, encoding='utf-8') as inf: + os.path.join(self._cmor_folder, "*coordinate*.json") + ): + with open(json_file, encoding="utf-8") as inf: table_data = json.loads(inf.read()) - for coord_name in table_data['axis_entry'].keys(): + for coord_name in table_data["axis_entry"].keys(): coord = CoordinateInfo(coord_name) - coord.read_json(table_data['axis_entry'][coord_name]) + coord.read_json(table_data["axis_entry"][coord_name]) self.coords[coord_name] = coord def _load_controlled_vocabulary(self): self.activities = {} self.institutes = {} - for json_file in glob.glob(os.path.join(self._cmor_folder, - '*_CV.json')): - with open(json_file, encoding='utf-8') as inf: + for json_file in glob.glob( + os.path.join(self._cmor_folder, "*_CV.json") + ): + with open(json_file, encoding="utf-8") as inf: table_data = json.loads(inf.read()) try: - exps = table_data['CV']['experiment_id'] + exps = table_data["CV"]["experiment_id"] for exp_id in exps: - activity = exps[exp_id]['activity_id'][0].split(' ') + activity = exps[exp_id]["activity_id"][0].split(" ") self.activities[exp_id] = activity except (KeyError, AttributeError): pass try: - sources = table_data['CV']['source_id'] + sources = table_data["CV"]["source_id"] for source_id in sources: - institution = sources[source_id]['institution_id'] + institution = sources[source_id]["institution_id"] self.institutes[source_id] = institution except (KeyError, AttributeError): pass @@ -520,13 +535,13 @@ def get_table(self, table): try: return self.tables[table] except KeyError: - return self.tables.get(''.join((self.default_table_prefix, table))) + return self.tables.get("".join((self.default_table_prefix, table))) @staticmethod def _is_table(table_data): - if 'variable_entry' not in table_data: + if "variable_entry" not in table_data: return False - if 'Header' not in table_data: + if "Header" not in table_data: return False return True @@ -538,21 +553,30 @@ class TableInfo(dict): def __init__(self, *args, **kwargs): """Create a new TableInfo object for storing VariableInfo objects.""" super(TableInfo, self).__init__(*args, **kwargs) - self.name = '' - self.frequency = '' - self.realm = '' + self.name = "" + self.frequency = "" + self.realm = "" def __eq__(self, other): - return (self.name, self.frequency, self.realm) == \ - (other.name, other.frequency, other.realm) + return (self.name, self.frequency, self.realm) == ( + other.name, + other.frequency, + other.realm, + ) def __ne__(self, other): - return (self.name, self.frequency, self.realm) != \ - (other.name, other.frequency, other.realm) + return (self.name, self.frequency, self.realm) != ( + other.name, + other.frequency, + other.realm, + ) def __lt__(self, other): - return (self.name, self.frequency, self.realm) < \ - (other.name, other.frequency, other.realm) + return (self.name, self.frequency, self.realm) < ( + other.name, + other.frequency, + other.realm, + ) class JsonInfo(object): @@ -564,7 +588,7 @@ class JsonInfo(object): def __init__(self): self._json_data = {} - def _read_json_variable(self, parameter, default=''): + def _read_json_variable(self, parameter, default=""): """Read a json parameter in json_data. Parameters @@ -616,19 +640,19 @@ def __init__(self, table_type, short_name): """Modeling realm""" self.short_name = short_name """Short name""" - self.standard_name = '' + self.standard_name = "" """Standard name""" - self.long_name = '' + self.long_name = "" """Long name""" - self.units = '' + self.units = "" """Data units""" - self.valid_min = '' + self.valid_min = "" """Minimum admitted value""" - self.valid_max = '' + self.valid_max = "" """Maximum admitted value""" - self.frequency = '' + self.frequency = "" """Data frequency""" - self.positive = '' + self.positive = "" """Increasing direction""" self.dimensions = [] @@ -667,17 +691,18 @@ def read_json(self, json_data, default_freq): """ self._json_data = json_data - self.standard_name = self._read_json_variable('standard_name') - self.long_name = self._read_json_variable('long_name') - self.units = self._read_json_variable('units') - self.valid_min = self._read_json_variable('valid_min') - self.valid_max = self._read_json_variable('valid_max') - self.positive = self._read_json_variable('positive') + self.standard_name = self._read_json_variable("standard_name") + self.long_name = self._read_json_variable("long_name") + self.units = self._read_json_variable("units") + self.valid_min = self._read_json_variable("valid_min") + self.valid_max = self._read_json_variable("valid_max") + self.positive = self._read_json_variable("positive") self.modeling_realm = self._read_json_variable( - 'modeling_realm').split() - self.frequency = self._read_json_variable('frequency', default_freq) + "modeling_realm" + ).split() + self.frequency = self._read_json_variable("frequency", default_freq) - self.dimensions = self._read_json_variable('dimensions').split() + self.dimensions = self._read_json_variable("dimensions").split() def has_coord_with_standard_name(self, standard_name: str) -> bool: """Check if a coordinate with a given `standard_name` exists. @@ -771,19 +796,19 @@ def read_json(self, json_data): """ self._json_data = json_data - self.axis = self._read_json_variable('axis') - self.value = self._read_json_variable('value') - self.out_name = self._read_json_variable('out_name') - self.var_name = self._read_json_variable('var_name') - self.standard_name = self._read_json_variable('standard_name') - self.long_name = self._read_json_variable('long_name') - self.units = self._read_json_variable('units') - self.stored_direction = self._read_json_variable('stored_direction') - self.valid_min = self._read_json_variable('valid_min') - self.valid_max = self._read_json_variable('valid_max') - self.requested = self._read_json_list_variable('requested') - self.must_have_bounds = self._read_json_variable('must_have_bounds') - self.generic_lev_name = self._read_json_variable('generic_level_name') + self.axis = self._read_json_variable("axis") + self.value = self._read_json_variable("value") + self.out_name = self._read_json_variable("out_name") + self.var_name = self._read_json_variable("var_name") + self.standard_name = self._read_json_variable("standard_name") + self.long_name = self._read_json_variable("long_name") + self.units = self._read_json_variable("units") + self.stored_direction = self._read_json_variable("stored_direction") + self.valid_min = self._read_json_variable("valid_min") + self.valid_max = self._read_json_variable("valid_max") + self.requested = self._read_json_list_variable("requested") + self.must_have_bounds = self._read_json_variable("must_have_bounds") + self.generic_lev_name = self._read_json_variable("generic_level_name") class CMIP5Info(InfoBase): @@ -802,18 +827,19 @@ class CMIP5Info(InfoBase): found in the requested one """ - def __init__(self, - cmor_tables_path, - default=None, - alt_names=None, - strict=True): + def __init__( + self, cmor_tables_path, default=None, alt_names=None, strict=True + ): super().__init__(default, alt_names, strict) cmor_tables_path = self._get_cmor_path(cmor_tables_path) - self._cmor_folder = os.path.join(cmor_tables_path, 'Tables') + self._cmor_folder = os.path.join(cmor_tables_path, "Tables") if not os.path.isdir(self._cmor_folder): - raise OSError(errno.ENOTDIR, "CMOR tables path is not a directory", - self._cmor_folder) + raise OSError( + errno.ENOTDIR, + "CMOR tables path is not a directory", + self._cmor_folder, + ) self.strict = strict self.tables = {} @@ -821,8 +847,8 @@ def __init__(self, self._current_table = None self._last_line_read = None - for table_file in glob.glob(os.path.join(self._cmor_folder, '*')): - if '_grids' in table_file: + for table_file in glob.glob(os.path.join(self._cmor_folder, "*")): + if "_grids" in table_file: continue try: self._load_table(table_file) @@ -840,10 +866,10 @@ def _get_cmor_path(cmor_tables_path): if os.path.isdir(cmor_tables_path): return cmor_tables_path cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, 'tables', cmor_tables_path) + cmor_tables_path = os.path.join(cwd, "tables", cmor_tables_path) return cmor_tables_path - def _load_table(self, table_file, table_name=''): + def _load_table(self, table_file, table_name=""): if table_name and table_name in self.tables: # special case used for updating a table with custom variable file table = self.tables[table_name] @@ -854,28 +880,28 @@ def _load_table(self, table_file, table_name=''): self._read_table_file(table_file, table) def _read_table_file(self, table_file, table=None): - with open(table_file, 'r', encoding='utf-8') as self._current_table: + with open(table_file, "r", encoding="utf-8") as self._current_table: self._read_line() while True: key, value = self._last_line_read - if key == 'table_id': + if key == "table_id": table = TableInfo() - table.name = value[len('Table '):] + table.name = value[len("Table ") :] self.tables[table.name] = table - elif key == 'frequency': + elif key == "frequency": table.frequency = value - elif key == 'modeling_realm': + elif key == "modeling_realm": table.realm = value - elif key == 'generic_levels': - for dim in value.split(' '): + elif key == "generic_levels": + for dim in value.split(" "): coord = CoordinateInfo(dim) coord.generic_level = True - coord.axis = 'Z' + coord.axis = "Z" self.coords[dim] = coord - elif key == 'axis_entry': + elif key == "axis_entry": self.coords[value] = self._read_coordinate(value) continue - elif key == 'variable_entry': + elif key == "variable_entry": table[value] = self._read_variable(value, table.frequency) continue if not self._read_line(): @@ -883,44 +909,47 @@ def _read_table_file(self, table_file, table=None): def _read_line(self): line = self._current_table.readline() - if line == '': + if line == "": return False - if line.startswith('!'): + if line.startswith("!"): return self._read_line() - line = line.replace('\n', '') - if '!' in line: - line = line[:line.index('!')] + line = line.replace("\n", "") + if "!" in line: + line = line[: line.index("!")] line = line.strip() if not line: - self._last_line_read = ('', '') + self._last_line_read = ("", "") else: - index = line.index(':') - self._last_line_read = (line[:index].strip(), - line[index + 1:].strip()) + index = line.index(":") + self._last_line_read = ( + line[:index].strip(), + line[index + 1 :].strip(), + ) return True def _read_coordinate(self, value): coord = CoordinateInfo(value) while self._read_line(): key, value = self._last_line_read - if key in ('variable_entry', 'axis_entry'): + if key in ("variable_entry", "axis_entry"): return coord - if key == 'requested': + if key == "requested": coord.requested.extend( - (val for val in value.split(' ') if val)) + (val for val in value.split(" ") if val) + ) continue if hasattr(coord, key): setattr(coord, key, value) return coord def _read_variable(self, short_name, frequency): - var = VariableInfo('CMIP5', short_name) + var = VariableInfo("CMIP5", short_name) var.frequency = frequency while self._read_line(): key, value = self._last_line_read - if key in ('variable_entry', 'axis_entry'): + if key in ("variable_entry", "axis_entry"): break - if key in ('dimensions', 'modeling_realm'): + if key in ("dimensions", "modeling_realm"): setattr(var, key, value.split()) elif hasattr(var, key): setattr(var, key, value) @@ -962,10 +991,10 @@ class CMIP3Info(CMIP5Info): """ def _read_table_file(self, table_file, table=None): - for dim in ('zlevel', ): + for dim in ("zlevel",): coord = CoordinateInfo(dim) coord.generic_level = True - coord.axis = 'Z' + coord.axis = "Z" self.coords[dim] = coord super()._read_table_file(table_file, table) @@ -1001,11 +1030,11 @@ def __init__(self, cmor_tables_path: Optional[str | Path] = None) -> None: self.tables = {} self.var_to_freq: dict[str, dict] = {} table = TableInfo() - table.name = 'custom' + table.name = "custom" self.tables[table.name] = table # First, read default custom tables from repository - self._cmor_folder = self._get_cmor_path('custom') + self._cmor_folder = self._get_cmor_path("custom") self._read_table_dir(self._cmor_folder) # Second, if given, update default tables with user-defined custom @@ -1024,12 +1053,12 @@ def __init__(self, cmor_tables_path: Optional[str | Path] = None) -> None: def _read_table_dir(self, table_dir: str) -> None: """Read CMOR tables from directory.""" # If present, read coordinates - coordinates_file = os.path.join(table_dir, 'CMOR_coordinates.dat') + coordinates_file = os.path.join(table_dir, "CMOR_coordinates.dat") if os.path.isfile(coordinates_file): self._read_table_file(coordinates_file) # Read other variables - for dat_file in glob.glob(os.path.join(table_dir, '*.dat')): + for dat_file in glob.glob(os.path.join(table_dir, "*.dat")): if dat_file == coordinates_file: continue try: @@ -1044,10 +1073,7 @@ def _read_table_dir(self, table_dir: str) -> None: raise def get_variable( - self, - table: str, - short_name: str, - derived: bool = False + self, table: str, short_name: str, derived: bool = False ) -> VariableInfo | None: """Search and return the variable info. @@ -1069,7 +1095,7 @@ def get_variable( None if not. """ - return self.tables['custom'].get(short_name, None) + return self.tables["custom"].get(short_name, None) def _read_table_file( self, @@ -1077,22 +1103,22 @@ def _read_table_file( _: Optional[TableInfo] = None, ) -> None: """Read a single table file.""" - with open(table_file, 'r', encoding='utf-8') as self._current_table: + with open(table_file, "r", encoding="utf-8") as self._current_table: self._read_line() while True: key, value = self._last_line_read - if key == 'generic_levels': - for dim in value.split(' '): + if key == "generic_levels": + for dim in value.split(" "): coord = CoordinateInfo(dim) coord.generic_level = True - coord.axis = 'Z' + coord.axis = "Z" self.coords[dim] = coord - elif key == 'axis_entry': + elif key == "axis_entry": self.coords[value] = self._read_coordinate(value) continue - elif key == 'variable_entry': - self.tables['custom'][value] = self._read_variable( - value, '' + elif key == "variable_entry": + self.tables["custom"][value] = self._read_variable( + value, "" ) continue if not self._read_line(): diff --git a/esmvalcore/cmor/tables/custom/CMOR_prodlnox.dat b/esmvalcore/cmor/tables/custom/CMOR_prodlnox.dat new file mode 100644 index 0000000000..d8ab973de8 --- /dev/null +++ b/esmvalcore/cmor/tables/custom/CMOR_prodlnox.dat @@ -0,0 +1,20 @@ +SOURCE: CMIP6 +!============ +variable_entry: prodlnox +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg s-1 +long_name: Tendency of atmosphere mass content of NOx from lightning +comment: Production NOX (NO+NO2) by lightning globally integrated +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: time +out_name: prodlnox +type: real +!---------------------------------- +! diff --git a/esmvalcore/cmor/tables/custom/CMOR_soz.dat b/esmvalcore/cmor/tables/custom/CMOR_soz.dat new file mode 100644 index 0000000000..725e454e16 --- /dev/null +++ b/esmvalcore/cmor/tables/custom/CMOR_soz.dat @@ -0,0 +1,22 @@ +!============ +variable_entry: soz +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: equivalent_thickness_at_stp_of_atmosphere_ozone_content +units: m +cell_methods: time: mean +cell_measures: area: areacella +long_name: Stratospheric Ozone Column (O3 mole fraction >= 125 ppb) +comment: stratospheric ozone column calculated at 0 degrees C and 1 bar, such that 1m = 1e5 DU. Here, the stratosphere is defined as the region where O3 mole fraction >= 125 ppb. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +type: real +valid_min: 0.0 +valid_max: 5000.0 +!---------------------------------- +! diff --git a/esmvalcore/cmor/tables/custom/CMOR_tosStderr.dat b/esmvalcore/cmor/tables/custom/CMOR_tosStderr.dat new file mode 100644 index 0000000000..31756d4cde --- /dev/null +++ b/esmvalcore/cmor/tables/custom/CMOR_tosStderr.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: tosStderr +!============ +modeling_realm: ocean +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: K +cell_methods: time: mean +cell_measures: area: areacello +long_name: Sea Surface Temperature Error +comment: +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: tosStderr +type: real +valid_min: 0 +valid_max: +ok_min_mean_abs: 0 +ok_max_mean_abs: +!---------------------------------- +! diff --git a/esmvalcore/cmor/tables/custom/CMOR_toz.dat b/esmvalcore/cmor/tables/custom/CMOR_toz.dat index b875dcbe57..d2de911497 100644 --- a/esmvalcore/cmor/tables/custom/CMOR_toz.dat +++ b/esmvalcore/cmor/tables/custom/CMOR_toz.dat @@ -1,4 +1,4 @@ -SOURCE: CCMI1 +SOURCE: CMIP6 !============ variable_entry: toz !============ @@ -6,12 +6,12 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: -units: DU +standard_name: equivalent_thickness_at_stp_of_atmosphere_ozone_content +units: m cell_methods: time: mean cell_measures: area: areacella long_name: Total Ozone Column -comment: total ozone column in DU +comment: Total ozone column calculated at 0 degrees C and 1 bar, such that 1m = 1e5 DU. !---------------------------------- ! Additional variable information: !---------------------------------- diff --git a/esmvalcore/cmor/tables/custom/CMOR_tozStderr.dat b/esmvalcore/cmor/tables/custom/CMOR_tozStderr.dat index 7d8769bc7c..0b80052bee 100644 --- a/esmvalcore/cmor/tables/custom/CMOR_tozStderr.dat +++ b/esmvalcore/cmor/tables/custom/CMOR_tozStderr.dat @@ -1,4 +1,4 @@ -SOURCE: CCMI1 +SOURCE: CMIP6 !============ variable_entry: tozStderr !============ @@ -6,12 +6,12 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: -units: DU +standard_name: equivalent_thickness_at_stp_of_atmosphere_ozone_content +units: m cell_methods: time: mean cell_measures: area: areacella long_name: Total Ozone Column Error -comment: total ozone column in DU +comment: Total ozone column error calculated at 0 degrees C and 1 bar, such that 1m = 1e5 DU. !---------------------------------- ! Additional variable information: !---------------------------------- diff --git a/esmvalcore/cmor/tables/custom/CMOR_troz.dat b/esmvalcore/cmor/tables/custom/CMOR_troz.dat new file mode 100644 index 0000000000..ea00615131 --- /dev/null +++ b/esmvalcore/cmor/tables/custom/CMOR_troz.dat @@ -0,0 +1,22 @@ +!============ +variable_entry: troz +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: equivalent_thickness_at_stp_of_atmosphere_ozone_content +units: m +cell_methods: time: mean +cell_measures: area: areacella +long_name: Tropospheric Ozone Column (O3 mole fraction < 125 ppb) +comment: tropospheric ozone column calculated at 0 degrees C and 1 bar, such that 1m = 1e5 DU. Here, the troposphere is defined as the region where O3 mole fraction < 125 ppb. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +type: real +valid_min: 0.0 +valid_max: 5000.0 +!---------------------------------- +! diff --git a/esmvalcore/config-developer.yml b/esmvalcore/config-developer.yml index 9d5f1bc62f..c81324142a 100644 --- a/esmvalcore/config-developer.yml +++ b/esmvalcore/config-developer.yml @@ -194,3 +194,14 @@ CESM: output_file: '{project}_{dataset}_{case}_{gcomp}_{scomp}_{type}_{mip}_{short_name}' cmor_type: 'CMIP6' cmor_default_table_prefix: 'CMIP6_' + +ACCESS: + cmor_strict: false + input_dir: + default: + - '{dataset}/{sub_dataset}/{exp}/{modeling_realm}/netCDF' + input_file: + default: '{sub_dataset}.{special_attr}-*.nc' + output_file: '{project}_{dataset}_{mip}_{exp}_{institute}_{sub_dataset}_{special_attr}_{short_name}' + cmor_type: 'CMIP6' + cmor_default_table_prefix: 'CMIP6_' diff --git a/esmvalcore/config/__init__.py b/esmvalcore/config/__init__.py index 7c2b8e379c..f9a632b75c 100644 --- a/esmvalcore/config/__init__.py +++ b/esmvalcore/config/__init__.py @@ -13,7 +13,7 @@ from ._config_object import CFG, Config, Session __all__ = ( - 'CFG', - 'Config', - 'Session', + "CFG", + "Config", + "Session", ) diff --git a/esmvalcore/config/_config.py b/esmvalcore/config/_config.py index 5dcad80e05..71617c625e 100644 --- a/esmvalcore/config/_config.py +++ b/esmvalcore/config/_config.py @@ -1,4 +1,5 @@ """Functions dealing with config-user.yml / config-developer.yml.""" + from __future__ import annotations import collections.abc @@ -43,7 +44,7 @@ def _load_extra_facets(project, extra_facets_dir): config_file_paths = config_path.glob(f"{project.lower()}-*.yml") for config_file_path in sorted(config_file_paths): logger.debug("Loading extra facets from %s", config_file_path) - with config_file_path.open(encoding='utf-8') as config_file: + with config_file_path.open(encoding="utf-8") as config_file: config_piece = yaml.safe_load(config_file) if config_piece: _deep_update(config, config_piece) @@ -53,7 +54,7 @@ def _load_extra_facets(project, extra_facets_dir): def get_extra_facets(dataset, extra_facets_dir): """Read configuration files with additional variable information.""" project_details = _load_extra_facets( - dataset.facets['project'], + dataset.facets["project"], extra_facets_dir, ) @@ -75,10 +76,11 @@ def pattern_filter(patterns, name): return [pat for pat in patterns if fnmatch.fnmatchcase(name, pat)] extra_facets = {} - for dataset_ in pattern_filter(project_details, dataset['dataset']): - for mip_ in pattern_filter(project_details[dataset_], dataset['mip']): - for var in pattern_filter(project_details[dataset_][mip_], - dataset['short_name']): + for dataset_ in pattern_filter(project_details, dataset["dataset"]): + for mip_ in pattern_filter(project_details[dataset_], dataset["mip"]): + for var in pattern_filter( + project_details[dataset_][mip_], dataset["short_name"] + ): facets = project_details[dataset_][mip_][var] extra_facets.update(facets) @@ -87,23 +89,25 @@ def pattern_filter(patterns, name): def load_config_developer(cfg_file): """Read the developer's configuration file.""" - with open(cfg_file, 'r', encoding='utf-8') as file: + with open(cfg_file, "r", encoding="utf-8") as file: cfg = yaml.safe_load(file) - if 'obs4mips' in cfg: + if "obs4mips" in cfg: logger.warning( "Correcting capitalization, project 'obs4mips'" - " should be written as 'obs4MIPs' in %s", cfg_file) - cfg['obs4MIPs'] = cfg.pop('obs4mips') + " should be written as 'obs4MIPs' in %s", + cfg_file, + ) + cfg["obs4MIPs"] = cfg.pop("obs4mips") for project, settings in cfg.items(): - for site, drs in settings.get('input_dir', {}).items(): + for site, drs in settings.get("input_dir", {}).items(): # Since v2.8, 'version' can be used instead of 'latestversion' if isinstance(drs, list): - drs = [d.replace('{latestversion}', '{version}') for d in drs] + drs = [d.replace("{latestversion}", "{version}") for d in drs] else: - drs = drs.replace('{latestversion}', '{version}') - settings['input_dir'][site] = drs + drs = drs.replace("{latestversion}", "{version}") + settings["input_dir"][site] = drs CFG[project] = settings read_cmor_tables(cfg_file) @@ -118,8 +122,8 @@ def get_project_config(project): def get_institutes(variable): """Return the institutes given the dataset name in CMIP6.""" - dataset = variable['dataset'] - project = variable['project'] + dataset = variable["dataset"] + project = variable["project"] try: return CMOR_TABLES[project].institutes[dataset] except (KeyError, AttributeError): @@ -128,9 +132,9 @@ def get_institutes(variable): def get_activity(variable): """Return the activity given the experiment name in CMIP6.""" - project = variable['project'] + project = variable["project"] try: - exp = variable['exp'] + exp = variable["exp"] if isinstance(exp, list): return [CMOR_TABLES[project].activities[value][0] for value in exp] return CMOR_TABLES[project].activities[exp][0] @@ -143,8 +147,8 @@ def get_ignored_warnings(project: FacetValue, step: str) -> None | list: if project not in CFG: return None project_cfg = CFG[project] - if 'ignore_warnings' not in project_cfg: + if "ignore_warnings" not in project_cfg: return None - if step not in project_cfg['ignore_warnings']: + if step not in project_cfg["ignore_warnings"]: return None - return project_cfg['ignore_warnings'][step] + return project_cfg["ignore_warnings"][step] diff --git a/esmvalcore/config/_config_object.py b/esmvalcore/config/_config_object.py index 2e53857d70..dc78506215 100644 --- a/esmvalcore/config/_config_object.py +++ b/esmvalcore/config/_config_object.py @@ -1,4 +1,5 @@ """Importable config object.""" + from __future__ import annotations import os @@ -21,8 +22,10 @@ ) from ._validated_config import ValidatedConfig -URL = ('https://docs.esmvaltool.org/projects/' - 'ESMValCore/en/latest/quickstart/configure.html') +URL = ( + "https://docs.esmvaltool.org/projects/" + "ESMValCore/en/latest/quickstart/configure.html" +) class Config(ValidatedConfig): @@ -32,14 +35,15 @@ class Config(ValidatedConfig): :obj:`esmvalcore.config.CFG` instead. """ - _DEFAULT_USER_CONFIG_DIR = Path.home() / '.esmvaltool' + + _DEFAULT_USER_CONFIG_DIR = Path.home() / ".esmvaltool" _validate = _validators _deprecate = _deprecators _deprecated_defaults = _deprecated_options_defaults _warn_if_missing = ( - ('drs', URL), - ('rootpath', URL), + ("drs", URL), + ("rootpath", URL), ) @classmethod @@ -72,7 +76,7 @@ def _load_user_config( try: mapping = cls._read_config_file(config_user_path) - mapping['config_file'] = config_user_path + mapping["config_file"] = config_user_path except FileNotFoundError: if raise_exception: raise @@ -94,21 +98,21 @@ def _load_default_config(cls): """Load the default configuration.""" new = cls() - package_config_user_path = Path( - esmvalcore.__file__ - ).parent / 'config-user.yml' + package_config_user_path = ( + Path(esmvalcore.__file__).parent / "config-user.yml" + ) mapping = cls._read_config_file(package_config_user_path) # Add defaults that are not available in esmvalcore/config-user.yml - mapping['check_level'] = CheckLevels.DEFAULT - mapping['config_file'] = package_config_user_path - mapping['diagnostics'] = None - mapping['extra_facets_dir'] = tuple() - mapping['max_datasets'] = None - mapping['max_years'] = None - mapping['resume_from'] = [] - mapping['run_diagnostic'] = True - mapping['skip_nonexistent'] = False + mapping["check_level"] = CheckLevels.DEFAULT + mapping["config_file"] = package_config_user_path + mapping["diagnostics"] = None + mapping["extra_facets_dir"] = tuple() + mapping["max_datasets"] = None + mapping["max_years"] = None + mapping["resume_from"] = [] + mapping["run_diagnostic"] = True + mapping["skip_nonexistent"] = False new.update(mapping) @@ -122,14 +126,14 @@ def _read_config_file(config_user_path: Path) -> dict: f"Config file '{config_user_path}' does not exist" ) - with open(config_user_path, 'r', encoding='utf-8') as file: + with open(config_user_path, "r", encoding="utf-8") as file: cfg = yaml.safe_load(file) return cfg @staticmethod def _get_config_user_path( - filename: Optional[os.PathLike | str] = None + filename: Optional[os.PathLike | str] = None, ) -> Path: """Get path to user configuration file. @@ -169,10 +173,10 @@ def _get_config_user_path( # (2) Try to get user configuration file from internal # _ESMVALTOOL_USER_CONFIG_FILE_ environment variable if ( - config_user is None and - '_ESMVALTOOL_USER_CONFIG_FILE_' in os.environ + config_user is None + and "_ESMVALTOOL_USER_CONFIG_FILE_" in os.environ ): - config_user = os.environ['_ESMVALTOOL_USER_CONFIG_FILE_'] + config_user = os.environ["_ESMVALTOOL_USER_CONFIG_FILE_"] # (3) Try to get user configuration file from CLI arguments if config_user is None: @@ -180,7 +184,7 @@ def _get_config_user_path( # (4) Default location if config_user is None: - config_user = Config._DEFAULT_USER_CONFIG_DIR / 'config-user.yml' + config_user = Config._DEFAULT_USER_CONFIG_DIR / "config-user.yml" config_user = Path(config_user).expanduser() @@ -192,8 +196,8 @@ def _get_config_user_path( # If used within the esmvaltool program, make sure that subsequent # calls of this method (also in suprocesses) use the correct user # configuration file - if Path(sys.argv[0]).name == 'esmvaltool': - os.environ['_ESMVALTOOL_USER_CONFIG_FILE_'] = str(config_user) + if Path(sys.argv[0]).name == "esmvaltool": + os.environ["_ESMVALTOOL_USER_CONFIG_FILE_"] = str(config_user) return config_user @@ -212,15 +216,15 @@ def _get_config_path_from_cli() -> None | str: file exists. """ - if Path(sys.argv[0]).name != 'esmvaltool': + if Path(sys.argv[0]).name != "esmvaltool": return None for arg in sys.argv: - for opt in ('--config-file', '--config_file'): + for opt in ("--config-file", "--config_file"): if opt in arg: # Parse '--config-file=/file.yml' or # '--config_file=/file.yml' - partition = arg.partition('=') + partition = arg.partition("=") if partition[2]: return partition[2] @@ -243,13 +247,13 @@ def load_from_file( def reload(self): """Reload the config file.""" - if 'config_file' not in self: + if "config_file" not in self: raise ValueError( "Cannot reload configuration, option 'config_file' is " "missing; make sure to only use the `CFG` object from the " "`esmvalcore.config` module" ) - self.load_from_file(self['config_file']) + self.load_from_file(self["config_file"]) def start_session(self, name: str): """Start a new session from this configuration object. @@ -285,21 +289,21 @@ class Session(ValidatedConfig): _deprecate = _deprecators _deprecated_defaults = _deprecated_options_defaults - relative_preproc_dir = Path('preproc') - relative_work_dir = Path('work') - relative_plot_dir = Path('plots') - relative_run_dir = Path('run') - relative_main_log = Path('run', 'main_log.txt') - relative_main_log_debug = Path('run', 'main_log_debug.txt') - relative_cmor_log = Path('run', 'cmor_log.txt') - _relative_fixed_file_dir = Path('preproc', 'fixed_files') + relative_preproc_dir = Path("preproc") + relative_work_dir = Path("work") + relative_plot_dir = Path("plots") + relative_run_dir = Path("run") + relative_main_log = Path("run", "main_log.txt") + relative_main_log_debug = Path("run", "main_log_debug.txt") + relative_cmor_log = Path("run", "cmor_log.txt") + _relative_fixed_file_dir = Path("preproc", "fixed_files") - def __init__(self, config: dict, name: str = 'session'): + def __init__(self, config: dict, name: str = "session"): super().__init__(config) self.session_name: str | None = None self.set_session_name(name) - def set_session_name(self, name: str = 'session'): + def set_session_name(self, name: str = "session"): """Set the name for the session. The `name` is used to name the session directory, e.g. @@ -311,7 +315,7 @@ def set_session_name(self, name: str = 'session'): @property def session_dir(self): """Return session directory.""" - return self['output_dir'] / self.session_name + return self["output_dir"] / self.session_name @property def preproc_dir(self): @@ -336,7 +340,7 @@ def run_dir(self): @property def config_dir(self): """Return user config directory.""" - return Path(self['config_file']).parent + return Path(self["config_file"]).parent @property def main_log(self): diff --git a/esmvalcore/config/_config_validators.py b/esmvalcore/config/_config_validators.py index d6489862df..23034ce5c2 100644 --- a/esmvalcore/config/_config_validators.py +++ b/esmvalcore/config/_config_validators.py @@ -1,4 +1,5 @@ """List of config validators.""" + from __future__ import annotations import logging @@ -27,9 +28,9 @@ SEARCH_ESGF_OPTIONS = ( - 'never', # Never search ESGF for files - 'when_missing', # Only search ESGF if no local files are available - 'always', # Always search ESGF for files + "never", # Never search ESGF for files + "when_missing", # Only search ESGF if no local files are available + "always", # Always search ESGF for files ) @@ -50,22 +51,23 @@ def _make_type_validator(cls, *, allow_none=False): def validator(inp): looks_like_none = isinstance(inp, str) and (inp.lower() == "none") - if (allow_none and (inp is None or looks_like_none)): + if allow_none and (inp is None or looks_like_none): return None try: return cls(inp) except ValueError as err: if isinstance(cls, type): raise ValidationError( - f'Could not convert {repr(inp)} to {cls.__name__}' + f"Could not convert {repr(inp)} to {cls.__name__}" ) from err raise validator.__name__ = f"validate_{cls.__name__}" if allow_none: validator.__name__ += "_or_None" - validator.__qualname__ = (validator.__qualname__.rsplit(".", 1)[0] + "." + - validator.__name__) + validator.__qualname__ = ( + validator.__qualname__.rsplit(".", 1)[0] + "." + validator.__name__ + ) return validator @@ -74,54 +76,67 @@ def validator(inp): # the the 'Python Software Foundation License' # (https://www.python.org/psf/license) @lru_cache() -def _listify_validator(scalar_validator, - allow_stringlist=False, - *, - n_items=None, - docstring=None, - return_type=list): +def _listify_validator( + scalar_validator, + allow_stringlist=False, + *, + n_items=None, + docstring=None, + return_type=list, +): """Apply the validator to a list.""" def func(inp): if isinstance(inp, str): try: inp = return_type( - scalar_validator(val.strip()) for val in inp.split(',') - if val.strip()) + scalar_validator(val.strip()) + for val in inp.split(",") + if val.strip() + ) except Exception: if allow_stringlist: # Sometimes, a list of colors might be a single string # of single-letter colornames. So give that a shot. inp = return_type( - scalar_validator(val.strip()) for val in inp - if val.strip()) + scalar_validator(val.strip()) + for val in inp + if val.strip() + ) else: raise # Allow any ordered sequence type -- generators, np.ndarray, pd.Series # -- but not sets, whose iteration order is non-deterministic. - elif isinstance(inp, - Iterable) and not isinstance(inp, (set, frozenset)): + elif isinstance(inp, Iterable) and not isinstance( + inp, (set, frozenset) + ): # The condition on this list comprehension will preserve the # behavior of filtering out any empty strings (behavior was # from the original validate_stringlist()), while allowing # any non-string/text scalar values such as numbers and arrays. inp = return_type( - scalar_validator(val) for val in inp - if not isinstance(val, str) or val) + scalar_validator(val) + for val in inp + if not isinstance(val, str) or val + ) else: raise ValidationError( - f"Expected str or other non-set iterable, but got {inp}") + f"Expected str or other non-set iterable, but got {inp}" + ) if n_items is not None and len(inp) != n_items: - raise ValidationError(f"Expected {n_items} values, " - f"but there are {len(inp)} values in {inp}") + raise ValidationError( + f"Expected {n_items} values, " + f"but there are {len(inp)} values in {inp}" + ) return inp try: func.__name__ = "{}list".format(scalar_validator.__name__) except AttributeError: # class instance. func.__name__ = "{}List".format(type(scalar_validator).__name__) - func.__qualname__ = func.__qualname__.rsplit(".", - 1)[0] + "." + func.__name__ + func.__qualname__ = ( + func.__qualname__.rsplit(".", 1)[0] + "." + func.__name__ + ) if docstring is not None: docstring = scalar_validator.__doc__ func.__doc__ = docstring @@ -152,7 +167,7 @@ def validate_path(value, allow_none=False): def validate_positive(value): """Check if number is positive.""" if value is not None and value <= 0: - raise ValidationError(f'Expected a positive number, but got {value}') + raise ValidationError(f"Expected a positive number, but got {value}") return value @@ -169,30 +184,34 @@ def chained(value): validate_string = _make_type_validator(str) validate_string_or_none = _make_type_validator(str, allow_none=True) -validate_stringlist = _listify_validator(validate_string, - docstring='Return a list of strings.') +validate_stringlist = _listify_validator( + validate_string, docstring="Return a list of strings." +) validate_bool_or_none = partial(validate_bool, allow_none=True) validate_int = _make_type_validator(int) validate_int_or_none = _make_type_validator(int, allow_none=True) validate_float = _make_type_validator(float) -validate_floatlist = _listify_validator(validate_float, - docstring='Return a list of floats.') +validate_floatlist = _listify_validator( + validate_float, docstring="Return a list of floats." +) validate_dict = _make_type_validator(dict) validate_path_or_none = _make_type_validator(validate_path, allow_none=True) -validate_pathlist = _listify_validator(validate_path, - docstring='Return a list of paths.') +validate_pathlist = _listify_validator( + validate_path, docstring="Return a list of paths." +) -validate_pathtuple = _listify_validator(validate_path, - docstring='Return a tuple of paths.', - return_type=tuple) +validate_pathtuple = _listify_validator( + validate_path, docstring="Return a tuple of paths.", return_type=tuple +) validate_int_positive = _chain_validator(validate_int, validate_positive) -validate_int_positive_or_none = _make_type_validator(validate_int_positive, - allow_none=True) +validate_int_positive_or_none = _make_type_validator( + validate_int_positive, allow_none=True +) def validate_rootpath(value): @@ -200,11 +219,12 @@ def validate_rootpath(value): mapping = validate_dict(value) new_mapping = {} for key, paths in mapping.items(): - if key == 'obs4mips': + if key == "obs4mips": logger.warning( "Correcting capitalization, project 'obs4mips' should be " - "written as 'obs4MIPs' in 'rootpath' in config-user.yml") - key = 'obs4MIPs' + "written as 'obs4MIPs' in 'rootpath' in config-user.yml" + ) + key = "obs4MIPs" if isinstance(paths, Path): paths = str(paths) if isinstance(paths, (str, list)): @@ -224,11 +244,12 @@ def validate_drs(value): mapping = validate_dict(value) new_mapping = {} for key, drs in mapping.items(): - if key == 'obs4mips': + if key == "obs4mips": logger.warning( "Correcting capitalization, project 'obs4mips' should be " - "written as 'obs4MIPs' in 'drs' in config-user.yml") - key = 'obs4MIPs' + "written as 'obs4MIPs' in 'drs' in config-user.yml" + ) + key = "obs4MIPs" new_mapping[key] = validate_string(drs) return new_mapping @@ -237,7 +258,7 @@ def validate_config_developer(value): """Validate and load config developer path.""" path = validate_path_or_none(value) if path is None: - path = importlib_files('esmvalcore') / 'config-developer.yml' + path = importlib_files("esmvalcore") / "config-developer.yml" load_config_developer(path) return path @@ -250,7 +271,8 @@ def validate_check_level(value): value = CheckLevels[value.upper()] except KeyError: raise ValidationError( - f'`{value}` is not a valid strictness level') from None + f"`{value}` is not a valid strictness level" + ) from None else: value = CheckLevels(value) @@ -264,64 +286,62 @@ def validate_search_esgf(value): value = value.lower() if value not in SEARCH_ESGF_OPTIONS: raise ValidationError( - f'`{value}` is not a valid option ESGF search option, possible ' - f'values are {SEARCH_ESGF_OPTIONS}' + f"`{value}` is not a valid option ESGF search option, possible " + f"values are {SEARCH_ESGF_OPTIONS}" ) from None return value def validate_diagnostics( - diagnostics: Union[Iterable[str], str, None] + diagnostics: Union[Iterable[str], str, None], ) -> Optional[set[str]]: """Validate diagnostic location.""" if diagnostics is None: return None if isinstance(diagnostics, str): - diagnostics = diagnostics.strip().split(' ') + diagnostics = diagnostics.strip().split(" ") return { - pattern if TASKSEP in pattern else pattern + TASKSEP + '*' + pattern if TASKSEP in pattern else pattern + TASKSEP + "*" for pattern in diagnostics or () } _validators = { # From user config - 'auxiliary_data_dir': validate_path, - 'compress_netcdf': validate_bool, - 'config_developer_file': validate_config_developer, - 'download_dir': validate_path, - 'drs': validate_drs, - 'exit_on_warning': validate_bool, - 'extra_facets_dir': validate_pathtuple, - 'log_level': validate_string, - 'max_parallel_tasks': validate_int_or_none, - 'output_dir': validate_path, - 'output_file_type': validate_string, - 'profile_diagnostic': validate_bool, - 'remove_preproc_dir': validate_bool, - 'rootpath': validate_rootpath, - 'run_diagnostic': validate_bool, - 'save_intermediary_cubes': validate_bool, - 'search_esgf': validate_search_esgf, - + "auxiliary_data_dir": validate_path, + "compress_netcdf": validate_bool, + "config_developer_file": validate_config_developer, + "download_dir": validate_path, + "drs": validate_drs, + "exit_on_warning": validate_bool, + "extra_facets_dir": validate_pathtuple, + "log_level": validate_string, + "max_parallel_tasks": validate_int_or_none, + "output_dir": validate_path, + "output_file_type": validate_string, + "profile_diagnostic": validate_bool, + "remove_preproc_dir": validate_bool, + "rootpath": validate_rootpath, + "run_diagnostic": validate_bool, + "save_intermediary_cubes": validate_bool, + "search_esgf": validate_search_esgf, # From CLI - 'check_level': validate_check_level, - 'diagnostics': validate_diagnostics, - 'max_datasets': validate_int_positive_or_none, - 'max_years': validate_int_positive_or_none, - 'resume_from': validate_pathlist, - 'skip_nonexistent': validate_bool, - + "check_level": validate_check_level, + "diagnostics": validate_diagnostics, + "max_datasets": validate_int_positive_or_none, + "max_years": validate_int_positive_or_none, + "resume_from": validate_pathlist, + "skip_nonexistent": validate_bool, # From recipe - 'write_ncl_interface': validate_bool, - + "write_ncl_interface": validate_bool, # config location - 'config_file': validate_path, + "config_file": validate_path, } # Handle deprecations (using ``ValidatedConfig._deprecate``) + def _handle_deprecation( option: str, deprecated_version: str, diff --git a/esmvalcore/config/_dask.py b/esmvalcore/config/_dask.py index 7030ea816a..effd33058f 100644 --- a/esmvalcore/config/_dask.py +++ b/esmvalcore/config/_dask.py @@ -1,4 +1,5 @@ """Configuration for Dask distributed.""" + import contextlib import importlib import logging @@ -9,7 +10,7 @@ logger = logging.getLogger(__name__) -CONFIG_FILE = Path.home() / '.esmvaltool' / 'dask.yml' +CONFIG_FILE = Path.home() / ".esmvaltool" / "dask.yml" def check_distributed_config(): @@ -23,7 +24,8 @@ def check_distributed_config(): "In that case, you can safely ignore this warning. " "See https://docs.esmvaltool.org/projects/ESMValCore/en/latest/" "quickstart/configure.html#dask-distributed-configuration for " - "more information. ") + "more information. " + ) @contextlib.contextmanager @@ -31,32 +33,34 @@ def get_distributed_client(): """Get a Dask distributed client.""" dask_args = {} if CONFIG_FILE.exists(): - config = yaml.safe_load(CONFIG_FILE.read_text(encoding='utf-8')) + config = yaml.safe_load(CONFIG_FILE.read_text(encoding="utf-8")) if config is not None: dask_args = config - client_args = dask_args.get('client') or {} - cluster_args = dask_args.get('cluster') or {} + client_args = dask_args.get("client") or {} + cluster_args = dask_args.get("cluster") or {} # Start a cluster, if requested - if 'address' in client_args: + if "address" in client_args: # Use an externally managed cluster. cluster = None if cluster_args: logger.warning( "Not using Dask 'cluster' settings from %s because a cluster " - "'address' is already provided in 'client'.", CONFIG_FILE) + "'address' is already provided in 'client'.", + CONFIG_FILE, + ) elif cluster_args: # Start cluster. cluster_type = cluster_args.pop( - 'type', - 'distributed.LocalCluster', + "type", + "distributed.LocalCluster", ) - cluster_module_name, cluster_cls_name = cluster_type.rsplit('.', 1) + cluster_module_name, cluster_cls_name = cluster_type.rsplit(".", 1) cluster_module = importlib.import_module(cluster_module_name) cluster_cls = getattr(cluster_module, cluster_cls_name) cluster = cluster_cls(**cluster_args) - client_args['address'] = cluster.scheduler_address + client_args["address"] = cluster.scheduler_address else: # No cluster configured, use Dask basic scheduler, or a LocalCluster # managed through Client. diff --git a/esmvalcore/config/_diagnostics.py b/esmvalcore/config/_diagnostics.py index c8f0869c9e..c527f617f6 100644 --- a/esmvalcore/config/_diagnostics.py +++ b/esmvalcore/config/_diagnostics.py @@ -1,4 +1,5 @@ """Diagnostics and tags management.""" + import logging import os from pathlib import Path @@ -32,22 +33,22 @@ def __repr__(self): @property def recipes(self): """Return the location of the recipes.""" - return self.path / 'recipes' + return self.path / "recipes" @property def references(self): """Return location of the references (bibtex files).""" - return self.path / 'references' + return self.path / "references" @property def tags_config(self): """Return location of the tags config.""" - return self.path / 'config-references.yml' + return self.path / "config-references.yml" @property def scripts(self): """Return location of diagnostic scripts.""" - return self.path / 'diag_scripts' + return self.path / "diag_scripts" def load_tags(self): """Load the tags config into an instance of ``TagsManager``.""" @@ -67,7 +68,7 @@ def find(cls): path = Path.cwd() else: path = Path(esmvaltool.__file__).absolute().parent - logger.debug('Using diagnostics from %s', path) + logger.debug("Using diagnostics from %s", path) return cls(path) @@ -83,7 +84,7 @@ def from_file(cls, filename: str): """Load the reference tags used for provenance recording.""" if os.path.exists(filename): logger.debug("Loading tags from %s", filename) - with open(filename, 'r', encoding='utf-8') as file: + with open(filename, "r", encoding="utf-8") as file: tags = cls(yaml.safe_load(file)) tags.source_file = filename return tags @@ -132,13 +133,14 @@ def get_tag_value(self, section: str, tag: str): Name of the tag """ if section not in self: - postfix = f' in {self.source_file}' if self.source_file else '' + postfix = f" in {self.source_file}" if self.source_file else "" raise ValueError(f"Section '{section}' does not exist{postfix}") if tag not in self[section]: - postfix = f' of {self.source_file}' if self.source_file else '' + postfix = f" of {self.source_file}" if self.source_file else "" raise ValueError( - f"Tag '{tag}' does not exist in section '{section}'{postfix}") + f"Tag '{tag}' does not exist in section '{section}'{postfix}" + ) return self[section][tag] diff --git a/esmvalcore/config/_esgf_pyclient.py b/esmvalcore/config/_esgf_pyclient.py index 1dfcd38ede..9fc352d223 100644 --- a/esmvalcore/config/_esgf_pyclient.py +++ b/esmvalcore/config/_esgf_pyclient.py @@ -1,51 +1,19 @@ """esgf-pyclient configuration. The configuration is read from the file ~/.esmvaltool/esgf-pyclient.yml. - -There are four sections in the configuration file: - -logon: contains keyword arguments to :func:`pyesgf.logon.LogonManager.logon` -search_connection: contains keyword arguments to - :class:`pyesgf.search.connection.SearchConnection` """ -import importlib + import logging import os import stat from functools import lru_cache from pathlib import Path -from types import ModuleType -from typing import Optional import yaml -keyring: Optional[ModuleType] = None -try: - keyring = importlib.import_module('keyring') -except ModuleNotFoundError: - pass - logger = logging.getLogger(__name__) -CONFIG_FILE = Path.home() / '.esmvaltool' / 'esgf-pyclient.yml' - - -def get_keyring_credentials(): - """Load credentials from keyring.""" - logon = {} - if keyring is None: - return logon - - for key in ['hostname', 'username', 'password']: - try: - value = keyring.get_password('ESGF', key) - except keyring.errors.NoKeyringError: - # No keyring backend is available - return logon - if value is not None: - logon[key] = value - - return logon +CONFIG_FILE = Path.home() / ".esmvaltool" / "esgf-pyclient.yml" def read_config_file(): @@ -56,20 +24,22 @@ def read_config_file(): if mode & stat.S_IRWXG or mode & stat.S_IRWXO: logger.warning("Correcting unsafe permissions on %s", CONFIG_FILE) os.chmod(CONFIG_FILE, stat.S_IRUSR | stat.S_IWUSR) - with CONFIG_FILE.open(encoding='utf-8') as file: + with CONFIG_FILE.open(encoding="utf-8") as file: cfg = yaml.safe_load(file) else: logger.info( "Using default ESGF configuration, configuration " - "file %s not present.", CONFIG_FILE) + "file %s not present.", + CONFIG_FILE, + ) cfg = {} # For backwards compatibility: prior to v2.6 the configuration file # contained a single URL instead of a list of URLs. - if 'search_connection' in cfg: - if 'url' in cfg['search_connection']: - url = cfg['search_connection'].pop('url') - cfg['search_connection']['urls'] = [url] + if "search_connection" in cfg: + if "url" in cfg["search_connection"]: + url = cfg["search_connection"].pop("url") + cfg["search_connection"]["urls"] = [url] return cfg @@ -77,45 +47,39 @@ def read_config_file(): def load_esgf_pyclient_config(): """Load the esgf-pyclient configuration.""" cfg = { - # Arguments to - # https://esgf-pyclient.readthedocs.io/en/latest/api.html#pyesgf.logon.LogonManager.logon - 'logon': { - 'interactive': False, - 'bootstrap': True, - }, # Arguments to # https://esgf-pyclient.readthedocs.io/en/latest/api.html#pyesgf.search.connection.SearchConnection - 'search_connection': { + "search_connection": { # List of available index nodes: https://esgf.llnl.gov/nodes.html # Be careful about the url, not all search urls have CMIP3 data? - 'urls': [ - 'https://esgf.ceda.ac.uk/esg-search', - 'https://esgf-node.llnl.gov/esg-search', - 'https://esgf-data.dkrz.de/esg-search', - 'https://esgf-node.ipsl.upmc.fr/esg-search', - 'https://esg-dn1.nsc.liu.se/esg-search', - 'https://esgf.nci.org.au/esg-search', - 'https://esgf.nccs.nasa.gov/esg-search', - 'https://esgdata.gfdl.noaa.gov/esg-search', + "urls": [ + "https://esgf.ceda.ac.uk/esg-search", + "https://esgf-node.llnl.gov/esg-search", + "https://esgf-data.dkrz.de/esg-search", + "https://esgf-node.ipsl.upmc.fr/esg-search", + "https://esg-dn1.nsc.liu.se/esg-search", + "https://esgf.nci.org.au/esg-search", + "https://esgf.nccs.nasa.gov/esg-search", + "https://esgdata.gfdl.noaa.gov/esg-search", ], - 'distrib': True, - 'timeout': 120, - 'cache': '~/.esmvaltool/cache/pyesgf-search-results', - 'expire_after': 86400, # cache expires after 1 day + "distrib": True, + "timeout": 120, + "cache": "~/.esmvaltool/cache/pyesgf-search-results", + "expire_after": 86400, # cache expires after 1 day }, } - keyring_cfg = get_keyring_credentials() - cfg['logon'].update(keyring_cfg) - file_cfg = read_config_file() - for section in ['logon', 'search_connection']: + for section in ["search_connection"]: cfg[section].update(file_cfg.get(section, {})) - if 'cache' in cfg['search_connection']: - cache_file = Path(os.path.expandvars( - cfg['search_connection']['cache'])).expanduser().absolute() - cfg['search_connection']['cache'] = cache_file + if "cache" in cfg["search_connection"]: + cache_file = ( + Path(os.path.expandvars(cfg["search_connection"]["cache"])) + .expanduser() + .absolute() + ) + cfg["search_connection"]["cache"] = cache_file Path(cache_file).parent.mkdir(parents=True, exist_ok=True) return cfg diff --git a/esmvalcore/config/_logging.py b/esmvalcore/config/_logging.py index c854937413..ca9cc87d3b 100644 --- a/esmvalcore/config/_logging.py +++ b/esmvalcore/config/_logging.py @@ -11,17 +11,17 @@ import yaml -class FilterMultipleNames(): +class FilterMultipleNames: """Only allow/Disallow events from loggers with specific names.""" def __init__( self, names: Iterable[str], - mode: Literal['allow', 'disallow'], + mode: Literal["allow", "disallow"], ) -> None: """Initialize filter.""" self.names = names - if mode == 'allow': + if mode == "allow": self.starts_with_name = True else: self.starts_with_name = False @@ -40,14 +40,14 @@ def _purge_file_handlers(cfg: dict) -> None: This is used to remove file handlers which require an output directory to be set. """ - cfg['handlers'] = { + cfg["handlers"] = { name: handler - for name, handler in cfg['handlers'].items() - if 'filename' not in handler + for name, handler in cfg["handlers"].items() + if "filename" not in handler } - prev_root = cfg['root']['handlers'] - cfg['root']['handlers'] = [ - name for name in prev_root if name in cfg['handlers'] + prev_root = cfg["root"]["handlers"] + cfg["root"]["handlers"] = [ + name for name in prev_root if name in cfg["handlers"] ] @@ -58,31 +58,31 @@ def _get_log_files( """Initialize log files for the file handlers.""" log_files = [] - handlers = cfg['handlers'] + handlers = cfg["handlers"] for handler in handlers.values(): - filename = handler.get('filename', None) + filename = handler.get("filename", None) if filename: if output_dir is None: - raise ValueError('`output_dir` must be defined') + raise ValueError("`output_dir` must be defined") if not os.path.isabs(filename): - handler['filename'] = os.path.join(output_dir, filename) + handler["filename"] = os.path.join(output_dir, filename) - log_files.append(handler['filename']) + log_files.append(handler["filename"]) return log_files def _update_stream_level(cfg: dict, level=None): """Update the log level for the stream handlers.""" - handlers = cfg['handlers'] + handlers = cfg["handlers"] for handler in handlers.values(): - if level is not None and 'stream' in handler: - if handler['stream'] in ('ext://sys.stdout', 'ext://sys.stderr'): - handler['level'] = level.upper() + if level is not None and "stream" in handler: + if handler["stream"] in ("ext://sys.stdout", "ext://sys.stderr"): + handler["level"] = level.upper() def configure_logging( @@ -107,11 +107,11 @@ def configure_logging( Filenames that will be logged to. """ if cfg_file is None: - cfg_file = Path(__file__).parent / 'config-logging.yml' + cfg_file = Path(__file__).parent / "config-logging.yml" cfg_file = Path(cfg_file).absolute() - with open(cfg_file, 'r', encoding='utf-8') as file_handler: + with open(cfg_file, "r", encoding="utf-8") as file_handler: cfg = yaml.safe_load(file_handler) if output_dir is None: diff --git a/esmvalcore/config/_validated_config.py b/esmvalcore/config/_validated_config.py index 27048397a7..898abf3bb8 100644 --- a/esmvalcore/config/_validated_config.py +++ b/esmvalcore/config/_validated_config.py @@ -1,4 +1,5 @@ """Config validation objects.""" + from __future__ import annotations import pprint @@ -88,15 +89,17 @@ def __repr__(self): """Return canonical string representation.""" class_name = self.__class__.__name__ indent = len(class_name) + 1 - repr_split = pprint.pformat(self._mapping, indent=1, - width=80 - indent).split('\n') - repr_indented = ('\n' + ' ' * indent).join(repr_split) - return '{}({})'.format(class_name, repr_indented) + repr_split = pprint.pformat( + self._mapping, indent=1, width=80 - indent + ).split("\n") + repr_indented = ("\n" + " " * indent).join(repr_split) + return "{}({})".format(class_name, repr_indented) def __str__(self): """Return string representation.""" - return '\n'.join( - map('{0[0]}: {0[1]}'.format, sorted(self._mapping.items()))) + return "\n".join( + map("{0[0]}: {0[1]}".format, sorted(self._mapping.items())) + ) def __iter__(self): """Yield sorted list of keys.""" @@ -112,11 +115,13 @@ def __delitem__(self, key): def check_missing(self): """Check and warn for missing variables.""" - for (key, more_info) in self._warn_if_missing: + for key, more_info in self._warn_if_missing: if key not in self: - more_info = f' ({more_info})' if more_info else '' - warnings.warn(f'`{key}` is not defined{more_info}', - MissingConfigParameter) + more_info = f" ({more_info})" if more_info else "" + warnings.warn( + f"`{key}` is not defined{more_info}", + MissingConfigParameter, + ) def copy(self): """Copy the keys/values of this object to a dict.""" diff --git a/esmvalcore/config/extra_facets/access-mappings.yml b/esmvalcore/config/extra_facets/access-mappings.yml new file mode 100644 index 0000000000..b82899c261 --- /dev/null +++ b/esmvalcore/config/extra_facets/access-mappings.yml @@ -0,0 +1,67 @@ +# Extra facets for native ACCESS model output + +# A complete list of supported keys is given in the documentation (see +# ESMValCore/doc/quickstart/find_data.rst). +--- + +ACCESS-ESM1-5: + + '*': + + tas: + raw_name: fld_s03i236 + modeling_realm: atm + + pr: + raw_name: fld_s05i216 + modeling_realm: atm + + ps: + raw_name: fld_s00i409 + modeling_realm: atm + + clt: + raw_name: fld_s02i204 + modeling_realm: atm + + psl: + raw_name: fld_s16i222 + modeling_realm: atm + + hus: + raw_name: fld_s30i205 + modeling_realm: atm + + zg: + raw_name: fld_s30i207 + modeling_realm: atm + + va: + raw_name: fld_s30i202 + modeling_realm: atm + + ua: + raw_name: fld_s30i201 + modeling_realm: atm + + ta: + raw_name: fld_s30i204 + modeling_realm: atm + + rlus: + raw_name: + - fld_s02i207 + - fld_s02i201 + - fld_s03i332 + - fld_s02i205 + modeling_realm: atm + + rlds: + raw_name: fld_s02i207 + modeling_realm: atm + + rsus: + raw_name: + - fld_s01i235 + - fld_s01i201 + modeling_realm: atm diff --git a/esmvalcore/config/extra_facets/emac-mappings.yml b/esmvalcore/config/extra_facets/emac-mappings.yml index 3e98cd2ba4..4a2379605f 100644 --- a/esmvalcore/config/extra_facets/emac-mappings.yml +++ b/esmvalcore/config/extra_facets/emac-mappings.yml @@ -11,14 +11,15 @@ # latter case, the prioritization is given by the order of the list; if # possible, use the first entry, if this is not present, use the second, etc. # This is particularly useful for variables where regular averages ("*_ave") -# or conditional averages ("*_cav") exist. For 3D variables defined on -# pressure levels, only the pressure levels defined by the CMOR table (e.g., -# for Amon's ta: "tm1_p19_cav" and "tm1_p19_ave") are given. If other -# pressure levels are desired, e.g., "tm1_p39_cav", this has to be explicitly -# specified in the recipe using "raw_name: tm1_p39_cav" or "raw_name: -# [tm1_p19_cav, tm1_p39_cav]". +# or conditional averages ("*_cav") exist. If other variables are desired, +# e.g., "tm1_p39_cav", this has to be explicitly specified in the recipe +# ("raw_name: tm1_p39_cav".) # - Asterisks ("*") in the comments in list below refer to either "cav" or # "ave". "cav" is prioritized. +# - The channels given here are generalization and may not always be +# applicable. In that case choose the channels with the appropriate variable +# per recipe. +# # A complete list of supported keys is given in the documentation (see # ESMValCore/doc/quickstart/find_data.rst). @@ -35,71 +36,73 @@ EMAC: # 1D/2D dynamical/meteorological variables '*': awhea: # non-CMOR variable - raw_name: [awhea_cav, awhea_ave] + raw_name: [awhea_cav, awhea_ave, awhea] channel: Omon clivi: - raw_name: [xivi_cav, xivi_ave] + raw_name: [xivi_cav, xivi_ave, xivi] channel: Amon clt: - raw_name: [aclcov_cav, aclcov_ave] + raw_name: [aclcov_cav, aclcov_ave, aclcov] raw_units: '1' channel: Amon clwvi: # derived from xlvi_*, xivi_* channel: Amon co2mass: - raw_name: [MP_CO2_cav, MP_CO2_ave] + raw_name: [MP_CO2_cav, MP_CO2_ave, MP_CO2] channel: tracer_pdef_gp evspsbl: - raw_name: [evap_cav, evap_ave] + raw_name: [evap_cav, evap_ave, evap] channel: Amon hfls: - raw_name: [ahfl_cav, ahfl_ave] + raw_name: [ahfl_cav, ahfl_ave, ahfl] channel: Amon hfns: # ESMValCore-derivation channel: Amon hfss: - raw_name: [ahfs_cav, ahfs_ave] + raw_name: [ahfs_cav, ahfs_ave, ahfs] channel: Amon hurs: - raw_name: [rh_2m_cav, rh_2m_ave] + raw_name: [rh_2m_cav, rh_2m_ave, rh_2m] raw_units: '1' channel: Amon od550aer: - raw_name: [aot_opt_TOT_550_total_cav, aot_opt_TOT_550_total_ave] + raw_name: [aot_opt_TOT_550_total_cav, aot_opt_TOT_550_total_ave, aot_opt_TOT_550_total] raw_units: '1' channel: AERmon pr: # derived from aprl_*, aprc_* channel: Amon prc: - raw_name: [aprc_cav, aprc_ave] + raw_name: [aprc_cav, aprc_ave, aprc] + channel: Amon + prodlnox: # derived from NOxcg_*, NOxic_* channel: Amon prl: # non-CMOR variable - raw_name: [aprl_cav, aprl_ave] + raw_name: [aprl_cav, aprl_ave, aprl] channel: Amon prsn: - raw_name: [aprs_cav, aprs_ave] + raw_name: [aprs_cav, aprs_ave, aprs] channel: Amon prw: - raw_name: [qvi_cav, qvi_ave] + raw_name: [qvi_cav, qvi_ave, qvi] channel: Amon ps: - raw_name: [aps_cav, aps_ave] + raw_name: [aps_cav, aps_ave, aps] channel: Amon psl: - raw_name: [slp_cav, slp_ave] + raw_name: [slp_cav, slp_ave, slp] channel: Amon rlds: # derived from flxtbot_*, tradsu_* channel: Amon rlns: # ESMValCore-derivation channel: Amon rlus: - raw_name: [tradsu_cav, tradsu_ave] + raw_name: [tradsu_cav, tradsu_ave, tradsu] channel: Amon rlut: - raw_name: [flxttop_cav, flxttop_ave] + raw_name: [flxttop_cav, flxttop_ave, flxttop] channel: Amon rlutcs: - raw_name: [flxtftop_cav, flxtftop_ave] + raw_name: [flxtftop_cav, flxtftop_ave, flxtftop] channel: Amon rsds: # derived from flxsbot_*, sradsu_* channel: Amon @@ -110,32 +113,32 @@ EMAC: rsnt: # ESMValCore-derivation channel: Amon rsus: - raw_name: [sradsu_cav, sradsu_ave] + raw_name: [sradsu_cav, sradsu_ave, sradsu] channel: Amon rsut: - raw_name: [srad0u_cav, srad0u_ave] + raw_name: [srad0u_cav, srad0u_ave, srad0u] channel: Amon rsutcs: - raw_name: [flxusftop_cav, flxusftop_ave] + raw_name: [flxusftop_cav, flxusftop_ave, flxusftop] channel: Amon rtmt: # derived from flxttop_*, flxstop_* channel: Amon sfcWind: - raw_name: [wind10_cav, wind10_ave] + raw_name: [wind10_cav, wind10_ave, wind10] channel: Amon siconc: - raw_name: [seaice_cav, seaice_ave] + raw_name: [seaice_cav, seaice_ave, seaice] raw_units: '1' channel: Amon siconca: - raw_name: [seaice_cav, seaice_ave] + raw_name: [seaice_cav, seaice_ave, seaice] raw_units: '1' channel: Amon sithick: - raw_name: [siced_cav, siced_ave] + raw_name: [siced_cav, siced_ave, siced] channel: Amon tas: - raw_name: [temp2_cav, temp2_ave] + raw_name: [temp2_cav, temp2_ave, temp2] channel: Amon tasmax: raw_name: temp2_max @@ -144,10 +147,10 @@ EMAC: raw_name: temp2_min channel: Amon tauu: - raw_name: [ustr_cav, ustr_ave] + raw_name: [ustr_cav, ustr_ave, ustr] channel: Amon tauv: - raw_name: [vstr_cav, vstr_ave] + raw_name: [vstr_cav, vstr_ave, vstr] channel: Amon tos: raw_name: tsw @@ -155,210 +158,99 @@ EMAC: toz: channel: column ts: - raw_name: [tsurf_cav, tsurf_ave] + raw_name: [tsurf_cav, tsurf_ave, tsurf] channel: Amon uas: - raw_name: [u10_cav, u10_ave] + raw_name: [u10_cav, u10_ave, u10] channel: Amon vas: - raw_name: [v10_cav, v10_ave] + raw_name: [v10_cav, v10_ave, v10] channel: Amon # Tracers (non-CMOR variables) MP_BC_tot: # derived from MP_BC_ks_*, MP_BC_as_*, MP_BC_cs_*, MP_BC_ki_* channel: tracer_pdef_gp MP_CFCl3: - raw_name: [MP_CFCl3_cav, MP_CFCl3_ave] + raw_name: [MP_CFCl3_cav, MP_CFCl3_ave, MP_CFCl3] channel: tracer_pdef_gp MP_ClOX: - raw_name: [MP_ClOX_cav, MP_ClOX_ave] + raw_name: [MP_ClOX_cav, MP_ClOX_ave, MP_ClOX] channel: tracer_pdef_gp MP_CH4: - raw_name: [MP_CH4_cav, MP_CH4_ave] + raw_name: [MP_CH4_cav, MP_CH4_ave, MP_CH4] channel: tracer_pdef_gp MP_CO: - raw_name: [MP_CO_cav, MP_CO_ave] + raw_name: [MP_CO_cav, MP_CO_ave, MP_CO] channel: tracer_pdef_gp MP_CO2: - raw_name: [MP_CO2_cav, MP_CO2_ave] + raw_name: [MP_CO2_cav, MP_CO2_ave, MP_CO2] channel: tracer_pdef_gp MP_DU_tot: # derived from MP_DU_as_*, MP_DU_cs_*, MP_DU_ai_*, MP_DU_ci_* channel: tracer_pdef_gp MP_N2O: - raw_name: [MP_N2O_cav, MP_N2O_ave] + raw_name: [MP_N2O_cav, MP_N2O_ave, MP_N2O] channel: tracer_pdef_gp MP_NH3: - raw_name: [MP_NH3_cav, MP_NH3_ave] + raw_name: [MP_NH3_cav, MP_NH3_ave, MP_NH3] channel: tracer_pdef_gp MP_NO: - raw_name: [MP_NO_cav, MP_NO_ave] + raw_name: [MP_NO_cav, MP_NO_ave, MP_NO] channel: tracer_pdef_gp MP_NO2: - raw_name: [MP_NO2_cav, MP_NO2_ave] + raw_name: [MP_NO2_cav, MP_NO2_ave, MP_NO2] channel: tracer_pdef_gp MP_NOX: - raw_name: [MP_NOX_cav, MP_NOX_ave] + raw_name: [MP_NOX_cav, MP_NOX_ave, MP_NOX] channel: tracer_pdef_gp MP_O3: - raw_name: [MP_O3_cav, MP_O3_ave] + raw_name: [MP_O3_cav, MP_O3_ave, MP_O3] channel: tracer_pdef_gp MP_OH: - raw_name: [MP_OH_cav, MP_OH_ave] + raw_name: [MP_OH_cav, MP_OH_ave, MP_OH] channel: tracer_pdef_gp MP_S: - raw_name: [MP_S_cav, MP_S_ave] + raw_name: [MP_S_cav, MP_S_ave, MP_S] channel: tracer_pdef_gp MP_SO2: - raw_name: [MP_SO2_cav, MP_SO2_ave] + raw_name: [MP_SO2_cav, MP_SO2_ave, MP_SO2] channel: tracer_pdef_gp MP_SO4mm_tot: # derived from MP_SO4mm_ns_*, MP_SO4mm_ks_*, MP_SO4mm_as_*, MP_SO4mm_cs_* channel: tracer_pdef_gp MP_SS_tot: # derived from MP_SS_ks_*, MP_SS_as_*, MP_SS_cs_* channel: tracer_pdef_gp - # 3D dynamical/meteorological variables - 6hrLev: - ta: - raw_name: [tm1_cav, tm1_ave] - channel: Amon - ua: - raw_name: [um1_cav, um1_ave] - channel: Amon - va: - raw_name: [vm1_cav, vm1_ave] - channel: Amon - AERmon: - ua: - raw_name: [um1_cav, um1_ave] - channel: Amon - va: - raw_name: [vm1_cav, vm1_ave] - channel: Amon - zg: - raw_name: [geopot_cav, geopot_ave] - channel: Amon - Amon: - cl: - raw_name: [aclcac_cav, aclcac_ave] - raw_units: '1' - channel: Amon - cli: - raw_name: [xim1_cav, xim1_ave] - channel: Amon - clw: - raw_name: [xlm1_cav, xlm1_ave] - channel: Amon - hur: # defined on plev19 - raw_name: [rhum_p19_cav, rhum_p19_ave] - channel: Amon - hus: # defined on plev19 - raw_name: [qm1_p19_cav, qm1_p19_ave] - channel: Amon - ta: # defined on plev19 - raw_name: [tm1_p19_cav, tm1_p19_ave] - channel: Amon - ua: # defined on plev19 - raw_name: [um1_p19_cav, um1_p19_ave] - channel: Amon - va: # defined on plev19 - raw_name: [vm1_p19_cav, vm1_p19_ave] - channel: Amon - zg: # defined on plev19 - raw_name: [geopot_p19_cav, geopot_p19_ave] - channel: Amon - CF3hr: - ta: - raw_name: [tm1_cav, tm1_ave] - channel: Amon - CFday: + # 3D dynamical/meteorological variables cl: - raw_name: [aclcac_cav, aclcac_ave] + raw_name: [aclcac_cav, aclcac_ave, aclcac] raw_units: '1' channel: Amon cli: - raw_name: [xim1_cav, xim1_ave] + raw_name: [xim1_cav, xim1_ave, xim1] channel: Amon clw: - raw_name: [xlm1_cav, xlm1_ave] + raw_name: [xlm1_cav, xlm1_ave, xlm1] channel: Amon hur: - raw_name: [rhum_cav, rhum_ave] + raw_name: [rhum_cav, rhum_ave, rhum] channel: Amon hus: - raw_name: [qm1_cav, qm1_ave] + raw_name: [qm1_cav, qm1_ave, qm1] channel: Amon - ta: - raw_name: [tm1_cav, tm1_ave] - channel: Amon - ua: - raw_name: [um1_cav, um1_ave] - channel: Amon - va: - raw_name: [vm1_cav, vm1_ave] - channel: Amon - zg: - raw_name: [geopot_cav, geopot_ave] - channel: Amon - CFmon: - hur: - raw_name: [rhum_cav, rhum_ave] - channel: Amon - hus: - raw_name: [qm1_cav, qm1_ave] + o3: + raw_name: [O3_cav, O3_ave, O3] channel: Amon ta: - raw_name: [tm1_cav, tm1_ave] - channel: Amon - day: - hur: # defined on plev8 - raw_name: [rhum_p8_cav, rhum_p8_ave] - channel: Amon - hus: # defined on plev8 - raw_name: [qm1_p8_cav, qm1_p8_ave] - channel: Amon - ua: # defined on plev8 - raw_name: [um1_p8_cav, um1_p8_ave] + raw_name: [tm1_cav, tm1_ave, tm1] channel: Amon - va: # defined on plev8 - raw_name: [vm1_p8_cav, vm1_p8_ave] - channel: Amon - zg: # defined on plev8 - raw_name: [geopot_p8_cav, geopot_p8_ave] - channel: Amon - E1hr: - ua: # defined on plev3 - raw_name: [um1_p3_cav, um1_p3_ave] - channel: Amon - va: # defined on plev3 - raw_name: [vm1_p3_cav, vm1_p3_ave] - channel: Amon - E3hrPt: - hus: - raw_name: [qm1_cav, qm1_ave] - channel: Amon - Eday: - ta: # defined on plev19 - raw_name: [tm1_p19_cav, tm1_p19_ave] - channel: Amon - hus: # defined on plev19 - raw_name: [qm1_p19_cav, qm1_p19_ave] - channel: Amon - ua: # defined on plev19 - raw_name: [um1_p19_cav, um1_p19_ave] - channel: Amon - va: # defined on plev19 - raw_name: [vm1_p19_cav, vm1_p19_ave] - channel: Amon - zg: # defined on plev19 - raw_name: [geopot_p19_cav, geopot_p19_ave] - channel: Amon - Esubhr: - ta: - raw_name: [tm1_cav, tm1_ave] + tro3: + raw_name: [O3_cav, O3_ave, O3] channel: Amon ua: - raw_name: [um1_cav, um1_ave] + raw_name: [um1_cav, um1_ave, um1] channel: Amon va: - raw_name: [vm1_cav, vm1_ave] + raw_name: [vm1_cav, vm1_ave, vm1] + channel: Amon + zg: + raw_name: [geopot_cav, geopot_ave, geopot] channel: Amon diff --git a/esmvalcore/dataset.py b/esmvalcore/dataset.py index d4bd665aa6..c436edfe8b 100644 --- a/esmvalcore/dataset.py +++ b/esmvalcore/dataset.py @@ -1,4 +1,5 @@ """Classes and functions for defining, finding, and loading data.""" + from __future__ import annotations import logging @@ -35,9 +36,9 @@ from esmvalcore.typing import Facets, FacetValue __all__ = [ - 'Dataset', - 'INHERITED_FACETS', - 'datasets_to_recipe', + "Dataset", + "INHERITED_FACETS", + "datasets_to_recipe", ] logger = logging.getLogger(__name__) @@ -45,12 +46,12 @@ File = Union[esgf.ESGFFile, local.LocalFile] INHERITED_FACETS: list[str] = [ - 'dataset', - 'domain', - 'driver', - 'grid', - 'project', - 'timerange', + "dataset", + "domain", + "driver", + "grid", + "project", + "timerange", ] """Inherited facets. @@ -69,14 +70,18 @@ def _augment(base: dict, update: dict): def _isglob(facet_value: FacetValue | None) -> bool: """Check if a facet value is a glob pattern.""" - return (isinstance(facet_value, str) - and bool(re.match(r'.*[\*\?]+.*|.*\[.*\].*', facet_value))) + return isinstance(facet_value, str) and bool( + re.match(r".*[\*\?]+.*|.*\[.*\].*", facet_value) + ) def _ismatch(facet_value: FacetValue, pattern: FacetValue) -> bool: """Check if a facet value matches a glob pattern.""" - return (isinstance(pattern, str) and isinstance(facet_value, str) - and fnmatchcase(facet_value, pattern)) + return ( + isinstance(pattern, str) + and isinstance(facet_value, str) + and fnmatchcase(facet_value, pattern) + ) class Dataset: @@ -98,25 +103,24 @@ class Dataset: """ _SUMMARY_FACETS = ( - 'short_name', - 'mip', - 'project', - 'dataset', - 'rcm_version', - 'driver', - 'domain', - 'activity', - 'exp', - 'ensemble', - 'grid', - 'version', + "short_name", + "mip", + "project", + "dataset", + "rcm_version", + "driver", + "domain", + "activity", + "exp", + "ensemble", + "grid", + "version", ) """Facets used to create a summary of a Dataset instance.""" def __init__(self, **facets: FacetValue): - self.facets: Facets = {} - self.supplementaries: list['Dataset'] = [] + self.supplementaries: list["Dataset"] = [] self._persist: set[str] = set() self._session: Session | None = None @@ -130,7 +134,7 @@ def __init__(self, **facets: FacetValue): def from_recipe( recipe: Path | str | dict, session: Session, - ) -> list['Dataset']: + ) -> list["Dataset"]: """Read datasets from a recipe. Parameters @@ -149,6 +153,7 @@ def from_recipe( A list of datasets. """ from esmvalcore._recipe.to_datasets import datasets_from_recipe + return datasets_from_recipe(recipe, session) def _file_to_dataset( @@ -157,14 +162,16 @@ def _file_to_dataset( ) -> Dataset: """Create a dataset from a file with a `facets` attribute.""" facets = dict(file.facets) - if 'version' not in self.facets: + if "version" not in self.facets: # Remove version facet if no specific version requested - facets.pop('version', None) + facets.pop("version", None) updated_facets = { f: v - for f, v in facets.items() if f in self.facets - and _isglob(self.facets[f]) and _ismatch(v, self.facets[f]) + for f, v in facets.items() + if f in self.facets + and _isglob(self.facets[f]) + and _ismatch(v, self.facets[f]) } dataset = self.copy() dataset.facets.update(updated_facets) @@ -172,7 +179,7 @@ def _file_to_dataset( # If possible, remove unexpanded facets that can be automatically # populated. unexpanded = {f for f, v in dataset.facets.items() if _isglob(v)} - required_for_augment = {'project', 'mip', 'short_name', 'dataset'} + required_for_augment = {"project", "mip", "short_name", "dataset"} if unexpanded and not unexpanded & required_for_augment: copy = dataset.copy() copy.supplementaries = [] @@ -192,10 +199,10 @@ def _get_available_datasets(self) -> Iterator[Dataset]: """ dataset_template = self.copy() dataset_template.supplementaries = [] - if _isglob(dataset_template.facets.get('timerange')): + if _isglob(dataset_template.facets.get("timerange")): # Remove wildcard `timerange` facet, because data finding cannot # handle it - dataset_template.facets.pop('timerange') + dataset_template.facets.pop("timerange") seen = set() partially_defined = [] @@ -206,11 +213,15 @@ def _get_available_datasets(self) -> Iterator[Dataset]: # Filter out identical datasets facetset = frozenset( (f, frozenset(v) if isinstance(v, list) else v) - for f, v in dataset.facets.items()) + for f, v in dataset.facets.items() + ) if facetset not in seen: seen.add(facetset) - if any(_isglob(v) for f, v in dataset.facets.items() - if f != 'timerange'): + if any( + _isglob(v) + for f, v in dataset.facets.items() + if f != "timerange" + ): partially_defined.append((dataset, file)) else: dataset._update_timerange() @@ -220,19 +231,24 @@ def _get_available_datasets(self) -> Iterator[Dataset]: # Only yield datasets with globs if there is no better alternative for dataset, file in partially_defined: - msg = (f"{dataset} with unexpanded wildcards, created from file " - f"{file} with facets {file.facets}. Are the missing facets " - "in the path to the file?" if isinstance( - file, local.LocalFile) else "available on ESGF?") + msg = ( + f"{dataset} with unexpanded wildcards, created from file " + f"{file} with facets {file.facets}. Are the missing facets " + "in the path to the file?" + if isinstance(file, local.LocalFile) + else "available on ESGF?" + ) if expanded: logger.info("Ignoring %s", msg) else: logger.debug( "Not updating timerange and supplementaries for %s " - "because it still contains wildcards.", msg) + "because it still contains wildcards.", + msg, + ) yield dataset - def from_files(self) -> Iterator['Dataset']: + def from_files(self) -> Iterator["Dataset"]: """Create datasets based on the available files. The facet values for local files are retrieved from the directory tree @@ -266,17 +282,18 @@ def from_files(self) -> Iterator['Dataset']: """ expanded = False if any(_isglob(v) for v in self.facets.values()): - if _isglob(self.facets['mip']): + if _isglob(self.facets["mip"]): available_mips = _get_mips( - self.facets['project'], # type: ignore - self.facets['short_name'], # type: ignore + self.facets["project"], # type: ignore + self.facets["short_name"], # type: ignore ) mips = [ - mip for mip in available_mips - if _ismatch(mip, self.facets['mip']) + mip + for mip in available_mips + if _ismatch(mip, self.facets["mip"]) ] else: - mips = [self.facets['mip']] # type: ignore + mips = [self.facets["mip"]] # type: ignore for mip in mips: dataset_template = self.copy(mip=mip) @@ -316,7 +333,7 @@ def _remove_unexpanded_supplementaries(self) -> None: "For %s: ignoring supplementary variable '%s', " "unable to expand wildcards %s.", self.summary(shorten=True), - supplementary_ds.facets['short_name'], + supplementary_ds.facets["short_name"], ", ".join(f"'{f}'" for f in unexpanded), ) else: @@ -346,8 +363,9 @@ def _remove_duplicate_supplementaries(self) -> None: not_used = [] supplementaries = list(self.supplementaries) self.supplementaries.clear() - for _, duplicates in groupby(supplementaries, - key=lambda ds: ds['short_name']): + for _, duplicates in groupby( + supplementaries, key=lambda ds: ds["short_name"] + ): group = sorted(duplicates, key=self._match, reverse=True) self.supplementaries.append(group[0]) not_used.extend(group[1:]) @@ -357,27 +375,30 @@ def _remove_duplicate_supplementaries(self) -> None: "List of all supplementary datasets found for %s:\n%s", self.summary(shorten=True), "\n".join( - sorted(ds.summary(shorten=True) - for ds in supplementaries)), + sorted(ds.summary(shorten=True) for ds in supplementaries) + ), ) def _fix_fx_exp(self) -> None: for supplementary_ds in self.supplementaries: - exps = supplementary_ds.facets.get('exp') - frequency = supplementary_ds.facets.get('frequency') - if isinstance(exps, list) and len(exps) > 1 and frequency == 'fx': + exps = supplementary_ds.facets.get("exp") + frequency = supplementary_ds.facets.get("frequency") + if isinstance(exps, list) and len(exps) > 1 and frequency == "fx": for exp in exps: dataset = supplementary_ds.copy(exp=exp) if dataset.files: - supplementary_ds.facets['exp'] = exp + supplementary_ds.facets["exp"] = exp logger.info( "Corrected wrong 'exp' from '%s' to '%s' for " - "supplementary variable '%s' of %s", exps, exp, - supplementary_ds.facets['short_name'], - self.summary(shorten=True)) + "supplementary variable '%s' of %s", + exps, + exp, + supplementary_ds.facets["short_name"], + self.summary(shorten=True), + ) break - def copy(self, **facets: FacetValue) -> 'Dataset': + def copy(self, **facets: FacetValue) -> "Dataset": """Create a copy. Parameters @@ -401,10 +422,9 @@ def copy(self, **facets: FacetValue) -> 'Dataset': for supplementary in self.supplementaries: # The short_name and mip of the supplementary variable are probably # different from the main variable, so don't copy those facets. - skip = ('short_name', 'mip') + skip = ("short_name", "mip") supplementary_facets = { - k: v - for k, v in facets.items() if k not in skip + k: v for k, v in facets.items() if k not in skip } new_supplementary = supplementary.copy(**supplementary_facets) new.supplementaries.append(new_supplementary) @@ -412,24 +432,25 @@ def copy(self, **facets: FacetValue) -> 'Dataset': def __eq__(self, other) -> bool: """Compare with another dataset.""" - return (isinstance(other, self.__class__) - and self._session == other._session - and self.facets == other.facets - and self.supplementaries == other.supplementaries) + return ( + isinstance(other, self.__class__) + and self._session == other._session + and self.facets == other.facets + and self.supplementaries == other.supplementaries + ) def __repr__(self) -> str: """Create a string representation.""" first_keys = ( - 'diagnostic', - 'variable_group', - 'dataset', - 'project', - 'mip', - 'short_name', + "diagnostic", + "variable_group", + "dataset", + "project", + "mip", + "short_name", ) def facets2str(facets): - view = {k: facets[k] for k in first_keys if k in facets} for key, value in sorted(facets.items()): if key not in first_keys: @@ -445,7 +466,8 @@ def facets2str(facets): txt.append("supplementaries:") txt.extend( textwrap.indent(facets2str(a.facets), " ") - for a in self.supplementaries) + for a in self.supplementaries + ) if self._session: txt.append(f"session: '{self.session.session_name}'") return "\n".join(txt) @@ -462,7 +484,7 @@ def _get_joined_summary_facets( continue val = self.facets[key] if join_lists and isinstance(val, (tuple, list)): - val = '-'.join(str(elem) for elem in val) + val = "-".join(str(elem) for elem in val) else: val = str(val) summary_facets_vals.append(val) @@ -485,16 +507,23 @@ def summary(self, shorten: bool = False) -> str: return repr(self) title = self.__class__.__name__ - txt = f"{title}: " + self._get_joined_summary_facets(', ') + txt = f"{title}: " + self._get_joined_summary_facets(", ") def supplementary_summary(dataset): return ", ".join( - str(dataset.facets[k]) for k in self._SUMMARY_FACETS - if k in dataset.facets and dataset[k] != self.facets.get(k)) + str(dataset.facets[k]) + for k in self._SUMMARY_FACETS + if k in dataset.facets and dataset[k] != self.facets.get(k) + ) if self.supplementaries: - txt += (", supplementaries: " + "; ".join( - supplementary_summary(a) for a in self.supplementaries) + "") + txt += ( + ", supplementaries: " + + "; ".join( + supplementary_summary(a) for a in self.supplementaries + ) + + "" + ) return txt def __getitem__(self, key): @@ -531,11 +560,11 @@ def set_version(self) -> None: """Set the ``'version'`` facet based on the available data.""" versions: set[str] = set() for file in self.files: - if 'version' in file.facets: - versions.add(file.facets['version']) # type: ignore + if "version" in file.facets: + versions.add(file.facets["version"]) # type: ignore version = versions.pop() if len(versions) == 1 else sorted(versions) if version: - self.set_facet('version', version) + self.set_facet("version", version) for supplementary_ds in self.supplementaries: supplementary_ds.set_version() @@ -583,19 +612,19 @@ def augment_facets(self) -> None: supplementary._augment_facets() def _augment_facets(self): - extra_facets = get_extra_facets(self, self.session['extra_facets_dir']) + extra_facets = get_extra_facets(self, self.session["extra_facets_dir"]) _augment(self.facets, extra_facets) - if 'institute' not in self.facets: + if "institute" not in self.facets: institute = get_institutes(self.facets) if institute: - self.facets['institute'] = institute - if 'activity' not in self.facets: + self.facets["institute"] = institute + if "activity" not in self.facets: activity = get_activity(self.facets) if activity: - self.facets['activity'] = activity + self.facets["activity"] = activity _update_cmor_facets(self.facets) - if self.facets.get('frequency') == 'fx': - self.facets.pop('timerange', None) + if self.facets.get("frequency") == "fx": + self.facets.pop("timerange", None) def find_files(self) -> None: """Find files. @@ -605,7 +634,7 @@ def find_files(self) -> None: """ self.augment_facets() - if _isglob(self.facets.get('timerange')): + if _isglob(self.facets.get("timerange")): self._update_timerange() self._find_files() @@ -619,16 +648,16 @@ def _find_files(self) -> None: ) # If project does not support automatic downloads from ESGF, stop here - if self.facets['project'] not in esgf.facets.FACETS: + if self.facets["project"] not in esgf.facets.FACETS: return # 'never' mode: never download files from ESGF and stop here - if self.session['search_esgf'] == 'never': + if self.session["search_esgf"] == "never": return # 'when_missing' mode: if files are available locally, do not check # ESGF - if self.session['search_esgf'] == 'when_missing': + if self.session["search_esgf"] == "when_missing": try: check.data_availability(self, log=False) except InputFilesNotFound: @@ -648,8 +677,8 @@ def _find_files(self) -> None: # Use ESGF files that are newer than the locally available # files. local_file = local_files[file.name] - if 'version' in local_file.facets: - if file.facets['version'] > local_file.facets['version']: + if "version" in local_file.facets: + if file.facets["version"] > local_file.facets["version"]: idx = self.files.index(local_file) self.files[idx] = file @@ -680,7 +709,7 @@ def load(self) -> Cube: input_files = list(self.files) for supplementary_dataset in self.supplementaries: input_files.extend(supplementary_dataset.files) - esgf.download(input_files, self.session['download_dir']) + esgf.download(input_files, self.session["download_dir"]) cube = self._load() supplementary_cubes = [] @@ -691,10 +720,10 @@ def load(self) -> Cube: output_file = _get_output_file(self.facets, self.session.preproc_dir) cubes = preprocess( [cube], - 'add_supplementary_variables', + "add_supplementary_variables", input_files=input_files, output_file=output_file, - debug=self.session['save_intermediary_cubes'], + debug=self.session["save_intermediary_cubes"], supplementary_cubes=supplementary_cubes, ) @@ -708,64 +737,64 @@ def _load(self) -> Cube: "locally using glob patterns:", "\n".join(str(f) for f in self._file_globs or []), ] - if self.session['search_esgf'] != 'never': - lines.append('or on ESGF.') + if self.session["search_esgf"] != "never": + lines.append("or on ESGF.") msg = "\n".join(lines) raise InputFilesNotFound(msg) output_file = _get_output_file(self.facets, self.session.preproc_dir) fix_dir_prefix = Path( self.session._fixed_file_dir, - self._get_joined_summary_facets('_', join_lists=True) + '_', + self._get_joined_summary_facets("_", join_lists=True) + "_", ) settings: dict[str, dict[str, Any]] = {} - settings['fix_file'] = { - 'output_dir': fix_dir_prefix, - 'add_unique_suffix': True, - 'session': self.session, + settings["fix_file"] = { + "output_dir": fix_dir_prefix, + "add_unique_suffix": True, + "session": self.session, **self.facets, } - settings['load'] = { - 'ignore_warnings': get_ignored_warnings( - self.facets['project'], 'load' + settings["load"] = { + "ignore_warnings": get_ignored_warnings( + self.facets["project"], "load" ), } - settings['fix_metadata'] = { - 'check_level': self.session['check_level'], - 'session': self.session, + settings["fix_metadata"] = { + "check_level": self.session["check_level"], + "session": self.session, **self.facets, } - settings['concatenate'] = { - 'check_level': self.session['check_level'] + settings["concatenate"] = {"check_level": self.session["check_level"]} + settings["cmor_check_metadata"] = { + "check_level": self.session["check_level"], + "cmor_table": self.facets["project"], + "mip": self.facets["mip"], + "frequency": self.facets["frequency"], + "short_name": self.facets["short_name"], } - settings['cmor_check_metadata'] = { - 'check_level': self.session['check_level'], - 'cmor_table': self.facets['project'], - 'mip': self.facets['mip'], - 'frequency': self.facets['frequency'], - 'short_name': self.facets['short_name'], - } - if 'timerange' in self.facets: - settings['clip_timerange'] = { - 'timerange': self.facets['timerange'], + if "timerange" in self.facets: + settings["clip_timerange"] = { + "timerange": self.facets["timerange"], } - settings['fix_data'] = { - 'check_level': self.session['check_level'], - 'session': self.session, + settings["fix_data"] = { + "check_level": self.session["check_level"], + "session": self.session, **self.facets, } - settings['cmor_check_data'] = { - 'check_level': self.session['check_level'], - 'cmor_table': self.facets['project'], - 'mip': self.facets['mip'], - 'frequency': self.facets['frequency'], - 'short_name': self.facets['short_name'], + settings["cmor_check_data"] = { + "check_level": self.session["check_level"], + "cmor_table": self.facets["project"], + "mip": self.facets["mip"], + "frequency": self.facets["frequency"], + "short_name": self.facets["short_name"], } result = [ - file.local_file(self.session['download_dir']) if isinstance( - file, esgf.ESGFFile) else file for file in self.files + file.local_file(self.session["download_dir"]) + if isinstance(file, esgf.ESGFFile) + else file + for file in self.files ] for step, kwargs in settings.items(): result = preprocess( @@ -773,14 +802,14 @@ def _load(self) -> Cube: step, input_files=self.files, output_file=output_file, - debug=self.session['save_intermediary_cubes'], + debug=self.session["save_intermediary_cubes"], **kwargs, ) cube = result[0] return cube - def from_ranges(self) -> list['Dataset']: + def from_ranges(self) -> list["Dataset"]: """Create a list of datasets from short notations. This expands the ``'ensemble'`` and ``'sub_experiment'`` facets in the @@ -796,10 +825,11 @@ def from_ranges(self) -> list['Dataset']: The datasets. """ datasets = [self] - for key in 'ensemble', 'sub_experiment': + for key in "ensemble", "sub_experiment": if key in self.facets: datasets = [ - ds.copy(**{key: value}) for ds in datasets + ds.copy(**{key: value}) + for ds in datasets for value in ds._expand_range(key) ] return datasets @@ -810,12 +840,12 @@ def _expand_range(self, input_tag): Expansion only supports ensembles defined as strings, not lists. """ expanded = [] - regex = re.compile(r'\(\d+:\d+\)') + regex = re.compile(r"\(\d+:\d+\)") def expand_range(input_range): match = regex.search(input_range) if match: - start, end = match.group(0)[1:-1].split(':') + start, end = match.group(0)[1:-1].split(":") for i in range(int(start), int(end) + 1): range_ = regex.sub(str(i), input_range, 1) expand_range(range_) @@ -828,7 +858,8 @@ def expand_range(input_range): if regex.search(elem): raise RecipeError( f"In {self}: {input_tag} expansion " - f"cannot be combined with {input_tag} lists") + f"cannot be combined with {input_tag} lists" + ) expanded.append(tag) else: expand_range(tag) @@ -844,19 +875,20 @@ def _update_timerange(self): dataset = self.copy() dataset.supplementaries = [] dataset.augment_facets() - if 'timerange' not in dataset.facets: - self.facets.pop('timerange', None) + if "timerange" not in dataset.facets: + self.facets.pop("timerange", None) return - timerange = self.facets['timerange'] + timerange = self.facets["timerange"] if not isinstance(timerange, str): raise TypeError( - f"timerange should be a string, got '{timerange!r}'") + f"timerange should be a string, got '{timerange!r}'" + ) check.valid_time_selection(timerange) - if '*' in timerange: + if "*" in timerange: dataset = self.copy() - dataset.facets.pop('timerange') + dataset.facets.pop("timerange") dataset.supplementaries = [] check.data_availability(dataset) intervals = [_get_start_end_date(f) for f in dataset.files] @@ -864,16 +896,16 @@ def _update_timerange(self): min_date = min(interval[0] for interval in intervals) max_date = max(interval[1] for interval in intervals) - if timerange == '*': - timerange = f'{min_date}/{max_date}' - if '*' in timerange.split('/')[0]: - timerange = timerange.replace('*', min_date) - if '*' in timerange.split('/')[1]: - timerange = timerange.replace('*', max_date) + if timerange == "*": + timerange = f"{min_date}/{max_date}" + if "*" in timerange.split("/")[0]: + timerange = timerange.replace("*", min_date) + if "*" in timerange.split("/")[1]: + timerange = timerange.replace("*", max_date) # Make sure that years are in format YYYY - start_date, end_date = timerange.split('/') + start_date, end_date = timerange.split("/") timerange = _dates_to_timerange(start_date, end_date) check.valid_time_selection(timerange) - self.set_facet('timerange', timerange) + self.set_facet("timerange", timerange) diff --git a/esmvalcore/esgf/__init__.py b/esmvalcore/esgf/__init__.py index 515c779a38..ca8607f964 100644 --- a/esmvalcore/esgf/__init__.py +++ b/esmvalcore/esgf/__init__.py @@ -1,9 +1,10 @@ """Find files on the ESGF and download them.""" + from ._download import ESGFFile, download from ._search import find_files __all__ = [ - 'ESGFFile', - 'download', - 'find_files', + "ESGFFile", + "download", + "find_files", ] diff --git a/esmvalcore/esgf/_download.py b/esmvalcore/esgf/_download.py index 445ff7a1d8..3d3fcd327c 100644 --- a/esmvalcore/esgf/_download.py +++ b/esmvalcore/esgf/_download.py @@ -1,4 +1,5 @@ """Module for downloading files from ESGF.""" + import concurrent.futures import contextlib import datetime @@ -22,7 +23,6 @@ from esmvalcore.typing import Facets from ..local import LocalFile -from ._logon import get_credentials from .facets import DATASET_MAP, FACETS logger = logging.getLogger(__name__) @@ -30,10 +30,10 @@ TIMEOUT = 5 * 60 """Timeout (in seconds) for downloads.""" -HOSTS_FILE = Path.home() / '.esmvaltool' / 'cache' / 'esgf-hosts.yml' -SIZE = 'size (bytes)' -DURATION = 'duration (s)' -SPEED = 'speed (MB/s)' +HOSTS_FILE = Path.home() / ".esmvaltool" / "cache" / "esgf-hosts.yml" +SIZE = "size (bytes)" +DURATION = "duration (s)" +SPEED = "speed (MB/s)" class DownloadError(Exception): @@ -52,9 +52,9 @@ def compute_speed(size, duration): def load_speeds(): """Load average download speeds from HOSTS_FILE.""" try: - content = HOSTS_FILE.read_text(encoding='utf-8') + content = HOSTS_FILE.read_text(encoding="utf-8") except FileNotFoundError: - content = '{}' + content = "{}" speeds = yaml.safe_load(content) return speeds @@ -71,7 +71,7 @@ def log_speed(url, size, duration): SIZE: size, DURATION: round(duration), SPEED: round(speed, 1), - 'error': False, + "error": False, } with atomic_write(HOSTS_FILE) as file: yaml.safe_dump(speeds, file) @@ -82,7 +82,7 @@ def log_error(url): speeds = load_speeds() host = urlparse(url).hostname entry = speeds.get(host, {SIZE: 0, DURATION: 0, SPEED: 0}) - entry['error'] = True + entry["error"] = True speeds[host] = entry with atomic_write(HOSTS_FILE) as file: yaml.safe_dump(speeds, file) @@ -94,7 +94,7 @@ def atomic_write(filename): filename.parent.mkdir(parents=True, exist_ok=True) with NamedTemporaryFile(prefix=f"{filename}.") as file: tmp_file = file.name - with open(tmp_file, 'w', encoding='utf-8') as file: + with open(tmp_file, "w", encoding="utf-8") as file: yield file shutil.move(tmp_file, filename) @@ -135,7 +135,7 @@ def get_preferred_hosts(): # Ignore errors older than an hour errored = [] else: - errored = [h for h in speeds if speeds[h]['error']] + errored = [h for h in speeds if speeds[h]["error"]] # Move hosts with an error to the end of the list for host in errored: @@ -195,7 +195,7 @@ class ESGFFile: def __init__(self, results): results = list(results) - self.name = str(Path(results[0].filename).with_suffix('.nc')) + self.name = str(Path(results[0].filename).with_suffix(".nc")) self.size = results[0].size self.dataset = self._get_dataset_id(results) self.facets = self._get_facets(results) @@ -211,7 +211,7 @@ def _from_results(cls, results, facets): def same_file(result): # Remove the hostname from the dataset_id - dataset = result.json['dataset_id'].split('|')[0] + dataset = result.json["dataset_id"].split("|")[0] # Ignore the extension (some files are called .nc_0, .nc_1) filename = Path(result.filename).stem # Ignore case @@ -224,14 +224,17 @@ def same_file(result): # Filter out files containing the wrong variable, e.g. for # cmip5.output1.ICHEC.EC-EARTH.historical # .mon.atmos.Amon.r1i1p1.v20121115 - variable = file.name.split('_')[0] - if 'variable' not in facets or facets['variable'] == variable: + variable = file.name.split("_")[0] + if "variable" not in facets or facets["variable"] == variable: files.append(file) else: logger.debug( "Ignoring file(s) %s containing wrong variable '%s' in" - " found in search for variable '%s'", file.urls, variable, - facets.get('variable', facets.get('variable_id', '?'))) + " found in search for variable '%s'", + file.urls, + variable, + facets.get("variable", facets.get("variable_id", "?")), + ) return files @@ -243,7 +246,7 @@ def _get_facets(self, results): read from the `dataset_id` and filename and used to correct any wrong facets values. """ - project = results[0].json['project'][0] + project = results[0].json["project"][0] # Read the facets from the metadata facets = { @@ -252,26 +255,32 @@ def _get_facets(self, results): if their_facet in results[0].json } facets = { - facet: - value[0] if isinstance(value, list) and len(value) == 1 else value + facet: value[0] + if isinstance(value, list) and len(value) == 1 + else value for facet, value in facets.items() } - facets['project'] = project - if 'dataset' in facets: + facets["project"] = project + if "dataset" in facets: reverse_dataset_map = { - v: k - for k, v in DATASET_MAP.get(project, {}).items() + v: k for k, v in DATASET_MAP.get(project, {}).items() } - facets['dataset'] = reverse_dataset_map.get( - facets['dataset'], facets['dataset']) + facets["dataset"] = reverse_dataset_map.get( + facets["dataset"], facets["dataset"] + ) # Update the facets with information from the dataset_id and filename more_reliable_facets = self._get_facets_from_dataset_id(results) for facet, value in more_reliable_facets.items(): if facet not in facets or facets[facet] != value: logger.debug( - "Correcting facet '%s' from '%s' to '%s' for %s.%s", facet, - facets.get(facet), value, self.dataset, self.name) + "Correcting facet '%s' from '%s' to '%s' for %s.%s", + facet, + facets.get(facet), + value, + self.dataset, + self.name, + ) facets[facet] = value return facets @@ -295,26 +304,27 @@ def _get_facets_from_dataset_id(results) -> Facets: # %(rcm_version)s.%(time_frequency)s.%(variable)s' # obs4MIPs: '%(project)s.%(institute)s.%(source_id)s.%(realm)s. # %(time_frequency)s' - project = results[0].json['project'][0] + project = results[0].json["project"][0] # Read the keys from `dataset_id_template_` and translate to our keys - template = results[0].json['dataset_id_template_'][0] + template = results[0].json["dataset_id_template_"][0] keys = re.findall(r"%\((.*?)\)s", template) reverse_facet_map = {v: k for k, v in FACETS[project].items()} - reverse_facet_map['realm'] = 'modeling_realm' - reverse_facet_map['mip_era'] = 'project' # CMIP6 oddity - reverse_facet_map['variable_id'] = 'short_name' # CMIP6 oddity - reverse_facet_map['valid_institute'] = 'institute' # CMIP5 oddity + reverse_facet_map["realm"] = "modeling_realm" + reverse_facet_map["mip_era"] = "project" # CMIP6 oddity + reverse_facet_map["variable_id"] = "short_name" # CMIP6 oddity + reverse_facet_map["valid_institute"] = "institute" # CMIP5 oddity keys = [reverse_facet_map.get(k, k) for k in keys] - keys.append('version') - if keys[0] == 'project': + keys.append("version") + if keys[0] == "project": # The project is sometimes hardcoded all lowercase in the template keys = keys[1:] # Read values from dataset_id # Pick the first dataset_id if there are differences in case - dataset_id = sorted(r.json['dataset_id'].split('|')[0] - for r in results)[0] - values = dataset_id.split('.')[1:] + dataset_id = sorted( + r.json["dataset_id"].split("|")[0] for r in results + )[0] + values = dataset_id.split(".")[1:] facets = {} if len(keys) == len(values): for idx, key in enumerate(keys): @@ -322,12 +332,15 @@ def _get_facets_from_dataset_id(results) -> Facets: else: logger.debug( "Wrong dataset_id_template_ %s or facet values containing '.' " - "for dataset %s", template, dataset_id) - facets['version'] = dataset_id.split('.')[-1] + "for dataset %s", + template, + dataset_id, + ) + facets["version"] = dataset_id.split(".")[-1] # The dataset_id does not contain the short_name for all projects, # so get it from the filename: - facets['short_name'] = results[0].json['title'].split('_')[0] + facets["short_name"] = results[0].json["title"].split("_")[0] return facets @@ -335,41 +348,43 @@ def _get_facets_from_dataset_id(results) -> Facets: def _get_dataset_id(results): """Simplify dataset_id so it is always composed of the same facets.""" # Pick the first dataset_id if there are differences in case - dataset_id = sorted(r.json['dataset_id'].split('|')[0] - for r in results)[0] + dataset_id = sorted( + r.json["dataset_id"].split("|")[0] for r in results + )[0] - project = results[0].json['project'][0] - if project != 'obs4MIPs': + project = results[0].json["project"][0] + if project != "obs4MIPs": return dataset_id # Simplify the obs4MIPs dataset_id so it contains only facets that are # present for all datasets. - version = dataset_id.rsplit('.', 1)[1] - dataset_key = FACETS[project]['dataset'] + version = dataset_id.rsplit(".", 1)[1] + dataset_key = FACETS[project]["dataset"] dataset_name = results[0].json[dataset_key][0] dataset_name = DATASET_MAP[project].get(dataset_name, dataset_name) return f"{project}.{dataset_name}.{version}" def _get_relative_path(self) -> Path: """Get the subdirectories.""" - if self.facets['project'] == 'obs4MIPs': + if self.facets["project"] == "obs4MIPs": # Avoid errors due to a to a `.` in the dataset name - facets = ['project', 'dataset', 'version'] + facets = ["project", "dataset", "version"] path = Path(*[self.facets[f] for f in facets]) else: - path = Path(*self.dataset.split('.')) + path = Path(*self.dataset.split(".")) return path / self.name def __repr__(self): """Represent the file as a string.""" hosts = [urlparse(u).hostname for u in self.urls] - return (f"ESGFFile:{self._get_relative_path()}" - f" on hosts {hosts}") + return f"ESGFFile:{self._get_relative_path()} on hosts {hosts}" def __eq__(self, other): """Compare `self` to `other`.""" - return (isinstance(other, self.__class__) - and (self.dataset, self.name) == (other.dataset, other.name)) + return isinstance(other, self.__class__) and ( + self.dataset, + self.name, + ) == (other.dataset, other.name) def __lt__(self, other): """Compare `self` to `other`.""" @@ -425,10 +440,13 @@ def download(self, dest_folder): for url in sort_hosts(self.urls): try: self._download(local_file, url) - except (DownloadError, - requests.exceptions.RequestException) as error: - logger.debug("Not able to download %s. Error message: %s", url, - error) + except ( + DownloadError, + requests.exceptions.RequestException, + ) as error: + logger.debug( + "Not able to download %s. Error message: %s", url, error + ) errors[url] = error log_error(url) else: @@ -437,7 +455,8 @@ def download(self, dest_folder): if not local_file.exists(): raise DownloadError( f"Failed to download file {local_file}, errors:" - "\n" + "\n".join(f"{url}: {errors[url]}" for url in errors)) + "\n" + "\n".join(f"{url}: {errors[url]}" for url in errors) + ) return local_file @@ -460,10 +479,7 @@ def _download(self, local_file, url): logger.debug("Downloading %s to %s", url, tmp_file) start_time = datetime.datetime.now() - response = requests.get(url, - stream=True, - timeout=TIMEOUT, - cert=get_credentials()) + response = requests.get(url, stream=True, timeout=TIMEOUT) response.raise_for_status() with tmp_file.open("wb") as file: # Specify chunk_size to avoid @@ -479,22 +495,28 @@ def _download(self, local_file, url): if hasher is None: logger.warning( "No checksum available, unable to check data" - " integrity for %s, ", url) + " integrity for %s, ", + url, + ) else: local_checksum = hasher.hexdigest() if local_checksum != checksum: raise DownloadError( f"Wrong {checksum_type} checksum for file {tmp_file}," f" downloaded from {url}: expected {checksum}, but got" - f" {local_checksum}. Try downloading the file again.") + f" {local_checksum}. Try downloading the file again." + ) shutil.move(tmp_file, local_file) log_speed(url, self.size, duration.total_seconds()) - logger.info("Downloaded %s (%s) in %s (%s/s) from %s", local_file, - format_size(self.size), - format_timespan(duration.total_seconds()), - format_size(self.size / duration.total_seconds()), - urlparse(url).hostname) + logger.info( + "Downloaded %s (%s) in %s (%s/s) from %s", + local_file, + format_size(self.size), + format_timespan(duration.total_seconds()), + format_size(self.size / duration.total_seconds()), + urlparse(url).hostname, + ) def get_download_message(files): @@ -503,9 +525,7 @@ def get_download_message(files): lines = [] for file in files: total_size += file.size - lines.append(f"{format_size(file.size)}" - "\t" - f"{file}") + lines.append(f"{format_size(file.size)}\t{file}") lines.insert(0, "Will download the following files:") lines.insert(0, f"Will download {format_size(total_size)}") @@ -531,12 +551,16 @@ def download(files, dest_folder, n_jobs=4): Raised if one or more files failed to download. """ files = [ - file for file in files if isinstance(file, ESGFFile) + file + for file in files + if isinstance(file, ESGFFile) and not file.local_file(dest_folder).exists() ] if not files: - logger.debug("All required data is available locally," - " not downloading anything.") + logger.debug( + "All required data is available locally," + " not downloading anything." + ) return files = sorted(files) @@ -553,8 +577,7 @@ def _download(file: ESGFFile): random.shuffle(files) with concurrent.futures.ThreadPoolExecutor(max_workers=n_jobs) as executor: future_to_file = { - executor.submit(_download, file): file - for file in files + executor.submit(_download, file): file for file in files } for future in concurrent.futures.as_completed(future_to_file): @@ -562,8 +585,9 @@ def _download(file: ESGFFile): try: future.result() except DownloadError as error: - logger.error("Failed to download %s, error message %s", file, - error) + logger.error( + "Failed to download %s, error message %s", file, error + ) errors.append(error) else: total_size += file.size @@ -577,8 +601,9 @@ def _download(file: ESGFFile): ) if errors: - msg = ("Failed to download the following files:\n" + - "\n".join(sorted(str(error) for error in errors))) + msg = "Failed to download the following files:\n" + "\n".join( + sorted(str(error) for error in errors) + ) raise DownloadError(msg) logger.info("Successfully downloaded all requested files.") diff --git a/esmvalcore/esgf/_logon.py b/esmvalcore/esgf/_logon.py deleted file mode 100644 index e9c33251c7..0000000000 --- a/esmvalcore/esgf/_logon.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Functions for logging on to ESGF.""" -import logging -from functools import lru_cache - -import pyesgf.logon -import pyesgf.search - -from ..config._esgf_pyclient import get_esgf_config - -logger = logging.getLogger(__name__) - - -@lru_cache(None) -def get_manager(): - """Return a logon manager.""" - return pyesgf.logon.LogonManager() - - -def logon(): - """Log on to ESGF and return a LogonManager.""" - cfg = get_esgf_config() - manager = get_manager() - - if not manager.is_logged_on(): - keys = ['interactive', 'hostname', 'username', 'password'] - if any(cfg['logon'].get(key) for key in keys): - # only try logging on if it is configured - manager.logon(**cfg['logon']) - if manager.is_logged_on(): - logger.info("Logged on to ESGF") - else: - logger.warning("Failed to log on to ESGF, data " - "availability will be limited.") - - return manager - - -def get_credentials(): - """Return ESGF credentials.""" - manager = logon() - if manager.is_logged_on(): - credentials = manager.esgf_credentials - else: - credentials = None - return credentials diff --git a/esmvalcore/esgf/_search.py b/esmvalcore/esgf/_search.py index 62882a5345..d8c2cd696f 100644 --- a/esmvalcore/esgf/_search.py +++ b/esmvalcore/esgf/_search.py @@ -1,4 +1,5 @@ """Module for finding files on ESGF.""" + import itertools import logging from functools import lru_cache @@ -21,12 +22,12 @@ def get_esgf_facets(variable): """Translate variable to facets for searching on ESGF.""" - project = variable.get('project', '') - facets = {'project': project} + project = variable.get("project", "") + facets = {"project": project} for our_name, esgf_name in FACETS[project].items(): if our_name in variable: values = variable[our_name] - if values == '*': + if values == "*": # Wildcards can be specified on ESGF by omitting the facet continue @@ -36,11 +37,11 @@ def get_esgf_facets(variable): values = [values] for i, value in enumerate(values): - if our_name == 'dataset': + if our_name == "dataset": # Replace dataset name by ESGF name for dataset values[i] = DATASET_MAP[project].get(value, value) - facets[esgf_name] = ','.join(values) + facets[esgf_name] = ",".join(values) return facets @@ -52,17 +53,17 @@ def select_latest_versions(files, versions): def same_file(file): """Return a versionless identifier for a file.""" # Dataset without the version number - dataset = file.dataset.rsplit('.', 1)[0] + dataset = file.dataset.rsplit(".", 1)[0] return (dataset, file.name) if isinstance(versions, str): - versions = (versions, ) + versions = (versions,) files = sorted(files, key=same_file) for _, group in itertools.groupby(files, key=same_file): group = sorted(group, reverse=True) if versions: - selection = [f for f in group if f.facets['version'] in versions] + selection = [f for f in group if f.facets["version"] in versions] if not selection: # Skip the file if it is not the requested version(s). continue @@ -70,8 +71,11 @@ def same_file(file): latest_version = group[0] result.append(latest_version) if len(group) > 1: - logger.debug("Only using the latest version %s, not %s", - latest_version, group[1:]) + logger.debug( + "Only using the latest version %s, not %s", + latest_version, + group[1:], + ) return result @@ -129,8 +133,10 @@ def _search_index_nodes(facets): logger.debug("Unable to connect to %s due to %s", url, error) errors.append(error) - raise FileNotFoundError("Failed to search ESGF, unable to connect:\n" + - "\n".join(f"- {e}" for e in errors)) + raise FileNotFoundError( + "Failed to search ESGF, unable to connect:\n" + + "\n".join(f"- {e}" for e in errors) + ) def esgf_search_files(facets): @@ -150,16 +156,17 @@ def esgf_search_files(facets): files = ESGFFile._from_results(results, facets) - msg = 'none' if not files else '\n' + '\n'.join(str(f) for f in files) - logger.debug("Found the following files matching facets %s: %s", facets, - msg) + msg = "none" if not files else "\n" + "\n".join(str(f) for f in files) + logger.debug( + "Found the following files matching facets %s: %s", facets, msg + ) return files def select_by_time(files, timerange): """Select files containing data between a timerange.""" - if '*' in timerange: + if "*" in timerange: # TODO: support * combined with a period return files @@ -327,15 +334,16 @@ def find_files(*, project, short_name, dataset, **facets): if project not in FACETS: raise ValueError( f"Unable to download from ESGF, because project {project} is not" - " on it or is not supported by the esmvalcore.esgf module.") + " on it or is not supported by the esmvalcore.esgf module." + ) # The project is required for the function to work. - facets['project'] = project + facets["project"] = project # The dataset and short_name facet are not strictly required, # but without these it seems likely that the user is requesting # more results than they intended. - facets['dataset'] = dataset - facets['short_name'] = short_name + facets["dataset"] = dataset + facets["short_name"] = short_name # Convert lists to tuples to allow caching results for facet, value in facets.items(): @@ -355,12 +363,12 @@ def cached_search(**facets): esgf_facets = get_esgf_facets(facets) files = esgf_search_files(esgf_facets) - if 'version' not in facets or facets['version'] != '*': - files = select_latest_versions(files, facets.get('version')) + if "version" not in facets or facets["version"] != "*": + files = select_latest_versions(files, facets.get("version")) _replace_years_with_timerange(facets) - if 'timerange' in facets: - files = select_by_time(files, facets['timerange']) - logger.debug("Selected files:\n%s", '\n'.join(str(f) for f in files)) + if "timerange" in facets: + files = select_by_time(files, facets["timerange"]) + logger.debug("Selected files:\n%s", "\n".join(str(f) for f in files)) return files diff --git a/esmvalcore/esgf/facets.py b/esmvalcore/esgf/facets.py index 3947ba2a6c..b7d1495972 100644 --- a/esmvalcore/esgf/facets.py +++ b/esmvalcore/esgf/facets.py @@ -5,75 +5,75 @@ from ..config._esgf_pyclient import get_esgf_config FACETS = { - 'CMIP3': { - 'dataset': 'model', - 'ensemble': 'ensemble', - 'exp': 'experiment', - 'frequency': 'time_frequency', - 'short_name': 'variable', + "CMIP3": { + "dataset": "model", + "ensemble": "ensemble", + "exp": "experiment", + "frequency": "time_frequency", + "short_name": "variable", }, - 'CMIP5': { - 'dataset': 'model', - 'ensemble': 'ensemble', - 'exp': 'experiment', - 'frequency': 'time_frequency', - 'institute': 'institute', - 'mip': 'cmor_table', - 'product': 'product', - 'short_name': 'variable', + "CMIP5": { + "dataset": "model", + "ensemble": "ensemble", + "exp": "experiment", + "frequency": "time_frequency", + "institute": "institute", + "mip": "cmor_table", + "product": "product", + "short_name": "variable", }, - 'CMIP6': { - 'activity': 'activity_drs', - 'dataset': 'source_id', - 'ensemble': 'member_id', - 'exp': 'experiment_id', - 'institute': 'institution_id', - 'grid': 'grid_label', - 'mip': 'table_id', - 'short_name': 'variable', + "CMIP6": { + "activity": "activity_drs", + "dataset": "source_id", + "ensemble": "member_id", + "exp": "experiment_id", + "institute": "institution_id", + "grid": "grid_label", + "mip": "table_id", + "short_name": "variable", }, - 'CORDEX': { - 'dataset': 'rcm_name', - 'driver': 'driving_model', - 'domain': 'domain', - 'ensemble': 'ensemble', - 'exp': 'experiment', - 'frequency': 'time_frequency', - 'institute': 'institute', - 'product': 'product', - 'short_name': 'variable', + "CORDEX": { + "dataset": "rcm_name", + "driver": "driving_model", + "domain": "domain", + "ensemble": "ensemble", + "exp": "experiment", + "frequency": "time_frequency", + "institute": "institute", + "product": "product", + "short_name": "variable", + }, + "obs4MIPs": { + "dataset": "source_id", + "frequency": "time_frequency", + "institute": "institute", + "short_name": "variable", }, - 'obs4MIPs': { - 'dataset': 'source_id', - 'frequency': 'time_frequency', - 'institute': 'institute', - 'short_name': 'variable', - } } """Mapping between the recipe and ESGF facet names.""" DATASET_MAP = { - 'CMIP3': {}, - 'CMIP5': { - 'ACCESS1-0': 'ACCESS1.0', - 'ACCESS1-3': 'ACCESS1.3', - 'bcc-csm1-1': 'BCC-CSM1.1', - 'bcc-csm1-1-m': 'BCC-CSM1.1(m)', - 'CESM1-BGC': 'CESM1(BGC)', - 'CESM1-CAM5': 'CESM1(CAM5)', - 'CESM1-CAM5-1-FV2': 'CESM1(CAM5.1,FV2)', - 'CESM1-FASTCHEM': 'CESM1(FASTCHEM)', - 'CESM1-WACCM': 'CESM1(WACCM)', - 'CSIRO-Mk3-6-0': 'CSIRO-Mk3.6.0', - 'fio-esm': 'FIO-ESM', - 'GFDL-CM2p1': 'GFDL-CM2.1', - 'inmcm4': 'INM-CM4', - 'MRI-AGCM3-2H': 'MRI-AGCM3.2H', - 'MRI-AGCM3-2S': 'MRI-AGCM3.2S' + "CMIP3": {}, + "CMIP5": { + "ACCESS1-0": "ACCESS1.0", + "ACCESS1-3": "ACCESS1.3", + "bcc-csm1-1": "BCC-CSM1.1", + "bcc-csm1-1-m": "BCC-CSM1.1(m)", + "CESM1-BGC": "CESM1(BGC)", + "CESM1-CAM5": "CESM1(CAM5)", + "CESM1-CAM5-1-FV2": "CESM1(CAM5.1,FV2)", + "CESM1-FASTCHEM": "CESM1(FASTCHEM)", + "CESM1-WACCM": "CESM1(WACCM)", + "CSIRO-Mk3-6-0": "CSIRO-Mk3.6.0", + "fio-esm": "FIO-ESM", + "GFDL-CM2p1": "GFDL-CM2.1", + "inmcm4": "INM-CM4", + "MRI-AGCM3-2H": "MRI-AGCM3.2H", + "MRI-AGCM3-2S": "MRI-AGCM3.2S", }, - 'CMIP6': {}, - 'CORDEX': {}, - 'obs4MIPs': {}, + "CMIP6": {}, + "CORDEX": {}, + "obs4MIPs": {}, } """Cache for the mapping between recipe/filesystem and ESGF dataset names.""" @@ -90,20 +90,20 @@ def create_dataset_map(): dataset_map = {} indices = { - 'CMIP3': 2, - 'CMIP5': 3, - 'CMIP6': 3, - 'CORDEX': 7, - 'obs4MIPs': 2, + "CMIP3": 2, + "CMIP5": 3, + "CMIP6": 3, + "CORDEX": 7, + "obs4MIPs": 2, } for project in FACETS: dataset_map[project] = {} - dataset_key = FACETS[project]['dataset'] + dataset_key = FACETS[project]["dataset"] ctx = connection.new_context( project=project, facets=[dataset_key], - fields=['id'], + fields=["id"], latest=True, ) available_datasets = sorted(ctx.facet_counts[dataset_key]) @@ -114,22 +114,26 @@ def create_dataset_map(): # Figure out the ESGF name of the requested dataset n_available = len(available_datasets) for i, dataset in enumerate(available_datasets, 1): - print(f"Looking for dataset name of facet name" - f" {dataset} ({i} of {n_available})") + print( + f"Looking for dataset name of facet name" + f" {dataset} ({i} of {n_available})" + ) query = {dataset_key: dataset} dataset_result = next(iter(ctx.search(batch_size=1, **query))) print(f"Dataset id: {dataset_result.dataset_id}") dataset_id = dataset_result.dataset_id if dataset not in dataset_id: idx = indices[project] - dataset_alias = dataset_id.split('.')[idx] - print(f"Found dataset name '{dataset_alias}'" - f" for facet '{dataset}',") + dataset_alias = dataset_id.split(".")[idx] + print( + f"Found dataset name '{dataset_alias}'" + f" for facet '{dataset}'," + ) dataset_map[project][dataset_alias] = dataset return dataset_map -if __name__ == '__main__': +if __name__ == "__main__": # Run this module to create an up to date DATASET_MAP print(create_dataset_map()) diff --git a/esmvalcore/exceptions.py b/esmvalcore/exceptions.py index ef8ebc0136..20c20c2d35 100644 --- a/esmvalcore/exceptions.py +++ b/esmvalcore/exceptions.py @@ -1,4 +1,5 @@ """Exceptions that may be raised by ESMValCore.""" + import sys @@ -18,7 +19,7 @@ def _suppressed_hook(error, message, traceback): """https://stackoverflow.com/a/27674608.""" if issubclass(error, SuppressedError): # Print only the message and hide the traceback - print(f'{error.__name__}: {message}', file=sys.stderr) + print(f"{error.__name__}: {message}", file=sys.stderr) else: # Print full traceback sys.__excepthook__(error, message, traceback) diff --git a/esmvalcore/experimental/__init__.py b/esmvalcore/experimental/__init__.py index aeea2c0f28..7b9b0df65d 100644 --- a/esmvalcore/experimental/__init__.py +++ b/esmvalcore/experimental/__init__.py @@ -9,15 +9,15 @@ from .recipe import Recipe from .utils import RecipeList, get_all_recipes, get_recipe -logging.basicConfig(format='%(message)s', - level=logging.INFO, - stream=sys.stdout) +logging.basicConfig( + format="%(message)s", level=logging.INFO, stream=sys.stdout +) __all__ = [ - 'CFG', - 'get_all_recipes', - 'get_recipe', - 'Recipe', - 'RecipeList', - 'warnings', + "CFG", + "get_all_recipes", + "get_recipe", + "Recipe", + "RecipeList", + "warnings", ] diff --git a/esmvalcore/experimental/_logging.py b/esmvalcore/experimental/_logging.py index 206376c9c0..49f9c62153 100644 --- a/esmvalcore/experimental/_logging.py +++ b/esmvalcore/experimental/_logging.py @@ -30,17 +30,18 @@ def log_to_dir(drc: Path): drc.mkdir(parents=True, exist_ok=True) # create file handler which logs even debug messages - debug_log_file = logging.FileHandler(drc / 'main_log_debug.txt') + debug_log_file = logging.FileHandler(drc / "main_log_debug.txt") debug_log_file.setLevel(logging.DEBUG) formatter = logging.Formatter( - '%(asctime)s UTC [%(process)d] %(levelname)-7s' - ' %(name)s:%(lineno)s %(message)s') + "%(asctime)s UTC [%(process)d] %(levelname)-7s" + " %(name)s:%(lineno)s %(message)s" + ) debug_log_file.setFormatter(formatter) # create file handler which logs simple info messages - simple_log_file = logging.FileHandler(drc / 'main_log.txt') + simple_log_file = logging.FileHandler(drc / "main_log.txt") simple_log_file.setLevel(logging.INFO) - formatter = logging.Formatter('%(levelname)-7s [%(process)d] %(message)s') + formatter = logging.Formatter("%(levelname)-7s [%(process)d] %(message)s") simple_log_file.setFormatter(formatter) # add the handlers to root logger diff --git a/esmvalcore/experimental/_warnings.py b/esmvalcore/experimental/_warnings.py index b31eb78c20..ddc474f568 100644 --- a/esmvalcore/experimental/_warnings.py +++ b/esmvalcore/experimental/_warnings.py @@ -5,12 +5,13 @@ def _warning_formatter(message, category, filename, lineno, line=None): """Patch warning formatting to not mention itself.""" - return f'{filename}:{lineno}: {category.__name__}: {message}\n' + return f"{filename}:{lineno}: {category.__name__}: {message}\n" warnings.formatwarning = _warning_formatter warnings.warn( - '\n Thank you for trying out the new ESMValCore API.' - '\n Note that this API is experimental and may be subject to change.' - '\n More info: https://github.com/ESMValGroup/ESMValCore/issues/498', ) + "\n Thank you for trying out the new ESMValCore API." + "\n Note that this API is experimental and may be subject to change." + "\n More info: https://github.com/ESMValGroup/ESMValCore/issues/498", +) diff --git a/esmvalcore/experimental/recipe.py b/esmvalcore/experimental/recipe.py index 18e520324b..f199ef719f 100644 --- a/esmvalcore/experimental/recipe.py +++ b/esmvalcore/experimental/recipe.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__file__) -class Recipe(): +class Recipe: """API wrapper for the esmvalcore Recipe object. This class can be used to inspect and run the recipe. @@ -33,7 +33,7 @@ class Recipe(): def __init__(self, path: os.PathLike): self.path = Path(path) if not self.path.exists(): - raise FileNotFoundError(f'Cannot find recipe: `{path}`.') + raise FileNotFoundError(f"Cannot find recipe: `{path}`.") self._engine: Optional[RecipeEngine] = None self._data: Optional[Dict] = None @@ -42,7 +42,7 @@ def __init__(self, path: os.PathLike): def __repr__(self) -> str: """Return canonical string representation.""" - return f'{self.__class__.__name__}({self.name!r})' + return f"{self.__class__.__name__}({self.name!r})" def __str__(self) -> str: """Return string representation.""" @@ -70,7 +70,7 @@ def name(self): def data(self) -> dict: """Return dictionary representation of the recipe.""" if self._data is None: - with open(self.path, 'r', encoding='utf-8') as yaml_file: + with open(self.path, "r", encoding="utf-8") as yaml_file: self._data = yaml.safe_load(yaml_file) return self._data @@ -94,9 +94,9 @@ def _load(self, session: Session) -> RecipeEngine: """ logger.info(pprint.pformat(session)) - return RecipeEngine(raw_recipe=self.data, - session=session, - recipe_file=self.path) + return RecipeEngine( + raw_recipe=self.data, session=session, recipe_file=self.path + ) def run( self, @@ -130,7 +130,7 @@ def run( self.last_session = session if task: - session['diagnostics'] = task + session["diagnostics"] = task with log_to_dir(session.run_dir): _dask.check_distributed_config() @@ -154,10 +154,10 @@ def get_output(self) -> RecipeOutput: grouped by diagnostic task. """ if self._engine is None: - raise AttributeError('Run the recipe first using `.run()`.') + raise AttributeError("Run the recipe first using `.run()`.") output = self._engine.get_output() - task_output = output['task_output'] + task_output = output["task_output"] return RecipeOutput( task_output=task_output, diff --git a/esmvalcore/experimental/recipe_info.py b/esmvalcore/experimental/recipe_info.py index cb0fd32f95..3a1a41b864 100644 --- a/esmvalcore/experimental/recipe_info.py +++ b/esmvalcore/experimental/recipe_info.py @@ -1,4 +1,5 @@ """Handles recipe metadata (under 'documentation' section).""" + import os import textwrap from pathlib import Path @@ -10,7 +11,7 @@ from .templates import get_template -class RecipeInfo(): +class RecipeInfo: """API wrapper for the esmvalcore Recipe object. This class can be used to inspect and run the recipe. @@ -33,35 +34,35 @@ def __init__(self, data, filename: Union[os.PathLike, str]): def __repr__(self) -> str: """Return canonical string representation.""" - return f'{self.__class__.__name__}({self.name!r})' + return f"{self.__class__.__name__}({self.name!r})" def __str__(self) -> str: """Return string representation.""" - bullet = '\n - ' - string = f'## {self.title}' + bullet = "\n - " + string = f"## {self.title}" - string += '\n\n' - string += f'{self.description}' + string += "\n\n" + string += f"{self.description}" - string += '\n\n### Authors' + string += "\n\n### Authors" for author in self.authors: - string += f'{bullet}{author}' + string += f"{bullet}{author}" - string += '\n\n### Maintainers' + string += "\n\n### Maintainers" for maintainer in self.maintainers: - string += f'{bullet}{maintainer}' + string += f"{bullet}{maintainer}" if self.projects: - string += '\n\n### Projects' + string += "\n\n### Projects" for project in self.projects: - string += f'{bullet}{project}' + string += f"{bullet}{project}" if self.references: - string += '\n\n### References' + string += "\n\n### References" for reference in self.references: - string += bullet + reference.render('plaintext') + string += bullet + reference.render("plaintext") - string += '\n' + string += "\n" return string @@ -72,34 +73,34 @@ def _repr_html_(self) -> str: @classmethod def from_yaml(cls, path: str): """Return instance of 'RecipeInfo' from a recipe in yaml format.""" - data = yaml.safe_load(Path(path).read_text(encoding='utf-8')) + data = yaml.safe_load(Path(path).read_text(encoding="utf-8")) return cls(data, filename=path) @property def name(self) -> str: """Name of the recipe.""" - return Path(self.filename).stem.replace('_', ' ').capitalize() + return Path(self.filename).stem.replace("_", " ").capitalize() @property def title(self) -> str: """Title of the recipe.""" if self._title is None: - self._title = self.data['documentation']['title'] + self._title = self.data["documentation"]["title"] return self._title @property def description(self) -> str: """Recipe description.""" if self._description is None: - description = self.data['documentation']['description'] - self._description = '\n'.join(textwrap.wrap(description)) + description = self.data["documentation"]["description"] + self._description = "\n".join(textwrap.wrap(description)) return self._description @property def authors(self) -> tuple: """List of recipe authors.""" if self._authors is None: - tags = self.data['documentation'].get('authors', ()) + tags = self.data["documentation"].get("authors", ()) self._authors = tuple(Contributor.from_tag(tag) for tag in tags) return self._authors @@ -107,16 +108,17 @@ def authors(self) -> tuple: def maintainers(self) -> tuple: """List of recipe maintainers.""" if self._maintainers is None: - tags = self.data['documentation'].get('maintainer', ()) + tags = self.data["documentation"].get("maintainer", ()) self._maintainers = tuple( - Contributor.from_tag(tag) for tag in tags) + Contributor.from_tag(tag) for tag in tags + ) return self._maintainers @property def projects(self) -> tuple: """List of recipe projects.""" if self._projects is None: - tags = self.data['documentation'].get('projects', []) + tags = self.data["documentation"].get("projects", []) self._projects = tuple(Project.from_tag(tag) for tag in tags) return self._projects @@ -124,7 +126,7 @@ def projects(self) -> tuple: def references(self) -> tuple: """List of project references.""" if self._references is None: - tags = self.data['documentation'].get('references', []) + tags = self.data["documentation"].get("references", []) self._references = tuple(Reference.from_tag(tag) for tag in tags) return self._references @@ -136,7 +138,7 @@ def render(self, template=None): customize the output. """ if not template: - template = get_template(self.__class__.__name__ + '.j2') + template = get_template(self.__class__.__name__ + ".j2") rendered = template.render(info=self) return rendered diff --git a/esmvalcore/experimental/recipe_metadata.py b/esmvalcore/experimental/recipe_metadata.py index 1801e92ca2..25e47c91ac 100644 --- a/esmvalcore/experimental/recipe_metadata.py +++ b/esmvalcore/experimental/recipe_metadata.py @@ -32,15 +32,17 @@ def __init__(self, name: str, institute: str, orcid: Optional[str] = None): def __repr__(self) -> str: """Return canonical string representation.""" - return (f'{self.__class__.__name__}({self.name!r},' - f' institute={self.institute!r}, orcid={self.orcid!r})') + return ( + f"{self.__class__.__name__}({self.name!r}," + f" institute={self.institute!r}, orcid={self.orcid!r})" + ) def __str__(self) -> str: """Return string representation.""" - string = f'{self.name} ({self.institute}' + string = f"{self.name} ({self.institute}" if self.orcid: - string += f'; {self.orcid}' - string += ')' + string += f"; {self.orcid}" + string += ")" return string def _repr_markdown_(self) -> str: @@ -48,7 +50,7 @@ def _repr_markdown_(self) -> str: return str(self) @classmethod - def from_tag(cls, tag: str) -> 'Contributor': + def from_tag(cls, tag: str) -> "Contributor": """Return an instance of Contributor from a tag (``TAGS``). Parameters @@ -57,11 +59,11 @@ def from_tag(cls, tag: str) -> 'Contributor': The contributor tags are defined in the authors section in ``config-references.yml``. """ - mapping = TAGS.get_tag_value(section='authors', tag=tag) + mapping = TAGS.get_tag_value(section="authors", tag=tag) - name = ' '.join(reversed(mapping['name'].split(', '))) - institute = mapping.get('institute', 'No affiliation') - orcid = mapping['orcid'] + name = " ".join(reversed(mapping["name"].split(", "))) + institute = mapping.get("institute", "No affiliation") + orcid = mapping["orcid"] return cls(name=name, institute=institute, orcid=orcid) @@ -74,9 +76,9 @@ def from_dict(cls, attributes): attributes : dict Dictionary containing name / institute [/ orcid]. """ - name = attributes['name'] - institute = attributes['institute'] - orcid = attributes.get('orcid', None) + name = attributes["name"] + institute = attributes["institute"] + orcid = attributes.get("orcid", None) return cls(name=name, institute=institute, orcid=orcid) @@ -94,15 +96,15 @@ def __init__(self, project: str): def __repr__(self) -> str: """Return canonical string representation.""" - return f'{self.__class__.__name__}({self.project!r})' + return f"{self.__class__.__name__}({self.project!r})" def __str__(self) -> str: """Return string representation.""" - string = f'{self.project}' + string = f"{self.project}" return string @classmethod - def from_tag(cls, tag: str) -> 'Project': + def from_tag(cls, tag: str) -> "Project": """Return an instance of Project from a tag (``TAGS``). Parameters @@ -110,7 +112,7 @@ def from_tag(cls, tag: str) -> 'Project': tag : str The project tags are defined in ``config-references.yml``. """ - project = TAGS['projects'][tag] + project = TAGS["projects"][tag] return cls(project=project) @@ -134,15 +136,16 @@ def __init__(self, filename: str): if len(bib_data.entries) > 1: raise NotImplementedError( - f'{self.__class__.__name__} cannot handle bibtex files ' - 'with more than 1 entry.') + f"{self.__class__.__name__} cannot handle bibtex files " + "with more than 1 entry." + ) self._bib_data = bib_data self._key, self._entry = list(bib_data.entries.items())[0] self._filename = filename @classmethod - def from_tag(cls, tag: str) -> 'Reference': + def from_tag(cls, tag: str) -> "Reference": """Return an instance of Reference from a bibtex tag. Parameters @@ -151,22 +154,22 @@ def from_tag(cls, tag: str) -> 'Reference': The bibtex tags resolved as ``esmvaltool/references/{tag}.bibtex`` or the corresponding directory as defined by the diagnostics path. """ - filename = DIAGNOSTICS.references / f'{tag}.bibtex' + filename = DIAGNOSTICS.references / f"{tag}.bibtex" return cls(filename) def __repr__(self) -> str: """Return canonical string representation.""" - return f'{self.__class__.__name__}({self._key!r})' + return f"{self.__class__.__name__}({self._key!r})" def __str__(self) -> str: """Return string representation.""" - return self.render(renderer='plaintext') + return self.render(renderer="plaintext") def _repr_html_(self) -> str: """Represent using markdown renderer in a notebook environment.""" - return self.render(renderer='html') + return self.render(renderer="html") - def render(self, renderer: str = 'html') -> str: + def render(self, renderer: str = "html") -> str: """Render the reference. Parameters @@ -180,16 +183,18 @@ def render(self, renderer: str = 'html') -> str: str Rendered reference """ - style = 'plain' # alpha, plain, unsrt, unsrtalpha - backend = pybtex.plugin.find_plugin('pybtex.backends', renderer)() - formatter = pybtex.plugin.find_plugin('pybtex.style.formatting', - style)() + style = "plain" # alpha, plain, unsrt, unsrtalpha + backend = pybtex.plugin.find_plugin("pybtex.backends", renderer)() + formatter = pybtex.plugin.find_plugin( + "pybtex.style.formatting", style + )() try: formatter = formatter.format_entry(self._key, self._entry) rendered = formatter.text.render(backend) except Exception as err: raise RenderError( - f'Could not render {self._key!r}: {err}') from None + f"Could not render {self._key!r}: {err}" + ) from None return rendered diff --git a/esmvalcore/experimental/recipe_output.py b/esmvalcore/experimental/recipe_output.py index aab7f749ad..f24f319c70 100644 --- a/esmvalcore/experimental/recipe_output.py +++ b/esmvalcore/experimental/recipe_output.py @@ -1,4 +1,5 @@ """API for handing recipe output.""" + import base64 import getpass import logging @@ -31,10 +32,11 @@ class TaskOutput: def __init__(self, name: str, files: dict): self.name = name - self.title = name.replace('_', ' ').replace(TASKSEP, ': ').title() + self.title = name.replace("_", " ").replace(TASKSEP, ": ").title() self.files = tuple( OutputFile.create(filename, attributes) - for filename, attributes in files.items()) + for filename, attributes in files.items() + ) def __str__(self): """Return string representation.""" @@ -42,10 +44,10 @@ def __str__(self): def __repr__(self): """Return canonical string representation.""" - indent = ' ' - string = f'{self.name}:\n' + indent = " " + string = f"{self.name}:\n" for file in self.files: - string += f'{indent}{file}\n' + string += f"{indent}{file}\n" return string def __len__(self): @@ -59,15 +61,15 @@ def __getitem__(self, index: int): @property def image_files(self) -> tuple: """Return a tuple of image objects.""" - return tuple(item for item in self.files if item.kind == 'image') + return tuple(item for item in self.files if item.kind == "image") @property def data_files(self) -> tuple: """Return a tuple of data objects.""" - return tuple(item for item in self.files if item.kind == 'data') + return tuple(item for item in self.files if item.kind == "data") @classmethod - def from_task(cls, task) -> 'TaskOutput': + def from_task(cls, task) -> "TaskOutput": """Create an instance of `TaskOutput` from a Task. Where task is an instance of `esmvalcore._task.BaseTask`. @@ -94,15 +96,15 @@ class DiagnosticOutput: def __init__(self, name, task_output, title=None, description=None): self.name = name self.title = title if title else name.title() - self.description = description if description else '' + self.description = description if description else "" self.task_output = task_output def __repr__(self): """Return canonical string representation.""" - indent = ' ' - string = f'{self.name}:\n' + indent = " " + string = f"{self.name}:\n" for task_output in self.task_output: - string += f'{indent}{task_output}\n' + string += f"{indent}{task_output}\n" return string @@ -152,12 +154,12 @@ def __init__(self, task_output: dict, session=None, info=None): # Create diagnostic output filters: dict = {} for name, tasks in diagnostics.items(): - diagnostic_info = info.data['diagnostics'][name] + diagnostic_info = info.data["diagnostics"][name] self.diagnostics[name] = DiagnosticOutput( name=name, task_output=tasks, - title=diagnostic_info.get('title'), - description=diagnostic_info.get('description'), + title=diagnostic_info.get("title"), + description=diagnostic_info.get("description"), ) # Add data to filters @@ -177,7 +179,7 @@ def _add_to_filters(cls, filters, attributes): values = attributes[attr] # `set()` to avoid duplicates attr_list = filters.get(attr, set()) - if (isinstance(values, str) or not isinstance(values, Sequence)): + if isinstance(values, str) or not isinstance(values, Sequence): attr_list.add(values) else: attr_list.update(values) @@ -192,7 +194,7 @@ def _sort_filters(cls, filters): def __repr__(self): """Return canonical string representation.""" - string = '\n'.join(repr(item) for item in self._task_output.values()) + string = "\n".join(repr(item) for item in self._task_output.values()) return string @@ -220,10 +222,10 @@ def from_core_recipe_output(cls, recipe_output: dict): recipe_output : dict Output from `_recipe.Recipe.get_product_output` """ - task_output = recipe_output['task_output'] - recipe_data = recipe_output['recipe_data'] - session = recipe_output['session'] - recipe_filename = recipe_output['recipe_filename'] + task_output = recipe_output["task_output"] + recipe_data = recipe_output["recipe_data"] + session = recipe_output["session"] + recipe_filename = recipe_output["recipe_filename"] info = RecipeInfo(recipe_data, filename=recipe_filename) info.resolve() @@ -232,18 +234,18 @@ def from_core_recipe_output(cls, recipe_output: dict): def _log_ssh_html_info(self): """Log information about accessing index.html on an SSH server.""" - if 'SSH_CONNECTION' not in os.environ: + if "SSH_CONNECTION" not in os.environ: return - server_ip = os.environ['SSH_CONNECTION'].split()[2] - server_ip_env = '${server}' - server = f'{getpass.getuser()}@{server_ip_env}' - port = '31415' - port_env = '${port}' + server_ip = os.environ["SSH_CONNECTION"].split()[2] + server_ip_env = "${server}" + server = f"{getpass.getuser()}@{server_ip_env}" + port = "31415" + port_env = "${port}" command = ( - f'server={server_ip} && port={port} && ' - f'ssh -t -L {port_env}:localhost:{port_env} {server} ' - f'{sys.executable} -m http.server {port_env} -d ' - f'{self.session.session_dir}' + f"server={server_ip} && port={port} && " + f"ssh -t -L {port_env}:localhost:{port_env} {server} " + f"{sys.executable} -m http.server {port_env} -d " + f"{self.session.session_dir}" ) logger.info( "It looks like you are connected to a remote machine via SSH. To " @@ -267,12 +269,12 @@ def write_html(self): A html file `index.html` gets written to the session directory. """ - filename = self.session.session_dir / 'index.html' + filename = self.session.session_dir / "index.html" - template = get_template('recipe_output_page.j2') + template = get_template("recipe_output_page.j2") html_dump = self.render(template=template) - with open(filename, 'w', encoding='utf-8') as file: + with open(filename, "w", encoding="utf-8") as file: file.write(html_dump) logger.info("Wrote recipe output to:\nfile://%s", filename) @@ -286,7 +288,7 @@ def render(self, template=None): customize the output. """ if not template: - template = get_template(self.__class__.__name__ + '.j2') + template = get_template(self.__class__.__name__ + ".j2") rendered = template.render( diagnostics=self.diagnostics.values(), session=self.session, @@ -299,14 +301,14 @@ def render(self, template=None): def read_main_log(self) -> str: """Read log file.""" - return self.session.main_log.read_text(encoding='utf-8') + return self.session.main_log.read_text(encoding="utf-8") def read_main_log_debug(self) -> str: """Read debug log file.""" - return self.session.main_log_debug.read_text(encoding='utf-8') + return self.session.main_log_debug.read_text(encoding="utf-8") -class OutputFile(): +class OutputFile: """Base container for recipe output files. Use `OutputFile.create(path='', attributes=attributes)` to @@ -334,27 +336,28 @@ def __init__(self, path: str, attributes: Optional[dict] = None): def __repr__(self): """Return canonical string representation.""" - return f'{self.__class__.__name__}({self.path.name!r})' + return f"{self.__class__.__name__}({self.path.name!r})" @property def caption(self) -> str: """Return the caption of the file (fallback to path).""" - return self.attributes.get('caption', str(self.path)) + return self.attributes.get("caption", str(self.path)) @property def authors(self) -> tuple: """List of recipe authors.""" if self._authors is None: - authors = self.attributes['authors'] + authors = self.attributes["authors"] self._authors = tuple( - Contributor.from_dict(author) for author in authors) + Contributor.from_dict(author) for author in authors + ) return self._authors @property def references(self) -> tuple: """List of project references.""" if self._references is None: - tags = self.attributes.get('references', []) + tags = self.attributes.get("references", []) self._references = tuple(Reference.from_tag(tag) for tag in tags) return self._references @@ -379,24 +382,24 @@ def _get_derived_path(self, append: str, suffix: Optional[str] = None): @property def citation_file(self): """Return path of citation file (bibtex format).""" - return self._get_derived_path('_citation', '.bibtex') + return self._get_derived_path("_citation", ".bibtex") @property def data_citation_file(self): """Return path of data citation info (txt format).""" - return self._get_derived_path('_data_citation_info', '.txt') + return self._get_derived_path("_data_citation_info", ".txt") @property def provenance_xml_file(self): """Return path of provenance file (xml format).""" - return self._get_derived_path('_provenance', '.xml') + return self._get_derived_path("_provenance", ".xml") @classmethod def create( cls, path: str, attributes: Optional[dict] = None, - ) -> 'OutputFile': + ) -> "OutputFile": """Construct new instances of OutputFile. Chooses a derived class if suitable. @@ -404,9 +407,9 @@ def create( item_class: Type[OutputFile] ext = Path(path).suffix - if ext in ('.png', ): + if ext in (".png",): item_class = ImageFile - elif ext in ('.nc', ): + elif ext in (".nc",): item_class = DataFile else: item_class = cls @@ -417,13 +420,13 @@ def create( class ImageFile(OutputFile): """Container for image output.""" - kind = 'image' + kind = "image" def to_base64(self) -> str: """Encode image as base64 to embed in a Jupyter notebook.""" with open(self.path, "rb") as file: encoded = base64.b64encode(file.read()) - return encoded.decode('utf-8') + return encoded.decode("utf-8") def _repr_html_(self): """Render png as html in Jupyter notebook.""" @@ -434,12 +437,13 @@ def _repr_html_(self): class DataFile(OutputFile): """Container for data output.""" - kind = 'data' + kind = "data" def load_xarray(self): """Load data using xarray.""" # local import because `ESMValCore` does not depend on `xarray` import xarray as xr + return xr.load_dataset(self.path) def load_iris(self): diff --git a/esmvalcore/experimental/templates/__init__.py b/esmvalcore/experimental/templates/__init__.py index e0f38b93e0..9df5688eea 100644 --- a/esmvalcore/experimental/templates/__init__.py +++ b/esmvalcore/experimental/templates/__init__.py @@ -1,4 +1,5 @@ """Collection of jinja2 templates to render html output.""" + from pathlib import Path from jinja2 import Environment, FileSystemLoader @@ -9,5 +10,5 @@ get_template = environment.get_template __all__ = [ - 'get_template', + "get_template", ] diff --git a/esmvalcore/experimental/utils.py b/esmvalcore/experimental/utils.py index 3c2b1f003a..8d6d145af0 100644 --- a/esmvalcore/experimental/utils.py +++ b/esmvalcore/experimental/utils.py @@ -12,6 +12,7 @@ class RecipeList(list): """Container for recipes.""" + def find(self, query: Pattern[str]): """Search for recipes matching the search query or pattern. @@ -56,9 +57,9 @@ def get_all_recipes(subdir: Optional[str] = None) -> list: List of available recipes """ if subdir is None: - subdir = '**' + subdir = "**" rootdir = DIAGNOSTICS.recipes - files = rootdir.glob(f'{subdir}/*.yml') + files = rootdir.glob(f"{subdir}/*.yml") return RecipeList(Recipe(file) for file in files) @@ -90,9 +91,9 @@ def get_recipe(name: Union[os.PathLike, str]) -> Recipe: locations = Path(), DIAGNOSTICS.recipes if isinstance(name, str): - filenames = (name, name + '.yml') + filenames = (name, name + ".yml") else: - filenames = (name, ) + filenames = (name,) for location in locations: for filename in filenames: @@ -100,4 +101,4 @@ def get_recipe(name: Union[os.PathLike, str]) -> Recipe: if try_path.exists(): return Recipe(try_path) - raise FileNotFoundError(f'Could not find `{name}` in {locations}.') + raise FileNotFoundError(f"Could not find `{name}` in {locations}.") diff --git a/esmvalcore/iris_helpers.py b/esmvalcore/iris_helpers.py index eb9a96461e..4162233eec 100644 --- a/esmvalcore/iris_helpers.py +++ b/esmvalcore/iris_helpers.py @@ -1,4 +1,5 @@ """Auxiliary functions for :mod:`iris`.""" + from __future__ import annotations from typing import Dict, Iterable, List, Literal, Sequence @@ -111,7 +112,7 @@ def date2num(date, unit, dtype=np.float64): def merge_cube_attributes( cubes: Sequence[Cube], - delimiter: str = ' ', + delimiter: str = " ", ) -> None: """Merge attributes of all given cubes in-place. @@ -142,7 +143,7 @@ def merge_cube_attributes( # Step 1: collect all attribute values in a list attributes: Dict[str, List[NetCDFAttr]] = {} for cube in cubes: - for (attr, val) in cube.attributes.items(): + for attr, val in cube.attributes.items(): attributes.setdefault(attr, []) attributes[attr].append(val) @@ -160,7 +161,7 @@ def merge_cube_attributes( # Step 3: if values are not equal, first convert them to strings (so that # set() can be used); then extract unique elements from this list, sort it, # and use the delimiter to join all elements to a single string. - for (attr, vals) in attributes.items(): + for attr, vals in attributes.items(): set_of_str = sorted({str(v) for v in vals}) if len(set_of_str) == 1: final_attributes[attr] = vals[0] @@ -175,7 +176,7 @@ def merge_cube_attributes( def _rechunk( array: da.core.Array, complete_dims: list[int], - remaining_dims: int | Literal['auto'], + remaining_dims: int | Literal["auto"], ) -> da.core.Array: """Rechunk a given array so that it is not chunked along given dims.""" new_chunks: list[str | int] = [remaining_dims] * array.ndim @@ -187,7 +188,7 @@ def _rechunk( def _rechunk_dim_metadata( cube: Cube, complete_dims: Iterable[int], - remaining_dims: int | Literal['auto'] = 'auto', + remaining_dims: int | Literal["auto"] = "auto", ) -> None: """Rechunk dimensional metadata of a cube (in-place).""" # Non-dimensional coords that span complete_dims @@ -228,7 +229,7 @@ def _rechunk_dim_metadata( def rechunk_cube( cube: Cube, complete_coords: Iterable[Coord | str], - remaining_dims: int | Literal['auto'] = 'auto', + remaining_dims: int | Literal["auto"] = "auto", ) -> Cube: """Rechunk cube so that it is not chunked along given dimensions. @@ -292,8 +293,8 @@ def has_regular_grid(cube: Cube) -> bool: """ try: - lat = cube.coord('latitude') - lon = cube.coord('longitude') + lat = cube.coord("latitude") + lon = cube.coord("longitude") except CoordinateNotFoundError: return False if lat.ndim != 1 or lon.ndim != 1: @@ -321,8 +322,8 @@ def has_irregular_grid(cube: Cube) -> bool: """ try: - lat = cube.coord('latitude') - lon = cube.coord('longitude') + lat = cube.coord("latitude") + lon = cube.coord("longitude") except CoordinateNotFoundError: return False if lat.ndim == 2 and lon.ndim == 2: @@ -348,8 +349,8 @@ def has_unstructured_grid(cube: Cube) -> bool: """ try: - lat = cube.coord('latitude') - lon = cube.coord('longitude') + lat = cube.coord("latitude") + lon = cube.coord("longitude") except CoordinateNotFoundError: return False if lat.ndim != 1 or lon.ndim != 1: diff --git a/esmvalcore/local.py b/esmvalcore/local.py index 539679f682..61c2782b58 100644 --- a/esmvalcore/local.py +++ b/esmvalcore/local.py @@ -1,4 +1,5 @@ """Find files on the local filesystem.""" + from __future__ import annotations import itertools @@ -37,13 +38,14 @@ def _get_from_pattern(pattern, date_range_pattern, stem, group): if not daterange: # Retry with extended context for CMIP3 context = r"(?:^|[-_.]|$)" - date_range_pattern_with_context = (context + date_range_pattern + - context) + date_range_pattern_with_context = ( + context + date_range_pattern + context + ) daterange = re.search(date_range_pattern_with_context, stem) if daterange: start_point = daterange.group(group) - end_group = '_'.join([group, 'end']) + end_group = "_".join([group, "end"]) end_point = daterange.group(end_group) else: # Check for single dates in the filename @@ -53,8 +55,8 @@ def _get_from_pattern(pattern, date_range_pattern, stem, group): start_point = end_point = dates[0][0] elif len(dates) > 1: # Check for dates at start or (exclusive or) end of filename - start = re.search(r'^' + pattern, stem) - end = re.search(pattern + r'$', stem) + start = re.search(r"^" + pattern, stem) + end = re.search(pattern + r"$", stem) if start and not end: start_point = end_point = start.group(group) elif end: @@ -64,7 +66,8 @@ def _get_from_pattern(pattern, date_range_pattern, stem, group): def _get_start_end_date( - file: str | Path | LocalFile | ESGFFile) -> tuple[str, str]: + file: str | Path | LocalFile | ESGFFile, +) -> tuple[str, str]: """Get the start and end dates as a string from a file name. Examples of allowed dates: 1980, 198001, 1980-01, 19801231, 1980-12-31, @@ -93,7 +96,7 @@ def _get_start_end_date( ValueError Start or end date cannot be determined. """ - if hasattr(file, 'name'): # Path, LocalFile, ESGFFile + if hasattr(file, "name"): # Path, LocalFile, ESGFFile stem = Path(file.name).stem else: # str stem = Path(file).stem @@ -101,59 +104,71 @@ def _get_start_end_date( start_date = end_date = None # Build regex - time_pattern = (r"(?P[0-2][0-9]" - r"(?P[0-5][0-9]" - r"(?P[0-5][0-9])?)?Z?)") - date_pattern = (r"(?P[0-9]{4})" - r"(?P-?[01][0-9]" - r"(?P-?[0-3][0-9]" - rf"(T?{time_pattern})?)?)?") - datetime_pattern = (rf"(?P{date_pattern})") + time_pattern = ( + r"(?P[0-2][0-9]" + r"(?P[0-5][0-9]" + r"(?P[0-5][0-9])?)?Z?)" + ) + date_pattern = ( + r"(?P[0-9]{4})" + r"(?P-?[01][0-9]" + r"(?P-?[0-3][0-9]" + rf"(T?{time_pattern})?)?)?" + ) + datetime_pattern = rf"(?P{date_pattern})" end_datetime_pattern = datetime_pattern.replace(">", "_end>") # Dates can either be delimited by '-', '_', or '_cat_' (the latter for # CMIP3) - date_range_pattern = (datetime_pattern + r"[-_](?:cat_)?" + - end_datetime_pattern) + date_range_pattern = ( + datetime_pattern + r"[-_](?:cat_)?" + end_datetime_pattern + ) # Find dates using the regex - start_date, end_date = _get_from_pattern(datetime_pattern, - date_range_pattern, stem, - 'datetime') + start_date, end_date = _get_from_pattern( + datetime_pattern, date_range_pattern, stem, "datetime" + ) # As final resort, try to get the dates from the file contents - if ((start_date is None or end_date is None) - and isinstance(file, (str, Path)) and Path(file).exists()): + if ( + (start_date is None or end_date is None) + and isinstance(file, (str, Path)) + and Path(file).exists() + ): logger.debug("Must load file %s for daterange ", file) cubes = iris.load(file) for cube in cubes: logger.debug(cube) try: - time = cube.coord('time') + time = cube.coord("time") except iris.exceptions.CoordinateNotFoundError: continue start_date = isodate.date_isoformat( - time.cell(0).point, format=isodate.isostrf.DATE_BAS_COMPLETE) + time.cell(0).point, format=isodate.isostrf.DATE_BAS_COMPLETE + ) end_date = isodate.date_isoformat( - time.cell(-1).point, format=isodate.isostrf.DATE_BAS_COMPLETE) + time.cell(-1).point, format=isodate.isostrf.DATE_BAS_COMPLETE + ) break if start_date is None or end_date is None: raise ValueError( f"File {file} datetimes do not match a recognized pattern and " - f"time coordinate can not be read from the file") + f"time coordinate can not be read from the file" + ) # Remove potential '-' characters from datetimes - start_date = start_date.replace('-', '') - end_date = end_date.replace('-', '') + start_date = start_date.replace("-", "") + end_date = end_date.replace("-", "") return start_date, end_date def _get_start_end_year( - file: str | Path | LocalFile | ESGFFile) -> tuple[int, int]: + file: str | Path | LocalFile | ESGFFile, +) -> tuple[int, int]: """Get the start and end year as int from a file name. See :func:`_get_start_end_date`. @@ -187,26 +202,26 @@ def _dates_to_timerange(start_date, end_date): end_date = str(end_date) # Pad years with 0s if not wildcard or relative time range - if start_date != '*' and not start_date.startswith('P'): + if start_date != "*" and not start_date.startswith("P"): start_date = start_date.zfill(4) - if end_date != '*' and not end_date.startswith('P'): + if end_date != "*" and not end_date.startswith("P"): end_date = end_date.zfill(4) - return f'{start_date}/{end_date}' + return f"{start_date}/{end_date}" def _replace_years_with_timerange(variable): """Set `timerange` tag from tags `start_year` and `end_year`.""" - start_year = variable.get('start_year') - end_year = variable.get('end_year') + start_year = variable.get("start_year") + end_year = variable.get("end_year") if start_year and end_year: - variable['timerange'] = _dates_to_timerange(start_year, end_year) + variable["timerange"] = _dates_to_timerange(start_year, end_year) elif start_year: - variable['timerange'] = _dates_to_timerange(start_year, start_year) + variable["timerange"] = _dates_to_timerange(start_year, start_year) elif end_year: - variable['timerange'] = _dates_to_timerange(end_year, end_year) - variable.pop('start_year', None) - variable.pop('end_year', None) + variable["timerange"] = _dates_to_timerange(end_year, end_year) + variable.pop("start_year", None) + variable.pop("end_year", None) def _parse_period(timerange): @@ -219,40 +234,44 @@ def _parse_period(timerange): start_date = None end_date = None time_format = None - datetime_format = (isodate.DATE_BAS_COMPLETE + 'T' + - isodate.TIME_BAS_COMPLETE) - if timerange.split('/')[0].startswith('P'): + datetime_format = ( + isodate.DATE_BAS_COMPLETE + "T" + isodate.TIME_BAS_COMPLETE + ) + if timerange.split("/")[0].startswith("P"): try: - end_date = isodate.parse_datetime(timerange.split('/')[1]) + end_date = isodate.parse_datetime(timerange.split("/")[1]) time_format = datetime_format except isodate.ISO8601Error: - end_date = isodate.parse_date(timerange.split('/')[1]) + end_date = isodate.parse_date(timerange.split("/")[1]) time_format = isodate.DATE_BAS_COMPLETE - delta = isodate.parse_duration(timerange.split('/')[0]) + delta = isodate.parse_duration(timerange.split("/")[0]) start_date = end_date - delta - elif timerange.split('/')[1].startswith('P'): + elif timerange.split("/")[1].startswith("P"): try: - start_date = isodate.parse_datetime(timerange.split('/')[0]) + start_date = isodate.parse_datetime(timerange.split("/")[0]) time_format = datetime_format except isodate.ISO8601Error: - start_date = isodate.parse_date(timerange.split('/')[0]) + start_date = isodate.parse_date(timerange.split("/")[0]) time_format = isodate.DATE_BAS_COMPLETE - delta = isodate.parse_duration(timerange.split('/')[1]) + delta = isodate.parse_duration(timerange.split("/")[1]) end_date = start_date + delta if time_format == datetime_format: start_date = str( - isodate.datetime_isoformat(start_date, format=datetime_format)) + isodate.datetime_isoformat(start_date, format=datetime_format) + ) end_date = str( - isodate.datetime_isoformat(end_date, format=datetime_format)) + isodate.datetime_isoformat(end_date, format=datetime_format) + ) elif time_format == isodate.DATE_BAS_COMPLETE: - start_date = str(isodate.date_isoformat(start_date, - format=time_format)) + start_date = str( + isodate.date_isoformat(start_date, format=time_format) + ) end_date = str(isodate.date_isoformat(end_date, format=time_format)) if start_date is None and end_date is None: - start_date = timerange.split('/')[0] - end_date = timerange.split('/')[1] + start_date = timerange.split("/")[0] + end_date = timerange.split("/")[1] return start_date, end_date @@ -271,12 +290,12 @@ def _truncate_dates(date, file_date): zeros (e.g., use ``date='0100'`` and ``file_date='199901'`` for a correct comparison). """ - date = re.sub("[^0-9]", '', date) - file_date = re.sub("[^0-9]", '', file_date) + date = re.sub("[^0-9]", "", date) + file_date = re.sub("[^0-9]", "", file_date) if len(date) < len(file_date): - file_date = file_date[0:len(date)] + file_date = file_date[0 : len(date)] elif len(date) > len(file_date): - date = date[0:len(file_date)] + date = date[0 : len(file_date)] return int(date), int(file_date) @@ -290,7 +309,7 @@ def _select_files(filenames, timerange): Otherwise, the file selection occurs taking into account the time resolution of the file. """ - if '*' in timerange: + if "*" in timerange: # TODO: support * combined with a period return filenames @@ -314,19 +333,22 @@ def _replace_tags( ) -> list[Path]: """Replace tags in the config-developer's file with actual values.""" if isinstance(paths, str): - pathset = set((paths.strip('/'), )) + pathset = set((paths.strip("/"),)) else: - pathset = set(path.strip('/') for path in paths) + pathset = set(path.strip("/") for path in paths) tlist: set[str] = set() for path in pathset: - tlist = tlist.union(re.findall(r'{([^}]*)}', path)) - if 'sub_experiment' in variable: + tlist = tlist.union(re.findall(r"{([^}]*)}", path)) + if "sub_experiment" in variable: new_paths: set[str] = set() for path in pathset: new_paths.update( - (re.sub(r'(\b{ensemble}\b)', r'{sub_experiment}-\1', path), - re.sub(r'({ensemble})', r'{sub_experiment}-\1', path))) - tlist.add('sub_experiment') + ( + re.sub(r"(\b{ensemble}\b)", r"{sub_experiment}-\1", path), + re.sub(r"({ensemble})", r"{sub_experiment}-\1", path), + ) + ) + tlist.add("sub_experiment") pathset = new_paths for tag in tlist: @@ -335,11 +357,13 @@ def _replace_tags( if tag in variable: replacewith = variable[tag] - elif tag == 'version': - replacewith = '*' + elif tag == "version": + replacewith = "*" else: - raise RecipeError(f"Dataset key '{tag}' must be specified for " - f"{variable}, check your recipe entry") + raise RecipeError( + f"Dataset key '{tag}' must be specified for " + f"{variable}, check your recipe entry" + ) pathset = _replace_tag(pathset, original_tag, replacewith) return [Path(p) for p in pathset] @@ -353,17 +377,17 @@ def _replace_tag(paths, tag, replacewith): result.extend(_replace_tag(paths, tag, item)) else: text = _apply_caps(str(replacewith), lower, upper) - result.extend(p.replace('{' + tag + '}', text) for p in paths) + result.extend(p.replace("{" + tag + "}", text) for p in paths) return list(set(result)) def _get_caps_options(tag): lower = False upper = False - if tag.endswith('.lower'): + if tag.endswith(".lower"): lower = True tag = tag[0:-6] - elif tag.endswith('.upper'): + elif tag.endswith(".upper"): upper = True tag = tag[0:-6] return tag, lower, upper @@ -391,8 +415,10 @@ def _select_drs(input_type: str, project: str, structure: str) -> list[str]: return value raise KeyError( - 'drs {} for {} project not specified in config-developer file'.format( - structure, project)) + "drs {} for {} project not specified in config-developer file".format( + structure, project + ) + ) @dataclass(order=True, frozen=True) @@ -407,8 +433,11 @@ def get_glob_patterns(self, **facets) -> list[Path]: """Compose the globs that will be used to look for files.""" dirname_globs = _replace_tags(self.dirname_template, facets) filename_globs = _replace_tags(self.filename_template, facets) - return sorted(self.rootpath / d / f for d in dirname_globs - for f in filename_globs) + return sorted( + self.rootpath / d / f + for d in dirname_globs + for f in filename_globs + ) def find_files(self, **facets) -> list[LocalFile]: """Find files.""" @@ -423,8 +452,8 @@ def find_files(self, **facets) -> list[LocalFile]: files.append(file) files.sort() # sorting makes it easier to see what was found - if 'timerange' in facets: - files = _select_files(files, facets['timerange']) + if "timerange" in facets: + files = _select_files(files, facets["timerange"]) return files @@ -433,51 +462,55 @@ def find_files(self, **facets) -> list[LocalFile]: def _get_data_sources(project: str) -> list[DataSource]: """Get a list of data sources.""" - rootpaths = CFG['rootpath'] - for key in (project, 'default'): + rootpaths = CFG["rootpath"] + for key in (project, "default"): if key in rootpaths: paths = rootpaths[key] nonexistent = tuple(p for p in paths if not os.path.exists(p)) if nonexistent and (key, nonexistent) not in _ROOTPATH_WARNED: logger.warning( "'%s' rootpaths '%s' set in config-user.yml do not exist", - key, ', '.join(str(p) for p in nonexistent)) + key, + ", ".join(str(p) for p in nonexistent), + ) _ROOTPATH_WARNED.add((key, nonexistent)) if isinstance(paths, list): - structure = CFG['drs'].get(project, 'default') + structure = CFG["drs"].get(project, "default") paths = {p: structure for p in paths} sources: list[DataSource] = [] for path, structure in paths.items(): - dir_templates = _select_drs('input_dir', project, structure) - file_templates = _select_drs('input_file', project, structure) + dir_templates = _select_drs("input_dir", project, structure) + file_templates = _select_drs("input_file", project, structure) sources.extend( DataSource(path, d, f) - for d in dir_templates for f in file_templates + for d in dir_templates + for f in file_templates ) return sources raise KeyError( f"No '{project}' or 'default' path specified under 'rootpath' in " - "the user configuration.") + "the user configuration." + ) def _get_output_file(variable: dict[str, Any], preproc_dir: Path) -> Path: """Return the full path to the output (preprocessed) file.""" - cfg = get_project_config(variable['project']) + cfg = get_project_config(variable["project"]) # Join different experiment names - if isinstance(variable.get('exp'), (list, tuple)): + if isinstance(variable.get("exp"), (list, tuple)): variable = dict(variable) - variable['exp'] = '-'.join(variable['exp']) - outfile = _replace_tags(cfg['output_file'], variable)[0] - if 'timerange' in variable: - timerange = variable['timerange'].replace('/', '-') - outfile = Path(f'{outfile}_{timerange}') + variable["exp"] = "-".join(variable["exp"]) + outfile = _replace_tags(cfg["output_file"], variable)[0] + if "timerange" in variable: + timerange = variable["timerange"].replace("/", "-") + outfile = Path(f"{outfile}_{timerange}") outfile = Path(f"{outfile}.nc") return Path( preproc_dir, - variable.get('diagnostic', ''), - variable.get('variable_group', ''), + variable.get("diagnostic", ""), + variable.get("variable_group", ""), outfile, ) @@ -485,8 +518,13 @@ def _get_output_file(variable: dict[str, Any], preproc_dir: Path) -> Path: def _get_multiproduct_filename(attributes: dict, preproc_dir: Path) -> Path: """Get ensemble/multi-model filename depending on settings.""" relevant_keys = [ - 'project', 'dataset', 'exp', 'ensemble_statistics', - 'multi_model_statistics', 'mip', 'short_name' + "project", + "dataset", + "exp", + "ensemble_statistics", + "multi_model_statistics", + "mip", + "short_name", ] filename_segments = [] @@ -494,8 +532,8 @@ def _get_multiproduct_filename(attributes: dict, preproc_dir: Path) -> Path: if key in attributes: attribute = attributes[key] if isinstance(attribute, (list, tuple)): - attribute = '-'.join(attribute) - filename_segments.extend(attribute.split('_')) + attribute = "-".join(attribute) + filename_segments.extend(attribute.split("_")) # Remove duplicate segments: filename_segments = list(dict.fromkeys(filename_segments)) @@ -505,9 +543,9 @@ def _get_multiproduct_filename(attributes: dict, preproc_dir: Path) -> Path: outfile = Path( preproc_dir, - attributes['diagnostic'], - attributes['variable_group'], - '_'.join(filename_segments), + attributes["diagnostic"], + attributes["variable_group"], + "_".join(filename_segments), ) return outfile @@ -516,14 +554,13 @@ def _get_multiproduct_filename(attributes: dict, preproc_dir: Path) -> Path: def _path2facets(path: Path, drs: str) -> dict[str, str]: """Extract facets from a path using a DRS like '{facet1}/{facet2}'.""" keys = [] - for key in re.findall(r'{(.*?)}[^-]', f'{drs} '): - key = key.split('.')[0] # Remove trailing .lower and .upper + for key in re.findall(r"{(.*?)}[^-]", f"{drs} "): + key = key.split(".")[0] # Remove trailing .lower and .upper keys.append(key) start, end = -len(keys) - 1, -1 values = path.parts[start:end] facets = { - key: values[idx] - for idx, key in enumerate(keys) if "{" not in key + key: values[idx] for idx, key in enumerate(keys) if "{" not in key } if len(facets) != len(keys): @@ -532,13 +569,14 @@ def _path2facets(path: Path, drs: str) -> dict[str, str]: for idx, key in enumerate(keys): if key not in facets: facet1, facet2 = key.split("}-{") - facets[facet2] = values[idx].replace(f'{facets[facet1]}-', '') + facets[facet2] = values[idx].replace(f"{facets[facet1]}-", "") return facets def _filter_versions_called_latest( - files: list['LocalFile']) -> list['LocalFile']: + files: list["LocalFile"], +) -> list["LocalFile"]: """Filter out versions called 'latest' if they are duplicates. On compute clusters it is usual to have a symbolic link to the @@ -547,26 +585,30 @@ def _filter_versions_called_latest( """ resolved_valid_versions = { f.resolve(strict=False) - for f in files if f.facets.get('version') != 'latest' + for f in files + if f.facets.get("version") != "latest" } return [ - f for f in files if f.facets.get('version') != 'latest' or f.resolve( - strict=False) not in resolved_valid_versions + f + for f in files + if f.facets.get("version") != "latest" + or f.resolve(strict=False) not in resolved_valid_versions ] -def _select_latest_version(files: list['LocalFile']) -> list['LocalFile']: +def _select_latest_version(files: list["LocalFile"]) -> list["LocalFile"]: """Select only the latest version of files.""" def filename(file): return file.name def version(file): - return file.facets.get('version', '') + return file.facets.get("version", "") result = [] - for _, group in itertools.groupby(sorted(files, key=filename), - key=filename): + for _, group in itertools.groupby( + sorted(files, key=filename), key=filename + ): duplicates = sorted(group, key=version) latest = duplicates[-1] result.append(latest) @@ -636,24 +678,24 @@ def find_files( ------- list[LocalFile] The files that were found. - """ # pylint: disable=line-too-long + """ facets = dict(facets) - if 'original_short_name' in facets: - facets['short_name'] = facets['original_short_name'] + if "original_short_name" in facets: + facets["short_name"] = facets["original_short_name"] files = [] filter_latest = False - data_sources = _get_data_sources(facets['project']) # type: ignore + data_sources = _get_data_sources(facets["project"]) # type: ignore for data_source in data_sources: for file in data_source.find_files(**facets): - if file.facets.get('version') == 'latest': + if file.facets.get("version") == "latest": filter_latest = True files.append(file) if filter_latest: files = _filter_versions_called_latest(files) - if 'version' not in facets: + if "version" not in facets: files = _select_latest_version(files) files.sort() # sorting makes it easier to see what was found @@ -678,7 +720,7 @@ def facets(self) -> Facets: When using :func:`find_files`, facets are read from the directory structure. Facets stored in filenames are not yet supported. """ - if not hasattr(self, '_facets'): + if not hasattr(self, "_facets"): self._facets: Facets = {} return self._facets diff --git a/esmvalcore/preprocessor/__init__.py b/esmvalcore/preprocessor/__init__.py index 3800fb1413..3429078a5d 100644 --- a/esmvalcore/preprocessor/__init__.py +++ b/esmvalcore/preprocessor/__init__.py @@ -1,4 +1,5 @@ """Preprocessor module.""" + from __future__ import annotations import copy @@ -94,115 +95,115 @@ __all__ = [ # File reformatting/CMORization - 'fix_file', + "fix_file", # Load cubes from file - 'load', + "load", # Metadata reformatting/CMORization - 'fix_metadata', + "fix_metadata", # Concatenate all cubes in one - 'concatenate', - 'cmor_check_metadata', + "concatenate", + "cmor_check_metadata", # Extract years given by dataset keys (start_year and end_year) - 'clip_timerange', + "clip_timerange", # Data reformatting/CMORization - 'fix_data', - 'cmor_check_data', + "fix_data", + "cmor_check_data", # Attach ancillary variables and cell measures - 'add_supplementary_variables', + "add_supplementary_variables", # Derive variable - 'derive', + "derive", # Time extraction (as defined in the preprocessor section) - 'extract_time', - 'extract_season', - 'extract_month', - 'resample_hours', - 'resample_time', + "extract_time", + "extract_season", + "extract_month", + "resample_hours", + "resample_time", # Level extraction - 'extract_levels', + "extract_levels", # Weighting - 'weighting_landsea_fraction', + "weighting_landsea_fraction", # Mask landsea (fx or Natural Earth) - 'mask_landsea', + "mask_landsea", # Natural Earth only - 'mask_glaciated', + "mask_glaciated", # Mask landseaice, sftgif only - 'mask_landseaice', + "mask_landseaice", # Regridding - 'regrid', + "regrid", # Point interpolation - 'extract_coordinate_points', - 'extract_point', - 'extract_location', + "extract_coordinate_points", + "extract_point", + "extract_location", # Masking missing values - 'mask_multimodel', - 'mask_fillvalues', - 'mask_above_threshold', - 'mask_below_threshold', - 'mask_inside_range', - 'mask_outside_range', + "mask_multimodel", + "mask_fillvalues", + "mask_above_threshold", + "mask_below_threshold", + "mask_inside_range", + "mask_outside_range", # Other - 'clip', - 'rolling_window_statistics', + "clip", + "rolling_window_statistics", # Region selection - 'extract_region', - 'extract_shape', - 'extract_volume', - 'extract_trajectory', - 'extract_transect', - 'detrend', - 'extract_named_regions', - 'axis_statistics', - 'depth_integration', - 'area_statistics', - 'volume_statistics', + "extract_region", + "extract_shape", + "extract_volume", + "extract_trajectory", + "extract_transect", + "detrend", + "extract_named_regions", + "axis_statistics", + "depth_integration", + "area_statistics", + "volume_statistics", # Time operations - 'local_solar_time', - 'amplitude', - 'zonal_statistics', - 'meridional_statistics', - 'accumulate_coordinate', - 'hourly_statistics', - 'daily_statistics', - 'monthly_statistics', - 'seasonal_statistics', - 'annual_statistics', - 'decadal_statistics', - 'climate_statistics', - 'anomalies', - 'regrid_time', - 'timeseries_filter', - 'linear_trend', - 'linear_trend_stderr', + "local_solar_time", + "amplitude", + "zonal_statistics", + "meridional_statistics", + "accumulate_coordinate", + "hourly_statistics", + "daily_statistics", + "monthly_statistics", + "seasonal_statistics", + "annual_statistics", + "decadal_statistics", + "climate_statistics", + "anomalies", + "regrid_time", + "timeseries_filter", + "linear_trend", + "linear_trend_stderr", # Convert units - 'convert_units', + "convert_units", # Histograms - 'histogram', + "histogram", # Ensemble statistics - 'ensemble_statistics', + "ensemble_statistics", # Multi model statistics - 'multi_model_statistics', + "multi_model_statistics", # Comparison with reference datasets - 'bias', - 'distance_metric', + "bias", + "distance_metric", # Remove supplementary variables from cube - 'remove_supplementary_variables', + "remove_supplementary_variables", # Save to file - 'save', + "save", ] TIME_PREPROCESSORS = [ - 'clip_timerange', - 'extract_time', - 'extract_season', - 'extract_month', - 'daily_statistics', - 'monthly_statistics', - 'seasonal_statistics', - 'annual_statistics', - 'decadal_statistics', - 'climate_statistics', - 'anomalies', - 'regrid_time', + "clip_timerange", + "extract_time", + "extract_season", + "extract_month", + "daily_statistics", + "monthly_statistics", + "seasonal_statistics", + "annual_statistics", + "decadal_statistics", + "climate_statistics", + "anomalies", + "regrid_time", ] DEFAULT_ORDER = tuple(__all__) @@ -211,18 +212,20 @@ """ # The order of initial and final steps cannot be configured -INITIAL_STEPS = DEFAULT_ORDER[:DEFAULT_ORDER.index( - 'add_supplementary_variables') + 1] -FINAL_STEPS = DEFAULT_ORDER[DEFAULT_ORDER.index( - 'remove_supplementary_variables'):] +INITIAL_STEPS = DEFAULT_ORDER[ + : DEFAULT_ORDER.index("add_supplementary_variables") + 1 +] +FINAL_STEPS = DEFAULT_ORDER[ + DEFAULT_ORDER.index("remove_supplementary_variables") : +] MULTI_MODEL_FUNCTIONS = { - 'bias', - 'distance_metric', - 'ensemble_statistics', - 'multi_model_statistics', - 'mask_multimodel', - 'mask_fillvalues', + "bias", + "distance_metric", + "ensemble_statistics", + "multi_model_statistics", + "mask_multimodel", + "mask_fillvalues", } @@ -250,8 +253,10 @@ def check_preprocessor_settings(settings): # functools.wraps). signature = inspect.signature(function) args = [ - n for (n, p) in signature.parameters.items() if - p.kind in ( + n + for (n, p) in signature.parameters.items() + if p.kind + in ( inspect.Parameter.POSITIONAL_ONLY, inspect.Parameter.POSITIONAL_OR_KEYWORD, ) @@ -260,10 +265,12 @@ def check_preprocessor_settings(settings): # Check for invalid arguments (only possible if no *args or **kwargs # allowed) var_kinds = [p.kind for p in signature.parameters.values()] - check_args = not any([ - inspect.Parameter.VAR_POSITIONAL in var_kinds, - inspect.Parameter.VAR_KEYWORD in var_kinds, - ]) + check_args = not any( + [ + inspect.Parameter.VAR_POSITIONAL in var_kinds, + inspect.Parameter.VAR_KEYWORD in var_kinds, + ] + ) if check_args: invalid_args = set(settings[step]) - set(args) if invalid_args: @@ -275,7 +282,8 @@ def check_preprocessor_settings(settings): # Check for missing arguments defaults = [ - p.default for p in signature.parameters.values() + p.default + for p in signature.parameters.values() if p.default is not inspect.Parameter.empty ] end = None if not defaults else -len(defaults) @@ -291,15 +299,19 @@ def check_preprocessor_settings(settings): signature.bind(None, **settings[step]) except TypeError: logger.error( - "Wrong preprocessor function arguments in " - "function '%s'", step) + "Wrong preprocessor function arguments in function '%s'", + step, + ) raise def _check_multi_model_settings(products): """Check that multi dataset settings are identical for all products.""" - multi_model_steps = (step for step in MULTI_MODEL_FUNCTIONS - if any(step in p.settings for p in products)) + multi_model_steps = ( + step + for step in MULTI_MODEL_FUNCTIONS + if any(step in p.settings for p in products) + ) for step in multi_model_steps: reference = None for product in products: @@ -332,16 +344,22 @@ def _get_multi_model_settings(products, step): def _run_preproc_function(function, items, kwargs, input_files=None): """Run preprocessor function.""" kwargs_str = ",\n".join( - [f"{k} = {pformat(v)}" for (k, v) in kwargs.items()]) + [f"{k} = {pformat(v)}" for (k, v) in kwargs.items()] + ) if input_files is None: file_msg = "" else: - file_msg = (f"\nloaded from original input file(s)\n" - f"{pformat(input_files)}") + file_msg = ( + f"\nloaded from original input file(s)\n{pformat(input_files)}" + ) logger.debug( "Running preprocessor function '%s' on the data\n%s%s\nwith function " - "argument(s)\n%s", function.__name__, pformat(items), file_msg, - kwargs_str) + "argument(s)\n%s", + function.__name__, + pformat(items), + file_msg, + kwargs_str, + ) try: return function(items, **kwargs) except Exception: @@ -350,10 +368,12 @@ def _run_preproc_function(function, items, kwargs, input_files=None): n_shown_args = 4 if input_files is not None and len(input_files) > n_shown_args: n_not_shown_files = len(input_files) - n_shown_args - file_msg = (f"\nloaded from original input file(s)\n" - f"{pformat(input_files[:n_shown_args])}\n(and " - f"{n_not_shown_files:d} further file(s) not shown " - f"here; refer to the debug log for a full list)") + file_msg = ( + f"\nloaded from original input file(s)\n" + f"{pformat(input_files[:n_shown_args])}\n(and " + f"{n_not_shown_files:d} further file(s) not shown " + f"here; refer to the debug log for a full list)" + ) # Make sure that the arguments are indexable if isinstance(items, (PreprocessorFile, Cube, str, Path)): @@ -365,23 +385,24 @@ def _run_preproc_function(function, items, kwargs, input_files=None): data_msg = pformat(items) else: n_not_shown_args = len(items) - n_shown_args - data_msg = (f"{pformat(items[:n_shown_args])}\n(and " - f"{n_not_shown_args:d} further argument(s) not shown " - f"here; refer to the debug log for a full list)") + data_msg = ( + f"{pformat(items[:n_shown_args])}\n(and " + f"{n_not_shown_args:d} further argument(s) not shown " + f"here; refer to the debug log for a full list)" + ) logger.error( "Failed to run preprocessor function '%s' on the data\n%s%s\nwith " - "function argument(s)\n%s", function.__name__, data_msg, file_msg, - kwargs_str) + "function argument(s)\n%s", + function.__name__, + data_msg, + file_msg, + kwargs_str, + ) raise def preprocess( - items, - step, - input_files=None, - output_file=None, - debug=False, - **settings + items, step, input_files=None, output_file=None, debug=False, **settings ): """Run preprocessor.""" logger.debug("Running preprocessor step %s", step) @@ -393,13 +414,19 @@ def preprocess( item.data = item.core_data().rechunk() result = [] - if itype.endswith('s'): - result.append(_run_preproc_function(function, items, settings, - input_files=input_files)) + if itype.endswith("s"): + result.append( + _run_preproc_function( + function, items, settings, input_files=input_files + ) + ) else: for item in items: - result.append(_run_preproc_function(function, item, settings, - input_files=input_files)) + result.append( + _run_preproc_function( + function, item, settings, input_files=input_files + ) + ) items = [] for item in result: @@ -421,7 +448,7 @@ def get_step_blocks(steps, order): """Group steps into execution blocks.""" blocks = [] prev_step_type = None - for step in order[len(INITIAL_STEPS):-len(FINAL_STEPS)]: + for step in order[len(INITIAL_STEPS) : -len(FINAL_STEPS)]: if step in steps: step_type = step in MULTI_MODEL_FUNCTIONS if step_type is not prev_step_type: @@ -467,11 +494,11 @@ def __init__( if attributes is None: attributes = {} attributes = copy.deepcopy(attributes) - if 'save' not in self.settings: - self.settings['save'] = {} - self.settings['save']['filename'] = filename + if "save" not in self.settings: + self.settings["save"] = {} + self.settings["save"]["filename"] = filename - attributes['filename'] = filename + attributes["filename"] = filename super().__init__( filename=filename, @@ -489,11 +516,14 @@ def apply(self, step: str, debug: bool = False): raise ValueError( f"PreprocessorFile {self} has no settings for step {step}" ) - self.cubes = preprocess(self.cubes, step, - input_files=self._input_files, - output_file=self.filename, - debug=debug, - **self.settings[step]) + self.cubes = preprocess( + self.cubes, + step, + input_files=self._input_files, + output_file=self.filename, + debug=debug, + **self.settings[step], + ) @property def cubes(self): @@ -508,10 +538,12 @@ def cubes(self, value): def save(self): """Save cubes to disk.""" - preprocess(self._cubes, - 'save', - input_files=self._input_files, - **self.settings['save']) + preprocess( + self._cubes, + "save", + input_files=self._input_files, + **self.settings["save"], + ) def close(self): """Close the file.""" @@ -529,20 +561,20 @@ def _update_attributes(self): # Names names = { - 'standard_name': 'standard_name', - 'long_name': 'long_name', - 'var_name': 'short_name', + "standard_name": "standard_name", + "long_name": "long_name", + "var_name": "short_name", } - for (name_in, name_out) in names.items(): + for name_in, name_out in names.items(): cube_val = getattr(ref_cube, name_in) - self.attributes[name_out] = '' if cube_val is None else cube_val + self.attributes[name_out] = "" if cube_val is None else cube_val # Units - self.attributes['units'] = str(ref_cube.units) + self.attributes["units"] = str(ref_cube.units) # Frequency - if 'frequency' in ref_cube.attributes: - self.attributes['frequency'] = ref_cube.attributes['frequency'] + if "frequency" in ref_cube.attributes: + self.attributes["frequency"] = ref_cube.attributes["frequency"] @property def is_closed(self): @@ -553,8 +585,7 @@ def _initialize_entity(self): """Initialize the provenance entity representing the file.""" super()._initialize_entity() settings = { - 'preprocessor:' + k: str(v) - for k, v in self.settings.items() + "preprocessor:" + k: str(v) for k, v in self.settings.items() } self.entity.add_attributes(settings) @@ -565,7 +596,7 @@ def group(self, keys: list) -> str: values from .attributes """ if not keys: - return '' + return "" if isinstance(keys, str): keys = [keys] @@ -575,18 +606,21 @@ def group(self, keys: list) -> str: attribute = self.attributes.get(key) if attribute: if isinstance(attribute, (list, tuple)): - attribute = '-'.join(attribute) + attribute = "-".join(attribute) identifier.append(attribute) - return '_'.join(identifier) + return "_".join(identifier) def _apply_multimodel(products, step, debug): """Apply multi model step to products.""" settings, exclude = _get_multi_model_settings(products, step) - logger.debug("Applying %s to\n%s", step, - '\n'.join(str(p) for p in products - exclude)) + logger.debug( + "Applying %s to\n%s", + step, + "\n".join(str(p) for p in products - exclude), + ) result = preprocess(products - exclude, step, **settings) products = set(result) | exclude @@ -606,7 +640,7 @@ class PreprocessingTask(BaseTask): def __init__( self, products: Iterable[PreprocessorFile], - name: str = '', + name: str = "", order: Iterable[str] = DEFAULT_ORDER, debug: bool | None = None, write_ncl_interface: bool = False, @@ -631,7 +665,7 @@ def _initialize_multiproduct_provenance(self, step): for input_product in input_products: step_settings = input_product.settings[step] - output_products = step_settings.get('output_products', {}) + output_products = step_settings.get("output_products", {}) for product in output_products.values(): statistic_products.update(product.values()) @@ -640,12 +674,12 @@ def _initialize_multiproduct_provenance(self, step): def _initialize_multimodel_provenance(self): """Initialize provenance for multi-model statistics.""" - step = 'multi_model_statistics' + step = "multi_model_statistics" self._initialize_multiproduct_provenance(step) def _initialize_ensemble_provenance(self): """Initialize provenance for ensemble statistics.""" - step = 'ensemble_statistics' + step = "ensemble_statistics" self._initialize_multiproduct_provenance(step) def _get_input_products(self, step): @@ -664,8 +698,7 @@ def _run(self, _): self._initialize_product_provenance() steps = { - step - for product in self.products for step in product.settings + step for product in self.products for step in product.settings } blocks = get_step_blocks(steps, self.order) @@ -674,8 +707,9 @@ def _run(self, _): logger.debug("Running block %s", block) if block[0] in MULTI_MODEL_FUNCTIONS: for step in block: - self.products = _apply_multimodel(self.products, step, - self.debug) + self.products = _apply_multimodel( + self.products, step, self.debug + ) else: for product in _sort_products(self.products): logger.debug("Applying single-model steps to %s", product) @@ -692,25 +726,34 @@ def _run(self, _): product.cubes # pylint: disable=pointless-statement product.close() - metadata_files = write_metadata(self.products, - self.write_ncl_interface) + metadata_files = write_metadata( + self.products, self.write_ncl_interface + ) return metadata_files def __str__(self): """Get human readable description.""" order = [ - step for step in self.order + step + for step in self.order if any(step in product.settings for product in self.products) ] - products = '\n\n'.join('\n'.join([ - str(p), - 'input files: ' + pformat(p._input_files), - 'settings: ' + pformat(p.settings), - ]) for p in self.products) - txt = "\n".join([ - f"{self.__class__.__name__}: {self.name}", - f"order: {order}", - f"{products}", - self.print_ancestors(), - ]) + products = "\n\n".join( + "\n".join( + [ + str(p), + "input files: " + pformat(p._input_files), + "settings: " + pformat(p.settings), + ] + ) + for p in self.products + ) + txt = "\n".join( + [ + f"{self.__class__.__name__}: {self.name}", + f"order: {order}", + f"{products}", + self.print_ancestors(), + ] + ) return txt diff --git a/esmvalcore/preprocessor/_area.py b/esmvalcore/preprocessor/_area.py index 3fcbe1e54f..a47ca29892 100644 --- a/esmvalcore/preprocessor/_area.py +++ b/esmvalcore/preprocessor/_area.py @@ -3,10 +3,10 @@ Allows for selecting data subsets using certain latitude and longitude bounds; selecting geographical regions; constructing area averages; etc. """ + from __future__ import annotations import logging -import warnings from pathlib import Path from typing import TYPE_CHECKING, Iterable, Literal, Optional @@ -16,17 +16,15 @@ import shapely import shapely.ops from dask import array as da -from iris.coords import AuxCoord, CellMeasure +from iris.coords import AuxCoord from iris.cube import Cube, CubeList -from iris.exceptions import CoordinateMultiDimError, CoordinateNotFoundError +from iris.exceptions import CoordinateNotFoundError -from esmvalcore.iris_helpers import has_regular_grid -from esmvalcore.preprocessor._regrid import broadcast_to_shape from esmvalcore.preprocessor._shared import ( get_iris_aggregator, get_normalized_cube, - guess_bounds, preserve_float_dtype, + try_adding_calculated_cell_area, update_weights_kwargs, ) from esmvalcore.preprocessor._supplementary_vars import ( @@ -41,7 +39,7 @@ logger = logging.getLogger(__name__) -SHAPE_ID_KEYS: tuple[str, ...] = ('name', 'NAME', 'Name', 'id', 'ID') +SHAPE_ID_KEYS: tuple[str, ...] = ("name", "NAME", "Name", "id", "ID") def extract_region( @@ -79,11 +77,11 @@ def extract_region( cell_measures = cube.cell_measures() ancil_vars = cube.ancillary_variables() - if abs(start_latitude) > 90.: + if abs(start_latitude) > 90.0: raise ValueError(f"Invalid start_latitude: {start_latitude}") - if abs(end_latitude) > 90.: + if abs(end_latitude) > 90.0: raise ValueError(f"Invalid end_latitude: {end_latitude}") - if cube.coord('latitude').ndim == 1: + if cube.coord("latitude").ndim == 1: # Iris check if any point of the cell is inside the region # To check only the center, ignore_bounds must be set to # True (default) is False @@ -110,10 +108,12 @@ def extract_region( def _extract_region_from_dim_metadata(dim_metadata, dim_metadata_dims): """Extract region from dimensional metadata.""" - idx = tuple(( - slice(None) if d in dim_metadata_dims else 0 - for d in range(cube.ndim) - )) + idx = tuple( + ( + slice(None) if d in dim_metadata_dims else 0 + for d in range(cube.ndim) + ) + ) subcube = cube[idx].copy(dim_metadata.core_data()) for sub_cm in subcube.cell_measures(): subcube.remove_cell_measure(sub_cm) @@ -153,18 +153,19 @@ def _extract_region_from_dim_metadata(dim_metadata, dim_metadata_dims): return region_subset -def _extract_irregular_region(cube, start_longitude, end_longitude, - start_latitude, end_latitude): +def _extract_irregular_region( + cube, start_longitude, end_longitude, start_latitude, end_latitude +): """Extract a region from a cube on an irregular grid.""" # Convert longitudes to valid range - if start_longitude != 360.: - start_longitude %= 360. - if end_longitude != 360.: - end_longitude %= 360. + if start_longitude != 360.0: + start_longitude %= 360.0 + if end_longitude != 360.0: + end_longitude %= 360.0 # Select coordinates inside the region - lats = cube.coord('latitude').points - lons = (cube.coord('longitude').points + 360.) % 360. + lats = cube.coord("latitude").points + lons = (cube.coord("longitude").points + 360.0) % 360.0 if start_longitude <= end_longitude: select_lons = (lons >= start_longitude) & (lons <= end_longitude) else: @@ -196,8 +197,8 @@ def _extract_irregular_region(cube, start_longitude, end_longitude, def zonal_statistics( cube: Cube, operator: str, - normalize: Optional[Literal['subtract', 'divide']] = None, - **operator_kwargs + normalize: Optional[Literal["subtract", "divide"]] = None, + **operator_kwargs, ) -> Cube: """Compute zonal statistics. @@ -231,12 +232,12 @@ def zonal_statistics( Zonal statistics not yet implemented for irregular grids. """ - if cube.coord('longitude').points.ndim >= 2: + if cube.coord("longitude").points.ndim >= 2: raise ValueError( "Zonal statistics on irregular grids not yet implemented" ) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) - result = cube.collapsed('longitude', agg, **agg_kwargs) + result = cube.collapsed("longitude", agg, **agg_kwargs) if normalize is not None: result = get_normalized_cube(cube, result, normalize) return result @@ -246,7 +247,7 @@ def zonal_statistics( def meridional_statistics( cube: Cube, operator: str, - normalize: Optional[Literal['subtract', 'divide']] = None, + normalize: Optional[Literal["subtract", "divide"]] = None, **operator_kwargs, ) -> Cube: """Compute meridional statistics. @@ -280,132 +281,26 @@ def meridional_statistics( Zonal statistics not yet implemented for irregular grids. """ - if cube.coord('latitude').points.ndim >= 2: + if cube.coord("latitude").points.ndim >= 2: raise ValueError( "Meridional statistics on irregular grids not yet implemented" ) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) - result = cube.collapsed('latitude', agg, **agg_kwargs) + result = cube.collapsed("latitude", agg, **agg_kwargs) if normalize is not None: result = get_normalized_cube(cube, result, normalize) return result -def compute_area_weights(cube): - """Compute area weights.""" - with warnings.catch_warnings(record=True) as caught_warnings: - warnings.filterwarnings( - 'always', - message="Using DEFAULT_SPHERICAL_EARTH_RADIUS.", - category=UserWarning, - module='iris.analysis.cartography', - ) - # TODO: replace the following line with - # weights = iris.analysis.cartography.area_weights( - # cube, compute=not cube.has_lazy_data() - # ) - # once https://github.com/SciTools/iris/pull/5658 is available - weights = _get_area_weights(cube) - - for warning in caught_warnings: - logger.debug( - "%s while computing area weights of the following cube:\n%s", - warning.message, cube) - return weights - - -def _get_area_weights(cube: Cube) -> np.ndarray | da.Array: - """Get area weights. - - For non-lazy data, simply use the according iris function. For lazy data, - calculate area weights for a single lat-lon slice and broadcast it to the - correct shape. - - Note - ---- - This is a temporary workaround to get lazy area weights. Can be removed - once https://github.com/SciTools/iris/pull/5658 is available. - - """ - if not cube.has_lazy_data(): - return iris.analysis.cartography.area_weights(cube) - - lat_lon_dims = sorted( - tuple(set(cube.coord_dims('latitude') + cube.coord_dims('longitude'))) - ) - lat_lon_slice = next(cube.slices(['latitude', 'longitude'], ordered=False)) - weights_2d = iris.analysis.cartography.area_weights(lat_lon_slice) - weights = broadcast_to_shape( - da.array(weights_2d), - cube.shape, - lat_lon_dims, - chunks=cube.lazy_data().chunks, - ) - return weights - - -def _try_adding_calculated_cell_area(cube: Cube) -> None: - """Try to add calculated cell measure 'cell_area' to cube (in-place).""" - if cube.cell_measures('cell_area'): - return - - logger.debug( - "Found no cell measure 'cell_area' in cube %s. Check availability of " - "supplementary variables", - cube.summary(shorten=True), - ) - logger.debug("Attempting to calculate grid cell area") - - rotated_pole_grid = all([ - cube.coord('latitude').core_points().ndim == 2, - cube.coord('longitude').core_points().ndim == 2, - cube.coords('grid_latitude'), - cube.coords('grid_longitude'), - ]) - - # For regular grids, calculate grid cell areas with iris function - if has_regular_grid(cube): - cube = guess_bounds(cube, ['latitude', 'longitude']) - logger.debug("Calculating grid cell areas for regular grid") - cell_areas = compute_area_weights(cube) - - # For rotated pole grids, use grid_latitude and grid_longitude to calculate - # grid cell areas - elif rotated_pole_grid: - cube = guess_bounds(cube, ['grid_latitude', 'grid_longitude']) - cube_tmp = cube.copy() - cube_tmp.remove_coord('latitude') - cube_tmp.coord('grid_latitude').rename('latitude') - cube_tmp.remove_coord('longitude') - cube_tmp.coord('grid_longitude').rename('longitude') - logger.debug("Calculating grid cell areas for rotated pole grid") - cell_areas = compute_area_weights(cube_tmp) - - # For all other cases, grid cell areas cannot be calculated - else: - logger.error( - "Supplementary variables are needed to calculate grid cell " - "areas for irregular or unstructured grid of cube %s", - cube.summary(shorten=True), - ) - raise CoordinateMultiDimError(cube.coord('latitude')) - - # Add new cell measure - cell_measure = CellMeasure( - cell_areas, standard_name='cell_area', units='m2', measure='area', - ) - cube.add_cell_measure(cell_measure, np.arange(cube.ndim)) - - @register_supplementaries( - variables=['areacella', 'areacello'], - required='prefer_at_least_one', + variables=["areacella", "areacello"], + required="prefer_at_least_one", ) @preserve_float_dtype def area_statistics( cube: Cube, operator: str, - normalize: Optional[Literal['subtract', 'divide']] = None, + normalize: Optional[Literal["subtract", "divide"]] = None, **operator_kwargs, ) -> Cube: """Apply a statistical operator in the horizontal plane. @@ -449,21 +344,21 @@ def area_statistics( `cell_area` is not available. """ - has_cell_measure = bool(cube.cell_measures('cell_area')) + has_cell_measure = bool(cube.cell_measures("cell_area")) # Get aggregator and correct kwargs (incl. weights) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) agg_kwargs = update_weights_kwargs( - agg, agg_kwargs, 'cell_area', cube, _try_adding_calculated_cell_area + agg, agg_kwargs, "cell_area", cube, try_adding_calculated_cell_area ) - result = cube.collapsed(['latitude', 'longitude'], agg, **agg_kwargs) + result = cube.collapsed(["latitude", "longitude"], agg, **agg_kwargs) if normalize is not None: result = get_normalized_cube(cube, result, normalize) # Make sure input cube has not been modified - if not has_cell_measure and cube.cell_measures('cell_area'): - cube.remove_cell_measure('cell_area') + if not has_cell_measure and cube.cell_measures("cell_area"): + cube.remove_cell_measure("cell_area") return result @@ -500,13 +395,17 @@ def extract_named_regions(cube: Cube, regions: str | Iterable[str]) -> Cube: if not isinstance(regions, (list, tuple, set)): raise TypeError( - 'Regions "{}" is not an acceptable format.'.format(regions)) + 'Regions "{}" is not an acceptable format.'.format(regions) + ) - available_regions = set(cube.coord('region').points) + available_regions = set(cube.coord("region").points) invalid_regions = set(regions) - available_regions if invalid_regions: - raise ValueError('Region(s) "{}" not in cube region(s): {}'.format( - invalid_regions, available_regions)) + raise ValueError( + 'Region(s) "{}" not in cube region(s): {}'.format( + invalid_regions, available_regions + ) + ) constraints = iris.Constraint(region=lambda r: r in regions) cube = cube.extract(constraint=constraints) @@ -522,36 +421,37 @@ def _crop_cube( cmor_coords: bool = True, ) -> Cube: """Crop cubes on a regular grid.""" - lon_coord = cube.coord(axis='X') - lat_coord = cube.coord(axis='Y') + lon_coord = cube.coord(axis="X") + lat_coord = cube.coord(axis="Y") if lon_coord.ndim == 1 and lat_coord.ndim == 1: # add a padding of one cell around the cropped cube lon_bound = lon_coord.core_bounds()[0] lon_step = lon_bound[1] - lon_bound[0] start_longitude -= lon_step if not cmor_coords: - if start_longitude < -180.: - start_longitude = -180. + if start_longitude < -180.0: + start_longitude = -180.0 else: if start_longitude < 0: start_longitude = 0 end_longitude += lon_step if not cmor_coords: - if end_longitude > 180.: - end_longitude = 180. + if end_longitude > 180.0: + end_longitude = 180.0 else: if end_longitude > 360: - end_longitude = 360. + end_longitude = 360.0 lat_bound = lat_coord.core_bounds()[0] lat_step = lat_bound[1] - lat_bound[0] start_latitude -= lat_step if start_latitude < -90: - start_latitude = -90. + start_latitude = -90.0 end_latitude += lat_step - if end_latitude > 90.: - end_latitude = 90. - cube = extract_region(cube, start_longitude, end_longitude, - start_latitude, end_latitude) + if end_latitude > 90.0: + end_latitude = 90.0 + cube = extract_region( + cube, start_longitude, end_longitude, start_latitude, end_latitude + ) return cube @@ -563,7 +463,8 @@ def _select_representative_point( """Get mask to select a representative point.""" representative_point = shape.representative_point() points = shapely.geometry.MultiPoint( - np.stack((np.ravel(lon), np.ravel(lat)), axis=1)) + np.stack((np.ravel(lon), np.ravel(lat)), axis=1) + ) nearest_point = shapely.ops.nearest_points(points, representative_point)[0] nearest_lon, nearest_lat = nearest_point.coords[0] mask = (lon == nearest_lon) & (lat == nearest_lat) @@ -577,23 +478,23 @@ def _correct_coords_from_shapefile( pad_hawaii: bool, ) -> tuple[np.ndarray, np.ndarray]: """Get correct lat and lon from shapefile.""" - lon = cube.coord(axis='X').points - lat = cube.coord(axis='Y').points - if cube.coord(axis='X').ndim < 2: + lon = cube.coord(axis="X").points + lat = cube.coord(axis="Y").points + if cube.coord(axis="X").ndim < 2: lon, lat = np.meshgrid(lon, lat, copy=False) if not cmor_coords: # Wrap around longitude coordinate to match data lon = lon.copy() # ValueError: assignment destination is read-only - lon[lon >= 180.] -= 360. + lon[lon >= 180.0] -= 360.0 # the NE mask may not have points at x = -180 and y = +/-90 # so we will fool it and apply the mask at (-179, -89, 89) instead if pad_hawaii: - lon = np.where(lon == -180., lon + 1., lon) + lon = np.where(lon == -180.0, lon + 1.0, lon) if pad_north_pole: - lat_0 = np.where(lat == -90., lat + 1., lat) - lat = np.where(lat_0 == 90., lat_0 - 1., lat_0) + lat_0 = np.where(lat == -90.0, lat + 1.0, lat) + lat = np.where(lat_0 == 90.0, lat_0 - 1.0, lat_0) return lon, lat @@ -610,12 +511,12 @@ def _process_ids(geometries, ids: list | dict | None) -> tuple: ) key = list(ids.keys())[0] for geometry in geometries: - if key not in geometry['properties']: + if key not in geometry["properties"]: raise ValueError( f"Geometry {dict(geometry['properties'])} does not have " f"requested attribute {key}" ) - id_keys: tuple[str, ...] = (key, ) + id_keys: tuple[str, ...] = (key,) ids = ids[key] # Otherwise, use SHAPE_ID_KEYS to get ID @@ -641,10 +542,10 @@ def _get_requested_geometries( # Iterate through all geometries and select matching elements requested_geometries = {} - for (reading_order, geometry) in enumerate(geometries): + for reading_order, geometry in enumerate(geometries): for key in id_keys: - if key in geometry['properties']: - geometry_id = str(geometry['properties'][key]) + if key in geometry["properties"]: + geometry_id = str(geometry["properties"][key]) break # If none of the attributes are available in the geometry, use reading @@ -675,17 +576,18 @@ def _get_masks_from_geometries( geometries: dict[str, dict], lon: np.ndarray, lat: np.ndarray, - method: str = 'contains', + method: str = "contains", decomposed: bool = False, ) -> dict[str, np.ndarray]: """Get cube masks from requested regions.""" - if method not in {'contains', 'representative'}: + if method not in {"contains", "representative"}: raise ValueError( "Invalid value for `method`. Choose from 'contains', ", - "'representative'.") + "'representative'.", + ) masks = {} - for (id_, geometry) in geometries.items(): + for id_, geometry in geometries.items(): masks[id_] = _get_single_mask(lon, lat, method, geometry) if not decomposed and len(masks) > 1: @@ -726,10 +628,10 @@ def _get_single_mask( geometry: dict, ) -> np.ndarray: """Get single mask from one region.""" - shape = shapely.geometry.shape(geometry['geometry']) - if method == 'contains': + shape = shapely.geometry.shape(geometry["geometry"]) + if method == "contains": mask = shapely.vectorized.contains(shape, lon, lat) - if method == 'representative' or not mask.any(): + if method == "representative" or not mask.any(): mask = _select_representative_point(shape, lon, lat) return mask @@ -742,7 +644,7 @@ def _merge_masks( merged_mask = np.zeros(shape, dtype=bool) for mask in masks.values(): merged_mask |= mask - return {'0': merged_mask} + return {"0": merged_mask} def fix_coordinate_ordering(cube: Cube) -> Cube: @@ -766,11 +668,11 @@ def fix_coordinate_ordering(cube: Cube) -> Cube: """ try: - time_dim = cube.coord_dims('time') + time_dim = cube.coord_dims("time") except CoordinateNotFoundError: time_dim = () try: - shape_dim = cube.coord_dims('shape_id') + shape_dim = cube.coord_dims("shape_id") except CoordinateNotFoundError: shape_dim = () @@ -801,13 +703,13 @@ def _update_shapefile_path( # Try path relative to auxiliary_data_dir if session is given if session is not None: - shapefile_path = session['auxiliary_data_dir'] / shapefile + shapefile_path = session["auxiliary_data_dir"] / shapefile logger.debug("extract_shape: Looking for shapefile %s", shapefile_path) if shapefile_path.exists(): return shapefile_path # Try path relative to esmvalcore/preprocessor/shapefiles/ - shapefile_path = Path(__file__).parent / 'shapefiles' / shapefile + shapefile_path = Path(__file__).parent / "shapefiles" / shapefile logger.debug("extract_shape: Looking for shapefile %s", shapefile_path) if shapefile_path.exists(): return shapefile_path @@ -816,7 +718,7 @@ def _update_shapefile_path( # esmvalcore/preprocessor/shapefiles/ again # Note: this will find "special" shapefiles like 'ar6' shapefile_path = ( - Path(__file__).parent / 'shapefiles' / f"{shapefile.lower()}.shp" + Path(__file__).parent / "shapefiles" / f"{shapefile.lower()}.shp" ) if shapefile_path.exists(): return shapefile_path @@ -829,7 +731,7 @@ def _update_shapefile_path( def extract_shape( cube: Cube, shapefile: str | Path, - method: str = 'contains', + method: str = "contains", crop: bool = True, decomposed: bool = False, ids: Optional[list | dict] = None, @@ -891,7 +793,6 @@ def extract_shape( """ shapefile = _update_shapefile_path(shapefile) with fiona.open(shapefile) as geometries: - # Get parameters specific to the shapefile (NE used case e.g. # longitudes [-180, 180] or latitude missing or overflowing edges) cmor_coords = True @@ -899,9 +800,9 @@ def extract_shape( pad_hawaii = False if geometries.bounds[0] < 0: cmor_coords = False - if geometries.bounds[1] > -90. and geometries.bounds[1] < -85.: + if geometries.bounds[1] > -90.0 and geometries.bounds[1] < -85.0: pad_north_pole = True - if geometries.bounds[0] > -180. and geometries.bounds[0] < 179.: + if geometries.bounds[0] > -180.0 and geometries.bounds[0] < 179.0: pad_hawaii = True requested_geometries = _get_requested_geometries( @@ -942,7 +843,7 @@ def extract_shape( # Remove dummy scalar coordinate if final cube is not decomposed if not decomposed: - result.remove_coord('shape_id') + result.remove_coord("shape_id") return result @@ -954,7 +855,7 @@ def _mask_cube(cube: Cube, masks: dict[str, np.ndarray]) -> Cube: _cube = cube.copy() remove_supplementary_variables(_cube) _cube.add_aux_coord( - AuxCoord(id_, units='no_unit', long_name='shape_id') + AuxCoord(id_, units="no_unit", long_name="shape_id") ) mask = da.broadcast_to(mask, _cube.shape) _cube.data = da.ma.masked_where(~mask, _cube.core_data()) diff --git a/esmvalcore/preprocessor/_compare_with_refs.py b/esmvalcore/preprocessor/_compare_with_refs.py index 1634fc1752..b4cb632dea 100644 --- a/esmvalcore/preprocessor/_compare_with_refs.py +++ b/esmvalcore/preprocessor/_compare_with_refs.py @@ -1,4 +1,5 @@ """Preprocessor functions for comparisons of data with reference datasets.""" + from __future__ import annotations import logging @@ -33,13 +34,13 @@ logger = logging.getLogger(__name__) -BiasType = Literal['absolute', 'relative'] +BiasType = Literal["absolute", "relative"] def bias( products: set[PreprocessorFile] | Iterable[Cube], reference: Optional[Cube] = None, - bias_type: BiasType = 'absolute', + bias_type: BiasType = "absolute", denominator_mask_threshold: float = 1e-3, keep_reference_dataset: bool = False, ) -> set[PreprocessorFile] | CubeList: @@ -114,12 +115,12 @@ def bias( "A list of Cubes is given to this preprocessor; please " "specify a `reference`" ) - (reference, ref_product) = _get_ref(products, 'reference_for_bias') + (reference, ref_product) = _get_ref(products, "reference_for_bias") else: ref_product = None # Mask reference cube appropriately for relative biases - if bias_type == 'relative': + if bias_type == "relative": reference = reference.copy() npx = get_array_module(reference.core_data()) reference.data = npx.ma.masked_inside( @@ -145,7 +146,7 @@ def bias( cube = _calculate_bias(cube, reference, bias_type) # Adapt metadata and provenance information - product.attributes['units'] = str(cube.units) + product.attributes["units"] = str(cube.units) if ref_product is not None: product.wasderivedfrom(ref_product) @@ -188,12 +189,12 @@ def _calculate_bias(cube: Cube, reference: Cube, bias_type: BiasType) -> Cube: """Calculate bias for a single cube relative to a reference cube.""" cube_metadata = cube.metadata - if bias_type == 'absolute': + if bias_type == "absolute": cube = cube - reference new_units = cube.units - elif bias_type == 'relative': + elif bias_type == "relative": cube = (cube - reference) / reference - new_units = '1' + new_units = "1" else: raise ValueError( f"Expected one of ['absolute', 'relative'] for bias_type, got " @@ -207,12 +208,12 @@ def _calculate_bias(cube: Cube, reference: Cube, bias_type: BiasType) -> Cube: MetricType = Literal[ - 'rmse', - 'weighted_rmse', - 'pearsonr', - 'weighted_pearsonr', - 'emd', - 'weighted_emd', + "rmse", + "weighted_rmse", + "pearsonr", + "weighted_pearsonr", + "emd", + "weighted_emd", ] @@ -333,7 +334,7 @@ def distance_metric( ) reference_products = [] for product in products: - if product.attributes.get('reference_for_metric', False): + if product.attributes.get("reference_for_metric", False): reference_products.append(product) if len(reference_products) != 1: raise ValueError( @@ -372,10 +373,10 @@ def distance_metric( cube = _calculate_metric(cube, reference, metric, coords, **kwargs) # Adapt metadata and provenance information - product.attributes['standard_name'] = cube.standard_name - product.attributes['long_name'] = cube.long_name - product.attributes['short_name'] = cube.var_name - product.attributes['units'] = str(cube.units) + product.attributes["standard_name"] = cube.standard_name + product.attributes["long_name"] = cube.long_name + product.attributes["short_name"] = cube.var_name + product.attributes["units"] = str(cube.units) if product != reference_product: product.wasderivedfrom(reference_product) @@ -413,14 +414,14 @@ def _calculate_metric( # possible since some operations (e.g., sqrt()) are not available for cubes coords = get_all_coords(cube, coords) metrics_funcs = { - 'rmse': partial(_calculate_rmse, weighted=False, **kwargs), - 'weighted_rmse': partial(_calculate_rmse, weighted=True, **kwargs), - 'pearsonr': partial(_calculate_pearsonr, weighted=False, **kwargs), - 'weighted_pearsonr': partial( + "rmse": partial(_calculate_rmse, weighted=False, **kwargs), + "weighted_rmse": partial(_calculate_rmse, weighted=True, **kwargs), + "pearsonr": partial(_calculate_pearsonr, weighted=False, **kwargs), + "weighted_pearsonr": partial( _calculate_pearsonr, weighted=True, **kwargs ), - 'emd': partial(_calculate_emd, weighted=False, **kwargs), - 'weighted_emd': partial(_calculate_emd, weighted=True, **kwargs), + "emd": partial(_calculate_emd, weighted=False, **kwargs), + "weighted_emd": partial(_calculate_emd, weighted=True, **kwargs), } if metric not in metrics_funcs: raise ValueError( @@ -449,15 +450,15 @@ def _calculate_rmse( # Data axis = get_all_coord_dims(cube, coords) weights = get_weights(cube, coords) if weighted else None - squared_error = (cube.core_data() - reference.core_data())**2 + squared_error = (cube.core_data() - reference.core_data()) ** 2 npx = get_array_module(squared_error) rmse = npx.sqrt(npx.ma.average(squared_error, axis=axis, weights=weights)) # Metadata metadata = CubeMetadata( None, - 'RMSE' if cube.long_name is None else f'RMSE of {cube.long_name}', - 'rmse' if cube.var_name is None else f'rmse_{cube.var_name}', + "RMSE" if cube.long_name is None else f"RMSE of {cube.long_name}", + "rmse" if cube.var_name is None else f"rmse_{cube.var_name}", cube.units, cube.attributes, cube.cell_methods, @@ -477,7 +478,7 @@ def _calculate_pearsonr( """Calculate Pearson correlation coefficient.""" # Here, we want to use common_mask=True in iris.analysis.stats.pearsonr # (iris' default is common_mask=False) - kwargs.setdefault('common_mask', True) + kwargs.setdefault("common_mask", True) # Data weights = get_weights(cube, coords) if weighted else None @@ -489,11 +490,12 @@ def _calculate_pearsonr( metadata = CubeMetadata( None, ( - "Pearson's r" if cube.long_name is None + "Pearson's r" + if cube.long_name is None else f"Pearson's r of {cube.long_name}" ), - 'pearsonr' if cube.var_name is None else f'pearsonr_{cube.var_name}', - '1', + "pearsonr" if cube.var_name is None else f"pearsonr_{cube.var_name}", + "1", cube.attributes, cube.cell_methods, ) @@ -521,7 +523,7 @@ def _calculate_emd( bins=n_bins, bin_range=bin_range, weights=weights, - normalization='sum', + normalization="sum", ) pmf_ref = histogram( reference, @@ -529,7 +531,7 @@ def _calculate_emd( bins=n_bins, bin_range=bin_range, weights=weights, - normalization='sum', + normalization="sum", ) bin_centers = pmf.coord(cube.name()).points @@ -541,7 +543,7 @@ def _calculate_emd( if cube.has_lazy_data() and reference.has_lazy_data(): emd = da.apply_gufunc( _get_emd, - '(i),(i),(i)->()', + "(i),(i),(i)->()", pmf.lazy_data(), pmf_ref.lazy_data(), bin_centers, @@ -550,14 +552,14 @@ def _calculate_emd( vectorize=True, ) else: - v_get_emd = np.vectorize(_get_emd, signature='(n),(n),(n)->()') + v_get_emd = np.vectorize(_get_emd, signature="(n),(n),(n)->()") emd = v_get_emd(pmf.data, pmf_ref.data, bin_centers) # Metadata metadata = CubeMetadata( None, - 'EMD' if cube.long_name is None else f'EMD of {cube.long_name}', - 'emd' if cube.var_name is None else f'emd_{cube.var_name}', + "EMD" if cube.long_name is None else f"EMD of {cube.long_name}", + "emd" if cube.var_name is None else f"emd_{cube.var_name}", cube.units, cube.attributes, cube.cell_methods, diff --git a/esmvalcore/preprocessor/_cycles.py b/esmvalcore/preprocessor/_cycles.py index 017d6ed71b..92329a93ff 100644 --- a/esmvalcore/preprocessor/_cycles.py +++ b/esmvalcore/preprocessor/_cycles.py @@ -1,4 +1,5 @@ """Operations related to cycles (annual cycle, diurnal cycle, etc.).""" + import logging import iris @@ -50,17 +51,22 @@ def amplitude(cube, coords): for coord_name in coords: if cube.coords(coord_name): continue - logger.debug("Trying to add coordinate '%s' to cube via iris." - "coord_categorisation", coord_name) - if hasattr(iris.coord_categorisation, f'add_{coord_name}'): - getattr(iris.coord_categorisation, f'add_{coord_name}')(cube, - 'time') + logger.debug( + "Trying to add coordinate '%s' to cube via iris." + "coord_categorisation", + coord_name, + ) + if hasattr(iris.coord_categorisation, f"add_{coord_name}"): + getattr(iris.coord_categorisation, f"add_{coord_name}")( + cube, "time" + ) logger.debug("Added temporal coordinate '%s'", coord_name) else: raise iris.exceptions.CoordinateNotFoundError( f"Coordinate '{coord_name}' is not a coordinate of cube " f"{cube.summary(shorten=True)} and cannot be added via " - f"iris.coord_categorisation") + f"iris.coord_categorisation" + ) # Calculate amplitude max_cube = cube.aggregated_by(coords, iris.analysis.MAX) diff --git a/esmvalcore/preprocessor/_derive/__init__.py b/esmvalcore/preprocessor/_derive/__init__.py index 27bffa38d7..065845ef4d 100644 --- a/esmvalcore/preprocessor/_derive/__init__.py +++ b/esmvalcore/preprocessor/_derive/__init__.py @@ -7,6 +7,8 @@ import iris +from esmvalcore.preprocessor._units import convert_units + logger = logging.getLogger(__name__) @@ -18,14 +20,14 @@ def _get_all_derived_variables(): dict All derived variables with `short_name` (keys) and the associated python classes (values). - """ derivers = {} - for path in Path(__file__).parent.glob('[a-z]*.py'): + for path in Path(__file__).parent.glob("[a-z]*.py"): short_name = path.stem module = importlib.import_module( - f'esmvalcore.preprocessor._derive.{short_name}') - derivers[short_name] = getattr(module, 'DerivedVariable') + f"esmvalcore.preprocessor._derive.{short_name}" + ) + derivers[short_name] = getattr(module, "DerivedVariable") return derivers @@ -50,12 +52,12 @@ def get_required(short_name, project): ------- list List of dictionaries (including at least the key `short_name`). - """ if short_name.lower() not in ALL_DERIVED_VARIABLES: raise NotImplementedError( f"Cannot derive variable '{short_name}', no derivation script " - f"available") + f"available" + ) DerivedVariable = ALL_DERIVED_VARIABLES[short_name.lower()] # noqa: N806 variables = deepcopy(DerivedVariable().required(project)) return variables @@ -82,7 +84,6 @@ def derive(cubes, short_name, long_name, units, standard_name=None): ------- iris.cube.Cube The new derived variable. - """ if short_name == cubes[0].var_name: return cubes[0] @@ -94,9 +95,11 @@ def derive(cubes, short_name, long_name, units, standard_name=None): try: cube = DerivedVariable().calculate(cubes) except Exception as exc: - msg = (f"Derivation of variable '{short_name}' failed. If you used " - f"the option '--skip_nonexistent' for running your recipe, " - f"this might be caused by missing input data for derivation") + msg = ( + f"Derivation of variable '{short_name}' failed. If you used " + f"the option '--skip_nonexistent' for running your recipe, " + f"this might be caused by missing input data for derivation" + ) raise ValueError(msg) from exc # Set standard attributes @@ -104,8 +107,8 @@ def derive(cubes, short_name, long_name, units, standard_name=None): cube.standard_name = standard_name if standard_name else None cube.long_name = long_name for temp in cubes: - if 'source_file' in temp.attributes: - cube.attributes['source_file'] = temp.attributes['source_file'] + if "source_file" in temp.attributes: + cube.attributes["source_file"] = temp.attributes["source_file"] # Check/convert units if cube.units is None or cube.units == units: @@ -114,13 +117,19 @@ def derive(cubes, short_name, long_name, units, standard_name=None): logger.warning( "Units of cube after executing derivation script of '%s' are " "'%s', automatically setting them to '%s'. This might lead to " - "incorrect data", short_name, cube.units, units) + "incorrect data", + short_name, + cube.units, + units, + ) cube.units = units - elif cube.units.is_convertible(units): - cube.convert_units(units) else: - raise ValueError( - f"Units '{cube.units}' after executing derivation script of " - f"'{short_name}' cannot be converted to target units '{units}'") + try: + convert_units(cube, units) + except ValueError as exc: + raise ValueError( + f"Units '{cube.units}' after executing derivation script of " + f"'{short_name}' cannot be converted to target units '{units}'" + ) from exc return cube diff --git a/esmvalcore/preprocessor/_derive/_baseclass.py b/esmvalcore/preprocessor/_derive/_baseclass.py index ba9325bfdd..2d818f1ca3 100644 --- a/esmvalcore/preprocessor/_derive/_baseclass.py +++ b/esmvalcore/preprocessor/_derive/_baseclass.py @@ -1,4 +1,5 @@ """Contains the base class for derived variables.""" + from abc import abstractmethod diff --git a/esmvalcore/preprocessor/_derive/_shared.py b/esmvalcore/preprocessor/_derive/_shared.py index e6d07011f1..fd42ba9d75 100644 --- a/esmvalcore/preprocessor/_derive/_shared.py +++ b/esmvalcore/preprocessor/_derive/_shared.py @@ -13,18 +13,20 @@ def cloud_area_fraction(cubes, tau_constraint, plev_constraint): """Calculate cloud area fraction for different parameters.""" - clisccp_cube = cubes.extract_cube(NameConstraint(var_name='clisccp')) + clisccp_cube = cubes.extract_cube(NameConstraint(var_name="clisccp")) new_cube = clisccp_cube new_cube = new_cube.extract(tau_constraint & plev_constraint) coord_names = [ - coord.standard_name for coord in new_cube.coords() + coord.standard_name + for coord in new_cube.coords() if len(coord.points) > 1 ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: + if "atmosphere_optical_thickness_due_to_cloud" in coord_names: new_cube = new_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - new_cube = new_cube.collapsed('air_pressure', iris.analysis.SUM) + "atmosphere_optical_thickness_due_to_cloud", iris.analysis.SUM + ) + if "air_pressure" in coord_names: + new_cube = new_cube.collapsed("air_pressure", iris.analysis.SUM) return new_cube @@ -64,22 +66,25 @@ def column_average(cube, hus_cube, zg_cube, ps_cube): """ # Convert units of data - hus_cube.convert_units('1') - zg_cube.convert_units('m') - ps_cube.convert_units('Pa') + hus_cube.convert_units("1") + zg_cube.convert_units("m") + ps_cube.convert_units("Pa") # Level thickness (note: Buchwitz & Reuter use hPa but we use Pa; in fact, # this does not matter as units cancel out when calculating column-average p_layer_widths = pressure_level_widths(cube, ps_cube, top_limit=0.0) # Latitudes (1-dim array) - lat = cube.coord('latitude').points + lat = cube.coord("latitude").points # Gravitational acceleration g_0 on the geoid approximated by the # international gravity formula depending only on the latitude g_0 = np.array(lat) - g_0 = 9.780327 * (1.0 + 0.0053024 * (np.sin(lat / 180.0 * np.pi))**2 - - 0.0000058 * (np.sin(2.0 * lat / 180.0 * np.pi))**2) + g_0 = 9.780327 * ( + 1.0 + + 0.0053024 * (np.sin(lat / 180.0 * np.pi)) ** 2 + - 0.0000058 * (np.sin(2.0 * lat / 180.0 * np.pi)) ** 2 + ) # Approximation of the gravitational acceleration including the # free air correction @@ -92,17 +97,22 @@ def column_average(cube, hus_cube, zg_cube, ps_cube): # Number of dry air particles (air molecules excluding water vapor) within # each layer mw_air = 28.9644e-3 - n_dry = ((hus_cube * -1.0 + 1.0) * constants.value('Avogadro constant') * - p_layer_widths.data / (mw_air * g_4d_array)) + n_dry = ( + (hus_cube * -1.0 + 1.0) + * constants.value("Avogadro constant") + * p_layer_widths.data + / (mw_air * g_4d_array) + ) # Number of gas molecules per layer cube.data = cube.core_data() * n_dry.core_data() # Column-average - cube = cube.collapsed('air_pressure', iris.analysis.SUM) + cube = cube.collapsed("air_pressure", iris.analysis.SUM) cube.data = ( - cube.core_data() / - n_dry.collapsed('air_pressure', iris.analysis.SUM).core_data()) + cube.core_data() + / n_dry.collapsed("air_pressure", iris.analysis.SUM).core_data() + ) return cube @@ -130,7 +140,7 @@ def pressure_level_widths(cube, ps_cube, top_limit=0.0): data = _get_pressure_level_widths(pressure_array) p_level_widths_cube = cube.copy(data=data) - p_level_widths_cube.rename('pressure level widths') + p_level_widths_cube.rename("pressure level widths") p_level_widths_cube.units = ps_cube.units return p_level_widths_cube @@ -145,7 +155,7 @@ def _create_pressure_array(cube, ps_cube, top_limit): """ # Create 4D array filled with pressure level values - p_levels = cube.coord('air_pressure').points.astype(np.float32) + p_levels = cube.coord("air_pressure").points.astype(np.float32) p_4d_array = iris.util.broadcast_to_shape(p_levels, cube.shape, [1]) # Create 4d array filled with surface pressure values @@ -170,15 +180,15 @@ def _create_pressure_array(cube, ps_cube, top_limit): def _get_pressure_level_widths(array, air_pressure_axis=1): """Compute pressure level widths. - For a 1D array with pressure level columns, return a 1D array with - pressure level widths. + For array with pressure level columns, return array with pressure + level widths. """ array = np.copy(array) if np.any(np.diff(array, axis=air_pressure_axis) > 0.0): raise ValueError("Pressure level value increased with height") - # Calculate centers + # Calculate array of centers between two neighboring pressure levels indices = [slice(None)] * array.ndim array_shifted = np.roll(array, -1, axis=air_pressure_axis) index_0 = deepcopy(indices) @@ -198,9 +208,11 @@ def _get_pressure_level_widths(array, air_pressure_axis=1): dim_map = np.arange(array_centers.ndim) dim_map = np.delete(dim_map, air_pressure_axis) array_centers_surface = iris.util.broadcast_to_shape( - array_centers[tuple(index_0)], array_centers.shape, dim_map) - array_centers = np.where(np.isnan(array_centers), array_centers_surface, - array_centers) + array_centers[tuple(index_0)], array_centers.shape, dim_map + ) + array_centers = np.where( + np.isnan(array_centers), array_centers_surface, array_centers + ) # Calculate level widths p_level_widths = -np.diff(array_centers, axis=air_pressure_axis) diff --git a/esmvalcore/preprocessor/_derive/alb.py b/esmvalcore/preprocessor/_derive/alb.py index b0036077ad..b26f2269b6 100644 --- a/esmvalcore/preprocessor/_derive/alb.py +++ b/esmvalcore/preprocessor/_derive/alb.py @@ -4,6 +4,7 @@ - crez_ba """ + from iris import NameConstraint from ._baseclass import DerivedVariableBase @@ -16,20 +17,16 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsdscs' - }, - { - 'short_name': 'rsuscs' - }, + {"short_name": "rsdscs"}, + {"short_name": "rsuscs"}, ] return required @staticmethod def calculate(cubes): """Compute surface albedo.""" - rsdscs_cube = cubes.extract_cube(NameConstraint(var_name='rsdscs')) - rsuscs_cube = cubes.extract_cube(NameConstraint(var_name='rsuscs')) + rsdscs_cube = cubes.extract_cube(NameConstraint(var_name="rsdscs")) + rsuscs_cube = cubes.extract_cube(NameConstraint(var_name="rsuscs")) rsnscs_cube = rsuscs_cube / rsdscs_cube diff --git a/esmvalcore/preprocessor/_derive/amoc.py b/esmvalcore/preprocessor/_derive/amoc.py index 1e6e1261a5..fa029aae60 100644 --- a/esmvalcore/preprocessor/_derive/amoc.py +++ b/esmvalcore/preprocessor/_derive/amoc.py @@ -1,4 +1,5 @@ """Derivation of variable `amoc`.""" + import iris import numpy as np @@ -12,13 +13,16 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" if project == "CMIP5": - required = [{'short_name': 'msftmyz', 'mip': 'Omon'}] + required = [{"short_name": "msftmyz", "mip": "Omon"}] elif project == "CMIP6": - required = [{'short_name': 'msftmz', 'optional': True}, - {'short_name': 'msftyz', 'optional': True}] + required = [ + {"short_name": "msftmz", "optional": True}, + {"short_name": "msftyz", "optional": True}, + ] else: - raise ValueError(f"Project {project} can not be used " - f"for Amoc derivation.") + raise ValueError( + f"Project {project} can not be used for Amoc derivation." + ) return required @@ -41,29 +45,33 @@ def calculate(cubes): # msftmyz and msfmz cube = cubes.extract_cube( iris.Constraint( - name='ocean_meridional_overturning_mass_streamfunction')) + name="ocean_meridional_overturning_mass_streamfunction" + ) + ) meridional = True - lats = cube.coord('latitude').points + lats = cube.coord("latitude").points except iris.exceptions.ConstraintMismatchError: # msftyz cube = cubes.extract_cube( - iris.Constraint( - name='ocean_y_overturning_mass_streamfunction')) + iris.Constraint(name="ocean_y_overturning_mass_streamfunction") + ) meridional = False - lats = cube.coord('grid_latitude').points + lats = cube.coord("grid_latitude").points cube_orig = cube.copy() # 1: find the relevant region - atl_constraint = iris.Constraint(region='atlantic_arctic_ocean') + atl_constraint = iris.Constraint(region="atlantic_arctic_ocean") cube = cube.extract(constraint=atl_constraint) if cube is None: - raise ValueError(f"Amoc calculation: {cube_orig} doesn't contain" - f" atlantic_arctic_ocean.") + raise ValueError( + f"Amoc calculation: {cube_orig} doesn't contain" + f" atlantic_arctic_ocean." + ) # 2: Remove the shallowest 500m to avoid wind driven mixed layer. - depth_constraint = iris.Constraint(depth=lambda d: d >= 500.) + depth_constraint = iris.Constraint(depth=lambda d: d >= 500.0) cube = cube.extract(constraint=depth_constraint) # 3: Find the latitude closest to 26.5N (location of RAPID measurement) @@ -79,7 +87,7 @@ def calculate(cubes): # 4: find the maximum in the water column along the time axis. cube = cube.collapsed( - ['depth', 'region'], + ["depth", "region"], iris.analysis.MAX, ) diff --git a/esmvalcore/preprocessor/_derive/asr.py b/esmvalcore/preprocessor/_derive/asr.py index 30f65c4ff8..b088bfa9ec 100644 --- a/esmvalcore/preprocessor/_derive/asr.py +++ b/esmvalcore/preprocessor/_derive/asr.py @@ -11,18 +11,20 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'rsdt'}, {'short_name': 'rsut'}] + required = [{"short_name": "rsdt"}, {"short_name": "rsut"}] return required @staticmethod def calculate(cubes): """Compute absorbed shortwave radiation.""" rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux')) + Constraint(name="toa_outgoing_shortwave_flux") + ) asr_cube = rsdt_cube - rsut_cube - asr_cube.attributes['positive'] = 'down' + asr_cube.attributes["positive"] = "down" return asr_cube diff --git a/esmvalcore/preprocessor/_derive/chlora.py b/esmvalcore/preprocessor/_derive/chlora.py index f14930355e..63ebd1ab90 100644 --- a/esmvalcore/preprocessor/_derive/chlora.py +++ b/esmvalcore/preprocessor/_derive/chlora.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'chldiatos' - }, - { - 'short_name': 'chlmiscos' - }, + {"short_name": "chldiatos"}, + {"short_name": "chlmiscos"}, ] return required @@ -25,12 +21,18 @@ def required(project): def calculate(cubes): """Compute surface chlorophyll concentration.""" chldiatos_cube = cubes.extract_cube( - Constraint(name='mass_concentration_of_diatoms_expressed_as' + - '_chlorophyll_in_sea_water')) + Constraint( + name="mass_concentration_of_diatoms_expressed_as" + + "_chlorophyll_in_sea_water" + ) + ) chlmiscos_cube = cubes.extract_cube( - Constraint(name='mass_concentration_of_miscellaneous' + - '_phytoplankton_expressed_as_chlorophyll' + - '_in_sea_water')) + Constraint( + name="mass_concentration_of_miscellaneous" + + "_phytoplankton_expressed_as_chlorophyll" + + "_in_sea_water" + ) + ) chlora_cube = chldiatos_cube + chlmiscos_cube diff --git a/esmvalcore/preprocessor/_derive/clhmtisccp.py b/esmvalcore/preprocessor/_derive/clhmtisccp.py index b20d1ee0ff..5d800d0016 100644 --- a/esmvalcore/preprocessor/_derive/clhmtisccp.py +++ b/esmvalcore/preprocessor/_derive/clhmtisccp.py @@ -12,14 +12,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'clisccp'}] + required = [{"short_name": "clisccp"}] return required @staticmethod def calculate(cubes): """Compute ISCCP high level medium-thickness cloud area fraction.""" tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = Constraint(air_pressure=lambda p: p <= 44000.) + atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.0 + ) + plev = Constraint(air_pressure=lambda p: p <= 44000.0) return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvalcore/preprocessor/_derive/clhtkisccp.py b/esmvalcore/preprocessor/_derive/clhtkisccp.py index 9b9b5106e2..c85d65ec0a 100644 --- a/esmvalcore/preprocessor/_derive/clhtkisccp.py +++ b/esmvalcore/preprocessor/_derive/clhtkisccp.py @@ -12,14 +12,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'clisccp'}] + required = [{"short_name": "clisccp"}] return required @staticmethod def calculate(cubes): """Compute ISCCP high level thick cloud area fraction.""" tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = Constraint(air_pressure=lambda p: p <= 44000.) + atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.0 + ) + plev = Constraint(air_pressure=lambda p: p <= 44000.0) return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvalcore/preprocessor/_derive/cllmtisccp.py b/esmvalcore/preprocessor/_derive/cllmtisccp.py index 28b5104ea8..c48baebf3f 100644 --- a/esmvalcore/preprocessor/_derive/cllmtisccp.py +++ b/esmvalcore/preprocessor/_derive/cllmtisccp.py @@ -12,14 +12,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'clisccp'}] + required = [{"short_name": "clisccp"}] return required @staticmethod def calculate(cubes): """Compute ISCCP low level medium-thickness cloud area fraction.""" tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = Constraint(air_pressure=lambda p: p > 68000.) + atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.0 + ) + plev = Constraint(air_pressure=lambda p: p > 68000.0) return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvalcore/preprocessor/_derive/clltkisccp.py b/esmvalcore/preprocessor/_derive/clltkisccp.py index 3d0ba17363..3ee86c10f6 100644 --- a/esmvalcore/preprocessor/_derive/clltkisccp.py +++ b/esmvalcore/preprocessor/_derive/clltkisccp.py @@ -12,14 +12,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'clisccp'}] + required = [{"short_name": "clisccp"}] return required @staticmethod def calculate(cubes): """Compute ISCCP low level thick cloud area fraction.""" tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = Constraint(air_pressure=lambda p: p > 68000.) + atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.0 + ) + plev = Constraint(air_pressure=lambda p: p > 68000.0) return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvalcore/preprocessor/_derive/clmmtisccp.py b/esmvalcore/preprocessor/_derive/clmmtisccp.py index 53129d065e..b2fa6d5f96 100644 --- a/esmvalcore/preprocessor/_derive/clmmtisccp.py +++ b/esmvalcore/preprocessor/_derive/clmmtisccp.py @@ -12,14 +12,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'clisccp'}] + required = [{"short_name": "clisccp"}] return required @staticmethod def calculate(cubes): """Compute ISCCP middle level medium-thickness cloud area fraction.""" tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = Constraint(air_pressure=lambda p: 44000. < p <= 68000.) + atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.0 + ) + plev = Constraint(air_pressure=lambda p: 44000.0 < p <= 68000.0) return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvalcore/preprocessor/_derive/clmtkisccp.py b/esmvalcore/preprocessor/_derive/clmtkisccp.py index a61314a470..26d07209fe 100644 --- a/esmvalcore/preprocessor/_derive/clmtkisccp.py +++ b/esmvalcore/preprocessor/_derive/clmtkisccp.py @@ -12,14 +12,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'clisccp'}] + required = [{"short_name": "clisccp"}] return required @staticmethod def calculate(cubes): """Compute ISCCP middle level thick cloud area fraction.""" tau = Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = Constraint(air_pressure=lambda p: 44000. < p <= 68000.) + atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.0 + ) + plev = Constraint(air_pressure=lambda p: 44000.0 < p <= 68000.0) return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvalcore/preprocessor/_derive/co2s.py b/esmvalcore/preprocessor/_derive/co2s.py index 052800f83a..b57c7fba31 100644 --- a/esmvalcore/preprocessor/_derive/co2s.py +++ b/esmvalcore/preprocessor/_derive/co2s.py @@ -1,4 +1,5 @@ """Derivation of variable ``co2s``.""" + import dask.array as da import iris import numpy as np @@ -14,7 +15,8 @@ def _get_first_unmasked_data(array, axis): indices_first_positive = da.argmax(numerical_mask, axis=axis) indices = da.meshgrid( *[da.arange(array.shape[i]) for i in range(array.ndim) if i != axis], - indexing='ij') + indexing="ij", + ) indices.insert(axis, indices_first_positive) first_unmasked_data = np.array(array)[tuple(indices)] return first_unmasked_data @@ -39,43 +41,51 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'co2'}, {'short_name': 'ps'}] + required = [{"short_name": "co2"}, {"short_name": "ps"}] return required @staticmethod def calculate(cubes): """Compute mole fraction of CO2 at surface.""" co2_cube = cubes.extract_cube( - iris.Constraint(name='mole_fraction_of_carbon_dioxide_in_air')) + iris.Constraint(name="mole_fraction_of_carbon_dioxide_in_air") + ) ps_cube = cubes.extract_cube( - iris.Constraint(name='surface_air_pressure')) + iris.Constraint(name="surface_air_pressure") + ) # Fill masked data if necessary (interpolation fails with masked data) - (z_axis,) = co2_cube.coord_dims(co2_cube.coord(axis='Z', - dim_coords=True)) + (z_axis,) = co2_cube.coord_dims( + co2_cube.coord(axis="Z", dim_coords=True) + ) mask = da.ma.getmaskarray(co2_cube.core_data()) if mask.any(): first_unmasked_data = _get_first_unmasked_data( - co2_cube.core_data(), axis=z_axis) + co2_cube.core_data(), axis=z_axis + ) dim_map = [dim for dim in range(co2_cube.ndim) if dim != z_axis] first_unmasked_data = iris.util.broadcast_to_shape( - first_unmasked_data, co2_cube.shape, dim_map) - co2_cube.data = da.where(mask, first_unmasked_data, - co2_cube.core_data()) + first_unmasked_data, co2_cube.shape, dim_map + ) + co2_cube.data = da.where( + mask, first_unmasked_data, co2_cube.core_data() + ) # Interpolation (not supported for dask arrays) - air_pressure_coord = co2_cube.coord('air_pressure') + air_pressure_coord = co2_cube.coord("air_pressure") original_levels = iris.util.broadcast_to_shape( - air_pressure_coord.points, co2_cube.shape, - co2_cube.coord_dims(air_pressure_coord)) + air_pressure_coord.points, + co2_cube.shape, + co2_cube.coord_dims(air_pressure_coord), + ) target_levels = np.expand_dims(ps_cube.data, axis=z_axis) co2s_data = stratify.interpolate( target_levels, original_levels, co2_cube.data, axis=z_axis, - interpolation='linear', - extrapolation='linear', + interpolation="linear", + extrapolation="linear", ) co2s_data = np.squeeze(co2s_data, axis=z_axis) @@ -84,13 +94,15 @@ def calculate(cubes): indices[z_axis] = 0 co2s_cube = co2_cube[tuple(indices)] co2s_cube.data = co2s_data - if co2s_cube.coords('air_pressure'): - co2s_cube.remove_coord('air_pressure') - ps_coord = iris.coords.AuxCoord(ps_cube.data, - var_name='plev', - standard_name='air_pressure', - long_name='pressure', - units=ps_cube.units) + if co2s_cube.coords("air_pressure"): + co2s_cube.remove_coord("air_pressure") + ps_coord = iris.coords.AuxCoord( + ps_cube.data, + var_name="plev", + standard_name="air_pressure", + long_name="pressure", + units=ps_cube.units, + ) co2s_cube.add_aux_coord(ps_coord, np.arange(co2s_cube.ndim)) - co2s_cube.convert_units('1e-6') + co2s_cube.convert_units("1e-6") return co2s_cube diff --git a/esmvalcore/preprocessor/_derive/ctotal.py b/esmvalcore/preprocessor/_derive/ctotal.py index 18bef27071..8d8d00faef 100644 --- a/esmvalcore/preprocessor/_derive/ctotal.py +++ b/esmvalcore/preprocessor/_derive/ctotal.py @@ -1,7 +1,6 @@ """Derivation of variable `ctotal`.""" import iris - from iris import Constraint from ._baseclass import DerivedVariableBase @@ -14,27 +13,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - if project == 'CMIP5': + if project == "CMIP5": required = [ - { - 'short_name': 'cVeg', - 'mip': 'Lmon' - }, - { - 'short_name': 'cSoil', - 'mip': 'Lmon' - }, + {"short_name": "cVeg", "mip": "Lmon"}, + {"short_name": "cSoil", "mip": "Lmon"}, ] - elif project == 'CMIP6': + elif project == "CMIP6": required = [ - { - 'short_name': 'cVeg', - 'mip': 'Lmon' - }, - { - 'short_name': 'cSoil', - 'mip': 'Emon' - }, + {"short_name": "cVeg", "mip": "Lmon"}, + {"short_name": "cSoil", "mip": "Emon"}, ] return required @@ -43,18 +30,23 @@ def calculate(cubes): """Compute total ecosystem carbon storage.""" try: c_soil_cube = cubes.extract_cube( - Constraint(name='soil_carbon_content')) + Constraint(name="soil_carbon_content") + ) except iris.exceptions.ConstraintMismatchError: try: c_soil_cube = cubes.extract_cube( - Constraint(name='soil_mass_content_of_carbon')) + Constraint(name="soil_mass_content_of_carbon") + ) except iris.exceptions.ConstraintMismatchError: - raise ValueError(f"No cube from {cubes} can be loaded with " - f"standard name CMIP5: soil_carbon_content " - f"or CMIP6: soil_mass_content_of_carbon") + raise ValueError( + f"No cube from {cubes} can be loaded with " + f"standard name CMIP5: soil_carbon_content " + f"or CMIP6: soil_mass_content_of_carbon" + ) c_veg_cube = cubes.extract_cube( - Constraint(name='vegetation_carbon_content')) + Constraint(name="vegetation_carbon_content") + ) c_total_cube = c_soil_cube + c_veg_cube c_total_cube.standard_name = None - c_total_cube.long_name = 'Total Carbon Stock' + c_total_cube.long_name = "Total Carbon Stock" return c_total_cube diff --git a/esmvalcore/preprocessor/_derive/et.py b/esmvalcore/preprocessor/_derive/et.py index abdde1c191..ee772eb810 100644 --- a/esmvalcore/preprocessor/_derive/et.py +++ b/esmvalcore/preprocessor/_derive/et.py @@ -6,7 +6,7 @@ from ._baseclass import DerivedVariableBase # Constants -LATENT_HEAT_VAPORIZATION = 2.465E6 +LATENT_HEAT_VAPORIZATION = 2.465e6 class DerivedVariable(DerivedVariableBase): @@ -15,17 +15,18 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'hfls', 'mip': 'Amon'}] + required = [{"short_name": "hfls", "mip": "Amon"}] return required @staticmethod def calculate(cubes): """Compute evapotranspiration.""" hfls_cube = cubes.extract_cube( - Constraint(name='surface_upward_latent_heat_flux')) + Constraint(name="surface_upward_latent_heat_flux") + ) et_cube = hfls_cube * 24.0 * 3600.0 / LATENT_HEAT_VAPORIZATION - et_cube.units = cf_units.Unit('mm day-1') - et_cube.attributes.pop('positive', None) + et_cube.units = cf_units.Unit("mm day-1") + et_cube.attributes.pop("positive", None) return et_cube diff --git a/esmvalcore/preprocessor/_derive/hfns.py b/esmvalcore/preprocessor/_derive/hfns.py index 5bbc93cc53..9ed2178b7a 100644 --- a/esmvalcore/preprocessor/_derive/hfns.py +++ b/esmvalcore/preprocessor/_derive/hfns.py @@ -13,10 +13,10 @@ def required(project): """Declare the variables needed for derivation.""" required = [ { - 'short_name': 'hfls', + "short_name": "hfls", }, { - 'short_name': 'hfss', + "short_name": "hfss", }, ] return required @@ -24,8 +24,8 @@ def required(project): @staticmethod def calculate(cubes): """Compute surface net heat flux.""" - hfls_cube = cubes.extract_cube(NameConstraint(var_name='hfls')) - hfss_cube = cubes.extract_cube(NameConstraint(var_name='hfss')) + hfls_cube = cubes.extract_cube(NameConstraint(var_name="hfls")) + hfss_cube = cubes.extract_cube(NameConstraint(var_name="hfss")) hfns_cube = hfls_cube + hfss_cube hfns_cube.units = hfls_cube.units diff --git a/esmvalcore/preprocessor/_derive/lvp.py b/esmvalcore/preprocessor/_derive/lvp.py index 0faf0779c4..450d94ba56 100644 --- a/esmvalcore/preprocessor/_derive/lvp.py +++ b/esmvalcore/preprocessor/_derive/lvp.py @@ -17,24 +17,18 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'hfls' - }, - { - 'short_name': 'pr' - }, - { - 'short_name': 'evspsbl' - }, + {"short_name": "hfls"}, + {"short_name": "pr"}, + {"short_name": "evspsbl"}, ] return required @staticmethod def calculate(cubes): """Compute Latent Heat Release from Precipitation.""" - hfls_cube = cubes.extract_cube(NameConstraint(var_name='hfls')) - pr_cube = cubes.extract_cube(NameConstraint(var_name='pr')) - evspsbl_cube = cubes.extract_cube(NameConstraint(var_name='evspsbl')) + hfls_cube = cubes.extract_cube(NameConstraint(var_name="hfls")) + pr_cube = cubes.extract_cube(NameConstraint(var_name="pr")) + evspsbl_cube = cubes.extract_cube(NameConstraint(var_name="evspsbl")) lvp_cube = hfls_cube * (pr_cube / evspsbl_cube) diff --git a/esmvalcore/preprocessor/_derive/lwcre.py b/esmvalcore/preprocessor/_derive/lwcre.py index 7189560289..7b372c6de6 100644 --- a/esmvalcore/preprocessor/_derive/lwcre.py +++ b/esmvalcore/preprocessor/_derive/lwcre.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rlut' - }, - { - 'short_name': 'rlutcs' - }, + {"short_name": "rlut"}, + {"short_name": "rlutcs"}, ] return required @@ -25,12 +21,14 @@ def required(project): def calculate(cubes): """Compute longwave cloud radiative effect.""" rlut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_longwave_flux')) + Constraint(name="toa_outgoing_longwave_flux") + ) rlutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_longwave_flux_assuming_clear_sky')) + Constraint(name="toa_outgoing_longwave_flux_assuming_clear_sky") + ) lwcre_cube = rlutcs_cube - rlut_cube lwcre_cube.units = rlut_cube.units - lwcre_cube.attributes['positive'] = 'down' + lwcre_cube.attributes["positive"] = "down" return lwcre_cube diff --git a/esmvalcore/preprocessor/_derive/lwp.py b/esmvalcore/preprocessor/_derive/lwp.py index 067126d5e9..3a5f857a42 100644 --- a/esmvalcore/preprocessor/_derive/lwp.py +++ b/esmvalcore/preprocessor/_derive/lwp.py @@ -16,12 +16,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'clwvi' - }, - { - 'short_name': 'clivi' - }, + {"short_name": "clwvi"}, + {"short_name": "clivi"}, ] return required @@ -37,61 +33,64 @@ def calculate(cubes): """ # CMIP5 and CMIP6 names are slightly different, so use # variable name instead to extract cubes - clwvi_cube = cubes.extract_cube(NameConstraint(var_name='clwvi')) - clivi_cube = cubes.extract_cube(NameConstraint(var_name='clivi')) + clwvi_cube = cubes.extract_cube(NameConstraint(var_name="clwvi")) + clivi_cube = cubes.extract_cube(NameConstraint(var_name="clivi")) # CMIP5 and CMIP6 have different global attributes that we use # to determine model name and project name: # - CMIP5: model_id and project_id # - CMIP6: source_id and mip_era - project = clwvi_cube.attributes.get('project_id') + project = clwvi_cube.attributes.get("project_id") if project: - dataset = clwvi_cube.attributes.get('model_id') + dataset = clwvi_cube.attributes.get("model_id") # some CMIP6 models define both, project_id and source_id but # no model_id --> also try source_id to find model name if not dataset: - dataset = clwvi_cube.attributes.get('source_id') + dataset = clwvi_cube.attributes.get("source_id") else: - project = clwvi_cube.attributes.get('mip_era') - dataset = clwvi_cube.attributes.get('source_id') + project = clwvi_cube.attributes.get("mip_era") + dataset = clwvi_cube.attributes.get("source_id") # Should we check that the model_id/project_id are the same on both # cubes? bad_datasets = [ - 'CCSM4', # CMIP5 models - 'CESM1-CAM5-1-FV2', - 'CESM1-CAM5', - 'CMCC-CESM', - 'CMCC-CM', - 'CMCC-CMS', - 'CSIRO-Mk3-6-0', - 'GISS-E2-1-G', - 'GISS-E2-1-H', - 'IPSL-CM5A-MR', - 'IPSL-CM5A-LR', - 'IPSL-CM5B-LR', - 'IPSL-CM5A-MR', - 'MIROC-ESM', - 'MIROC-ESM-CHEM', - 'MIROC-ESM', - 'MPI-ESM-LR', - 'MPI-ESM-MR', - 'MPI-ESM-P', - 'AWI-ESM-1-1-LR', # CMIP6 models - 'CAMS-CSM1-0', - 'FGOALS-f3-L', - 'IPSL-CM6A-LR', - 'MPI-ESM-1-2-HAM', - 'MPI-ESM1-2-HR', - 'MPI-ESM1-2-LR', - 'SAM0-UNICON' + "CCSM4", # CMIP5 models + "CESM1-CAM5-1-FV2", + "CESM1-CAM5", + "CMCC-CESM", + "CMCC-CM", + "CMCC-CMS", + "CSIRO-Mk3-6-0", + "GISS-E2-1-G", + "GISS-E2-1-H", + "IPSL-CM5A-MR", + "IPSL-CM5A-LR", + "IPSL-CM5B-LR", + "IPSL-CM5A-MR", + "MIROC-ESM", + "MIROC-ESM-CHEM", + "MIROC-ESM", + "MPI-ESM-LR", + "MPI-ESM-MR", + "MPI-ESM-P", + "AWI-ESM-1-1-LR", # CMIP6 models + "CAMS-CSM1-0", + "FGOALS-f3-L", + "IPSL-CM6A-LR", + "MPI-ESM-1-2-HAM", + "MPI-ESM1-2-HR", + "MPI-ESM1-2-LR", + "SAM0-UNICON", ] affected_projects = ["CMIP5", "CMIP5_ETHZ", "CMIP6"] - if (project in affected_projects and dataset in bad_datasets): + if project in affected_projects and dataset in bad_datasets: logger.info( "Assuming that variable clwvi from %s dataset %s " - "contains only liquid water", project, dataset) + "contains only liquid water", + project, + dataset, + ) lwp_cube = clwvi_cube else: lwp_cube = clwvi_cube - clivi_cube diff --git a/esmvalcore/preprocessor/_derive/netcre.py b/esmvalcore/preprocessor/_derive/netcre.py index 9c65e0ec7e..28db99db36 100644 --- a/esmvalcore/preprocessor/_derive/netcre.py +++ b/esmvalcore/preprocessor/_derive/netcre.py @@ -12,18 +12,10 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rlut' - }, - { - 'short_name': 'rlutcs' - }, - { - 'short_name': 'rsut' - }, - { - 'short_name': 'rsutcs' - }, + {"short_name": "rlut"}, + {"short_name": "rlutcs"}, + {"short_name": "rsut"}, + {"short_name": "rsutcs"}, ] return required @@ -43,6 +35,6 @@ def calculate(cubes): netcre_cube = lwcre_cube + swcre_cube netcre_cube.units = lwcre_cube.units - netcre_cube.attributes['positive'] = 'down' + netcre_cube.attributes["positive"] = "down" return netcre_cube diff --git a/esmvalcore/preprocessor/_derive/ohc.py b/esmvalcore/preprocessor/_derive/ohc.py index d8d374b312..05590c9f3b 100644 --- a/esmvalcore/preprocessor/_derive/ohc.py +++ b/esmvalcore/preprocessor/_derive/ohc.py @@ -1,12 +1,12 @@ """Derivation of variable `ohc`.""" -import iris -from iris import Constraint +import iris from cf_units import Unit +from iris import Constraint from ._baseclass import DerivedVariableBase -RHO_CP = iris.coords.AuxCoord(4.09169e+6, units=Unit('kg m-3 J kg-1 K-1')) +RHO_CP = iris.coords.AuxCoord(4.09169e6, units=Unit("kg m-3 J kg-1 K-1")) class DerivedVariable(DerivedVariableBase): @@ -16,23 +16,13 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'thetao' - }, - { - 'short_name': 'volcello', - 'mip': 'fx' - }, + {"short_name": "thetao"}, + {"short_name": "volcello", "mip": "fx"}, ] - if project == 'CMIP6': + if project == "CMIP6": required = [ - { - 'short_name': 'thetao' - }, - { - 'short_name': 'volcello', - 'mip': 'Ofx' - }, + {"short_name": "thetao"}, + {"short_name": "volcello", "mip": "Ofx"}, ] return required @@ -56,27 +46,33 @@ def calculate(cubes): """ # 1. Load the thetao and volcello cubes cube = cubes.extract_cube( - Constraint(cube_func=lambda c: c.var_name == 'thetao')) + Constraint(cube_func=lambda c: c.var_name == "thetao") + ) volume = cubes.extract_cube( - Constraint(cube_func=lambda c: c.var_name == 'volcello')) + Constraint(cube_func=lambda c: c.var_name == "volcello") + ) # 2. multiply with each other and with cprho0 # some juggling with coordinates needed since Iris is very # restrictive in this regard - cube.convert_units('K') + cube.convert_units("K") try: - t_coord_dims = cube.coord_dims('time') + t_coord_dims = cube.coord_dims("time") except iris.exceptions.CoordinateNotFoundError: time_coord_present = False else: time_coord_present = True t_coord_dim = t_coord_dims[0] - dim_coords = [(coord, cube.coord_dims(coord)[0]) - for coord in cube.coords( - contains_dimension=t_coord_dim, dim_coords=True)] + dim_coords = [ + (coord, cube.coord_dims(coord)[0]) + for coord in cube.coords( + contains_dimension=t_coord_dim, dim_coords=True + ) + ] aux_coords = [ (coord, cube.coord_dims(coord)) - for coord in cube.coords(contains_dimension=t_coord_dim, - dim_coords=False) + for coord in cube.coords( + contains_dimension=t_coord_dim, dim_coords=False + ) ] for coord, dims in dim_coords + aux_coords: cube.remove_coord(coord) diff --git a/esmvalcore/preprocessor/_derive/rlns.py b/esmvalcore/preprocessor/_derive/rlns.py index ed41aae039..d2a9ba0276 100644 --- a/esmvalcore/preprocessor/_derive/rlns.py +++ b/esmvalcore/preprocessor/_derive/rlns.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rlds' - }, - { - 'short_name': 'rlus' - }, + {"short_name": "rlds"}, + {"short_name": "rlus"}, ] return required @@ -25,9 +21,11 @@ def required(project): def calculate(cubes): """Compute surface net downward longwave radiation.""" rlds_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_longwave_flux_in_air')) + Constraint(name="surface_downwelling_longwave_flux_in_air") + ) rlus_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_longwave_flux_in_air')) + Constraint(name="surface_upwelling_longwave_flux_in_air") + ) rlns_cube = rlds_cube - rlus_cube diff --git a/esmvalcore/preprocessor/_derive/rlnst.py b/esmvalcore/preprocessor/_derive/rlnst.py index 4eecd9b90d..1a79afae7c 100644 --- a/esmvalcore/preprocessor/_derive/rlnst.py +++ b/esmvalcore/preprocessor/_derive/rlnst.py @@ -4,6 +4,7 @@ - weig_ka """ + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -16,15 +17,9 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rlds' - }, - { - 'short_name': 'rlus' - }, - { - 'short_name': 'rlut' - }, + {"short_name": "rlds"}, + {"short_name": "rlus"}, + {"short_name": "rlut"}, ] return required @@ -37,11 +32,14 @@ def calculate(cubes): to surface and outer space. """ rlds_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_longwave_flux_in_air')) + Constraint(name="surface_downwelling_longwave_flux_in_air") + ) rlus_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_longwave_flux_in_air')) + Constraint(name="surface_upwelling_longwave_flux_in_air") + ) rlut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_longwave_flux')) + Constraint(name="toa_outgoing_longwave_flux") + ) rlnst_cube = rlut_cube + (rlds_cube - rlus_cube) diff --git a/esmvalcore/preprocessor/_derive/rlnstcs.py b/esmvalcore/preprocessor/_derive/rlnstcs.py index 10613216a9..82e78fe368 100644 --- a/esmvalcore/preprocessor/_derive/rlnstcs.py +++ b/esmvalcore/preprocessor/_derive/rlnstcs.py @@ -4,6 +4,7 @@ - weig_ka """ + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -16,15 +17,9 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rldscs' - }, - { - 'short_name': 'rlus' - }, - { - 'short_name': 'rlutcs' - }, + {"short_name": "rldscs"}, + {"short_name": "rlus"}, + {"short_name": "rlutcs"}, ] return required @@ -37,12 +32,17 @@ def calculate(cubes): to surface and outer space assuming clear sky. """ rldscs_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_longwave_flux_in_air_' + - 'assuming_clear_sky')) + Constraint( + name="surface_downwelling_longwave_flux_in_air_" + + "assuming_clear_sky" + ) + ) rlus_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_longwave_flux_in_air')) + Constraint(name="surface_upwelling_longwave_flux_in_air") + ) rlutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_longwave_flux_assuming_clear_sky')) + Constraint(name="toa_outgoing_longwave_flux_assuming_clear_sky") + ) rlnstcs_cube = rlutcs_cube + (rldscs_cube - rlus_cube) diff --git a/esmvalcore/preprocessor/_derive/rlntcs.py b/esmvalcore/preprocessor/_derive/rlntcs.py index 49a7f498ab..ce5b22544f 100644 --- a/esmvalcore/preprocessor/_derive/rlntcs.py +++ b/esmvalcore/preprocessor/_derive/rlntcs.py @@ -1,4 +1,5 @@ """Derivation of variable `rlntcs`.""" + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -10,14 +11,15 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'rlutcs'}] + required = [{"short_name": "rlutcs"}] return required @staticmethod def calculate(cubes): """Compute toa net downward longwave radiation assuming clear sky.""" rlutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_longwave_flux_assuming_clear_sky')) + Constraint(name="toa_outgoing_longwave_flux_assuming_clear_sky") + ) rlutcs_cube.data = -rlutcs_cube.core_data() - rlutcs_cube.attributes['positive'] = 'down' + rlutcs_cube.attributes["positive"] = "down" return rlutcs_cube diff --git a/esmvalcore/preprocessor/_derive/rlus.py b/esmvalcore/preprocessor/_derive/rlus.py index 4d536c29ad..9c115e83ed 100644 --- a/esmvalcore/preprocessor/_derive/rlus.py +++ b/esmvalcore/preprocessor/_derive/rlus.py @@ -4,6 +4,7 @@ - lukas_brunner """ + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -16,12 +17,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rlds' - }, - { - 'short_name': 'rlns' - }, + {"short_name": "rlds"}, + {"short_name": "rlns"}, ] return required @@ -29,21 +26,23 @@ def required(project): def calculate(cubes): """Compute upwelling longwave flux from downwelling and net.""" rlds_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_longwave_flux_in_air')) + Constraint(name="surface_downwelling_longwave_flux_in_air") + ) rlns_cube = cubes.extract_cube( - Constraint(name='surface_net_downward_longwave_flux')) + Constraint(name="surface_net_downward_longwave_flux") + ) # fix latitude and longitude var_name - rlns_cube.coord(axis='X').long_name = rlds_cube.coord( - axis='X').long_name - rlns_cube.coord(axis='Y').long_name = rlds_cube.coord( - axis='Y').long_name - rlns_cube.coord(axis='X').var_name = rlds_cube.coord( - axis='X').var_name - rlns_cube.coord(axis='Y').var_name = rlds_cube.coord( - axis='Y').var_name + rlns_cube.coord(axis="X").long_name = rlds_cube.coord( + axis="X" + ).long_name + rlns_cube.coord(axis="Y").long_name = rlds_cube.coord( + axis="Y" + ).long_name + rlns_cube.coord(axis="X").var_name = rlds_cube.coord(axis="X").var_name + rlns_cube.coord(axis="Y").var_name = rlds_cube.coord(axis="Y").var_name rlus_cube = rlds_cube - rlns_cube - rlus_cube.attributes['positive'] = 'up' + rlus_cube.attributes["positive"] = "up" return rlus_cube diff --git a/esmvalcore/preprocessor/_derive/rsns.py b/esmvalcore/preprocessor/_derive/rsns.py index 27ba4cb968..6ea6c2e88f 100644 --- a/esmvalcore/preprocessor/_derive/rsns.py +++ b/esmvalcore/preprocessor/_derive/rsns.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsds' - }, - { - 'short_name': 'rsus' - }, + {"short_name": "rsds"}, + {"short_name": "rsus"}, ] return required @@ -25,9 +21,11 @@ def required(project): def calculate(cubes): """Compute surface net downward shortwave radiation.""" rsds_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_shortwave_flux_in_air')) + Constraint(name="surface_downwelling_shortwave_flux_in_air") + ) rsus_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_shortwave_flux_in_air')) + Constraint(name="surface_upwelling_shortwave_flux_in_air") + ) rsns_cube = rsds_cube - rsus_cube diff --git a/esmvalcore/preprocessor/_derive/rsnst.py b/esmvalcore/preprocessor/_derive/rsnst.py index e2a2a53b21..2912fa0c31 100644 --- a/esmvalcore/preprocessor/_derive/rsnst.py +++ b/esmvalcore/preprocessor/_derive/rsnst.py @@ -4,6 +4,7 @@ - weig_ka """ + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -16,18 +17,10 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsds' - }, - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsus' - }, - { - 'short_name': 'rsut' - }, + {"short_name": "rsds"}, + {"short_name": "rsdt"}, + {"short_name": "rsus"}, + {"short_name": "rsut"}, ] return required @@ -35,13 +28,17 @@ def required(project): def calculate(cubes): """Compute Heating from Shortwave Absorption.""" rsds_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_shortwave_flux_in_air')) + Constraint(name="surface_downwelling_shortwave_flux_in_air") + ) rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsus_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_shortwave_flux_in_air')) + Constraint(name="surface_upwelling_shortwave_flux_in_air") + ) rsut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux')) + Constraint(name="toa_outgoing_shortwave_flux") + ) rsnst_cube = (rsdt_cube - rsut_cube) - (rsds_cube - rsus_cube) diff --git a/esmvalcore/preprocessor/_derive/rsnstcs.py b/esmvalcore/preprocessor/_derive/rsnstcs.py index 976be64003..5a58a860c6 100644 --- a/esmvalcore/preprocessor/_derive/rsnstcs.py +++ b/esmvalcore/preprocessor/_derive/rsnstcs.py @@ -4,6 +4,7 @@ - weig_ka """ + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -16,18 +17,10 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsdscs' - }, - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsuscs' - }, - { - 'short_name': 'rsutcs' - }, + {"short_name": "rsdscs"}, + {"short_name": "rsdt"}, + {"short_name": "rsuscs"}, + {"short_name": "rsutcs"}, ] return required @@ -35,15 +28,23 @@ def required(project): def calculate(cubes): """Compute Heating from Shortwave Absorption assuming clear sky.""" rsdscs_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_shortwave_flux_in_air_' + - 'assuming_clear_sky')) + Constraint( + name="surface_downwelling_shortwave_flux_in_air_" + + "assuming_clear_sky" + ) + ) rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsuscs_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_shortwave_flux_in_air_' + - 'assuming_clear_sky')) + Constraint( + name="surface_upwelling_shortwave_flux_in_air_" + + "assuming_clear_sky" + ) + ) rsutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) + Constraint(name="toa_outgoing_shortwave_flux_assuming_clear_sky") + ) rsnstcs_cube = (rsdt_cube - rsutcs_cube) - (rsdscs_cube - rsuscs_cube) diff --git a/esmvalcore/preprocessor/_derive/rsnstcsnorm.py b/esmvalcore/preprocessor/_derive/rsnstcsnorm.py index a6445bbb64..3eb30b3115 100644 --- a/esmvalcore/preprocessor/_derive/rsnstcsnorm.py +++ b/esmvalcore/preprocessor/_derive/rsnstcsnorm.py @@ -4,6 +4,7 @@ - weig_ka """ + from cf_units import Unit from iris import Constraint @@ -17,18 +18,10 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsdscs' - }, - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsuscs' - }, - { - 'short_name': 'rsutcs' - }, + {"short_name": "rsdscs"}, + {"short_name": "rsdt"}, + {"short_name": "rsuscs"}, + {"short_name": "rsutcs"}, ] return required @@ -42,19 +35,29 @@ def calculate(cubes): the incoming shortwave flux at the top of the atmosphere. """ rsdscs_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_shortwave_flux_in_air_' + - 'assuming_clear_sky')) + Constraint( + name="surface_downwelling_shortwave_flux_in_air_" + + "assuming_clear_sky" + ) + ) rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsuscs_cube = cubes.extract_cube( - Constraint(name='surface_upwelling_shortwave_flux_in_air_' + - 'assuming_clear_sky')) + Constraint( + name="surface_upwelling_shortwave_flux_in_air_" + + "assuming_clear_sky" + ) + ) rsutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) - - rsnstcsnorm_cube = (((rsdt_cube - rsutcs_cube) - - (rsdscs_cube - rsuscs_cube)) / rsdt_cube) * 100.0 - rsnstcsnorm_cube.units = Unit('percent') - rsnstcsnorm_cube.attributes.pop('positive', None) + Constraint(name="toa_outgoing_shortwave_flux_assuming_clear_sky") + ) + + rsnstcsnorm_cube = ( + ((rsdt_cube - rsutcs_cube) - (rsdscs_cube - rsuscs_cube)) + / rsdt_cube + ) * 100.0 + rsnstcsnorm_cube.units = Unit("percent") + rsnstcsnorm_cube.attributes.pop("positive", None) return rsnstcsnorm_cube diff --git a/esmvalcore/preprocessor/_derive/rsnt.py b/esmvalcore/preprocessor/_derive/rsnt.py index dbd04ca2ca..f0c65200d7 100644 --- a/esmvalcore/preprocessor/_derive/rsnt.py +++ b/esmvalcore/preprocessor/_derive/rsnt.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsut' - }, + {"short_name": "rsdt"}, + {"short_name": "rsut"}, ] return required @@ -25,12 +21,14 @@ def required(project): def calculate(cubes): """Compute toa net downward shortwave radiation.""" rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux')) + Constraint(name="toa_outgoing_shortwave_flux") + ) rsnt_cube = rsdt_cube - rsut_cube rsnt_cube.units = rsdt_cube.units - rsnt_cube.attributes['positive'] = 'down' + rsnt_cube.attributes["positive"] = "down" return rsnt_cube diff --git a/esmvalcore/preprocessor/_derive/rsntcs.py b/esmvalcore/preprocessor/_derive/rsntcs.py index 9a4586d3fd..51d0dc8e7a 100644 --- a/esmvalcore/preprocessor/_derive/rsntcs.py +++ b/esmvalcore/preprocessor/_derive/rsntcs.py @@ -1,4 +1,5 @@ """Derivation of variable `rsntcs`.""" + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -11,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsutcs' - }, + {"short_name": "rsdt"}, + {"short_name": "rsutcs"}, ] return required @@ -24,9 +21,11 @@ def required(project): def calculate(cubes): """Compute toa net downward shortwave radiation assuming clear sky.""" rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) + Constraint(name="toa_outgoing_shortwave_flux_assuming_clear_sky") + ) rsntcs_cube = rsdt_cube - rsutcs_cube - rsntcs_cube.attributes['positive'] = 'down' + rsntcs_cube.attributes["positive"] = "down" return rsntcs_cube diff --git a/esmvalcore/preprocessor/_derive/rsus.py b/esmvalcore/preprocessor/_derive/rsus.py index 326d063c26..8d7f122f01 100644 --- a/esmvalcore/preprocessor/_derive/rsus.py +++ b/esmvalcore/preprocessor/_derive/rsus.py @@ -4,6 +4,7 @@ - lukas_brunner """ + from iris import Constraint from ._baseclass import DerivedVariableBase @@ -16,12 +17,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsds' - }, - { - 'short_name': 'rsns' - }, + {"short_name": "rsds"}, + {"short_name": "rsns"}, ] return required @@ -29,21 +26,23 @@ def required(project): def calculate(cubes): """Compute upwelling shortwave flux from downwelling and net.""" rsds_cube = cubes.extract_cube( - Constraint(name='surface_downwelling_shortwave_flux_in_air')) + Constraint(name="surface_downwelling_shortwave_flux_in_air") + ) rsns_cube = cubes.extract_cube( - Constraint(name='surface_net_downward_shortwave_flux')) + Constraint(name="surface_net_downward_shortwave_flux") + ) # fix latitude and longitude var_name - rsns_cube.coord(axis='X').long_name = rsds_cube.coord( - axis='X').long_name - rsns_cube.coord(axis='Y').long_name = rsds_cube.coord( - axis='Y').long_name - rsns_cube.coord(axis='X').var_name = rsds_cube.coord( - axis='X').var_name - rsns_cube.coord(axis='Y').var_name = rsds_cube.coord( - axis='Y').var_name + rsns_cube.coord(axis="X").long_name = rsds_cube.coord( + axis="X" + ).long_name + rsns_cube.coord(axis="Y").long_name = rsds_cube.coord( + axis="Y" + ).long_name + rsns_cube.coord(axis="X").var_name = rsds_cube.coord(axis="X").var_name + rsns_cube.coord(axis="Y").var_name = rsds_cube.coord(axis="Y").var_name rsus_cube = rsds_cube - rsns_cube - rsus_cube.attributes['positive'] = 'up' + rsus_cube.attributes["positive"] = "up" return rsus_cube diff --git a/esmvalcore/preprocessor/_derive/rtnt.py b/esmvalcore/preprocessor/_derive/rtnt.py index 012773ad69..171641f0b1 100644 --- a/esmvalcore/preprocessor/_derive/rtnt.py +++ b/esmvalcore/preprocessor/_derive/rtnt.py @@ -12,15 +12,9 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsdt' - }, - { - 'short_name': 'rsut' - }, - { - 'short_name': 'rlut' - }, + {"short_name": "rsdt"}, + {"short_name": "rsut"}, + {"short_name": "rlut"}, ] return required @@ -28,11 +22,14 @@ def required(project): def calculate(cubes): """Compute toa net downward total radiation.""" rsdt_cube = cubes.extract_cube( - Constraint(name='toa_incoming_shortwave_flux')) + Constraint(name="toa_incoming_shortwave_flux") + ) rsut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux')) + Constraint(name="toa_outgoing_shortwave_flux") + ) rlut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_longwave_flux')) + Constraint(name="toa_outgoing_longwave_flux") + ) rtnt_cube = rsdt_cube - rsut_cube - rlut_cube diff --git a/esmvalcore/preprocessor/_derive/sfcwind.py b/esmvalcore/preprocessor/_derive/sfcwind.py index 2af241f517..69abc3a388 100644 --- a/esmvalcore/preprocessor/_derive/sfcwind.py +++ b/esmvalcore/preprocessor/_derive/sfcwind.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'uas' - }, - { - 'short_name': 'vas' - }, + {"short_name": "uas"}, + {"short_name": "vas"}, ] return required @@ -27,9 +23,9 @@ def calculate(cubes): Wind speed derived from eastward and northward components. """ - uas_cube = cubes.extract_cube(NameConstraint(var_name='uas')) - vas_cube = cubes.extract_cube(NameConstraint(var_name='vas')) + uas_cube = cubes.extract_cube(NameConstraint(var_name="uas")) + vas_cube = cubes.extract_cube(NameConstraint(var_name="vas")) - sfcwind_cube = (uas_cube**2 + vas_cube**2)**0.5 + sfcwind_cube = (uas_cube**2 + vas_cube**2) ** 0.5 return sfcwind_cube diff --git a/esmvalcore/preprocessor/_derive/siextent.py b/esmvalcore/preprocessor/_derive/siextent.py index eee25e5a98..b12ffc24a5 100644 --- a/esmvalcore/preprocessor/_derive/siextent.py +++ b/esmvalcore/preprocessor/_derive/siextent.py @@ -1,4 +1,5 @@ """Derivation of variable `sithick`.""" + import logging import dask.array as da @@ -6,6 +7,7 @@ from iris import Constraint from esmvalcore.exceptions import RecipeError + from ._baseclass import DerivedVariableBase logger = logging.getLogger(__name__) @@ -18,14 +20,9 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'sic', - 'optional': 'true' - }, - { - 'short_name': 'siconca', - 'optional': 'true' - }] + {"short_name": "sic", "optional": "true"}, + {"short_name": "siconca", "optional": "true"}, + ] return required @staticmethod @@ -48,18 +45,19 @@ def calculate(cubes): Cube containing sea ice extent. """ try: - sic = cubes.extract_cube(Constraint(name='sic')) + sic = cubes.extract_cube(Constraint(name="sic")) except iris.exceptions.ConstraintMismatchError: try: - sic = cubes.extract_cube(Constraint(name='siconca')) + sic = cubes.extract_cube(Constraint(name="siconca")) except iris.exceptions.ConstraintMismatchError as exc: raise RecipeError( - 'Derivation of siextent failed due to missing variables ' - 'sic and siconca.') from exc + "Derivation of siextent failed due to missing variables " + "sic and siconca." + ) from exc ones = da.ones_like(sic) - siextent_data = da.ma.masked_where(sic.lazy_data() < 15., ones) + siextent_data = da.ma.masked_where(sic.lazy_data() < 15.0, ones) siextent = sic.copy(siextent_data) - siextent.units = 'm2' + siextent.units = "m2" return siextent diff --git a/esmvalcore/preprocessor/_derive/sispeed.py b/esmvalcore/preprocessor/_derive/sispeed.py index 168b2e5525..581fc70f26 100644 --- a/esmvalcore/preprocessor/_derive/sispeed.py +++ b/esmvalcore/preprocessor/_derive/sispeed.py @@ -1,10 +1,10 @@ """Derivation of variable `sispeed`.""" import logging + from iris import Constraint from .._regrid import regrid - from ._baseclass import DerivedVariableBase logger = logging.getLogger(__name__) @@ -16,10 +16,10 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - if project == 'CMIP6': - required = [{'short_name': 'siu'}, {'short_name': 'siv'}] + if project == "CMIP6": + required = [{"short_name": "siu"}, {"short_name": "siv"}] else: - required = [{'short_name': 'usi'}, {'short_name': 'vsi'}] + required = [{"short_name": "usi"}, {"short_name": "vsi"}] return required @staticmethod @@ -36,15 +36,15 @@ def calculate(cubes): Cube containing sea ice speed. """ - siu = cubes.extract_cube(Constraint(name='sea_ice_x_velocity')) - siv = cubes.extract_cube(Constraint(name='sea_ice_y_velocity')) + siu = cubes.extract_cube(Constraint(name="sea_ice_x_velocity")) + siv = cubes.extract_cube(Constraint(name="sea_ice_y_velocity")) try: return DerivedVariable._get_speed(siu, siv) except ValueError: - logger.debug('Regridding siv into siu grid to compute sispeed') - siv = regrid(siv, siu, 'linear') + logger.debug("Regridding siv into siu grid to compute sispeed") + siv = regrid(siv, siu, "linear") return DerivedVariable._get_speed(siu, siv) @staticmethod def _get_speed(siu, siv): - return (siu**2 + siv**2)**0.5 + return (siu**2 + siv**2) ** 0.5 diff --git a/esmvalcore/preprocessor/_derive/sithick.py b/esmvalcore/preprocessor/_derive/sithick.py index ad80709d8f..0ef77223aa 100644 --- a/esmvalcore/preprocessor/_derive/sithick.py +++ b/esmvalcore/preprocessor/_derive/sithick.py @@ -11,11 +11,14 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{ - 'short_name': 'sit', - }, { - 'short_name': 'sic', - }] + required = [ + { + "short_name": "sit", + }, + { + "short_name": "sic", + }, + ] return required @staticmethod @@ -36,9 +39,9 @@ def calculate(cubes): Cube containing sea ice speed. """ - sivol = cubes.extract_cube(Constraint(name='sea_ice_thickness')) - siconc = cubes.extract_cube(Constraint(name='sea_ice_area_fraction')) - siconc.convert_units('1.0') + sivol = cubes.extract_cube(Constraint(name="sea_ice_thickness")) + siconc = cubes.extract_cube(Constraint(name="sea_ice_area_fraction")) + siconc.convert_units("1.0") sithick = sivol / siconc return sithick diff --git a/esmvalcore/preprocessor/_derive/sm.py b/esmvalcore/preprocessor/_derive/sm.py index deb3526dfd..b90ba3df45 100644 --- a/esmvalcore/preprocessor/_derive/sm.py +++ b/esmvalcore/preprocessor/_derive/sm.py @@ -13,7 +13,7 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'mrsos'}] + required = [{"short_name": "mrsos"}] return required @staticmethod @@ -27,12 +27,12 @@ def calculate(cubes): 20 deg C). """ - mrsos_cube = cubes.extract_cube(NameConstraint(var_name='mrsos')) + mrsos_cube = cubes.extract_cube(NameConstraint(var_name="mrsos")) - depth = mrsos_cube.coord('depth').core_bounds().astype(np.float64) + depth = mrsos_cube.coord("depth").core_bounds().astype(np.float64) layer_thickness = depth[..., 1] - depth[..., 0] sm_cube = mrsos_cube / layer_thickness / 998.2 - sm_cube.units = cf_units.Unit('m3 m^-3') + sm_cube.units = cf_units.Unit("m3 m^-3") return sm_cube diff --git a/esmvalcore/preprocessor/_derive/soz.py b/esmvalcore/preprocessor/_derive/soz.py new file mode 100644 index 0000000000..d65b99fff4 --- /dev/null +++ b/esmvalcore/preprocessor/_derive/soz.py @@ -0,0 +1,109 @@ +"""Derivation of variable ``soz``.""" + +import dask.array as da +import iris + +from ._baseclass import DerivedVariableBase +from .toz import DerivedVariable as Toz +from .toz import add_longitude_coord, interpolate_hybrid_plevs + +# O3 mole fraction threshold (in ppb) that is used for the definition of the +# stratosphere (stratosphere = region where O3 mole fraction is at least as +# high as the threshold value) +STRATOSPHERIC_O3_THRESHOLD = 125.0 + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable ``soz``.""" + + @staticmethod + def required(project): + """Declare the variables needed for derivation.""" + if project == "CMIP6": + required = [{"short_name": "o3"}] + else: + required = [{"short_name": "tro3"}] + return required + + @staticmethod + def calculate(cubes): + """Compute stratospheric column ozone. + + Note + ---- + Here, the stratosphere is defined as the region in which the O3 mole + fraction is at least as high as the given threshold + (``STRATOSPHERIC_O3_THRESHOLD``). + + In the calculation of ``toz``, the surface air pressure (``ps``) is + used to determine the pressure level width of the lowest layer. For + ``soz``, this lowest layer can be ignored since it is not located in + the stratosphere (it will be masked out due to the O3 mole fraction + threshold). Thus, the surface air pressure (``ps``) is not necessary + for the derivation of ``soz`` and is simply replaced with the lowest + pressure level in the data to be able to use the ``toz`` derivation + function. + + The calculation of ``soz`` consists of three steps: + (1) Mask out O3 mole fractions smaller than given threshold. + (2) Cut out the lowest pressure level from the data and use it as + surface air pressure (``toz``). + (3) Use derivation function of ``toz`` to calculate ``soz`` (using the + masked data). + + """ + o3_cube = cubes.extract_cube( + iris.Constraint(name="mole_fraction_of_ozone_in_air") + ) + + # If o3 is given on hybrid pressure levels (e.g., from Table AERmon), + # interpolate it to regular pressure levels + if len(o3_cube.coord_dims("air_pressure")) > 1: + o3_cube = interpolate_hybrid_plevs(o3_cube) + + # To support zonal mean o3 (e.g., from Table AERmonZ), add longitude + # coordinate if necessary + if not o3_cube.coords("longitude"): + o3_cube = add_longitude_coord(o3_cube) + + # (1) Mask O3 mole fraction using the given threshold + o3_cube.convert_units("1e-9") + mask = o3_cube.lazy_data() < STRATOSPHERIC_O3_THRESHOLD + mask |= da.ma.getmaskarray(o3_cube.lazy_data()) + o3_cube.data = da.ma.masked_array(o3_cube.lazy_data(), mask=mask) + + # (2) Add surrogate for the surface air pressure (ps) cube using the + # lowest pressure level available in the data (this is fine since the + # the lowest pressure level is far away from the stratosphere). + + # Get dummy ps cube with correct dimensions + ps_dims = ( + o3_cube.coord_dims("time") + + o3_cube.coord_dims("latitude") + + o3_cube.coord_dims("longitude") + ) + idx_to_extract_ps = [0] * o3_cube.ndim + for ps_dim in ps_dims: + idx_to_extract_ps[ps_dim] = slice(None) + ps_cube = o3_cube[tuple(idx_to_extract_ps)].copy() + + # Set ps data using lowest pressure level available and add correct + # metadata + lowest_plev = o3_cube.coord("air_pressure").points.max() + ps_data = da.broadcast_to(lowest_plev, ps_cube.shape) + ps_cube.data = ps_data + ps_cube.var_name = "ps" + ps_cube.standard_name = "surface_air_pressure" + ps_cube.long_name = "Surface Air Pressure" + ps_cube.units = o3_cube.coord("air_pressure").units + + # Cut lowest pressure level from o3_cube + z_dim = o3_cube.coord_dims("air_pressure")[0] + idx_to_cut_lowest_plev = [slice(None)] * o3_cube.ndim + idx_to_cut_lowest_plev[z_dim] = slice(1, None) + o3_cube = o3_cube[tuple(idx_to_cut_lowest_plev)] + + # (3) Use derivation function of toz to calculate soz using the masked + # o3 cube and the surrogate ps cube + cubes = iris.cube.CubeList([o3_cube, ps_cube]) + return Toz.calculate(cubes) diff --git a/esmvalcore/preprocessor/_derive/swcre.py b/esmvalcore/preprocessor/_derive/swcre.py index c1873f622d..574a826278 100644 --- a/esmvalcore/preprocessor/_derive/swcre.py +++ b/esmvalcore/preprocessor/_derive/swcre.py @@ -12,12 +12,8 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - { - 'short_name': 'rsut' - }, - { - 'short_name': 'rsutcs' - }, + {"short_name": "rsut"}, + {"short_name": "rsutcs"}, ] return required @@ -25,12 +21,14 @@ def required(project): def calculate(cubes): """Compute shortwave cloud radiative effect.""" rsut_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux')) + Constraint(name="toa_outgoing_shortwave_flux") + ) rsutcs_cube = cubes.extract_cube( - Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) + Constraint(name="toa_outgoing_shortwave_flux_assuming_clear_sky") + ) swcre_cube = rsutcs_cube - rsut_cube swcre_cube.units = rsut_cube.units - swcre_cube.attributes['positive'] = 'down' + swcre_cube.attributes["positive"] = "down" return swcre_cube diff --git a/esmvalcore/preprocessor/_derive/toz.py b/esmvalcore/preprocessor/_derive/toz.py index 32fd9e8334..e1d01b48e8 100644 --- a/esmvalcore/preprocessor/_derive/toz.py +++ b/esmvalcore/preprocessor/_derive/toz.py @@ -1,34 +1,80 @@ -"""Derivation of variable `toz`.""" +"""Derivation of variable ``toz``.""" + +import warnings import cf_units import iris from scipy import constants +from esmvalcore.cmor.table import CMOR_TABLES + +from .._regrid import extract_levels, regrid from ._baseclass import DerivedVariableBase from ._shared import pressure_level_widths # Constants -AVOGADRO_CONST = constants.value('Avogadro constant') -AVOGADRO_CONST_UNIT = constants.unit('Avogadro constant') -STANDARD_GRAVITY = constants.value('standard acceleration of gravity') -STANDARD_GRAVITY_UNIT = constants.unit('standard acceleration of gravity') +AVOGADRO_CONST = constants.value("Avogadro constant") +AVOGADRO_CONST_UNIT = constants.unit("Avogadro constant") +STANDARD_GRAVITY = constants.value("standard acceleration of gravity") +STANDARD_GRAVITY_UNIT = constants.unit("standard acceleration of gravity") MW_AIR = 29 -MW_AIR_UNIT = cf_units.Unit('g mol^-1') +MW_AIR_UNIT = cf_units.Unit("g mol^-1") MW_O3 = 48 -MW_O3_UNIT = cf_units.Unit('g mol^-1') -DOBSON_UNIT = cf_units.Unit('2.69e20 m^-2') +MW_O3_UNIT = cf_units.Unit("g mol^-1") +DOBSON_UNIT = cf_units.Unit("2.69e20 m^-2") + + +def add_longitude_coord(cube, ps_cube=None): + """Add dimensional ``longitude`` coordinate of length 1 to cube.""" + lon_coord = iris.coords.DimCoord( + [180.0], + bounds=[[0.0, 360.0]], + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + new_dim_coords = [(c, cube.coord_dims(c)) for c in cube.dim_coords] + new_dim_coords.append((lon_coord, cube.ndim)) + new_aux_coords = [(c, cube.coord_dims(c)) for c in cube.aux_coords] + new_cube = iris.cube.Cube( + cube.core_data()[..., None], + dim_coords_and_dims=new_dim_coords, + aux_coords_and_dims=new_aux_coords, + ) + new_cube.metadata = cube.metadata + return new_cube + + +def interpolate_hybrid_plevs(cube): + """Interpolate hybrid pressure levels.""" + # Use CMIP6's plev19 target levels (in Pa) + target_levels = CMOR_TABLES["CMIP6"].coords["plev19"].requested + cube.coord("air_pressure").convert_units("Pa") + cube = extract_levels( + cube, target_levels, "linear", coordinate="air_pressure" + ) + return cube class DerivedVariable(DerivedVariableBase): - """Derivation of variable `toz`.""" + """Derivation of variable ``toz``.""" @staticmethod def required(project): """Declare the variables needed for derivation.""" - if project == 'CMIP6': - required = [{'short_name': 'o3'}, {'short_name': 'ps'}] + # TODO: make get_required _derive/__init__.py use variables as argument + # and make this dependent on mip + if project == "CMIP6": + required = [ + {"short_name": "o3"}, + {"short_name": "ps", "mip": "Amon"}, + ] else: - required = [{'short_name': 'tro3'}, {'short_name': 'ps'}] + required = [ + {"short_name": "tro3"}, + {"short_name": "ps"}, + ] return required @staticmethod @@ -41,24 +87,57 @@ def calculate(cubes): upper integration bound of 0 Pa is used. """ - tro3_cube = cubes.extract_cube( - iris.Constraint(name='mole_fraction_of_ozone_in_air')) + o3_cube = cubes.extract_cube( + iris.Constraint(name="mole_fraction_of_ozone_in_air") + ) ps_cube = cubes.extract_cube( - iris.Constraint(name='surface_air_pressure')) - - p_layer_widths = pressure_level_widths(tro3_cube, - ps_cube, - top_limit=0.0) - toz_cube = (tro3_cube * p_layer_widths / STANDARD_GRAVITY * MW_O3 / - MW_AIR) - toz_cube = toz_cube.collapsed('air_pressure', iris.analysis.SUM) - toz_cube.units = (tro3_cube.units * p_layer_widths.units / - STANDARD_GRAVITY_UNIT * MW_O3_UNIT / MW_AIR_UNIT) - - # Convert from kg m^-2 to Dobson unit (2.69e20 m^-2 ) + iris.Constraint(name="surface_air_pressure") + ) + + # If o3 is given on hybrid pressure levels (e.g., from Table AERmon), + # interpolate it to regular pressure levels + if len(o3_cube.coord_dims("air_pressure")) > 1: + o3_cube = interpolate_hybrid_plevs(o3_cube) + + # To support zonal mean o3 (e.g., from Table AERmonZ), add longitude + # coordinate and collapsed ps cube if necessary to ensure that they + # have correct shapes + if not o3_cube.coords("longitude"): + o3_cube = add_longitude_coord(o3_cube) + ps_cube = ps_cube.collapsed("longitude", iris.analysis.MEAN) + ps_cube.remove_coord("longitude") + ps_cube = add_longitude_coord(ps_cube) + + # If the horizontal dimensions of ps and o3 differ, regrid ps + # Note: regrid() checks if the regridding is really necessary before + # running the actual interpolation + ps_cube = regrid(ps_cube, o3_cube, "linear") + + # Actual derivation of toz using o3 mole fraction and pressure level + # widths + p_layer_widths = pressure_level_widths(o3_cube, ps_cube, top_limit=0.0) + toz_cube = o3_cube * p_layer_widths / STANDARD_GRAVITY * MW_O3 / MW_AIR + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + category=UserWarning, + message="Collapsing a non-contiguous coordinate", + ) + toz_cube = toz_cube.collapsed("air_pressure", iris.analysis.SUM) + toz_cube.units = ( + o3_cube.units + * p_layer_widths.units + / STANDARD_GRAVITY_UNIT + * MW_O3_UNIT + / MW_AIR_UNIT + ) + + # Convert from kg m^-2 to Dobson units DU (2.69e20 m^-2 ) and from + # DU to m (1 mm = 100 DU) toz_cube = toz_cube / MW_O3 * AVOGADRO_CONST toz_cube.units = toz_cube.units / MW_O3_UNIT * AVOGADRO_CONST_UNIT toz_cube.convert_units(DOBSON_UNIT) - toz_cube.units = 'DU' + toz_cube.data = toz_cube.core_data() * 1e-5 + toz_cube.units = "m" return toz_cube diff --git a/esmvalcore/preprocessor/_derive/troz.py b/esmvalcore/preprocessor/_derive/troz.py new file mode 100644 index 0000000000..55b778b24b --- /dev/null +++ b/esmvalcore/preprocessor/_derive/troz.py @@ -0,0 +1,61 @@ +"""Derivation of variable ``troz``.""" + +import dask.array as da +import iris + +from ._baseclass import DerivedVariableBase +from .soz import STRATOSPHERIC_O3_THRESHOLD +from .toz import DerivedVariable as Toz +from .toz import add_longitude_coord, interpolate_hybrid_plevs + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable ``troz``.""" + + @staticmethod + def required(project): + """Declare the variables needed for derivation.""" + return Toz.required(project) + + @staticmethod + def calculate(cubes): + """Compute tropospheric column ozone. + + Note + ---- + Here, the troposphere is defined as the region in which the O3 mole + fraction is smaller than the given threshold + (``STRATOSPHERIC_O3_THRESHOLD``). + + """ + o3_cube = cubes.extract_cube( + iris.Constraint(name="mole_fraction_of_ozone_in_air") + ) + ps_cube = cubes.extract_cube( + iris.Constraint(name="surface_air_pressure") + ) + + # If o3 is given on hybrid pressure levels (e.g., from Table AERmon), + # interpolate it to regular pressure levels + if len(o3_cube.coord_dims("air_pressure")) > 1: + o3_cube = interpolate_hybrid_plevs(o3_cube) + + # To support zonal mean o3 (e.g., from Table AERmonZ), add longitude + # coordinate and collapsed ps cube if necessary to ensure that they + # have correct shapes + if not o3_cube.coords("longitude"): + o3_cube = add_longitude_coord(o3_cube) + ps_cube = ps_cube.collapsed("longitude", iris.analysis.MEAN) + ps_cube.remove_coord("longitude") + ps_cube = add_longitude_coord(ps_cube) + + # Mask O3 mole fraction using the given threshold + o3_cube.convert_units("1e-9") + mask = o3_cube.lazy_data() >= STRATOSPHERIC_O3_THRESHOLD + mask |= da.ma.getmaskarray(o3_cube.lazy_data()) + o3_cube.data = da.ma.masked_array(o3_cube.lazy_data(), mask=mask) + + # Use derivation function of toz to calculate troz using the masked o3 + # cube and the ps cube + cubes = iris.cube.CubeList([o3_cube, ps_cube]) + return Toz.calculate(cubes) diff --git a/esmvalcore/preprocessor/_derive/uajet.py b/esmvalcore/preprocessor/_derive/uajet.py index 677736e433..cd23cdbbc0 100644 --- a/esmvalcore/preprocessor/_derive/uajet.py +++ b/esmvalcore/preprocessor/_derive/uajet.py @@ -17,23 +17,25 @@ class DerivedVariable(DerivedVariableBase): @staticmethod def required(project): """Declare the variables needed for derivation.""" - required = [{'short_name': 'ua'}] + required = [{"short_name": "ua"}] return required @staticmethod def calculate(cubes): """Compute latitude of maximum meridional wind speed.""" # Load cube, extract correct region and perform zonal mean - ua_cube = cubes.extract_cube(iris.Constraint(name='eastward_wind')) - ua_cube = ua_cube.interpolate([('air_pressure', PLEV)], - scheme=iris.analysis.Linear()) + ua_cube = cubes.extract_cube(iris.Constraint(name="eastward_wind")) + ua_cube = ua_cube.interpolate( + [("air_pressure", PLEV)], scheme=iris.analysis.Linear() + ) ua_cube = ua_cube.extract( - iris.Constraint(latitude=lambda cell: LAT[0] <= cell <= LAT[1])) - ua_cube = ua_cube.collapsed('longitude', iris.analysis.MEAN) + iris.Constraint(latitude=lambda cell: LAT[0] <= cell <= LAT[1]) + ) + ua_cube = ua_cube.collapsed("longitude", iris.analysis.MEAN) # Calculate maximum jet position uajet_vals = [] - for time_slice in ua_cube.slices(['latitude']): + for time_slice in ua_cube.slices(["latitude"]): ua_data = time_slice.data # Get maximum ua and corresponding index @@ -42,20 +44,20 @@ def calculate(cubes): # Perform 2nd degree polynomial fit to get maximum jet position x_vals = ua_data[slc] - y_vals = time_slice.coord('latitude').points[slc] + y_vals = time_slice.coord("latitude").points[slc] polyfit = np.polyfit(x_vals, y_vals, 2) polynom = np.poly1d(polyfit) uajet_vals.append(polynom(np.max(ua_data))) - uajet_cube = iris.cube.Cube(uajet_vals, - units=cf_units.Unit('degrees_north'), - dim_coords_and_dims=[ - (ua_cube.coord('time'), 0) - ], - attributes={ - 'plev': PLEV, - 'lat_range_0': LAT[0], - 'lat_range_1': LAT[1] - }) + uajet_cube = iris.cube.Cube( + uajet_vals, + units=cf_units.Unit("degrees_north"), + dim_coords_and_dims=[(ua_cube.coord("time"), 0)], + attributes={ + "plev": PLEV, + "lat_range_0": LAT[0], + "lat_range_1": LAT[1], + }, + ) return uajet_cube diff --git a/esmvalcore/preprocessor/_derive/xch4.py b/esmvalcore/preprocessor/_derive/xch4.py index e26d62b601..80285e00cd 100644 --- a/esmvalcore/preprocessor/_derive/xch4.py +++ b/esmvalcore/preprocessor/_derive/xch4.py @@ -13,10 +13,10 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - {'short_name': 'ch4'}, - {'short_name': 'hus'}, - {'short_name': 'zg'}, - {'short_name': 'ps'}, + {"short_name": "ch4"}, + {"short_name": "hus"}, + {"short_name": "zg"}, + {"short_name": "ps"}, ] return required @@ -24,13 +24,14 @@ def required(project): def calculate(cubes): """Calculate the column-averaged atmospheric CH4 [1e-9].""" ch4_cube = cubes.extract_cube( - Constraint(name='mole_fraction_of_methane_in_air')) - hus_cube = cubes.extract_cube(Constraint(name='specific_humidity')) - zg_cube = cubes.extract_cube(Constraint(name='geopotential_height')) - ps_cube = cubes.extract_cube(Constraint(name='surface_air_pressure')) + Constraint(name="mole_fraction_of_methane_in_air") + ) + hus_cube = cubes.extract_cube(Constraint(name="specific_humidity")) + zg_cube = cubes.extract_cube(Constraint(name="geopotential_height")) + ps_cube = cubes.extract_cube(Constraint(name="surface_air_pressure")) # Column-averaged CH4 xch4_cube = column_average(ch4_cube, hus_cube, zg_cube, ps_cube) - xch4_cube.convert_units('1') + xch4_cube.convert_units("1") return xch4_cube diff --git a/esmvalcore/preprocessor/_derive/xco2.py b/esmvalcore/preprocessor/_derive/xco2.py index d341002c7f..37c7ca3bb6 100644 --- a/esmvalcore/preprocessor/_derive/xco2.py +++ b/esmvalcore/preprocessor/_derive/xco2.py @@ -13,10 +13,10 @@ class DerivedVariable(DerivedVariableBase): def required(project): """Declare the variables needed for derivation.""" required = [ - {'short_name': 'co2'}, - {'short_name': 'hus'}, - {'short_name': 'zg'}, - {'short_name': 'ps'}, + {"short_name": "co2"}, + {"short_name": "hus"}, + {"short_name": "zg"}, + {"short_name": "ps"}, ] return required @@ -24,13 +24,14 @@ def required(project): def calculate(cubes): """Calculate the column-averaged atmospheric CO2 [1e-6].""" co2_cube = cubes.extract_cube( - Constraint(name='mole_fraction_of_carbon_dioxide_in_air')) - hus_cube = cubes.extract_cube(Constraint(name='specific_humidity')) - zg_cube = cubes.extract_cube(Constraint(name='geopotential_height')) - ps_cube = cubes.extract_cube(Constraint(name='surface_air_pressure')) + Constraint(name="mole_fraction_of_carbon_dioxide_in_air") + ) + hus_cube = cubes.extract_cube(Constraint(name="specific_humidity")) + zg_cube = cubes.extract_cube(Constraint(name="geopotential_height")) + ps_cube = cubes.extract_cube(Constraint(name="surface_air_pressure")) # Column-averaged CO2 xco2_cube = column_average(co2_cube, hus_cube, zg_cube, ps_cube) - xco2_cube.convert_units('1') + xco2_cube.convert_units("1") return xco2_cube diff --git a/esmvalcore/preprocessor/_detrend.py b/esmvalcore/preprocessor/_detrend.py index 5bc0ce6fc8..d7854ab162 100644 --- a/esmvalcore/preprocessor/_detrend.py +++ b/esmvalcore/preprocessor/_detrend.py @@ -1,4 +1,5 @@ """Preprocessor functions that remove trends from the data.""" + import logging import dask.array as da @@ -7,7 +8,7 @@ logger = logging.getLogger(__name__) -def detrend(cube, dimension='time', method='linear'): +def detrend(cube, dimension="time", method="linear"): """ Detrend data along a given dimension. @@ -33,6 +34,6 @@ def detrend(cube, dimension='time', method='linear'): axis=axis, arr=cube.lazy_data(), type=method, - shape=(cube.shape[axis],) + shape=(cube.shape[axis],), ) return cube.copy(detrended) diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index 900d026943..d30255ec13 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -1,4 +1,5 @@ """Functions for loading and saving cubes.""" + from __future__ import annotations import copy @@ -19,20 +20,21 @@ from iris.cube import CubeList from esmvalcore.cmor.check import CheckLevels +from esmvalcore.esgf.facets import FACETS from esmvalcore.iris_helpers import merge_cube_attributes from .._task import write_ncl_settings logger = logging.getLogger(__name__) -GLOBAL_FILL_VALUE = 1e+20 +GLOBAL_FILL_VALUE = 1e20 DATASET_KEYS = { - 'mip', + "mip", } VARIABLE_KEYS = { - 'reference_dataset', - 'alternative_dataset', + "reference_dataset", + "alternative_dataset", } iris.FUTURE.save_split_attrs = True @@ -50,17 +52,18 @@ def _get_attr_from_field_coord(ncfield, coord_name, attr): def _load_callback(raw_cube, field, _): """Use this callback to fix anything Iris tries to break.""" # Remove attributes that cause issues with merging and concatenation - _delete_attributes(raw_cube, - ('creation_date', 'tracking_id', 'history', 'comment')) + _delete_attributes( + raw_cube, ("creation_date", "tracking_id", "history", "comment") + ) for coord in raw_cube.coords(): # Iris chooses to change longitude and latitude units to degrees # regardless of value in file, so reinstating file value - if coord.standard_name in ['longitude', 'latitude']: - units = _get_attr_from_field_coord(field, coord.var_name, 'units') + if coord.standard_name in ["longitude", "latitude"]: + units = _get_attr_from_field_coord(field, coord.var_name, "units") if units is not None: coord.units = units # CMOR sometimes adds a history to the coordinates. - _delete_attributes(coord, ('history', )) + _delete_attributes(coord, ("history",)) def _delete_attributes(iris_object, atts): @@ -105,26 +108,32 @@ def load( ignore_warnings = list(ignore_warnings) # Default warnings ignored for every dataset - ignore_warnings.append({ - 'message': "Missing CF-netCDF measure variable .*", - 'category': UserWarning, - 'module': 'iris', - }) - ignore_warnings.append({ - 'message': "Ignoring netCDF variable '.*' invalid units '.*'", - 'category': UserWarning, - 'module': 'iris', - }) # iris < 3.8 - ignore_warnings.append({ - 'message': "Ignoring invalid units .* on netCDF variable .*", - 'category': UserWarning, - 'module': 'iris', - }) # iris >= 3.8 + ignore_warnings.append( + { + "message": "Missing CF-netCDF measure variable .*", + "category": UserWarning, + "module": "iris", + } + ) + ignore_warnings.append( + { + "message": "Ignoring netCDF variable '.*' invalid units '.*'", + "category": UserWarning, + "module": "iris", + } + ) # iris < 3.8 + ignore_warnings.append( + { + "message": "Ignoring invalid units .* on netCDF variable .*", + "category": UserWarning, + "module": "iris", + } + ) # iris >= 3.8 # Filter warnings with catch_warnings(): for warning_kwargs in ignore_warnings: - warning_kwargs.setdefault('action', 'ignore') + warning_kwargs.setdefault("action", "ignore") filterwarnings(**warning_kwargs) # Suppress UDUNITS-2 error messages that cannot be ignored with # warnings.filterwarnings @@ -134,10 +143,10 @@ def load( logger.debug("Done with loading %s", file) if not raw_cubes: - raise ValueError(f'Can not load cubes from {file}') + raise ValueError(f"Can not load cubes from {file}") for cube in raw_cubes: - cube.attributes['source_file'] = str(file) + cube.attributes["source_file"] = str(file) return raw_cubes @@ -145,18 +154,19 @@ def load( def _concatenate_cubes(cubes, check_level): """Concatenate cubes according to the check_level.""" kwargs = { - 'check_aux_coords': True, - 'check_cell_measures': True, - 'check_ancils': True, - 'check_derived_coords': True + "check_aux_coords": True, + "check_cell_measures": True, + "check_ancils": True, + "check_derived_coords": True, } if check_level > CheckLevels.DEFAULT: kwargs = dict.fromkeys(kwargs, False) logger.debug( - 'Concatenation will be performed without checking ' - 'auxiliary coordinates, cell measures, ancillaries ' - 'and derived coordinates present in the cubes.', ) + "Concatenation will be performed without checking " + "auxiliary coordinates, cell measures, ancillaries " + "and derived coordinates present in the cubes.", + ) concatenated = iris.cube.CubeList(cubes).concatenate(**kwargs) @@ -164,7 +174,6 @@ def _concatenate_cubes(cubes, check_level): class _TimesHelper: - def __init__(self, time): self.times = time.core_points() self.units = str(time.units) @@ -224,7 +233,10 @@ def from_cube(cls, cube): # current cube ends after new one, just forget new cube logger.debug( "Discarding %s because the time range " - "is already covered by %s", new_cube.cube, current_cube.cube) + "is already covered by %s", + new_cube.cube, + current_cube.cube, + ) continue if new_cube.start == current_cube.start: # new cube completely covers current one @@ -232,20 +244,27 @@ def from_cube(cls, cube): current_cube = new_cube logger.debug( "Discarding %s because the time range is covered by %s", - current_cube.cube, new_cube.cube) + current_cube.cube, + new_cube.cube, + ) continue # new cube ends after current one, # use all of new cube, and shorten current cube to # eliminate overlap with new cube - cut_index = cftime.time2index( - new_cube.start, - _TimesHelper(current_cube.times), - current_cube.times.units.calendar, - select="before", - ) + 1 - logger.debug("Using %s shortened to %s due to overlap", - current_cube.cube, - current_cube.times.cell(cut_index).point) + cut_index = ( + cftime.time2index( + new_cube.start, + _TimesHelper(current_cube.times), + current_cube.times.units.calendar, + select="before", + ) + + 1 + ) + logger.debug( + "Using %s shortened to %s due to overlap", + current_cube.cube, + current_cube.times.cell(cut_index).point, + ) new_cubes.append(current_cube.cube[:cut_index]) current_cube = new_cube @@ -257,20 +276,23 @@ def from_cube(cls, cube): def _fix_calendars(cubes): """Check and homogenise calendars, if possible.""" - calendars = [cube.coord('time').units.calendar for cube in cubes] + calendars = [cube.coord("time").units.calendar for cube in cubes] unique_calendars = np.unique(calendars) calendar_ocurrences = np.array( - [calendars.count(calendar) for calendar in unique_calendars]) + [calendars.count(calendar) for calendar in unique_calendars] + ) calendar_index = int( - np.argwhere(calendar_ocurrences == calendar_ocurrences.max())) + np.argwhere(calendar_ocurrences == calendar_ocurrences.max()) + ) for cube in cubes: - time_coord = cube.coord('time') + time_coord = cube.coord("time") old_calendar = time_coord.units.calendar if old_calendar != unique_calendars[calendar_index]: new_unit = time_coord.units.change_calendar( - unique_calendars[calendar_index]) + unique_calendars[calendar_index] + ) time_coord.units = new_unit @@ -281,14 +303,14 @@ def _get_concatenation_error(cubes): iris.cube.CubeList(cubes).concatenate_cube() except iris.exceptions.ConcatenateError as exc: msg = str(exc) - logger.error('Can not concatenate cubes into a single one: %s', msg) - logger.error('Resulting cubes:') + logger.error("Can not concatenate cubes into a single one: %s", msg) + logger.error("Resulting cubes:") for cube in cubes: logger.error(cube) time = cube.coord("time") - logger.error('From %s to %s', time.cell(0), time.cell(-1)) + logger.error("From %s to %s", time.cell(0), time.cell(-1)) - raise ValueError(f'Can not concatenate cubes: {msg}') + raise ValueError(f"Can not concatenate cubes: {msg}") def _sort_cubes_by_time(cubes): @@ -296,16 +318,49 @@ def _sort_cubes_by_time(cubes): try: cubes = sorted(cubes, key=lambda c: c.coord("time").cell(0).point) except iris.exceptions.CoordinateNotFoundError as exc: - msg = "One or more cubes {} are missing".format(cubes) + \ - " time coordinate: {}".format(str(exc)) + msg = "One or more cubes {} are missing".format( + cubes + ) + " time coordinate: {}".format(str(exc)) raise ValueError(msg) except TypeError as error: - msg = ("Cubes cannot be sorted " - f"due to differing time units: {str(error)}") + msg = ( + "Cubes cannot be sorted " + f"due to differing time units: {str(error)}" + ) raise TypeError(msg) from error return cubes +def _concatenate_cubes_by_experiment( + cubes: list[iris.cube.Cube], +) -> list[iris.cube.Cube]: + """Concatenate cubes by experiment. + + This ensures overlapping (branching) experiments are handled correctly. + """ + # get the possible facet names in CMIP3, 5, 6 for exp + # currently these are 'experiment', 'experiment_id' + exp_facet_names = { + project["exp"] for project in FACETS.values() if "exp" in project + } + + def get_exp(cube: iris.cube.Cube) -> str: + for key in exp_facet_names: + if key in cube.attributes: + return cube.attributes[key] + return "" + + experiments = {get_exp(cube) for cube in cubes} + if len(experiments) > 1: + # first do experiment-wise concatenation, then time-based + cubes = [ + concatenate([cube for cube in cubes if get_exp(cube) == exp]) + for exp in experiments + ] + + return cubes + + def concatenate(cubes, check_level=CheckLevels.DEFAULT): """Concatenate all cubes after fixing metadata. @@ -331,6 +386,8 @@ def concatenate(cubes, check_level=CheckLevels.DEFAULT): if len(cubes) == 1: return cubes[0] + cubes = _concatenate_cubes_by_experiment(cubes) + merge_cube_attributes(cubes) cubes = _sort_cubes_by_time(cubes) _fix_calendars(cubes) @@ -345,12 +402,9 @@ def concatenate(cubes, check_level=CheckLevels.DEFAULT): return result -def save(cubes, - filename, - optimize_access='', - compress=False, - alias='', - **kwargs): +def save( + cubes, filename, optimize_access="", compress=False, alias="", **kwargs +): """Save iris cubes to file. Parameters @@ -390,59 +444,71 @@ def save(cubes, raise ValueError(f"Cannot save empty cubes '{cubes}'") # Rename some arguments - kwargs['target'] = filename - kwargs['zlib'] = compress + kwargs["target"] = filename + kwargs["zlib"] = compress dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) - if (os.path.exists(filename) - and all(cube.has_lazy_data() for cube in cubes)): + if os.path.exists(filename) and all( + cube.has_lazy_data() for cube in cubes + ): logger.debug( "Not saving cubes %s to %s to avoid data loss. " - "The cube is probably unchanged.", cubes, filename) + "The cube is probably unchanged.", + cubes, + filename, + ) return filename for cube in cubes: - logger.debug("Saving cube:\n%s\nwith %s data to %s", cube, - "lazy" if cube.has_lazy_data() else "realized", filename) + logger.debug( + "Saving cube:\n%s\nwith %s data to %s", + cube, + "lazy" if cube.has_lazy_data() else "realized", + filename, + ) if optimize_access: cube = cubes[0] - if optimize_access == 'map': + if optimize_access == "map": dims = set( - cube.coord_dims('latitude') + cube.coord_dims('longitude')) - elif optimize_access == 'timeseries': - dims = set(cube.coord_dims('time')) + cube.coord_dims("latitude") + cube.coord_dims("longitude") + ) + elif optimize_access == "timeseries": + dims = set(cube.coord_dims("time")) else: dims = tuple() - for coord_dims in (cube.coord_dims(dimension) - for dimension in optimize_access.split(' ')): + for coord_dims in ( + cube.coord_dims(dimension) + for dimension in optimize_access.split(" ") + ): dims += coord_dims dims = set(dims) - kwargs['chunksizes'] = tuple( + kwargs["chunksizes"] = tuple( length if index in dims else 1 - for index, length in enumerate(cube.shape)) + for index, length in enumerate(cube.shape) + ) - kwargs['fill_value'] = GLOBAL_FILL_VALUE + kwargs["fill_value"] = GLOBAL_FILL_VALUE if alias: - for cube in cubes: - logger.debug('Changing var_name from %s to %s', cube.var_name, - alias) + logger.debug( + "Changing var_name from %s to %s", cube.var_name, alias + ) cube.var_name = alias # Ignore some warnings when saving with catch_warnings(): filterwarnings( - 'ignore', + "ignore", message=( ".* is being added as CF data variable attribute, but .* " "should only be a CF global attribute" ), category=UserWarning, - module='iris', + module="iris", ) iris.save(cubes, **kwargs) @@ -456,7 +522,7 @@ def _get_debug_filename(filename, step): num = int(sorted(os.listdir(dirname)).pop()[:2]) + 1 else: num = 0 - filename = os.path.join(dirname, '{:02}_{}.nc'.format(num, step)) + filename = os.path.join(dirname, "{:02}_{}.nc".format(num, step)) return filename @@ -465,8 +531,8 @@ def _sort_products(products): return sorted( products, key=lambda p: ( - p.attributes.get('recipe_dataset_index', 1e6), - p.attributes.get('dataset', ''), + p.attributes.get("recipe_dataset_index", 1e6), + p.attributes.get("dataset", ""), ), ) @@ -474,21 +540,22 @@ def _sort_products(products): def write_metadata(products, write_ncl=False): """Write product metadata to file.""" output_files = [] - for output_dir, prods in groupby(products, - lambda p: os.path.dirname(p.filename)): + for output_dir, prods in groupby( + products, lambda p: os.path.dirname(p.filename) + ): sorted_products = _sort_products(prods) metadata = {} for product in sorted_products: - if isinstance(product.attributes.get('exp'), (list, tuple)): + if isinstance(product.attributes.get("exp"), (list, tuple)): product.attributes = dict(product.attributes) - product.attributes['exp'] = '-'.join(product.attributes['exp']) - if 'original_short_name' in product.attributes: - del product.attributes['original_short_name'] + product.attributes["exp"] = "-".join(product.attributes["exp"]) + if "original_short_name" in product.attributes: + del product.attributes["original_short_name"] metadata[product.filename] = product.attributes - output_filename = os.path.join(output_dir, 'metadata.yml') + output_filename = os.path.join(output_dir, "metadata.yml") output_files.append(output_filename) - with open(output_filename, 'w', encoding='utf-8') as file: + with open(output_filename, "w", encoding="utf-8") as file: yaml.safe_dump(metadata, file) if write_ncl: output_files.append(_write_ncl_metadata(output_dir, metadata)) @@ -500,28 +567,31 @@ def _write_ncl_metadata(output_dir, metadata): """Write NCL metadata files to output_dir.""" variables = [copy.deepcopy(v) for v in metadata.values()] - info = {'input_file_info': variables} + info = {"input_file_info": variables} # Split input_file_info into dataset and variable properties # dataset keys and keys with non-identical values will be stored # in dataset_info, the rest in variable_info variable_info = {} - info['variable_info'] = [variable_info] - info['dataset_info'] = [] + info["variable_info"] = [variable_info] + info["dataset_info"] = [] for variable in variables: dataset_info = {} - info['dataset_info'].append(dataset_info) + info["dataset_info"].append(dataset_info) for key in variable: - dataset_specific = any(variable[key] != var.get(key, object()) - for var in variables) - if ((dataset_specific or key in DATASET_KEYS) - and key not in VARIABLE_KEYS): + dataset_specific = any( + variable[key] != var.get(key, object()) for var in variables + ) + if ( + dataset_specific or key in DATASET_KEYS + ) and key not in VARIABLE_KEYS: dataset_info[key] = variable[key] else: variable_info[key] = variable[key] - filename = os.path.join(output_dir, - variable_info['short_name'] + '_info.ncl') + filename = os.path.join( + output_dir, variable_info["short_name"] + "_info.ncl" + ) write_ncl_settings(info, filename) return filename diff --git a/esmvalcore/preprocessor/_mapping.py b/esmvalcore/preprocessor/_mapping.py index 28d1fefb83..ccbeed2816 100644 --- a/esmvalcore/preprocessor/_mapping.py +++ b/esmvalcore/preprocessor/_mapping.py @@ -14,8 +14,9 @@ def _is_single_item(testee): Return whether this is a single item, rather than an iterable. We count string types as 'single', also. """ - return (isinstance(testee, str) - or not isinstance(testee, collections.abc.Iterable)) + return isinstance(testee, str) or not isinstance( + testee, collections.abc.Iterable + ) def _as_list_of_coords(cube, names_or_coords): @@ -29,9 +30,11 @@ def _as_list_of_coords(cube, names_or_coords): coords.append(cube.coord(name_or_coord)) else: # Don't know how to handle this type - msg = ("Don't know how to handle coordinate of type %s. " - "Ensure all coordinates are of type str " - "or iris.coords.Coord.") % (type(name_or_coord), ) + msg = ( + "Don't know how to handle coordinate of type %s. " + "Ensure all coordinates are of type str " + "or iris.coords.Coord." + ) % (type(name_or_coord),) raise TypeError(msg) return coords @@ -41,8 +44,10 @@ def ref_to_dims_index_as_coordinate(cube, ref): coord = _as_list_of_coords(cube, ref)[0] dims = cube.coord_dims(coord) if not dims: - msg = ('Requested an iterator over a coordinate ({}) ' - 'which does not describe a dimension.') + msg = ( + "Requested an iterator over a coordinate ({}) " + "which does not describe a dimension." + ) msg = msg.format(coord.name()) raise ValueError(msg) return dims @@ -53,11 +58,14 @@ def ref_to_dims_index_as_index(cube, ref): try: dim = int(ref) except (ValueError, TypeError): - raise ValueError('{} Incompatible type {} for ' - 'slicing'.format(ref, type(ref))) + raise ValueError( + "{} Incompatible type {} for slicing".format(ref, type(ref)) + ) if dim < 0 or dim > cube.ndim: - msg = ('Requested an iterator over a dimension ({}) ' - 'which does not exist.'.format(dim)) + msg = ( + "Requested an iterator over a dimension ({}) " + "which does not exist.".format(dim) + ) raise ValueError(msg) dims = [dim] return dims @@ -161,9 +169,14 @@ def index_iterator(dims_to_slice, shape): for index_tuple in np.ndindex(*dims): src_ind = tuple( slice(None, None) if n in dims_to_slice else i - for n, i in enumerate(index_tuple)) - dst_ind = tuple(i for n, i in enumerate(index_tuple) - if n not in dims_to_slice) + dst_slices + for n, i in enumerate(index_tuple) + ) + dst_ind = ( + tuple( + i for n, i in enumerate(index_tuple) if n not in dims_to_slice + ) + + dst_slices + ) yield src_ind, dst_ind diff --git a/esmvalcore/preprocessor/_mask.py b/esmvalcore/preprocessor/_mask.py index cdedcc6391..1896475704 100644 --- a/esmvalcore/preprocessor/_mask.py +++ b/esmvalcore/preprocessor/_mask.py @@ -4,68 +4,86 @@ masking with ancillary variables, masking with Natural Earth shapefiles (land or ocean), masking on thresholds, missing values masking. """ + from __future__ import annotations import logging import os +from collections.abc import Iterable +from typing import Literal, Optional import cartopy.io.shapereader as shpreader import dask.array as da import iris +import iris.util import numpy as np import shapely.vectorized as shp_vect from iris.analysis import Aggregator +from iris.cube import Cube from iris.util import rolling_window +from esmvalcore.preprocessor._shared import get_array_module + from ._supplementary_vars import register_supplementaries logger = logging.getLogger(__name__) -def _get_fx_mask(fx_data, fx_option, mask_type): +def _get_fx_mask( + fx_data: np.ndarray | da.Array, + fx_option: Literal["land", "sea", "landsea", "ice"], + mask_type: Literal["sftlf", "sftof", "sftgif"], +) -> np.ndarray | da.Array: """Build a percentage-thresholded mask from an fx file.""" - inmask = da.zeros_like(fx_data, bool) - if mask_type == 'sftlf': - if fx_option == 'land': + inmask = np.zeros_like(fx_data, bool) # respects dask through dispatch + if mask_type == "sftlf": + if fx_option == "land": # Mask land out - inmask[fx_data > 50.] = True - elif fx_option == 'sea': + inmask[fx_data > 50.0] = True + elif fx_option == "sea": # Mask sea out - inmask[fx_data <= 50.] = True - elif mask_type == 'sftof': - if fx_option == 'land': + inmask[fx_data <= 50.0] = True + elif mask_type == "sftof": + if fx_option == "land": # Mask land out - inmask[fx_data < 50.] = True - elif fx_option == 'sea': + inmask[fx_data < 50.0] = True + elif fx_option == "sea": # Mask sea out - inmask[fx_data >= 50.] = True - elif mask_type == 'sftgif': - if fx_option == 'ice': + inmask[fx_data >= 50.0] = True + elif mask_type == "sftgif": + if fx_option == "ice": # Mask ice out - inmask[fx_data > 50.] = True - elif fx_option == 'landsea': + inmask[fx_data > 50.0] = True + elif fx_option == "landsea": # Mask landsea out - inmask[fx_data <= 50.] = True + inmask[fx_data <= 50.0] = True return inmask -def _apply_fx_mask(fx_mask, var_data): - """Apply the fx data extracted mask on the actual processed data.""" - # Apply mask across - old_mask = da.ma.getmaskarray(var_data) - mask = old_mask | fx_mask - var_data = da.ma.masked_array(var_data, mask=mask) - # maybe fill_value=1e+20 - - return var_data +def _apply_mask( + mask: np.ndarray | da.Array, + array: np.ndarray | da.Array, + dim_map: Optional[Iterable[int]] = None, +) -> np.ndarray | da.Array: + """Apply a (broadcasted) mask on an array.""" + npx = get_array_module(mask, array) + if dim_map is not None: + if isinstance(array, da.Array): + chunks = array.chunks + else: + chunks = None + mask = iris.util.broadcast_to_shape( + mask, array.shape, dim_map, chunks=chunks + ) + return npx.ma.masked_where(mask, array) @register_supplementaries( - variables=['sftlf', 'sftof'], - required='prefer_at_least_one', + variables=["sftlf", "sftof"], + required="prefer_at_least_one", ) -def mask_landsea(cube, mask_out): +def mask_landsea(cube: Cube, mask_out: Literal["land", "sea"]) -> Cube: """Mask out either land mass or sea (oceans, seas and lakes). It uses dedicated ancillary variables (sftlf or sftof) or, @@ -78,16 +96,15 @@ def mask_landsea(cube, mask_out): Parameters ---------- - cube: iris.cube.Cube - data cube to be masked. If the cube has an + cube: + Data cube to be masked. If the cube has an :class:`iris.coords.AncillaryVariable` with standard name ``'land_area_fraction'`` or ``'sea_area_fraction'`` that will be used. If both are present, only the 'land_area_fraction' will be used. If the ancillary variable is not available, the mask will be calculated from Natural Earth shapefiles. - - mask_out: str - either "land" to mask out land mass or "sea" to mask out seas. + mask_out: + Either ``'land'`` to mask out land mass or ``'sea'`` to mask out seas. Returns ------- @@ -107,49 +124,54 @@ def mask_landsea(cube, mask_out): # ne_10m_land is fast; ne_10m_ocean is very slow shapefiles = { - 'land': os.path.join(cwd, 'ne_masks/ne_10m_land.shp'), - 'sea': os.path.join(cwd, 'ne_masks/ne_50m_ocean.shp') + "land": os.path.join(cwd, "ne_masks/ne_10m_land.shp"), + "sea": os.path.join(cwd, "ne_masks/ne_50m_ocean.shp"), } # preserve importance order: try stflf first then sftof - fx_cube = None + ancillary_var = None try: - fx_cube = cube.ancillary_variable('land_area_fraction') + ancillary_var = cube.ancillary_variable("land_area_fraction") except iris.exceptions.AncillaryVariableNotFoundError: try: - fx_cube = cube.ancillary_variable('sea_area_fraction') + ancillary_var = cube.ancillary_variable("sea_area_fraction") except iris.exceptions.AncillaryVariableNotFoundError: - logger.debug('Ancillary variables land/sea area fraction not ' - 'found in cube. Check fx_file availability.') - - if fx_cube: - fx_cube_data = da.broadcast_to(fx_cube.core_data(), cube.shape) - landsea_mask = _get_fx_mask(fx_cube_data, mask_out, - fx_cube.var_name) - cube.data = _apply_fx_mask(landsea_mask, cube.core_data()) - logger.debug("Applying land-sea mask: %s", fx_cube.var_name) + logger.debug( + "Ancillary variables land/sea area fraction not found in " + "cube. Check fx_file availability." + ) + + if ancillary_var: + landsea_mask = _get_fx_mask( + ancillary_var.core_data(), mask_out, ancillary_var.var_name + ) + cube.data = _apply_mask( + landsea_mask, + cube.core_data(), + cube.ancillary_variable_dims(ancillary_var), + ) + logger.debug("Applying land-sea mask: %s", ancillary_var.var_name) else: - if cube.coord('longitude').points.ndim < 2: - cube = _mask_with_shp(cube, shapefiles[mask_out], [ - 0, - ]) + if cube.coord("longitude").points.ndim < 2: + cube = _mask_with_shp(cube, shapefiles[mask_out], [0]) logger.debug( "Applying land-sea mask from Natural Earth shapefile: \n%s", shapefiles[mask_out], ) else: - msg = ("Use of shapefiles with irregular grids not yet " - "implemented, land-sea mask not applied.") - raise ValueError(msg) + raise ValueError( + "Use of shapefiles with irregular grids not yet implemented, " + "land-sea mask not applied." + ) return cube @register_supplementaries( - variables=['sftgif'], - required='require_at_least_one', + variables=["sftgif"], + required="require_at_least_one", ) -def mask_landseaice(cube, mask_out): +def mask_landseaice(cube: Cube, mask_out: Literal["landsea", "ice"]) -> Cube: """Mask out either landsea (combined) or ice. Function that masks out either landsea (land and seas) or ice (Antarctica, @@ -159,13 +181,13 @@ def mask_landseaice(cube, mask_out): Parameters ---------- - cube: iris.cube.Cube - data cube to be masked. It should have an + cube: + Data cube to be masked. It should have an :class:`iris.coords.AncillaryVariable` with standard name ``'land_ice_area_fraction'``. - mask_out: str - either "landsea" to mask out landsea or "ice" to mask out ice. + Either ``'landsea'`` to mask out land and oceans or ``'ice'`` to mask + out ice. Returns ------- @@ -178,20 +200,26 @@ def mask_landseaice(cube, mask_out): Error raised if landsea-ice mask not found as an ancillary variable. """ # sftgif is the only one so far but users can set others - fx_cube = None + ancillary_var = None try: - fx_cube = cube.ancillary_variable('land_ice_area_fraction') + ancillary_var = cube.ancillary_variable("land_ice_area_fraction") except iris.exceptions.AncillaryVariableNotFoundError: - logger.debug('Ancillary variable land ice area fraction ' - 'not found in cube. Check fx_file availability.') - if fx_cube: - fx_cube_data = da.broadcast_to(fx_cube.core_data(), cube.shape) - landice_mask = _get_fx_mask(fx_cube_data, mask_out, fx_cube.var_name) - cube.data = _apply_fx_mask(landice_mask, cube.core_data()) + logger.debug( + "Ancillary variable land ice area fraction not found in cube. " + "Check fx_file availability." + ) + if ancillary_var: + landseaice_mask = _get_fx_mask( + ancillary_var.core_data(), mask_out, ancillary_var.var_name + ) + cube.data = _apply_mask( + landseaice_mask, + cube.core_data(), + cube.ancillary_variable_dims(ancillary_var), + ) logger.debug("Applying landsea-ice mask: sftgif") else: - msg = "Landsea-ice mask could not be found. Stopping. " - raise ValueError(msg) + raise ValueError("Landsea-ice mask could not be found. Stopping.") return cube @@ -225,26 +253,32 @@ def mask_glaciated(cube, mask_out: str = "glaciated"): cwd = os.path.dirname(__file__) # read glaciated shapefile shapefiles = { - 'glaciated': os.path.join(cwd, 'ne_masks/ne_10m_glaciated_areas.shp'), + "glaciated": os.path.join(cwd, "ne_masks/ne_10m_glaciated_areas.shp"), } - if mask_out == 'glaciated': - cube = _mask_with_shp(cube, shapefiles[mask_out], [ - 1859, - 1860, - 1861, - 1857, - 1858, - 1716, - 1587, - 1662, - 1578, - 1606, - ]) + if mask_out == "glaciated": + cube = _mask_with_shp( + cube, + shapefiles[mask_out], + [ + 1859, + 1860, + 1861, + 1857, + 1858, + 1716, + 1587, + 1662, + 1578, + 1606, + ], + ) logger.debug( "Applying glaciated areas mask from Natural Earth" - " shapefile: \n%s", shapefiles[mask_out]) + " shapefile: \n%s", + shapefiles[mask_out], + ) else: - msg = (f"Invalid argument mask_out: {mask_out}") + msg = f"Invalid argument mask_out: {mask_out}" raise ValueError(msg) return cube @@ -284,26 +318,29 @@ def _mask_with_shp(cube, shapefilename, region_indices=None): # Create a set of x,y points from the cube # 1D regular grids - if cube.coord('longitude').points.ndim < 2: - x_p, y_p = da.meshgrid( - cube.coord(axis='X').points, - cube.coord(axis='Y').points) + if cube.coord("longitude").points.ndim < 2: + x_p, y_p = np.meshgrid( + cube.coord(axis="X").points, + cube.coord(axis="Y").points, + ) # 2D irregular grids; spit an error for now else: - msg = ("No fx-files found (sftlf or sftof)!" - "2D grids are suboptimally masked with " - "Natural Earth masks. Exiting.") + msg = ( + "No fx-files found (sftlf or sftof)!" + "2D grids are suboptimally masked with " + "Natural Earth masks. Exiting." + ) raise ValueError(msg) # Wrap around longitude coordinate to match data - x_p_180 = da.where(x_p >= 180., x_p - 360., x_p) + x_p_180 = np.where(x_p >= 180.0, x_p - 360.0, x_p) # the NE mask has no points at x = -180 and y = +/-90 # so we will fool it and apply the mask at (-179, -89, 89) instead - x_p_180 = da.where(x_p_180 == -180., x_p_180 + 1., x_p_180) + x_p_180 = np.where(x_p_180 == -180.0, x_p_180 + 1.0, x_p_180) - y_p_0 = da.where(y_p == -90., y_p + 1., y_p) - y_p_90 = da.where(y_p_0 == 90., y_p_0 - 1., y_p_0) + y_p_0 = np.where(y_p == -90.0, y_p + 1.0, y_p) + y_p_90 = np.where(y_p_0 == 90.0, y_p_0 - 1.0, y_p_0) mask = None for region in regions: @@ -313,13 +350,14 @@ def _mask_with_shp(cube, shapefilename, region_indices=None): else: mask |= shp_vect.contains(region, x_p_180, y_p_90) - mask = da.array(mask) - iris.util.broadcast_to_shape(mask, cube.shape, cube.coord_dims('latitude') - + cube.coord_dims('longitude')) + if cube.has_lazy_data(): + mask = da.array(mask) - old_mask = da.ma.getmaskarray(cube.core_data()) - mask = old_mask | mask - cube.data = da.ma.masked_array(cube.core_data(), mask=mask) + cube.data = _apply_mask( + mask, + cube.core_data(), + cube.coord_dims("latitude") + cube.coord_dims("longitude"), + ) return cube @@ -413,8 +451,9 @@ def mask_above_threshold(cube, threshold): iris.cube.Cube thresholded cube. """ - cube.data = (da.ma.masked_where(cube.core_data() > threshold, - cube.core_data())) + cube.data = da.ma.masked_where( + cube.core_data() > threshold, cube.core_data() + ) return cube @@ -436,8 +475,9 @@ def mask_below_threshold(cube, threshold): iris.cube.Cube thresholded cube. """ - cube.data = (da.ma.masked_where(cube.core_data() < threshold, - cube.core_data())) + cube.data = da.ma.masked_where( + cube.core_data() < threshold, cube.core_data() + ) return cube @@ -494,7 +534,8 @@ def _get_shape(cubes): shapes = {cube.shape for cube in cubes} if len(shapes) > 1: raise ValueError( - f"Expected cubes with identical shapes, got shapes {shapes}") + f"Expected cubes with identical shapes, got shapes {shapes}" + ) return list(shapes)[0] @@ -565,7 +606,7 @@ def mask_multimodel(products): cubes = products shape = _get_shape(cubes) return _multimodel_mask_cubes(cubes, shape) - if all(type(p).__name__ == 'PreprocessorFile' for p in products): + if all(type(p).__name__ == "PreprocessorFile" for p in products): # Avoid circular input: https://stackoverflow.com/q/16964467 cubes = iris.cube.CubeList() for product in products: @@ -578,7 +619,8 @@ def mask_multimodel(products): raise TypeError( f"Input type for mask_multimodel not understood. Expected " f"iris.cube.Cube or esmvalcore.preprocessor.PreprocessorFile, " - f"got {product_types}") + f"got {product_types}" + ) def mask_fillvalues( @@ -622,8 +664,9 @@ def mask_fillvalues( NotImplementedError Implementation missing for data with higher dimensionality than 4. """ - array_module = da if any(c.has_lazy_data() for p in products - for c in p.cubes) else np + array_module = ( + da if any(c.has_lazy_data() for p in products for c in p.cubes) else np + ) combined_mask = None for product in products: @@ -688,7 +731,7 @@ def _get_fillvalues_mask( f"Fraction of missing values {threshold_fraction} should be " f"between 0 and 1.0" ) - nr_time_points = len(cube.coord('time').points) + nr_time_points = len(cube.coord("time").points) if time_window > nr_time_points: msg = "Time window (in time units) larger than total time span. Stop." raise ValueError(msg) @@ -699,7 +742,7 @@ def _get_fillvalues_mask( # Make an aggregator spell_count = Aggregator( - 'spell_count', + "spell_count", count_spells, lazy_func=count_spells, units_func=lambda units: 1, @@ -707,7 +750,7 @@ def _get_fillvalues_mask( # Calculate the statistic. counts_windowed_cube = cube.collapsed( - 'time', + "time", spell_count, threshold=min_value, spell_length=time_window, diff --git a/esmvalcore/preprocessor/_multimodel.py b/esmvalcore/preprocessor/_multimodel.py index d1e0d90e74..dcce65ebd3 100644 --- a/esmvalcore/preprocessor/_multimodel.py +++ b/esmvalcore/preprocessor/_multimodel.py @@ -7,6 +7,7 @@ generalized functions that operate on iris cubes. These wrappers support grouped execution by passing a groupby keyword. """ + from __future__ import annotations import logging @@ -39,12 +40,12 @@ logger = logging.getLogger(__name__) -CONCAT_DIM = 'multi-model' +CONCAT_DIM = "multi-model" def _get_consistent_time_unit(cubes): """Return cubes' time unit if consistent, standard calendar otherwise.""" - t_units = [cube.coord('time').units for cube in cubes] + t_units = [cube.coord("time").units for cube in cubes] if len(set(t_units)) == 1: return t_units[0] return cf_units.Unit("days since 1850-01-01", calendar="standard") @@ -68,7 +69,7 @@ def _unify_time_coordinates(cubes): for cube in cubes: # Extract date info from cube - coord = cube.coord('time') + coord = cube.coord("time") years = [p.year for p in coord.units.num2date(coord.points)] months = [p.month for p in coord.units.num2date(coord.points)] days = [p.day for p in coord.units.num2date(coord.points)] @@ -93,36 +94,38 @@ def _unify_time_coordinates(cubes): logger.warning( "Multimodel encountered (sub)daily data and inconsistent " "time units or calendars. Attempting to continue, but " - "might produce unexpected results.") + "might produce unexpected results." + ) else: raise ValueError( "Multimodel statistics preprocessor currently does not " - "support sub-daily data.") + "support sub-daily data." + ) # Update the cubes' time coordinate (both point values and the units!) - cube.coord('time').points = date2num(dates, t_unit, coord.dtype) - cube.coord('time').units = t_unit + cube.coord("time").points = date2num(dates, t_unit, coord.dtype) + cube.coord("time").units = t_unit _guess_time_bounds(cube) def _guess_time_bounds(cube): """Guess time bounds if possible.""" - cube.coord('time').bounds = None - if cube.coord('time').shape == (1,): + cube.coord("time").bounds = None + if cube.coord("time").shape == (1,): logger.debug( "Encountered scalar time coordinate in multi_model_statistics: " "cannot determine its bounds" ) else: - cube.coord('time').guess_bounds() + cube.coord("time").guess_bounds() def _time_coords_are_aligned(cubes): """Return `True` if time coords are aligned.""" - first_time_array = cubes[0].coord('time').points + first_time_array = cubes[0].coord("time").points for cube in cubes[1:]: - other_time_array = cube.coord('time').points + other_time_array = cube.coord("time").points if not np.array_equal(first_time_array, other_time_array): return False @@ -135,20 +138,23 @@ def _map_to_new_time(cube, time_points): Missing data inside original bounds is filled with nearest neighbour Missing data outside original bounds is masked. """ - time_coord = cube.coord('time') + time_coord = cube.coord("time") # Try if the required time points can be obtained by slicing the cube. time_slice = np.isin(time_coord.points, time_points) - if np.any(time_slice) and np.array_equal(time_coord.points[time_slice], - time_points): - time_idx, = cube.coord_dims('time') - indices = tuple(time_slice if i == time_idx else slice(None) - for i in range(cube.ndim)) + if np.any(time_slice) and np.array_equal( + time_coord.points[time_slice], time_points + ): + (time_idx,) = cube.coord_dims("time") + indices = tuple( + time_slice if i == time_idx else slice(None) + for i in range(cube.ndim) + ) return cube[indices] time_points = time_coord.units.num2date(time_points) - sample_points = [('time', time_points)] - scheme = iris.analysis.Nearest(extrapolation_mode='mask') + sample_points = [("time", time_points)] + scheme = iris.analysis.Nearest(extrapolation_mode="mask") # Make sure that all integer time coordinates ('year', 'month', # 'day_of_year', etc.) are converted to floats, otherwise the @@ -156,8 +162,9 @@ def _map_to_new_time(cube, time_points): # to integer". In addition, remove their bounds (this would be done by iris # anyway). int_time_coords = [] - for coord in cube.coords(dimensions=cube.coord_dims('time'), - dim_coords=False): + for coord in cube.coords( + dimensions=cube.coord_dims("time"), dim_coords=False + ): if np.issubdtype(coord.points.dtype, np.integer): int_time_coords.append(coord.name()) coord.points = coord.points.astype(float) @@ -168,7 +175,7 @@ def _map_to_new_time(cube, time_points): new_cube = cube.interpolate(sample_points, scheme) except Exception as excinfo: additional_info = "" - if cube.coords('time', dimensions=()): + if cube.coords("time", dimensions=()): additional_info = ( " Note: this alignment does not work for scalar time " "coordinates. To ignore all scalar coordinates in the input " @@ -182,9 +189,11 @@ def _map_to_new_time(cube, time_points): # Change the dtype of int_time_coords to their original values for coord_name in int_time_coords: - coord = new_cube.coord(coord_name, - dimensions=new_cube.coord_dims('time'), - dim_coords=False) + coord = new_cube.coord( + coord_name, + dimensions=new_cube.coord_dims("time"), + dim_coords=False, + ) coord.points = coord.points.astype(int) return new_cube @@ -197,15 +206,17 @@ def _align_time_coord(cubes, span): if _time_coords_are_aligned(cubes): return cubes - all_time_arrays = [cube.coord('time').points for cube in cubes] + all_time_arrays = [cube.coord("time").points for cube in cubes] - if span == 'overlap': + if span == "overlap": new_time_points = reduce(np.intersect1d, all_time_arrays) - elif span == 'full': + elif span == "full": new_time_points = reduce(np.union1d, all_time_arrays) else: - raise ValueError(f"Invalid argument for span: {span!r}" - "Must be one of 'overlap', 'full'.") + raise ValueError( + f"Invalid argument for span: {span!r}" + "Must be one of 'overlap', 'full'." + ) new_cubes = [_map_to_new_time(cube, new_time_points) for cube in cubes] @@ -229,8 +240,8 @@ def _get_equal_coords_metadata(cubes): for coord in cubes[0].coords(): for other_cube in cubes[1:]: other_cube_has_equal_coord = [ - coord.metadata == other_coord.metadata for other_coord in - other_cube.coords(coord.name()) + coord.metadata == other_coord.metadata + for other_coord in other_cube.coords(coord.name()) ] if not any(other_cube_has_equal_coord): break @@ -261,7 +272,6 @@ def _get_equal_coord_names_metadata(cubes, equal_coords_metadata): # Check if coordinate names and units match across all cubes for other_cube in cubes[1:]: - # Ignore names that do not exist in other cube/are not unique if len(other_cube.coords(coord_name)) != 1: break @@ -276,12 +286,8 @@ def _get_equal_coord_names_metadata(cubes, equal_coords_metadata): std_names = list( {c.coord(coord_name).standard_name for c in cubes} ) - long_names = list( - {c.coord(coord_name).long_name for c in cubes} - ) - var_names = list( - {c.coord(coord_name).var_name for c in cubes} - ) + long_names = list({c.coord(coord_name).long_name for c in cubes}) + var_names = list({c.coord(coord_name).var_name for c in cubes}) equal_names_metadata[coord_name] = dict( standard_name=std_names[0] if len(std_names) == 1 else None, long_name=long_names[0] if len(long_names) == 1 else None, @@ -304,14 +310,12 @@ def _equalise_coordinate_metadata(cubes): # --> keep matching names of these coordinates # Note: ignores duplicate coordinates equal_names_metadata = _get_equal_coord_names_metadata( - cubes, - equal_coords_metadata + cubes, equal_coords_metadata ) # Modify all coordinates of all cubes accordingly for cube in cubes: for coord in cube.coords(): - # Exactly matching coordinates --> do not modify if coord.metadata in equal_coords_metadata: continue @@ -325,9 +329,9 @@ def _equalise_coordinate_metadata(cubes): # Matching names and units --> set common names if coord.name() in equal_names_metadata: equal_names = equal_names_metadata[coord.name()] - coord.standard_name = equal_names['standard_name'] - coord.long_name = equal_names['long_name'] - coord.var_name = equal_names['var_name'] + coord.standard_name = equal_names["standard_name"] + coord.long_name = equal_names["long_name"] + coord.var_name = equal_names["var_name"] continue # Remaining coordinates --> remove long_name @@ -338,7 +342,7 @@ def _equalise_coordinate_metadata(cubes): # in the input cubes. Note: if `ignore_scalar_coords=True` is used for # `multi_model_statistics`, the cubes do not contain scalar coordinates # at this point anymore. - scalar_coords_to_always_remove = ['p0', 'ptop'] + scalar_coords_to_always_remove = ["p0", "ptop"] for scalar_coord in cube.coords(dimensions=()): if scalar_coord.var_name in scalar_coords_to_always_remove: cube.remove_coord(scalar_coord) @@ -363,7 +367,7 @@ def _equalise_var_metadata(cubes): `standard_names`, `long_names`, and `var_names`. """ - attrs = ['standard_name', 'long_name', 'var_name'] + attrs = ["standard_name", "long_name", "var_name"] equal_names_metadata = {} # Collect all names from the different cubes, grouped by cube.name() and @@ -424,7 +428,7 @@ def _combine(cubes): except MergeError as exc: # Note: str(exc) starts with "failed to merge into a single cube.\n" # --> remove this here for clear error message - msg = "\n".join(str(exc).split('\n')[1:]) + msg = "\n".join(str(exc).split("\n")[1:]) raise ValueError( f"Multi-model statistics failed to merge input cubes into a " f"single array:\n{cubes}\n{msg}" @@ -491,7 +495,8 @@ def _compute_eager( f"single array. This happened for operator {operator} " f"with computed statistics {result_slices}. " f"This can happen e.g. if the calculation results in inconsistent " - f"dtypes") from excinfo + f"dtypes" + ) from excinfo result_cube.data = np.ma.array(result_cube.data) @@ -504,22 +509,22 @@ def _compute(cubes: list, *, operator: iris.analysis.Aggregator, **kwargs): with warnings.catch_warnings(): warnings.filterwarnings( - 'ignore', + "ignore", message=( "Collapsing a non-contiguous coordinate. " f"Metadata may not be fully descriptive for '{CONCAT_DIM}." ), category=UserWarning, - module='iris', + module="iris", ) warnings.filterwarnings( - 'ignore', + "ignore", message=( f"Cannot check if coordinate is contiguous: Invalid " f"operation for '{CONCAT_DIM}'" ), category=UserWarning, - module='iris', + module="iris", ) # This will always return a masked array result_cube = cube.collapsed(CONCAT_DIM, operator, **kwargs) @@ -540,7 +545,8 @@ def _compute(cubes: list, *, operator: iris.analysis.Aggregator, **kwargs): method=cell_method.method, coords=cell_method.coord_names, intervals=cell_method.intervals, - comments=f'input_cubes: {len(cubes)}') + comments=f"input_cubes: {len(cubes)}", + ) result_cube.add_cell_method(updated_method) return result_cube @@ -581,7 +587,7 @@ def _multicube_statistics( # If all cubes contain a time coordinate, align them. If no cube contains a # time coordinate, do nothing. Else, raise an exception. - time_coords = [cube.coords('time') for cube in cubes] + time_coords = [cube.coords("time") for cube in cubes] if all(time_coords): cubes = _align_time_coord(cubes, span=span) elif not any(time_coords): @@ -606,7 +612,7 @@ def _multicube_statistics( def _compute_statistic(cubes, lazy_input, statistic): """Compute a single statistic.""" stat_id = _get_stat_identifier(statistic) - logger.debug('Multicube statistics: computing: %s', stat_id) + logger.debug("Multicube statistics: computing: %s", stat_id) (operator, kwargs) = _get_operator_and_kwargs(statistic) (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -658,12 +664,12 @@ def _get_operator_and_kwargs(statistic: str | dict) -> tuple[str, dict]: """Get operator and kwargs from a single statistic.""" if isinstance(statistic, dict): statistic = dict(statistic) - if 'operator' not in statistic: + if "operator" not in statistic: raise ValueError( f"`statistic` given as dictionary, but missing required key " f"`operator`, got {statistic}" ) - operator = statistic.pop('operator') + operator = statistic.pop("operator") kwargs = statistic else: operator = statistic @@ -673,8 +679,8 @@ def _get_operator_and_kwargs(statistic: str | dict) -> tuple[str, dict]: def _get_stat_identifier(statistic: str | dict) -> str: (operator, kwargs) = _get_operator_and_kwargs(statistic) - if 'percent' in kwargs: - operator += str(kwargs['percent']) + if "percent" in kwargs: + operator += str(kwargs["percent"]) return operator @@ -799,7 +805,7 @@ def multi_model_statistics( span=span, ignore_scalar_coords=ignore_scalar_coords, ) - if all(type(p).__name__ == 'PreprocessorFile' for p in products): + if all(type(p).__name__ == "PreprocessorFile" for p in products): # Avoid circular input: https://stackoverflow.com/q/16964467 statistics_products = set() for group, input_prods in _group_products(products, by_key=groupby): @@ -829,7 +835,7 @@ def ensemble_statistics( products: set[PreprocessorFile] | Iterable[Cube], statistics: list[str | dict], output_products, - span: str = 'overlap', + span: str = "overlap", ignore_scalar_coords: bool = False, ) -> dict | set: """Compute ensemble statistics. @@ -876,7 +882,7 @@ def ensemble_statistics( :func:`esmvalcore.preprocessor.multi_model_statistics` for the full description of the core statistics function. """ - ensemble_grouping = ('project', 'dataset', 'exp', 'sub_experiment') + ensemble_grouping = ("project", "dataset", "exp", "sub_experiment") return multi_model_statistics( products=products, span=span, diff --git a/esmvalcore/preprocessor/_other.py b/esmvalcore/preprocessor/_other.py index 3d047a4e24..995bfd30d1 100644 --- a/esmvalcore/preprocessor/_other.py +++ b/esmvalcore/preprocessor/_other.py @@ -1,4 +1,5 @@ """Preprocessor functions that do not fit into any of the categories.""" + from __future__ import annotations import logging @@ -46,8 +47,10 @@ def clip(cube, minimum=None, maximum=None): clipped cube. """ if minimum is None and maximum is None: - raise ValueError("Either minimum, maximum or both have to be\ - specified.") + raise ValueError( + "Either minimum, maximum or both have to be\ + specified." + ) elif minimum is not None and maximum is not None: if maximum < minimum: raise ValueError("Maximum should be equal or larger than minimum.") @@ -62,7 +65,7 @@ def histogram( bins: int | Sequence[float] = 10, bin_range: tuple[float, float] | None = None, weights: np.ndarray | da.Array | bool | None = None, - normalization: Literal['sum', 'integral'] | None = None, + normalization: Literal["sum", "integral"] | None = None, ) -> Cube: """Calculate histogram. @@ -141,7 +144,7 @@ def histogram( f"bins cannot be a str (got '{bins}'), must be int or Sequence of " f"int" ) - allowed_norms = (None, 'sum', 'integral') + allowed_norms = (None, "sum", "integral") if normalization is not None and normalization not in allowed_norms: raise ValueError( f"Expected one of {allowed_norms} for normalization, got " @@ -211,7 +214,7 @@ def _get_histogram_weights( cube: Cube, coords: Iterable[Coord] | Iterable[str], weights: np.ndarray | da.Array | bool | None, - normalization: Literal['sum', 'integral'] | None, + normalization: Literal["sum", "integral"] | None, ) -> np.ndarray | da.Array: """Get histogram weights.""" axes = get_all_coord_dims(cube, coords) @@ -244,7 +247,7 @@ def _calculate_histogram_lazy( along_axes: tuple[int, ...], bin_edges: np.ndarray, bin_range: tuple[float, float], - normalization: Literal['sum', 'integral'] | None = None, + normalization: Literal["sum", "integral"] | None = None, ) -> da.Array: """Calculate histogram over data along axes (lazy version). @@ -268,9 +271,9 @@ def _calculate_histogram_lazy( )[0] hist_sum = hist.sum() hist = da.ma.masked_array(hist, mask=da.allclose(hist_sum, 0.0)) - if normalization == 'sum': + if normalization == "sum": hist = hist / hist_sum - elif normalization == 'integral': + elif normalization == "integral": diffs = np.array(np.diff(bin_edges), dtype=data.dtype) hist = hist / hist_sum / diffs hist = da.ma.masked_invalid(hist) @@ -282,7 +285,7 @@ def _calculate_histogram_lazy( # the `axes` argument to da.apply_gufunc are the rightmost dimensions. # Thus, we need to use `along_axes=(ndim-n_axes, ..., ndim-2, ndim-1)` # for _calculate_histogram_eager here. - axes_in_chunk = tuple(range(data.ndim - n_axes, data.ndim)) + axes_in_chunk = tuple(range(data.ndim - n_axes, data.ndim)) # The call signature depends also on the number of axes in `axes`, and # will be (a,b,...)->(nbins) where a,b,... are the data dimensions that @@ -294,7 +297,7 @@ def _calculate_histogram_lazy( data, weights, axes=[along_axes, along_axes, (-1,)], - output_sizes={'nbins': len(bin_edges) - 1}, + output_sizes={"nbins": len(bin_edges) - 1}, along_axes=axes_in_chunk, bin_edges=bin_edges, bin_range=bin_range, @@ -311,7 +314,7 @@ def _calculate_histogram_eager( along_axes: tuple[int, ...], bin_edges: np.ndarray, bin_range: tuple[float, float], - normalization: Literal['sum', 'integral'] | None = None, + normalization: Literal["sum", "integral"] | None = None, ) -> np.ndarray: """Calculate histogram over data along axes (eager version). @@ -340,7 +343,7 @@ def _get_hist_values(arr, wgts): arr, bins=bin_edges, range=bin_range, weights=wgts )[0] - v_histogram = np.vectorize(_get_hist_values, signature='(n),(n)->(m)') + v_histogram = np.vectorize(_get_hist_values, signature="(n),(n)->(m)") hist = v_histogram(reshaped_data, reshaped_weights) # Mask points where all input data were masked (these are the ones where @@ -350,13 +353,13 @@ def _get_hist_values(arr, wgts): hist = np.ma.array(hist, mask=np.broadcast_to(mask, hist.shape)) # Apply normalization - if normalization == 'sum': + if normalization == "sum": hist = hist / np.ma.array(hist_sum, mask=mask) - elif normalization == 'integral': + elif normalization == "integral": hist = ( - hist / - np.ma.array(hist_sum, mask=mask) / - np.ma.array(np.diff(bin_edges), dtype=data.dtype) + hist + / np.ma.array(hist_sum, mask=mask) + / np.ma.array(np.diff(bin_edges), dtype=data.dtype) ) return hist @@ -367,12 +370,12 @@ def _get_histogram_cube( data: np.ndarray | da.Array, coords: Iterable[Coord] | Iterable[str], bin_edges: np.ndarray, - normalization: Literal['sum', 'integral'] | None, + normalization: Literal["sum", "integral"] | None, ): """Get cube with correct metadata for histogram.""" # Calculate bin centers using 2-window running mean and get corresponding # coordinate - bin_centers = np.convolve(bin_edges, np.ones(2), 'valid') / 2.0 + bin_centers = np.convolve(bin_edges, np.ones(2), "valid") / 2.0 bin_coord = DimCoord( bin_centers, bounds=np.stack((bin_edges[:-1], bin_edges[1:]), axis=-1), @@ -389,25 +392,24 @@ def _get_histogram_cube( # Get histogram cube long_name_suffix = ( - '' if cube.long_name is None else f' of {cube.long_name}' - ) - var_name_suffix = '' if cube.var_name is None else f'_{cube.var_name}' - dim_spec = ( - [(d, cube.coord_dims(d)) for d in cube.dim_coords] + - [(bin_coord, cube.ndim)] + "" if cube.long_name is None else f" of {cube.long_name}" ) - if normalization == 'sum': + var_name_suffix = "" if cube.var_name is None else f"_{cube.var_name}" + dim_spec = [(d, cube.coord_dims(d)) for d in cube.dim_coords] + [ + (bin_coord, cube.ndim) + ] + if normalization == "sum": long_name = f"Relative Frequency{long_name_suffix}" var_name = f"relative_frequency{var_name_suffix}" - units = '1' - elif normalization == 'integral': + units = "1" + elif normalization == "integral": long_name = f"Density{long_name_suffix}" var_name = f"density{var_name_suffix}" units = cube.units**-1 else: long_name = f"Frequency{long_name_suffix}" var_name = f"frequency{var_name_suffix}" - units = '1' + units = "1" hist_cube = Cube( data, standard_name=None, @@ -420,8 +422,8 @@ def _get_histogram_cube( aux_coords_and_dims=[(a, cube.coord_dims(a)) for a in cube.aux_coords], aux_factories=cube.aux_factories, ancillary_variables_and_dims=[ - (a, cube.ancillary_variable_dims(a)) for a in - cube.ancillary_variables() + (a, cube.ancillary_variable_dims(a)) + for a in cube.ancillary_variables() ], cell_measures_and_dims=[ (c, cube.cell_measure_dims(c)) for c in cube.cell_measures() diff --git a/esmvalcore/preprocessor/_regrid.py b/esmvalcore/preprocessor/_regrid.py index 0659f89e70..a8558f6ee1 100644 --- a/esmvalcore/preprocessor/_regrid.py +++ b/esmvalcore/preprocessor/_regrid.py @@ -1,4 +1,5 @@ """Horizontal and vertical regridding module.""" + from __future__ import annotations import functools @@ -21,6 +22,7 @@ from geopy.geocoders import Nominatim from iris.analysis import AreaWeighted, Linear, Nearest from iris.cube import Cube +from iris.util import broadcast_to_shape from esmvalcore.cmor._fixes.shared import ( add_altitude_from_plev, @@ -30,8 +32,8 @@ from esmvalcore.exceptions import ESMValCoreDeprecationWarning from esmvalcore.iris_helpers import has_irregular_grid, has_unstructured_grid from esmvalcore.preprocessor._shared import ( - broadcast_to_shape, get_array_module, + get_dims_along_axes, preserve_float_dtype, ) from esmvalcore.preprocessor._supplementary_vars import ( @@ -39,10 +41,8 @@ add_cell_measure, ) from esmvalcore.preprocessor.regrid_schemes import ( - ESMPyAreaWeighted, - ESMPyLinear, - ESMPyNearest, GenericFuncScheme, + IrisESMFRegrid, UnstructuredLinear, UnstructuredNearest, ) @@ -54,15 +54,17 @@ # Regular expression to parse a "MxN" cell-specification. _CELL_SPEC = re.compile( - r'''\A + r"""\A \s*(?P\d+(\.\d+)?)\s* x \s*(?P\d+(\.\d+)?)\s* \Z - ''', re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, +) # Default fill-value. -_MDI = 1e+20 +_MDI = 1e20 # Stock cube - global grid extents (degrees). _LAT_MIN = -90.0 @@ -74,42 +76,50 @@ # Supported point interpolation schemes. POINT_INTERPOLATION_SCHEMES = { - 'linear': Linear(extrapolation_mode='mask'), - 'nearest': Nearest(extrapolation_mode='mask'), + "linear": Linear(extrapolation_mode="mask"), + "nearest": Nearest(extrapolation_mode="mask"), } # Supported horizontal regridding schemes for regular grids (= rectilinear # grids; i.e., grids that can be described with 1D latitude and 1D longitude # coordinates orthogonal to each other) HORIZONTAL_SCHEMES_REGULAR = { - 'area_weighted': AreaWeighted(), - 'linear': Linear(extrapolation_mode='mask'), - 'nearest': Nearest(extrapolation_mode='mask'), + "area_weighted": AreaWeighted(), + "linear": Linear(extrapolation_mode="mask"), + "nearest": Nearest(extrapolation_mode="mask"), } # Supported horizontal regridding schemes for irregular grids (= general # curvilinear grids; i.e., grids that can be described with 2D latitude and 2D # longitude coordinates with common dimensions) HORIZONTAL_SCHEMES_IRREGULAR = { - 'area_weighted': ESMPyAreaWeighted(), - 'linear': ESMPyLinear(), - 'nearest': ESMPyNearest(), + "area_weighted": IrisESMFRegrid(method="conservative"), + "linear": IrisESMFRegrid(method="bilinear"), + "nearest": IrisESMFRegrid(method="nearest"), +} + +# Supported horizontal regridding schemes for meshes +# https://scitools-iris.readthedocs.io/en/stable/further_topics/ugrid/index.html +HORIZONTAL_SCHEMES_MESH = { + "area_weighted": IrisESMFRegrid(method="conservative"), + "linear": IrisESMFRegrid(method="bilinear"), + "nearest": IrisESMFRegrid(method="nearest"), } # Supported horizontal regridding schemes for unstructured grids (i.e., grids, # that can be described with 1D latitude and 1D longitude coordinate with # common dimensions) HORIZONTAL_SCHEMES_UNSTRUCTURED = { - 'linear': UnstructuredLinear(), - 'nearest': UnstructuredNearest(), + "linear": UnstructuredLinear(), + "nearest": UnstructuredNearest(), } # Supported vertical interpolation schemes. VERTICAL_SCHEMES = ( - 'linear', - 'nearest', - 'linear_extrapolate', - 'nearest_extrapolate', + "linear", + "nearest", + "linear_extrapolate", + "nearest_extrapolate", ) @@ -135,21 +145,25 @@ def parse_cell_spec(spec): """ cell_match = _CELL_SPEC.match(spec) if cell_match is None: - emsg = 'Invalid MxN cell specification for grid, got {!r}.' + emsg = "Invalid MxN cell specification for grid, got {!r}." raise ValueError(emsg.format(spec)) cell_group = cell_match.groupdict() - dlon = float(cell_group['dlon']) - dlat = float(cell_group['dlat']) + dlon = float(cell_group["dlon"]) + dlat = float(cell_group["dlat"]) if (np.trunc(_LON_RANGE / dlon) * dlon) != _LON_RANGE: - emsg = ('Invalid longitude delta in MxN cell specification ' - 'for grid, got {!r}.') + emsg = ( + "Invalid longitude delta in MxN cell specification " + "for grid, got {!r}." + ) raise ValueError(emsg.format(dlon)) if (np.trunc(_LAT_RANGE / dlat) * dlat) != _LAT_RANGE: - emsg = ('Invalid latitude delta in MxN cell specification ' - 'for grid, got {!r}.') + emsg = ( + "Invalid latitude delta in MxN cell specification " + "for grid, got {!r}." + ) raise ValueError(emsg.format(dlat)) return dlon, dlat @@ -172,17 +186,21 @@ def _generate_cube_from_dimcoords(latdata, londata, circular: bool = False): ------- iris.cube.Cube """ - lats = iris.coords.DimCoord(latdata, - standard_name='latitude', - units='degrees_north', - var_name='lat', - circular=circular) - - lons = iris.coords.DimCoord(londata, - standard_name='longitude', - units='degrees_east', - var_name='lon', - circular=circular) + lats = iris.coords.DimCoord( + latdata, + standard_name="latitude", + units="degrees_north", + var_name="lat", + circular=circular, + ) + + lons = iris.coords.DimCoord( + londata, + standard_name="longitude", + units="degrees_east", + var_name="lon", + circular=circular, + ) if not circular: # cannot guess bounds for wrapped coordinates @@ -191,7 +209,7 @@ def _generate_cube_from_dimcoords(latdata, londata, circular: bool = False): # Construct the resultant stock cube, with dummy data. shape = (latdata.size, londata.size) - dummy = np.empty(shape, dtype=np.dtype('int8')) + dummy = np.empty(shape, dtype=np.dtype("int8")) coords_spec = [(lats, 0), (lons, 1)] cube = Cube(dummy, dim_coords_and_dims=coords_spec) @@ -231,27 +249,36 @@ def _global_stock_cube(spec, lat_offset=True, lon_offset=True): # Construct the latitude coordinate, with bounds. if lat_offset: - latdata = np.linspace(_LAT_MIN + mid_dlat, _LAT_MAX - mid_dlat, - int(_LAT_RANGE / dlat)) + latdata = np.linspace( + _LAT_MIN + mid_dlat, _LAT_MAX - mid_dlat, int(_LAT_RANGE / dlat) + ) else: latdata = np.linspace(_LAT_MIN, _LAT_MAX, int(_LAT_RANGE / dlat) + 1) # Construct the longitude coordinat, with bounds. if lon_offset: - londata = np.linspace(_LON_MIN + mid_dlon, _LON_MAX - mid_dlon, - int(_LON_RANGE / dlon)) + londata = np.linspace( + _LON_MIN + mid_dlon, _LON_MAX - mid_dlon, int(_LON_RANGE / dlon) + ) else: - londata = np.linspace(_LON_MIN, _LON_MAX - dlon, - int(_LON_RANGE / dlon)) + londata = np.linspace( + _LON_MIN, _LON_MAX - dlon, int(_LON_RANGE / dlon) + ) cube = _generate_cube_from_dimcoords(latdata=latdata, londata=londata) return cube -def _spec_to_latlonvals(*, start_latitude: float, end_latitude: float, - step_latitude: float, start_longitude: float, - end_longitude: float, step_longitude: float) -> tuple: +def _spec_to_latlonvals( + *, + start_latitude: float, + end_latitude: float, + step_latitude: float, + start_longitude: float, + end_longitude: float, + step_longitude: float, +) -> tuple: """Define lat/lon values from spec. Create a regional cube starting defined by the target specification. @@ -288,17 +315,20 @@ def _spec_to_latlonvals(*, start_latitude: float, end_latitude: float, List of latitudes """ if step_latitude == 0: - raise ValueError('Latitude step cannot be 0, ' - f'got step_latitude={step_latitude}.') + raise ValueError( + f"Latitude step cannot be 0, got step_latitude={step_latitude}." + ) if step_longitude == 0: - raise ValueError('Longitude step cannot be 0, ' - f'got step_longitude={step_longitude}.') + raise ValueError( + "Longitude step cannot be 0, " + f"got step_longitude={step_longitude}." + ) if (start_latitude < _LAT_MIN) or (end_latitude > _LAT_MAX): raise ValueError( - f'Latitude values must lie between {_LAT_MIN}:{_LAT_MAX}, ' - f'got start_latitude={start_latitude}:end_latitude={end_latitude}.' + f"Latitude values must lie between {_LAT_MIN}:{_LAT_MAX}, " + f"got start_latitude={start_latitude}:end_latitude={end_latitude}." ) def get_points(start, stop, step): @@ -323,9 +353,9 @@ def _regional_stock_cube(spec: dict): """ latdata, londata = _spec_to_latlonvals(**spec) - cube = _generate_cube_from_dimcoords(latdata=latdata, - londata=londata, - circular=True) + cube = _generate_cube_from_dimcoords( + latdata=latdata, londata=londata, circular=True + ) def add_bounds_from_step(coord, step): """Calculate bounds from the given step.""" @@ -333,8 +363,8 @@ def add_bounds_from_step(coord, step): points = coord.points coord.bounds = np.vstack((points - bound, points + bound)).T - add_bounds_from_step(cube.coord('latitude'), spec['step_latitude']) - add_bounds_from_step(cube.coord('longitude'), spec['step_longitude']) + add_bounds_from_step(cube.coord("latitude"), spec["step_latitude"]) + add_bounds_from_step(cube.coord("longitude"), spec["step_longitude"]) return cube @@ -382,32 +412,42 @@ def extract_location(cube, location, scheme): If given location cannot be found by the geolocator. """ if location is None: - raise ValueError("Location needs to be specified." - " Examples: 'mount everest', 'romania'," - " 'new york, usa'") + raise ValueError( + "Location needs to be specified." + " Examples: 'mount everest', 'romania'," + " 'new york, usa'" + ) if scheme is None: - raise ValueError("Interpolation scheme needs to be specified." - " Use either 'linear' or 'nearest'.") + raise ValueError( + "Interpolation scheme needs to be specified." + " Use either 'linear' or 'nearest'." + ) try: # Try to use the default SSL context, see # https://github.com/ESMValGroup/ESMValCore/issues/2012 for more # information. ssl_context = ssl.create_default_context() - geolocator = Nominatim(user_agent='esmvalcore', - ssl_context=ssl_context) + geolocator = Nominatim( + user_agent="esmvalcore", ssl_context=ssl_context + ) except ssl.SSLError: logger.warning( "ssl.create_default_context() encountered a problem, not using it." ) - geolocator = Nominatim(user_agent='esmvalcore') + geolocator = Nominatim(user_agent="esmvalcore") geolocation = geolocator.geocode(location) if geolocation is None: - raise ValueError(f'Requested location {location} can not be found.') - logger.info("Extracting data for %s (%s °N, %s °E)", geolocation, - geolocation.latitude, geolocation.longitude) + raise ValueError(f"Requested location {location} can not be found.") + logger.info( + "Extracting data for %s (%s °N, %s °E)", + geolocation, + geolocation.latitude, + geolocation.longitude, + ) - return extract_point(cube, geolocation.latitude, geolocation.longitude, - scheme) + return extract_point( + cube, geolocation.latitude, geolocation.longitude, scheme + ) def extract_point(cube, latitude, longitude, scheme): @@ -485,7 +525,7 @@ def extract_point(cube, latitude, longitude, scheme): if not scheme: raise ValueError(msg) - point = [('latitude', latitude), ('longitude', longitude)] + point = [("latitude", latitude), ("longitude", longitude)] cube = cube.interpolate(point, scheme=scheme) return cube @@ -493,7 +533,7 @@ def extract_point(cube, latitude, longitude, scheme): def is_dataset(dataset): """Test if something is an `esmvalcore.dataset.Dataset`.""" # Use this function to avoid circular imports - return hasattr(dataset, 'facets') + return hasattr(dataset, "facets") def _get_target_grid_cube( @@ -512,13 +552,14 @@ def _get_target_grid_cube( target_grid_cube = iris.load_cube(target_grid) elif isinstance(target_grid, str): # Generate a target grid from the provided cell-specification - target_grid_cube = _global_stock_cube(target_grid, lat_offset, - lon_offset) + target_grid_cube = _global_stock_cube( + target_grid, lat_offset, lon_offset + ) # Align the target grid coordinate system to the source # coordinate system. src_cs = cube.coord_system() - xcoord = target_grid_cube.coord(axis='x', dim_coords=True) - ycoord = target_grid_cube.coord(axis='y', dim_coords=True) + xcoord = target_grid_cube.coord(axis="x", dim_coords=True) + ycoord = target_grid_cube.coord(axis="y", dim_coords=True) xcoord.coord_system = src_cs ycoord.coord_system = src_cs elif isinstance(target_grid, dict): @@ -528,50 +569,29 @@ def _get_target_grid_cube( target_grid_cube = target_grid if not isinstance(target_grid_cube, Cube): - raise ValueError(f'Expecting a cube, got {target_grid}.') + raise ValueError(f"Expecting a cube, got {target_grid}.") return target_grid_cube -def _attempt_irregular_regridding(cube: Cube, scheme: str) -> bool: - """Check if irregular regridding with ESMF should be used.""" - if not has_irregular_grid(cube): - return False - if scheme not in HORIZONTAL_SCHEMES_IRREGULAR: - raise ValueError( - f"Regridding scheme '{scheme}' does not support irregular data, " - f"expected one of {list(HORIZONTAL_SCHEMES_IRREGULAR)}") - return True - - -def _attempt_unstructured_regridding(cube: Cube, scheme: str) -> bool: - """Check if unstructured regridding should be used.""" - if not has_unstructured_grid(cube): - return False - if scheme not in HORIZONTAL_SCHEMES_UNSTRUCTURED: - raise ValueError( - f"Regridding scheme '{scheme}' does not support unstructured " - f"data, expected one of {list(HORIZONTAL_SCHEMES_UNSTRUCTURED)}") - return True - - -def _load_scheme(src_cube: Cube, scheme: str | dict): +def _load_scheme(src_cube: Cube, tgt_cube: Cube, scheme: str | dict): """Return scheme that can be used in :meth:`iris.cube.Cube.regrid`.""" loaded_scheme: Any = None # Deprecations - if scheme == 'unstructured_nearest': + if scheme == "unstructured_nearest": msg = ( "The regridding scheme `unstructured_nearest` has been deprecated " "in ESMValCore version 2.11.0 and is scheduled for removal in " "version 2.13.0. Please use the scheme `nearest` instead. This is " "an exact replacement for data on unstructured grids. Since " "version 2.11.0, ESMValCore is able to determine the most " - "suitable regridding scheme based on the input data.") + "suitable regridding scheme based on the input data." + ) warnings.warn(msg, ESMValCoreDeprecationWarning) - scheme = 'nearest' + scheme = "nearest" - if scheme == 'linear_extrapolate': + if scheme == "linear_extrapolate": msg = ( "The regridding scheme `linear_extrapolate` has been deprecated " "in ESMValCore version 2.11.0 and is scheduled for removal in " @@ -579,30 +599,36 @@ def _load_scheme(src_cube: Cube, scheme: str | dict): "iris.analysis:Linear` and `extrapolation_mode: extrapolate` " "instead (see https://docs.esmvaltool.org/projects/ESMValCore/en/" "latest/recipe/preprocessor.html#generic-regridding-schemes)." - "This is an exact replacement.") + "This is an exact replacement." + ) warnings.warn(msg, ESMValCoreDeprecationWarning) - scheme = 'linear' - loaded_scheme = Linear(extrapolation_mode='extrapolate') + scheme = "linear" + loaded_scheme = Linear(extrapolation_mode="extrapolate") logger.debug("Loaded regridding scheme %s", loaded_scheme) return loaded_scheme - # Scheme is a dict -> assume this describes a generic regridding scheme if isinstance(scheme, dict): + # Scheme is a dict -> assume this describes a generic regridding scheme loaded_scheme = _load_generic_scheme(scheme) - - # Scheme is a str -> load appropriate regridding scheme depending on the - # type of input data - elif _attempt_irregular_regridding(src_cube, scheme): - loaded_scheme = HORIZONTAL_SCHEMES_IRREGULAR[scheme] - elif _attempt_unstructured_regridding(src_cube, scheme): - loaded_scheme = HORIZONTAL_SCHEMES_UNSTRUCTURED[scheme] else: - loaded_scheme = HORIZONTAL_SCHEMES_REGULAR.get(scheme) - - if loaded_scheme is None: - raise ValueError( - f"Got invalid regridding scheme string '{scheme}', expected one " - f"of {list(HORIZONTAL_SCHEMES_REGULAR)}") + # Scheme is a str -> load appropriate regridding scheme depending on + # the type of input data + if has_irregular_grid(src_cube) or has_irregular_grid(tgt_cube): + grid_type = "irregular" + elif src_cube.mesh is not None or tgt_cube.mesh is not None: + grid_type = "mesh" + elif has_unstructured_grid(src_cube): + grid_type = "unstructured" + else: + grid_type = "regular" + + schemes = globals()[f"HORIZONTAL_SCHEMES_{grid_type.upper()}"] + if scheme not in schemes: + raise ValueError( + f"Regridding scheme '{scheme}' not available for {grid_type} " + f"data, expected one of: {', '.join(schemes)}" + ) + loaded_scheme = schemes[scheme] logger.debug("Loaded regridding scheme %s", loaded_scheme) @@ -617,7 +643,8 @@ def _load_generic_scheme(scheme: dict): object_ref = scheme.pop("reference") except KeyError as key_err: raise ValueError( - "No reference specified for generic regridding.") from key_err + "No reference specified for generic regridding." + ) from key_err module_name, separator, scheme_name = object_ref.partition(":") try: obj: Any = importlib.import_module(module_name) @@ -625,15 +652,16 @@ def _load_generic_scheme(scheme: dict): raise ValueError( f"Could not import specified generic regridding module " f"'{module_name}'. Please double check spelling and that the " - f"required module is installed.") from import_err + f"required module is installed." + ) from import_err if separator: - for attr in scheme_name.split('.'): + for attr in scheme_name.split("."): obj = getattr(obj, attr) # If `obj` is a function that requires `src_cube` and `grid_cube`, use # GenericFuncScheme scheme_args = inspect.getfullargspec(obj).args - if 'src_cube' in scheme_args and 'grid_cube' in scheme_args: + if "src_cube" in scheme_args and "grid_cube" in scheme_args: loaded_scheme = GenericFuncScheme(obj, **scheme) else: loaded_scheme = obj(**scheme) @@ -671,19 +699,19 @@ def _get_regridder( # _CACHED_REGRIDDERS[shape_key]` below since the hash() of a # coordinate is simply its id() (thus, coordinates loaded from two # different files would never be considered equal) - for (key, regridder) in _CACHED_REGRIDDERS[name_shape_key].items(): + for key, regridder in _CACHED_REGRIDDERS[name_shape_key].items(): if key == coord_key: return regridder # Regridder is not in cached -> return a new one and cache it - loaded_scheme = _load_scheme(src_cube, scheme) + loaded_scheme = _load_scheme(src_cube, tgt_cube, scheme) regridder = loaded_scheme.regridder(src_cube, tgt_cube) _CACHED_REGRIDDERS.setdefault(name_shape_key, {}) _CACHED_REGRIDDERS[name_shape_key][coord_key] = regridder # (2) Weights caching disabled else: - loaded_scheme = _load_scheme(src_cube, scheme) + loaded_scheme = _load_scheme(src_cube, tgt_cube, scheme) regridder = loaded_scheme.regridder(src_cube, tgt_cube) return regridder @@ -691,10 +719,10 @@ def _get_regridder( def _get_coord_key(src_cube: Cube, tgt_cube: Cube) -> tuple: """Get dict key from coordinates.""" - src_lat = src_cube.coord('latitude') - src_lon = src_cube.coord('longitude') - tgt_lat = tgt_cube.coord('latitude') - tgt_lon = tgt_cube.coord('longitude') + src_lat = src_cube.coord("latitude") + src_lon = src_cube.coord("longitude") + tgt_lat = tgt_cube.coord("latitude") + tgt_lon = tgt_cube.coord("longitude") return (src_lat, src_lon, tgt_lat, tgt_lon) @@ -831,11 +859,13 @@ def regrid( # Horizontal grids from source and target (almost) match # -> Return source cube with target coordinates if _horizontal_grid_is_close(cube, target_grid_cube): - for coord in ['latitude', 'longitude']: - cube.coord(coord).points = ( - target_grid_cube.coord(coord).core_points()) - cube.coord(coord).bounds = ( - target_grid_cube.coord(coord).core_bounds()) + for coord in ["latitude", "longitude"]: + cube.coord(coord).points = target_grid_cube.coord( + coord + ).core_points() + cube.coord(coord).bounds = target_grid_cube.coord( + coord + ).core_bounds() return cube # Load scheme and reuse existing regridder if possible @@ -860,36 +890,40 @@ def _cache_clear(): def _rechunk(cube: Cube, target_grid: Cube) -> Cube: """Re-chunk cube with optimal chunk sizes for target grid.""" - if not cube.has_lazy_data() or cube.ndim < 3: - # Only rechunk lazy multidimensional data + if not cube.has_lazy_data(): + # Only rechunk lazy data return cube - lon_coord = target_grid.coord(axis='X') - lat_coord = target_grid.coord(axis='Y') - if lon_coord.ndim != 1 or lat_coord.ndim != 1: - # This function only supports 1D lat/lon coordinates. - return cube + # Extract grid dimension information from source cube + src_grid_indices = get_dims_along_axes(cube, ["X", "Y"]) + src_grid_shape = tuple(cube.shape[i] for i in src_grid_indices) + src_grid_ndims = len(src_grid_indices) - lon_dim, = target_grid.coord_dims(lon_coord) - lat_dim, = target_grid.coord_dims(lat_coord) - grid_indices = sorted((lon_dim, lat_dim)) - target_grid_shape = tuple(target_grid.shape[i] for i in grid_indices) + # Extract grid dimension information from target cube. + tgt_grid_indices = get_dims_along_axes(target_grid, ["X", "Y"]) + tgt_grid_shape = tuple(target_grid.shape[i] for i in tgt_grid_indices) + tgt_grid_ndims = len(tgt_grid_indices) - if 2 * np.prod(cube.shape[-2:]) > np.prod(target_grid_shape): + if 2 * np.prod(src_grid_shape) > np.prod(tgt_grid_shape): # Only rechunk if target grid is more than a factor of 2 larger, # because rechunking will keep the original chunk in memory. return cube + # Compute a good chunk size for the target array + # This uses the fact that horizontal dimension(s) are the last dimension(s) + # of the input cube and also takes into account that iris regridding needs + # unchunked data along the grid dimensions. data = cube.lazy_data() + tgt_shape = data.shape[:-src_grid_ndims] + tgt_grid_shape + tgt_chunks = data.chunks[:-src_grid_ndims] + tgt_grid_shape - # Compute a good chunk size for the target array - tgt_shape = data.shape[:-2] + target_grid_shape - tgt_chunks = data.chunks[:-2] + target_grid_shape - tgt_data = da.empty(tgt_shape, dtype=data.dtype, chunks=tgt_chunks) - tgt_data = tgt_data.rechunk({i: "auto" for i in range(cube.ndim - 2)}) + tgt_data = da.empty(tgt_shape, chunks=tgt_chunks, dtype=data.dtype) + tgt_data = tgt_data.rechunk( + {i: "auto" for i in range(tgt_data.ndim - tgt_grid_ndims)} + ) # Adjust chunks to source array and rechunk - chunks = tgt_data.chunks[:-2] + data.shape[-2:] + chunks = tgt_data.chunks[:-tgt_grid_ndims] + data.shape[-src_grid_ndims:] cube.data = data.rechunk(chunks) return cube @@ -920,7 +954,7 @@ def _horizontal_grid_is_close(cube1: Cube, cube2: Cube) -> bool: ``True`` if grids are close; ``False`` if not. """ # Go through the 2 expected horizontal coordinates longitude and latitude. - for coord in ['latitude', 'longitude']: + for coord in ["latitude", "longitude"]: coord1 = cube1.coord(coord) coord2 = cube2.coord(coord) @@ -965,12 +999,14 @@ def _create_cube(src_cube, data, src_levels, levels): scalar vertical coordinate will be added. """ # Get the source cube vertical coordinate and associated dimension. - z_coord = src_cube.coord(axis='z', dim_coords=True) - z_dim, = src_cube.coord_dims(z_coord) + z_coord = src_cube.coord(axis="z", dim_coords=True) + (z_dim,) = src_cube.coord_dims(z_coord) if data.shape[z_dim] != levels.size: - emsg = ('Mismatch between data and levels for data dimension {!r}, ' - 'got data shape {!r} with levels shape {!r}.') + emsg = ( + "Mismatch between data and levels for data dimension {!r}, " + "got data shape {!r} with levels shape {!r}." + ) raise ValueError(emsg.format(z_dim, data.shape, levels.shape)) # Construct the resultant cube with the interpolated data @@ -1000,13 +1036,13 @@ def _create_cube(src_cube, data, src_levels, levels): metadata = src_levels.metadata kwargs = { - 'standard_name': metadata.standard_name, - 'long_name': metadata.long_name, - 'var_name': metadata.var_name, - 'units': metadata.units, - 'attributes': metadata.attributes, - 'coord_system': metadata.coord_system, - 'climatological': metadata.climatological, + "standard_name": metadata.standard_name, + "long_name": metadata.long_name, + "var_name": metadata.var_name, + "units": metadata.units, + "attributes": metadata.attributes, + "coord_system": metadata.coord_system, + "climatological": metadata.climatological, } try: @@ -1025,11 +1061,12 @@ def _create_cube(src_cube, data, src_levels, levels): return result -def _vertical_interpolate(cube, src_levels, levels, interpolation, - extrapolation): +def _vertical_interpolate( + cube, src_levels, levels, interpolation, extrapolation +): """Perform vertical interpolation.""" # Determine the source levels and axis for vertical interpolation. - z_axis, = cube.coord_dims(cube.coord(axis='z', dim_coords=True)) + (z_axis,) = cube.coord_dims(cube.coord(axis="z", dim_coords=True)) if cube.has_lazy_data(): # Make source levels lazy if cube has lazy data. @@ -1072,15 +1109,17 @@ def _vertical_interpolate(cube, src_levels, levels, interpolation, def _preserve_fx_vars(cube, result): - vertical_dim = set(cube.coord_dims(cube.coord(axis='z', dim_coords=True))) + vertical_dim = set(cube.coord_dims(cube.coord(axis="z", dim_coords=True))) if cube.cell_measures(): for measure in cube.cell_measures(): measure_dims = set(cube.cell_measure_dims(measure)) if vertical_dim.intersection(measure_dims): logger.warning( - 'Discarding use of z-axis dependent cell measure %s ' - 'in variable %s, as z-axis has been interpolated', - measure.var_name, result.var_name) + "Discarding use of z-axis dependent cell measure %s " + "in variable %s, as z-axis has been interpolated", + measure.var_name, + result.var_name, + ) else: add_cell_measure(result, measure, measure.measure) if cube.ancillary_variables(): @@ -1088,9 +1127,11 @@ def _preserve_fx_vars(cube, result): ancillary_dims = set(cube.ancillary_variable_dims(ancillary_var)) if vertical_dim.intersection(ancillary_dims): logger.warning( - 'Discarding use of z-axis dependent ancillary variable %s ' - 'in variable %s, as z-axis has been interpolated', - ancillary_var.var_name, result.var_name) + "Discarding use of z-axis dependent ancillary variable %s " + "in variable %s, as z-axis has been interpolated", + ancillary_var.var_name, + result.var_name, + ) else: add_ancillary_variable(result, ancillary_var) @@ -1116,18 +1157,19 @@ def parse_vertical_scheme(scheme): if scheme not in VERTICAL_SCHEMES: raise ValueError( f"Unknown vertical interpolation scheme, got '{scheme}', possible " - f"schemes are {VERTICAL_SCHEMES}") + f"schemes are {VERTICAL_SCHEMES}" + ) # This allows us to put level 0. to load the ocean surface. - extrap_scheme = 'nan' + extrap_scheme = "nan" - if scheme == 'linear_extrapolate': - scheme = 'linear' - extrap_scheme = 'nearest' + if scheme == "linear_extrapolate": + scheme = "linear" + extrap_scheme = "nearest" - if scheme == 'nearest_extrapolate': - scheme = 'nearest' - extrap_scheme = 'nearest' + if scheme == "nearest_extrapolate": + scheme = "nearest" + extrap_scheme = "nearest" return scheme, extrap_scheme @@ -1157,7 +1199,7 @@ def _rechunk_aux_factory_dependencies( chunks = tuple(cube_chunks[i] for i in coord_dims) coord.points = coord.lazy_points().rechunk(chunks) if coord.has_bounds(): - coord.bounds = coord.lazy_bounds().rechunk(chunks + (None, )) + coord.bounds = coord.lazy_bounds().rechunk(chunks + (None,)) cube.replace_coord(coord) return cube @@ -1225,7 +1267,7 @@ def extract_levels( # Try to determine the name of the vertical coordinate automatically if coordinate is None: - coordinate = cube.coord(axis='z', dim_coords=True).name() + coordinate = cube.coord(axis="z", dim_coords=True).name() # Add extra coordinates coord_names = [coord.name() for coord in cube.coords()] @@ -1234,31 +1276,33 @@ def extract_levels( else: # Try to calculate air_pressure from altitude coordinate or # vice versa using US standard atmosphere for conversion. - if coordinate == 'air_pressure' and 'altitude' in coord_names: + if coordinate == "air_pressure" and "altitude" in coord_names: # Calculate pressure level coordinate from altitude. - cube = _rechunk_aux_factory_dependencies(cube, 'altitude') + cube = _rechunk_aux_factory_dependencies(cube, "altitude") add_plev_from_altitude(cube) - if coordinate == 'altitude' and 'air_pressure' in coord_names: + if coordinate == "altitude" and "air_pressure" in coord_names: # Calculate altitude coordinate from pressure levels. - cube = _rechunk_aux_factory_dependencies(cube, 'air_pressure') + cube = _rechunk_aux_factory_dependencies(cube, "air_pressure") add_altitude_from_plev(cube) src_levels = cube.coord(coordinate) - if (src_levels.shape == levels.shape and np.allclose( - src_levels.core_points(), - levels, - rtol=rtol, - atol=1e-7 * - np.mean(src_levels.core_points()) if atol is None else atol, - )): + if src_levels.shape == levels.shape and np.allclose( + src_levels.core_points(), + levels, + rtol=rtol, + atol=1e-7 * np.mean(src_levels.core_points()) + if atol is None + else atol, + ): # Only perform vertical extraction/interpolation if the source # and target levels are not "similar" enough. result = cube # Set the levels to the requested values src_levels.points = levels - elif len(src_levels.shape) == 1 and \ - set(levels).issubset(set(src_levels.points)): + elif len(src_levels.shape) == 1 and set(levels).issubset( + set(src_levels.points) + ): # If all target levels exist in the source cube, simply extract them. name = src_levels.name() coord_values = { @@ -1268,7 +1312,7 @@ def extract_levels( result = cube.extract(constraint) # Ensure the constraint did not fail. if not result: - emsg = 'Failed to extract levels {!r} from cube {!r}.' + emsg = "Failed to extract levels {!r} from cube {!r}." raise ValueError(emsg.format(list(levels), name)) else: # As a last resort, perform vertical interpolation. @@ -1306,11 +1350,13 @@ def get_cmor_levels(cmor_table, coordinate): """ if cmor_table not in CMOR_TABLES: raise ValueError( - f"Level definition cmor_table '{cmor_table}' not available") + f"Level definition cmor_table '{cmor_table}' not available" + ) if coordinate not in CMOR_TABLES[cmor_table].coords: raise ValueError( - f'Coordinate {coordinate} not available for {cmor_table}') + f"Coordinate {coordinate} not available for {cmor_table}" + ) cmor = CMOR_TABLES[cmor_table].coords[coordinate] @@ -1320,8 +1366,9 @@ def get_cmor_levels(cmor_table, coordinate): return [float(cmor.value)] raise ValueError( - f'Coordinate {coordinate} in {cmor_table} does not have requested ' - f'values') + f"Coordinate {coordinate} in {cmor_table} does not have requested " + f"values" + ) def get_reference_levels(dataset): @@ -1347,9 +1394,9 @@ def get_reference_levels(dataset): dataset.files = [dataset.files[0]] cube = dataset.load() try: - coord = cube.coord(axis='Z') + coord = cube.coord(axis="Z") except iris.exceptions.CoordinateNotFoundError as exc: - raise ValueError(f'z-coord not available in {dataset.files}') from exc + raise ValueError(f"z-coord not available in {dataset.files}") from exc return coord.points.tolist() diff --git a/esmvalcore/preprocessor/_regrid_esmpy.py b/esmvalcore/preprocessor/_regrid_esmpy.py index c7edfa829c..b2cb559406 100755 --- a/esmvalcore/preprocessor/_regrid_esmpy.py +++ b/esmvalcore/preprocessor/_regrid_esmpy.py @@ -9,10 +9,14 @@ import ESMF as esmpy # noqa: N811 except ImportError: raise exc +import warnings + import iris import numpy as np from iris.cube import Cube +from esmvalcore.exceptions import ESMValCoreDeprecationWarning + from ._mapping import get_empty_data, map_slices, ref_to_dims_index ESMF_MANAGER = esmpy.Manager(debug=False) @@ -20,9 +24,9 @@ ESMF_LON, ESMF_LAT = 0, 1 ESMF_REGRID_METHODS = { - 'linear': esmpy.RegridMethod.BILINEAR, - 'area_weighted': esmpy.RegridMethod.CONSERVE, - 'nearest': esmpy.RegridMethod.NEAREST_STOD, + "linear": esmpy.RegridMethod.BILINEAR, + "area_weighted": esmpy.RegridMethod.CONSERVE, + "nearest": esmpy.RegridMethod.NEAREST_STOD, } MASK_REGRIDDING_MASK_VALUE = { @@ -45,6 +49,12 @@ class ESMPyRegridder: Does not support lazy regridding nor weights caching. + .. deprecated:: 2.12.0 + This regridder has been deprecated and is scheduled for removal in + version 2.14.0. Please use + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` to + create an :doc:`esmf_regrid:index` regridder instead. + Parameters ---------- src_cube: @@ -63,7 +73,7 @@ def __init__( self, src_cube: Cube, tgt_cube: Cube, - method: str = 'linear', + method: str = "linear", mask_threshold: float = 0.99, ): """Initialize class instance.""" @@ -118,16 +128,25 @@ class _ESMPyScheme: """ - _METHOD = '' + _METHOD = "" def __init__(self, mask_threshold: float = 0.99): """Initialize class instance.""" + msg = ( + "The `esmvalcore.preprocessor.regrid_schemes." + f"{self.__class__.__name__}' regridding scheme has been " + "deprecated in ESMValCore version 2.12.0 and is scheduled for " + "removal in version 2.14.0. Please use " + "`esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` " + "instead." + ) + warnings.warn(msg, ESMValCoreDeprecationWarning) self.mask_threshold = mask_threshold def __repr__(self) -> str: """Return string representation of class.""" return ( - f'{self.__class__.__name__}(mask_threshold={self.mask_threshold})' + f"{self.__class__.__name__}(mask_threshold={self.mask_threshold})" ) def regridder(self, src_cube: Cube, tgt_cube: Cube) -> ESMPyRegridder: @@ -161,9 +180,15 @@ class ESMPyAreaWeighted(_ESMPyScheme): Does not support lazy regridding. + .. deprecated:: 2.12.0 + This scheme has been deprecated and is scheduled for removal in version + 2.14.0. Please use + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` + instead. + """ - _METHOD = 'area_weighted' + _METHOD = "area_weighted" class ESMPyLinear(_ESMPyScheme): @@ -173,9 +198,15 @@ class ESMPyLinear(_ESMPyScheme): Does not support lazy regridding. + .. deprecated:: 2.12.0 + This scheme has been deprecated and is scheduled for removal in version + 2.14.0. Please use + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` + instead. + """ - _METHOD = 'linear' + _METHOD = "linear" class ESMPyNearest(_ESMPyScheme): @@ -185,9 +216,15 @@ class ESMPyNearest(_ESMPyScheme): Does not support lazy regridding. + .. deprecated:: 2.12.0 + This scheme has been deprecated and is scheduled for removal in version + 2.14.0. Please use + :class:`~esmvalcore.preprocessor.regrid_schemes.IrisESMFRegrid` + instead. + """ - _METHOD = 'nearest' + _METHOD = "nearest" def cf_2d_bounds_to_esmpy_corners(bounds, circular): @@ -210,7 +247,7 @@ def coords_iris_to_esmpy(lat, lon, circular): """Build ESMF compatible coordinate information from iris coords.""" dim = lat.ndim if lon.ndim != dim: - msg = 'Different dimensions in latitude({}) and longitude({}) coords.' + msg = "Different dimensions in latitude({}) and longitude({}) coords." raise ValueError(msg.format(lat.ndim, lon.ndim)) if dim == 1: for coord in [lat, lon]: @@ -221,38 +258,45 @@ def coords_iris_to_esmpy(lat, lon, circular): if circular: lon_corners = lon.bounds[:, 0] else: - lon_corners = np.concatenate([lon.bounds[:, 0], - lon.bounds[-1:, 1]]) - esmpy_lat_corners, esmpy_lon_corners = np.meshgrid(lat_corners, - lon_corners) + lon_corners = np.concatenate( + [lon.bounds[:, 0], lon.bounds[-1:, 1]] + ) + esmpy_lat_corners, esmpy_lon_corners = np.meshgrid( + lat_corners, lon_corners + ) elif dim == 2: esmpy_lat, esmpy_lon = lat.points.T.copy(), lon.points.T.copy() esmpy_lat_corners = cf_2d_bounds_to_esmpy_corners(lat.bounds, circular) esmpy_lon_corners = cf_2d_bounds_to_esmpy_corners(lon.bounds, circular) else: raise NotImplementedError( - f'Coord dimension is {dim}. Expected 1 or 2.' + f"Coord dimension is {dim}. Expected 1 or 2." ) return esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners -def get_grid(esmpy_lat, esmpy_lon, - esmpy_lat_corners, esmpy_lon_corners, circular): +def get_grid( + esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners, circular +): """Build EMSF grid from given coordinate information.""" if circular: num_peri_dims = 1 else: num_peri_dims = 0 - grid = esmpy.Grid(np.array(esmpy_lat.shape), - num_peri_dims=num_peri_dims, - staggerloc=[esmpy.StaggerLoc.CENTER]) + grid = esmpy.Grid( + np.array(esmpy_lat.shape), + num_peri_dims=num_peri_dims, + staggerloc=[esmpy.StaggerLoc.CENTER], + ) grid.get_coords(ESMF_LON)[...] = esmpy_lon grid.get_coords(ESMF_LAT)[...] = esmpy_lat grid.add_coords([esmpy.StaggerLoc.CORNER]) - grid_lon_corners = grid.get_coords(ESMF_LON, - staggerloc=esmpy.StaggerLoc.CORNER) - grid_lat_corners = grid.get_coords(ESMF_LAT, - staggerloc=esmpy.StaggerLoc.CORNER) + grid_lon_corners = grid.get_coords( + ESMF_LON, staggerloc=esmpy.StaggerLoc.CORNER + ) + grid_lat_corners = grid.get_coords( + ESMF_LAT, staggerloc=esmpy.StaggerLoc.CORNER + ) grid_lon_corners[...] = esmpy_lon_corners grid_lat_corners[...] = esmpy_lat_corners grid.add_item(esmpy.GridItem.MASK, esmpy.StaggerLoc.CENTER) @@ -267,28 +311,30 @@ def is_lon_circular(lon): if lon.ndim == 1: seam = lon.bounds[-1, 1] - lon.bounds[0, 0] elif lon.ndim == 2: - seam = (lon.bounds[1:-1, -1, (1, 2)] - - lon.bounds[1:-1, 0, (0, 3)]) + seam = lon.bounds[1:-1, -1, (1, 2)] - lon.bounds[1:-1, 0, (0, 3)] else: - raise NotImplementedError('AuxCoord longitude is higher ' - 'dimensional than 2d. Giving up.') - circular = np.all(abs(seam) % 360. < 1.e-3) + raise NotImplementedError( + "AuxCoord longitude is higher " + "dimensional than 2d. Giving up." + ) + circular = np.all(abs(seam) % 360.0 < 1.0e-3) else: - raise ValueError('longitude is neither DimCoord nor AuxCoord. ' - 'Giving up.') + raise ValueError( + "longitude is neither DimCoord nor AuxCoord. Giving up." + ) return circular def cube_to_empty_field(cube): """Build an empty ESMF field from a cube.""" - lat = cube.coord('latitude') - lon = cube.coord('longitude') + lat = cube.coord("latitude") + lon = cube.coord("longitude") circular = is_lon_circular(lon) esmpy_coords = coords_iris_to_esmpy(lat, lon, circular) grid = get_grid(*esmpy_coords, circular=circular) - field = esmpy.Field(grid, - name=cube.long_name, - staggerloc=esmpy.StaggerLoc.CENTER) + field = esmpy.Field( + grid, name=cube.long_name, staggerloc=esmpy.StaggerLoc.CENTER + ) return field @@ -304,21 +350,24 @@ def get_representant(cube, ref_to_slice): def regrid_mask_2d(src_data, regridding_arguments, mask_threshold): """Regrid the mask from the source field to the destination grid.""" - src_field = regridding_arguments['srcfield'] - dst_field = regridding_arguments['dstfield'] - regrid_method = regridding_arguments['regrid_method'] + src_field = regridding_arguments["srcfield"] + dst_field = regridding_arguments["dstfield"] + regrid_method = regridding_arguments["regrid_method"] original_src_mask = np.ma.getmaskarray(src_data) src_field.data[...] = ~original_src_mask.T - src_mask = src_field.grid.get_item(esmpy.GridItem.MASK, - esmpy.StaggerLoc.CENTER) + src_mask = src_field.grid.get_item( + esmpy.GridItem.MASK, esmpy.StaggerLoc.CENTER + ) src_mask[...] = original_src_mask.T - center_mask = dst_field.grid.get_item(esmpy.GridItem.MASK, - esmpy.StaggerLoc.CENTER) + center_mask = dst_field.grid.get_item( + esmpy.GridItem.MASK, esmpy.StaggerLoc.CENTER + ) center_mask[...] = 0 mask_regridder = esmpy.Regrid( src_mask_values=MASK_REGRIDDING_MASK_VALUE[regrid_method], dst_mask_values=np.array([]), - **regridding_arguments) + **regridding_arguments, + ) regr_field = mask_regridder(src_field, dst_field) dst_mask = regr_field.data[...].T < mask_threshold center_mask[...] = dst_mask.T @@ -332,17 +381,20 @@ def build_regridder_2d(src_rep, dst_rep, regrid_method, mask_threshold): dst_field = cube_to_empty_field(dst_rep) src_field = cube_to_empty_field(src_rep) regridding_arguments = { - 'srcfield': src_field, - 'dstfield': dst_field, - 'regrid_method': regrid_method, - 'unmapped_action': esmpy.UnmappedAction.IGNORE, - 'ignore_degenerate': True, + "srcfield": src_field, + "dstfield": dst_field, + "regrid_method": regrid_method, + "unmapped_action": esmpy.UnmappedAction.IGNORE, + "ignore_degenerate": True, } - dst_mask = regrid_mask_2d(src_rep.data, - regridding_arguments, mask_threshold) - field_regridder = esmpy.Regrid(src_mask_values=np.array([1]), - dst_mask_values=np.array([1]), - **regridding_arguments) + dst_mask = regrid_mask_2d( + src_rep.data, regridding_arguments, mask_threshold + ) + field_regridder = esmpy.Regrid( + src_mask_values=np.array([1]), + dst_mask_values=np.array([1]), + **regridding_arguments, + ) def regridder(src): """Regrid 2d for irregular grids.""" @@ -366,8 +418,9 @@ def build_regridder_3d(src_rep, dst_rep, regrid_method, mask_threshold): no_levels = src_rep.shape[0] for level in range(no_levels): esmf_regridders.append( - build_regridder_2d(src_rep[level], dst_rep[level], - regrid_method, mask_threshold) + build_regridder_2d( + src_rep[level], dst_rep[level], regrid_method, mask_threshold + ) ) def regridder(src): @@ -380,25 +433,27 @@ def regridder(src): return regridder -def build_regridder(src_rep, dst_rep, method, mask_threshold=.99): +def build_regridder(src_rep, dst_rep, method, mask_threshold=0.99): """Build regridders from representants.""" regrid_method = ESMF_REGRID_METHODS[method] if src_rep.ndim == 2: - regridder = build_regridder_2d(src_rep, dst_rep, - regrid_method, mask_threshold) + regridder = build_regridder_2d( + src_rep, dst_rep, regrid_method, mask_threshold + ) elif src_rep.ndim == 3: - regridder = build_regridder_3d(src_rep, dst_rep, - regrid_method, mask_threshold) + regridder = build_regridder_3d( + src_rep, dst_rep, regrid_method, mask_threshold + ) return regridder def get_grid_representant(cube, horizontal_only=False): """Extract the spatial grid from a cube.""" - horizontal_slice = ['latitude', 'longitude'] + horizontal_slice = ["latitude", "longitude"] ref_to_slice = horizontal_slice if not horizontal_only: try: - cube_z_coord = cube.coord(axis='Z') + cube_z_coord = cube.coord(axis="Z") n_zdims = len(cube.coord_dims(cube_z_coord)) if n_zdims == 0: # scalar z coordinate, go on with 2d regridding diff --git a/esmvalcore/preprocessor/_regrid_iris_esmf_regrid.py b/esmvalcore/preprocessor/_regrid_iris_esmf_regrid.py new file mode 100644 index 0000000000..42b294f15a --- /dev/null +++ b/esmvalcore/preprocessor/_regrid_iris_esmf_regrid.py @@ -0,0 +1,250 @@ +"""Iris-esmf-regrid based regridding scheme.""" + +from __future__ import annotations + +from collections.abc import Iterable +from typing import Any, Literal + +import dask +import dask.array as da +import iris.cube +import iris.exceptions +import numpy as np +from esmf_regrid.schemes import ( + ESMFAreaWeightedRegridder, + ESMFBilinearRegridder, + ESMFNearestRegridder, +) + +from esmvalcore.preprocessor._shared import ( + get_dims_along_axes, + get_dims_along_coords, +) + +METHODS = { + "conservative": ESMFAreaWeightedRegridder, + "bilinear": ESMFBilinearRegridder, + "nearest": ESMFNearestRegridder, +} + + +class IrisESMFRegrid: + """:doc:`esmf_regrid:index` based regridding scheme. + + Supports lazy regridding. + + Parameters + ---------- + method: + Either "conservative", "bilinear" or "nearest". Corresponds to the + :mod:`esmpy` methods + :attr:`~esmpy.api.constants.RegridMethod.CONSERVE`, + :attr:`~esmpy.api.constants.RegridMethod.BILINEAR` or + :attr:`~esmpy.api.constants.RegridMethod.NEAREST_STOD` used to + calculate regridding weights. + mdtol: + Tolerance of missing data. The value returned in each element of + the returned array will be masked if the fraction of masked data + exceeds ``mdtol``. ``mdtol=0`` means no missing data is tolerated while + ``mdtol=1`` will mean the resulting element will be masked if and only + if all the contributing elements of data are masked. If no value is + given, this will default to 1 for conservative regridding and 0 + otherwise. Only available for methods 'bilinear' and 'conservative'. + use_src_mask: + If True, derive a mask from the source cube data, + which will tell :mod:`esmpy` which points to ignore. If an array is + provided, that will be used. + If set to :obj:`None`, it will be set to :obj:`True` for methods + ``'bilinear'`` and ``'conservative'`` and to :obj:`False` for method + ``'nearest'``. This default may be changed to :obj:`True` for all + schemes once `SciTools-incubator/iris-esmf-regrid#368 + `_ + has been resolved. + use_tgt_mask: + If True, derive a mask from of the target cube, + which will tell :mod:`esmpy` which points to ignore. If an array is + provided, that will be used. + If set to :obj:`None`, it will be set to :obj:`True` for methods + ``'bilinear'`` and ``'conservative'`` and to :obj:`False` for method + ``'nearest'``. This default may be changed to :obj:`True` for all + schemes once `SciTools-incubator/iris-esmf-regrid#368`_ has been + resolved. + collapse_src_mask_along: + When deriving the mask from the source cube data, collapse the mask + along the dimensions identified by these axes or coordinates. Only + points that are masked at all time (``'T'``), vertical levels + (``'Z'``), or both time and vertical levels (``'TZ'``) will be + considered masked. Instead of the axes ``'T'`` and ``'Z'``, + coordinate names can also be provided. For any cube dimensions not + specified here, the first slice along the coordinate will be used to + determine the mask. + collapse_tgt_mask_along: + When deriving the mask from the target cube data, collapse the mask + along the dimensions identified by these axes or coordinates. Only + points that are masked at all time (``'T'``), vertical levels + (``'Z'``), or both time and vertical levels (``'TZ'``) will be + considered masked. Instead of the axes ``'T'`` and ``'Z'``, + coordinate names can also be provided. For any cube dimensions not + specified here, the first slice along the coordinate will be used to + determine the mask. + src_resolution: + If present, represents the amount of latitude slices per source cell + given to ESMF for calculation. If resolution is set, the source cube + must have strictly increasing bounds (bounds may be transposed + plus or minus 360 degrees to make the bounds strictly increasing). + Only available for method 'conservative'. + tgt_resolution: + If present, represents the amount of latitude slices per target cell + given to ESMF for calculation. If resolution is set, the target cube + must have strictly increasing bounds (bounds may be transposed + plus or minus 360 degrees to make the bounds strictly increasing). + Only available for method 'conservative'. + tgt_location: + Only used if the target grid is an :class:`iris.mesh.MeshXY`. Describes + the location for data on the mesh. Either ``'face'`` or ``'node'`` for + bilinear or nearest neighbour regridding, can only be ``'face'`` for + first order conservative regridding. + + Attributes + ---------- + kwargs: + Keyword arguments that will be provided to the regridder. + """ + + def __init__( + self, + method: Literal["bilinear", "conservative", "nearest"], + mdtol: float | None = None, + use_src_mask: None | bool | np.ndarray = None, + use_tgt_mask: None | bool | np.ndarray = None, + collapse_src_mask_along: Iterable[str] = ("Z",), + collapse_tgt_mask_along: Iterable[str] = ("Z",), + src_resolution: int | None = None, + tgt_resolution: int | None = None, + tgt_location: Literal["face", "node"] | None = None, + ) -> None: + if method not in METHODS: + raise ValueError( + "`method` should be one of 'bilinear', 'conservative', or " + "'nearest'" + ) + + if use_src_mask is None: + use_src_mask = method != "nearest" + if use_tgt_mask is None: + use_tgt_mask = method != "nearest" + + self.kwargs: dict[str, Any] = { + "method": method, + "use_src_mask": use_src_mask, + "use_tgt_mask": use_tgt_mask, + "collapse_src_mask_along": collapse_src_mask_along, + "collapse_tgt_mask_along": collapse_tgt_mask_along, + "tgt_location": tgt_location, + } + if method == "nearest": + if mdtol is not None: + raise TypeError( + "`mdol` can only be specified when `method='bilinear'` " + "or `method='conservative'`" + ) + else: + self.kwargs["mdtol"] = mdtol + if method == "conservative": + self.kwargs["src_resolution"] = src_resolution + self.kwargs["tgt_resolution"] = tgt_resolution + elif src_resolution is not None: + raise TypeError( + "`src_resolution` can only be specified when " + "`method='conservative'`" + ) + elif tgt_resolution is not None: + raise TypeError( + "`tgt_resolution` can only be specified when " + "`method='conservative'`" + ) + + def __repr__(self) -> str: + """Return string representation of class.""" + kwargs_str = ", ".join( + f"{k}={repr(v)}" for k, v in self.kwargs.items() + ) + return f"{self.__class__.__name__}({kwargs_str})" + + @staticmethod + def _get_mask( + cube: iris.cube.Cube, + collapse_mask_along: Iterable[str], + ) -> np.ndarray: + """Read the mask from the cube data. + + This function assumes that the mask is constant in dimensions + that are not horizontal or specified in `collapse_mask_along`. + """ + horizontal_dims = get_dims_along_axes(cube, ["X", "Y"]) + axes = tuple( + elem + for elem in collapse_mask_along + if isinstance(elem, str) and elem.upper() in ("T", "Z") + ) + other_dims = ( + get_dims_along_axes(cube, axes) # type: ignore[arg-type] + + get_dims_along_coords(cube, collapse_mask_along) + ) + + slices = tuple( + slice(None) if i in horizontal_dims + other_dims else 0 + for i in range(cube.ndim) + ) + subcube = cube[slices] + subcube_other_dims = ( + get_dims_along_axes(subcube, axes) # type: ignore[arg-type] + + get_dims_along_coords(subcube, collapse_mask_along) + ) + + mask = da.ma.getmaskarray(subcube.core_data()) + return mask.all(axis=subcube_other_dims) + + def regridder( + self, + src_cube: iris.cube.Cube, + tgt_cube: iris.cube.Cube | iris.mesh.MeshXY, + ) -> ( + ESMFAreaWeightedRegridder + | ESMFBilinearRegridder + | ESMFNearestRegridder + ): + """Create an :doc:`esmf_regrid:index` based regridding function. + + Parameters + ---------- + src_cube: + Cube defining the source grid. + tgt_cube: + Cube defining the target grid. + + Returns + ------- + :obj:`esmf_regrid.schemes.ESMFAreaWeightedRegridder` or + :obj:`esmf_regrid.schemes.ESMFBilinearRegridder` or + :obj:`esmf_regrid.schemes.ESMFNearestRegridder`: + An :doc:`esmf_regrid:index` regridder. + """ + kwargs = self.kwargs.copy() + regridder_cls = METHODS[kwargs.pop("method")] + src_mask = kwargs.pop("use_src_mask") + collapse_mask_along = kwargs.pop("collapse_src_mask_along") + if src_mask is True: + src_mask = self._get_mask(src_cube, collapse_mask_along) + tgt_mask = kwargs.pop("use_tgt_mask") + collapse_mask_along = kwargs.pop("collapse_tgt_mask_along") + if tgt_mask is True: + tgt_mask = self._get_mask(tgt_cube, collapse_mask_along) + src_mask, tgt_mask = dask.compute(src_mask, tgt_mask) + return regridder_cls( + src_cube, + tgt_cube, + use_src_mask=src_mask, + use_tgt_mask=tgt_mask, + **kwargs, + ) diff --git a/esmvalcore/preprocessor/_regrid_unstructured.py b/esmvalcore/preprocessor/_regrid_unstructured.py index 6618c728c0..02e8b62ebc 100644 --- a/esmvalcore/preprocessor/_regrid_unstructured.py +++ b/esmvalcore/preprocessor/_regrid_unstructured.py @@ -1,4 +1,5 @@ """Unstructured grid regridding.""" + from __future__ import annotations import logging @@ -55,7 +56,7 @@ def regridder( # Unstructured nearest-neighbor regridding requires exactly one X and # one Y coordinate (latitude and longitude). Remove any X or Y # dimensional coordinates if necessary. - for axis in ['x', 'y']: + for axis in ["x", "y"]: if src_cube.coords(axis=axis, dim_coords=True): coord = src_cube.coord(axis=axis, dim_coords=True) src_cube.remove_coord(coord) @@ -94,19 +95,19 @@ def __init__(self, src_cube: Cube, tgt_cube: Cube) -> None: f"Target cube {tgt_cube.summary(shorten=True)} does not have " f"regular grid" ) - src_lat = src_cube.coord('latitude').copy() - src_lon = src_cube.coord('longitude').copy() - tgt_lat = tgt_cube.coord('latitude').copy() - tgt_lon = tgt_cube.coord('longitude').copy() + src_lat = src_cube.coord("latitude").copy() + src_lon = src_cube.coord("longitude").copy() + tgt_lat = tgt_cube.coord("latitude").copy() + tgt_lon = tgt_cube.coord("longitude").copy() self.src_coords = [src_lat, src_lon] self.tgt_coords = [tgt_lat, tgt_lon] self.tgt_n_lat = tgt_lat.core_points().size self.tgt_n_lon = tgt_lon.core_points().size # Calculate regridding weights and indices - (self._weights, self._indices, - self._convex_hull_idx) = self._get_weights_and_idx( - src_lat, src_lon, tgt_lat, tgt_lon) + (self._weights, self._indices, self._convex_hull_idx) = ( + self._get_weights_and_idx(src_lat, src_lon, tgt_lat, tgt_lon) + ) def _get_weights_and_idx( self, @@ -134,10 +135,10 @@ def _get_weights_and_idx( src_lon = src_lon.copy() tgt_lat = tgt_lat.copy() tgt_lon = tgt_lon.copy() - src_lat.convert_units('degrees') - src_lon.convert_units('degrees') - tgt_lat.convert_units('degrees') - tgt_lon.convert_units('degrees') + src_lat.convert_units("degrees") + src_lon.convert_units("degrees") + tgt_lat.convert_units("degrees") + tgt_lon.convert_units("degrees") # Bring points into correct format # src_points: (N, 2) where N is the number of source grid points @@ -146,11 +147,11 @@ def _get_weights_and_idx( np.stack((src_lat.core_points(), src_lon.core_points()), axis=-1), np.stack( tuple( - tgt_coord.ravel() for tgt_coord in - np.meshgrid( + tgt_coord.ravel() + for tgt_coord in np.meshgrid( tgt_lat.core_points(), tgt_lon.core_points(), - indexing='ij', + indexing="ij", ) ), axis=-1, @@ -169,7 +170,7 @@ def _get_weights_and_idx( src_points_with_convex_hull = self._add_convex_hull_twice( src_points, hull.vertices ) - src_points_with_convex_hull[-2 * n_hull:-n_hull, 1] -= 360 + src_points_with_convex_hull[-2 * n_hull : -n_hull, 1] -= 360 src_points_with_convex_hull[-n_hull:, 1] += 360 # Actual weights calculation @@ -198,7 +199,7 @@ def __call__(self, cube: Cube) -> Cube: f"Cube {cube.summary(shorten=True)} does not have " f"unstructured grid" ) - coords = [cube.coord('latitude'), cube.coord('longitude')] + coords = [cube.coord("latitude"), cube.coord("longitude")] if coords != self.src_coords: raise ValueError( f"The given cube {cube.summary(shorten=True)} is not defined " @@ -211,14 +212,15 @@ def __call__(self, cube: Cube) -> Cube: # (excluding the unstructured grid dimension) plus the (x, y) target # grid dimensions. All dimensions to the right of the unstructured grid # dimension need to be shifted to the right by 1. - udim = cube.coord_dims('latitude')[0] + udim = cube.coord_dims("latitude")[0] dim_coords_and_dims = [ - (c, cube.coord_dims(c)[0]) for c in cube.coords(dim_coords=True) if - udim not in cube.coord_dims(c) + (c, cube.coord_dims(c)[0]) + for c in cube.coords(dim_coords=True) + if udim not in cube.coord_dims(c) ] dim_coords_and_dims = [ - (c, d) if d < udim else (c, d + 1) for (c, d) in - dim_coords_and_dims + (c, d) if d < udim else (c, d + 1) + for (c, d) in dim_coords_and_dims ] dim_coords_and_dims.append((self.tgt_coords[0], udim)) dim_coords_and_dims.append((self.tgt_coords[1], udim + 1)) @@ -227,11 +229,12 @@ def __call__(self, cube: Cube) -> Cube: # grid dimension (also make sure to shift all dimensions to the right # of the unstructured grid to the right by 1) old_aux_coords_and_dims = [ - (c, cube.coord_dims(c)) for c in cube.coords(dim_coords=False) if - udim not in cube.coord_dims(c) + (c, cube.coord_dims(c)) + for c in cube.coords(dim_coords=False) + if udim not in cube.coord_dims(c) ] aux_coords_and_dims = [] - for (aux_coord, dims) in old_aux_coords_and_dims: + for aux_coord, dims in old_aux_coords_and_dims: dims = tuple(d if d < udim else d + 1 for d in dims) aux_coords_and_dims.append((aux_coord, dims)) @@ -248,10 +251,10 @@ def __call__(self, cube: Cube) -> Cube: def _get_regridded_data(self, cube: Cube) -> np.ndarray | da.Array: """Get regridded data.""" - udim = cube.coord_dims('latitude')[0] + udim = cube.coord_dims("latitude")[0] # Cube must not be chunked along latitude and longitude dimension - rechunk_cube(cube, ['latitude', 'longitude']) + rechunk_cube(cube, ["latitude", "longitude"]) # Make sure that masked arrays are filled with nan's so they are # handled properly @@ -273,7 +276,7 @@ def _get_regridded_data(self, cube: Cube) -> np.ndarray | da.Array: def _regrid_eager(self, data: np.ndarray, axis: int) -> np.ndarray: """Eager regridding.""" v_interpolate = np.vectorize( - self._interpolate, signature='(i)->(lat,lon)' + self._interpolate, signature="(i)->(lat,lon)" ) # Make sure that interpolation dimension is rightmost dimension and @@ -294,12 +297,12 @@ def _regrid_lazy( """Lazy regridding.""" regridded_arr = da.apply_gufunc( self._interpolate, - '(i)->(lat,lon)', + "(i)->(lat,lon)", data, axes=[(axis,), (axis, axis + 1)], vectorize=True, output_dtypes=dtype, - output_sizes={'lat': self.tgt_n_lat, 'lon': self.tgt_n_lon}, + output_sizes={"lat": self.tgt_n_lat, "lon": self.tgt_n_lon}, ) return regridded_arr @@ -324,7 +327,7 @@ def _interpolate(self, data: np.ndarray) -> np.ndarray: """ data = self._add_convex_hull_twice(data, self._convex_hull_idx) interp_data = np.einsum( - 'nj,nj->n', np.take(data, self._indices), self._weights + "nj,nj->n", np.take(data, self._indices), self._weights ) interp_data = interp_data.reshape(self.tgt_n_lat, self.tgt_n_lon) return interp_data @@ -354,7 +357,7 @@ def _calculate_weights( indices = np.take(tri.simplices, simplex, axis=0) transform = np.take(tri.transform, simplex, axis=0) delta = tgt_points - transform[:, 2] - bary = np.einsum('njk,nk->nj', transform[:, :2, :], delta) + bary = np.einsum("njk,nk->nj", transform[:, :2, :], delta) weights = np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) extra_idx = simplex == -1 weights[extra_idx, :] = np.nan # missing values @@ -378,7 +381,7 @@ class UnstructuredLinear: def __repr__(self) -> str: """Return string representation of class.""" - return 'UnstructuredLinear()' + return "UnstructuredLinear()" def regridder( self, diff --git a/esmvalcore/preprocessor/_shared.py b/esmvalcore/preprocessor/_shared.py index addd3617ac..04490bdda4 100644 --- a/esmvalcore/preprocessor/_shared.py +++ b/esmvalcore/preprocessor/_shared.py @@ -3,6 +3,7 @@ Utility functions that can be used for multiple preprocessor steps """ + from __future__ import annotations import logging @@ -10,7 +11,7 @@ import warnings from collections import defaultdict from collections.abc import Callable, Iterable -from functools import partial, wraps +from functools import wraps from typing import Any, Literal, Optional import dask.array as da @@ -19,6 +20,7 @@ from iris.coords import CellMeasure, Coord, DimCoord from iris.cube import Cube from iris.exceptions import CoordinateMultiDimError, CoordinateNotFoundError +from iris.util import broadcast_to_shape from esmvalcore.exceptions import ESMValCoreDeprecationWarning from esmvalcore.iris_helpers import has_regular_grid @@ -73,7 +75,7 @@ def get_iris_aggregator( aggregator_kwargs = dict(operator_kwargs) # Deprecations - if cap_operator == 'STD': + if cap_operator == "STD": msg = ( f"The operator '{operator}' for computing the standard deviation " f"has been deprecated in ESMValCore version 2.10.0 and is " @@ -81,8 +83,8 @@ def get_iris_aggregator( f"instead. This is an exact replacement." ) warnings.warn(msg, ESMValCoreDeprecationWarning) - operator = 'std_dev' - cap_operator = 'STD_DEV' + operator = "std_dev" + cap_operator = "STD_DEV" elif re.match(r"^(P\d{1,2})(\.\d*)?$", cap_operator): msg = ( f"Specifying percentile operators with the syntax 'pXX.YY' (here: " @@ -93,9 +95,9 @@ def get_iris_aggregator( f"This is an exact replacement." ) warnings.warn(msg, ESMValCoreDeprecationWarning) - aggregator_kwargs['percent'] = float(operator[1:]) - operator = 'percentile' - cap_operator = 'PERCENTILE' + aggregator_kwargs["percent"] = float(operator[1:]) + operator = "percentile" + cap_operator = "PERCENTILE" # Check if valid aggregator is found if not hasattr(iris.analysis, cap_operator): @@ -103,20 +105,20 @@ def get_iris_aggregator( f"Aggregator '{operator}' not found in iris.analysis module" ) aggregator = getattr(iris.analysis, cap_operator) - if not hasattr(aggregator, 'aggregate'): + if not hasattr(aggregator, "aggregate"): raise ValueError( f"Aggregator {aggregator} found by '{operator}' is not a valid " f"iris.analysis.Aggregator" ) # Use dummy cube to check if aggregator_kwargs are valid - x_coord = DimCoord([1.0], bounds=[0.0, 2.0], var_name='x') + x_coord = DimCoord([1.0], bounds=[0.0, 2.0], var_name="x") cube = Cube([0.0], dim_coords_and_dims=[(x_coord, 0)]) test_kwargs = update_weights_kwargs( aggregator, aggregator_kwargs, np.array([1.0]) ) try: - cube.collapsed('x', aggregator, **test_kwargs) + cube.collapsed("x", aggregator, **test_kwargs) except (ValueError, TypeError) as exc: raise ValueError( f"Invalid kwargs for operator '{operator}': {str(exc)}" @@ -181,19 +183,19 @@ def update_weights_kwargs( """ kwargs = dict(kwargs) - if aggregator_accept_weights(aggregator) and kwargs.get('weights', True): - kwargs['weights'] = weights + if aggregator_accept_weights(aggregator) and kwargs.get("weights", True): + kwargs["weights"] = weights if cube is not None and callback is not None: callback(cube, **callback_kwargs) else: - kwargs.pop('weights', None) + kwargs.pop("weights", None) return kwargs def get_normalized_cube( cube: Cube, statistics_cube: Cube, - normalize: Literal['subtract', 'divide'], + normalize: Literal["subtract", "divide"], ) -> Cube: """Get cube normalized with statistics cube. @@ -219,10 +221,10 @@ def get_normalized_cube( Input cube normalized with statistics cube. """ - if normalize == 'subtract': + if normalize == "subtract": normalized_cube = cube - statistics_cube - elif normalize == 'divide': + elif normalize == "divide": normalized_cube = cube / statistics_cube # Iris sometimes masks zero-divisions, sometimes not @@ -298,6 +300,7 @@ def _groupby(iterable, keyfunc): def _group_products(products, by_key): """Group products by the given list of attributes.""" + def grouper(product): return product.group(by_key) @@ -318,52 +321,6 @@ def get_array_module(*args): return np -def broadcast_to_shape(array, shape, dim_map, chunks=None): - """Copy of `iris.util.broadcast_to_shape` that allows specifying chunks.""" - if isinstance(array, da.Array): - if chunks is not None: - chunks = list(chunks) - for src_idx, tgt_idx in enumerate(dim_map): - # Only use the specified chunks along new dimensions or on - # dimensions that have size 1 in the source array. - if array.shape[src_idx] != 1: - chunks[tgt_idx] = array.chunks[src_idx] - broadcast = partial(da.broadcast_to, shape=shape, chunks=chunks) - else: - broadcast = partial(np.broadcast_to, shape=shape) - - n_orig_dims = len(array.shape) - n_new_dims = len(shape) - n_orig_dims - array = array.reshape(array.shape + (1,) * n_new_dims) - - # Get dims in required order. - array = np.moveaxis(array, range(n_orig_dims), dim_map) - new_array = broadcast(array) - - if np.ma.isMA(array): - # broadcast_to strips masks so we need to handle them explicitly. - mask = np.ma.getmask(array) - if mask is np.ma.nomask: - new_mask = np.ma.nomask - else: - new_mask = broadcast(mask) - new_array = np.ma.array(new_array, mask=new_mask) - - elif _is_lazy_masked_data(array): - # broadcast_to strips masks so we need to handle them explicitly. - mask = da.ma.getmaskarray(array) - new_mask = broadcast(mask) - new_array = da.ma.masked_array(new_array, new_mask) - - return new_array - - -def _is_lazy_masked_data(array): - """Similar to `iris._lazy_data.is_lazy_masked_data`.""" - return isinstance(array, da.Array) and isinstance( - da.utils.meta_from_array(array), np.ma.MaskedArray) - - def get_weights( cube: Cube, coords: Iterable[Coord] | Iterable[str], @@ -373,19 +330,19 @@ def get_weights( weights = npx.ones_like(cube.core_data()) # Time weights: lengths of time interval - if 'time' in coords: - weights *= broadcast_to_shape( + if "time" in coords: + weights = weights * broadcast_to_shape( npx.array(get_time_weights(cube)), cube.shape, - cube.coord_dims('time'), + cube.coord_dims("time"), + chunks=cube.lazy_data().chunks if cube.has_lazy_data() else None, ) # Latitude weights: cell areas - if 'latitude' in coords: + if "latitude" in coords: cube = cube.copy() # avoid overwriting input cube - if ( - not cube.cell_measures('cell_area') and - not cube.coords('longitude') + if not cube.cell_measures("cell_area") and not cube.coords( + "longitude" ): raise CoordinateNotFoundError( f"Cube {cube.summary(shorten=True)} needs a `longitude` " @@ -395,10 +352,17 @@ def get_weights( f"variable)" ) try_adding_calculated_cell_area(cube) - weights *= broadcast_to_shape( - cube.cell_measure('cell_area').core_data(), + area_weights = cube.cell_measure("cell_area").core_data() + if cube.has_lazy_data(): + area_weights = da.array(area_weights) + chunks = cube.lazy_data().chunks + else: + chunks = None + weights = weights * broadcast_to_shape( + area_weights, cube.shape, - cube.cell_measure_dims('cell_area'), + cube.cell_measure_dims("cell_area"), + chunks=chunks, ) return weights @@ -420,8 +384,8 @@ def get_time_weights(cube: Cube) -> np.ndarray | da.core.Array: :class:`numpy.ndarray` otherwise. """ - time = cube.coord('time') - coord_dims = cube.coord_dims('time') + time = cube.coord("time") + coord_dims = cube.coord_dims("time") # Multidimensional time coordinates are not supported: In this case, # weights cannot be simply calculated as difference between the bounds @@ -445,7 +409,7 @@ def get_time_weights(cube: Cube) -> np.ndarray | da.core.Array: def try_adding_calculated_cell_area(cube: Cube) -> None: """Try to add calculated cell measure 'cell_area' to cube (in-place).""" - if cube.cell_measures('cell_area'): + if cube.cell_measures("cell_area"): return logger.debug( @@ -455,28 +419,30 @@ def try_adding_calculated_cell_area(cube: Cube) -> None: ) logger.debug("Attempting to calculate grid cell area") - rotated_pole_grid = all([ - cube.coord('latitude').core_points().ndim == 2, - cube.coord('longitude').core_points().ndim == 2, - cube.coords('grid_latitude'), - cube.coords('grid_longitude'), - ]) + rotated_pole_grid = all( + [ + cube.coord("latitude").core_points().ndim == 2, + cube.coord("longitude").core_points().ndim == 2, + cube.coords("grid_latitude"), + cube.coords("grid_longitude"), + ] + ) # For regular grids, calculate grid cell areas with iris function if has_regular_grid(cube): - cube = guess_bounds(cube, ['latitude', 'longitude']) + cube = guess_bounds(cube, ["latitude", "longitude"]) logger.debug("Calculating grid cell areas for regular grid") cell_areas = _compute_area_weights(cube) # For rotated pole grids, use grid_latitude and grid_longitude to calculate # grid cell areas elif rotated_pole_grid: - cube = guess_bounds(cube, ['grid_latitude', 'grid_longitude']) + cube = guess_bounds(cube, ["grid_latitude", "grid_longitude"]) cube_tmp = cube.copy() - cube_tmp.remove_coord('latitude') - cube_tmp.coord('grid_latitude').rename('latitude') - cube_tmp.remove_coord('longitude') - cube_tmp.coord('grid_longitude').rename('longitude') + cube_tmp.remove_coord("latitude") + cube_tmp.coord("grid_latitude").rename("latitude") + cube_tmp.remove_coord("longitude") + cube_tmp.coord("grid_longitude").rename("longitude") logger.debug("Calculating grid cell areas for rotated pole grid") cell_areas = _compute_area_weights(cube_tmp) @@ -487,11 +453,14 @@ def try_adding_calculated_cell_area(cube: Cube) -> None: "areas for irregular or unstructured grid of cube %s", cube.summary(shorten=True), ) - raise CoordinateMultiDimError(cube.coord('latitude')) + raise CoordinateMultiDimError(cube.coord("latitude")) # Add new cell measure cell_measure = CellMeasure( - cell_areas, standard_name='cell_area', units='m2', measure='area', + cell_areas, + standard_name="cell_area", + units="m2", + measure="area", ) cube.add_cell_measure(cell_measure, np.arange(cube.ndim)) @@ -500,52 +469,22 @@ def _compute_area_weights(cube): """Compute area weights.""" with warnings.catch_warnings(record=True) as caught_warnings: warnings.filterwarnings( - 'always', + "always", message="Using DEFAULT_SPHERICAL_EARTH_RADIUS.", category=UserWarning, - module='iris.analysis.cartography', + module="iris.analysis.cartography", ) - # TODO: replace the following line with - # weights = iris.analysis.cartography.area_weights( - # cube, compute=not cube.has_lazy_data() - # ) - # once https://github.com/SciTools/iris/pull/5658 is available - weights = _get_area_weights(cube) - + if cube.has_lazy_data(): + kwargs = {"compute": False, "chunks": cube.lazy_data().chunks} + else: + kwargs = {"compute": True} + weights = iris.analysis.cartography.area_weights(cube, **kwargs) for warning in caught_warnings: logger.debug( "%s while computing area weights of the following cube:\n%s", - warning.message, cube) - return weights - - -def _get_area_weights(cube: Cube) -> np.ndarray | da.Array: - """Get area weights. - - For non-lazy data, simply use the according iris function. For lazy data, - calculate area weights for a single lat-lon slice and broadcast it to the - correct shape. - - Note - ---- - This is a temporary workaround to get lazy area weights. Can be removed - once https://github.com/SciTools/iris/pull/5658 is available. - - """ - if not cube.has_lazy_data(): - return iris.analysis.cartography.area_weights(cube) - - lat_lon_dims = sorted( - tuple(set(cube.coord_dims('latitude') + cube.coord_dims('longitude'))) - ) - lat_lon_slice = next(cube.slices(['latitude', 'longitude'], ordered=False)) - weights_2d = iris.analysis.cartography.area_weights(lat_lon_slice) - weights = broadcast_to_shape( - da.array(weights_2d), - cube.shape, - lat_lon_dims, - chunks=cube.lazy_data().chunks, - ) + warning.message, + cube, + ) return weights @@ -575,3 +514,33 @@ def get_all_coord_dims( all_coord_dims.extend(cube.coord_dims(coord)) sorted_all_coord_dims = sorted(list(set(all_coord_dims))) return tuple(sorted_all_coord_dims) + + +def _get_dims_along(cube, *args, **kwargs): + """Get a tuple with the cube dimensions matching *args and **kwargs.""" + try: + coord = cube.coord(*args, **kwargs, dim_coords=True) + except iris.exceptions.CoordinateNotFoundError: + try: + coord = cube.coord(*args, **kwargs) + except iris.exceptions.CoordinateNotFoundError: + return tuple() + return cube.coord_dims(coord) + + +def get_dims_along_axes( + cube: iris.cube.Cube, + axes: Iterable[Literal["T", "Z", "Y", "X"]], +) -> tuple[int, ...]: + """Get a tuple with the dimensions along one or more axis.""" + dims = {d for axis in axes for d in _get_dims_along(cube, axis=axis)} + return tuple(sorted(dims)) + + +def get_dims_along_coords( + cube: iris.cube.Cube, + coords: Iterable[str], +) -> tuple[int, ...]: + """Get a tuple with the dimensions along one or more coordinates.""" + dims = {d for coord in coords for d in _get_dims_along(cube, coord)} + return tuple(sorted(dims)) diff --git a/esmvalcore/preprocessor/_supplementary_vars.py b/esmvalcore/preprocessor/_supplementary_vars.py index d5cb0e2d31..4096036674 100644 --- a/esmvalcore/preprocessor/_supplementary_vars.py +++ b/esmvalcore/preprocessor/_supplementary_vars.py @@ -24,12 +24,12 @@ def register_supplementaries(variables, required): preferred that at least one variable is available, but not strictly necessary. """ - valid = ('require_at_least_one', 'prefer_at_least_one') + valid = ("require_at_least_one", "prefer_at_least_one") if required not in valid: raise NotImplementedError(f"`required` should be one of {valid}") supplementaries = { - 'variables': variables, - 'required': required, + "variables": variables, + "required": required, } def wrapper(func): @@ -61,9 +61,11 @@ def add_cell_measure(cube, cell_measure_cube, measure): ValueError If measure name is not 'area' or 'volume'. """ - if measure not in ['area', 'volume']: - raise ValueError(f"measure name must be 'area' or 'volume', " - f"got {measure} instead") + if measure not in ["area", "volume"]: + raise ValueError( + f"measure name must be 'area' or 'volume', " + f"got {measure} instead" + ) measure = iris.coords.CellMeasure( cell_measure_cube.core_data(), standard_name=cell_measure_cube.standard_name, @@ -74,8 +76,11 @@ def add_cell_measure(cube, cell_measure_cube, measure): ) start_dim = cube.ndim - len(measure.shape) cube.add_cell_measure(measure, range(start_dim, cube.ndim)) - logger.debug('Added %s as cell measure in cube of %s.', - cell_measure_cube.var_name, cube.var_name) + logger.debug( + "Added %s as cell measure in cube of %s.", + cell_measure_cube.var_name, + cube.var_name, + ) def add_ancillary_variable(cube, ancillary_cube): @@ -98,11 +103,15 @@ def add_ancillary_variable(cube, ancillary_cube): standard_name=ancillary_cube.standard_name, units=ancillary_cube.units, var_name=ancillary_cube.var_name, - attributes=ancillary_cube.attributes) + attributes=ancillary_cube.attributes, + ) start_dim = cube.ndim - len(ancillary_var.shape) cube.add_ancillary_variable(ancillary_var, range(start_dim, cube.ndim)) - logger.debug('Added %s as ancillary variable in cube of %s.', - ancillary_cube.var_name, cube.var_name) + logger.debug( + "Added %s as ancillary variable in cube of %s.", + ancillary_cube.var_name, + cube.var_name, + ) def add_supplementary_variables( @@ -124,9 +133,9 @@ def add_supplementary_variables( Cube with added ancillary variables and/or cell measures. """ measure_names = { - 'areacella': 'area', - 'areacello': 'area', - 'volcello': 'volume' + "areacella": "area", + "areacello": "area", + "volcello": "volume", } for supplementary_cube in supplementary_cubes: if supplementary_cube.var_name in measure_names: diff --git a/esmvalcore/preprocessor/_time.py b/esmvalcore/preprocessor/_time.py index d4dff4c1f9..fd9d4c16ac 100644 --- a/esmvalcore/preprocessor/_time.py +++ b/esmvalcore/preprocessor/_time.py @@ -3,6 +3,7 @@ Allows for selecting data subsets using certain time bounds; constructing seasonal and area averages. """ + from __future__ import annotations import copy @@ -43,19 +44,19 @@ # Ignore warnings about missing bounds where those are not required for _coord in ( - 'clim_season', - 'day_of_year', - 'day_of_month', - 'month_number', - 'season_year', - 'year', + "clim_season", + "day_of_year", + "day_of_month", + "month_number", + "season_year", + "year", ): filterwarnings( - 'ignore', + "ignore", "Collapsing a non-contiguous coordinate. " f"Metadata may not be fully descriptive for '{_coord}'.", category=UserWarning, - module='iris', + module="iris", ) @@ -102,12 +103,12 @@ def extract_time( Time ranges are outside the cube time limits. """ - t_1 = PartialDateTime(year=int(start_year), - month=int(start_month), - day=int(start_day)) - t_2 = PartialDateTime(year=int(end_year), - month=int(end_month), - day=int(end_day)) + t_1 = PartialDateTime( + year=int(start_year), month=int(start_month), day=int(start_day) + ) + t_2 = PartialDateTime( + year=int(end_year), month=int(end_month), day=int(end_day) + ) return _extract_datetime(cube, t_1, t_2) @@ -117,7 +118,7 @@ def _parse_start_date(date): Returns a datetime.datetime object. """ - if date.startswith('P'): + if date.startswith("P"): start_date = isodate.parse_duration(date) else: try: @@ -125,7 +126,8 @@ def _parse_start_date(date): except isodate.isoerror.ISO8601Error: start_date = isodate.parse_date(date) start_date = datetime.datetime.combine( - start_date, datetime.time.min) + start_date, datetime.time.min + ) return start_date @@ -134,7 +136,7 @@ def _parse_end_date(date): Returns a datetime.datetime object. """ - if date.startswith('P'): + if date.startswith("P"): end_date = isodate.parse_duration(date) else: if len(date) == 4: @@ -147,8 +149,9 @@ def _parse_end_date(date): end_date = isodate.parse_datetime(date) except isodate.ISO8601Error: end_date = isodate.parse_date(date) - end_date = datetime.datetime.combine(end_date, - datetime.time.min) + end_date = datetime.datetime.combine( + end_date, datetime.time.min + ) end_date += datetime.timedelta(seconds=1) return end_date @@ -162,12 +165,14 @@ def _duration_to_date(duration, reference, sign): def _select_timeslice(cube: Cube, select: np.ndarray) -> Cube | None: """Slice a cube along its time axis.""" if select.any(): - coord = cube.coord('time') + coord = cube.coord("time") time_dims = cube.coord_dims(coord) if time_dims: time_dim = time_dims[0] - slices = tuple(select if i == time_dim else slice(None) - for i in range(cube.ndim)) + slices = tuple( + select if i == time_dim else slice(None) + for i in range(cube.ndim) + ) cube_slice = cube[slices] else: cube_slice = cube @@ -206,9 +211,9 @@ def _extract_datetime( ValueError if time ranges are outside the cube time limits """ - time_coord = cube.coord('time') + time_coord = cube.coord("time") time_units = time_coord.units - if time_units.calendar == '360_day': + if time_units.calendar == "360_day": if isinstance(start_datetime.day, int) and start_datetime.day > 30: start_datetime.day = 30 if isinstance(end_datetime.day, int) and end_datetime.day > 30: @@ -216,7 +221,8 @@ def _extract_datetime( if not cube.coord_dims(time_coord): constraint = iris.Constraint( - time=lambda t: start_datetime <= t.point < end_datetime) + time=lambda t: start_datetime <= t.point < end_datetime + ) cube_slice = cube.extract(constraint) else: # Convert all time points to dates at once, this is much faster @@ -232,11 +238,13 @@ def dt2str(time: PartialDateTime) -> str: if any([time.hour, time.minute, time.second]): txt += f" {time.hour:02d}:{time.minute:02d}:{time.second:02d}" return txt + raise ValueError( f"Time slice {dt2str(start_datetime)} " f"to {dt2str(end_datetime)} is outside " f"cube time bounds {time_coord.cell(0).point} to " - f"{time_coord.cell(-1).point}.") + f"{time_coord.cell(-1).point}." + ) return cube_slice @@ -262,8 +270,8 @@ def clip_timerange(cube: Cube, timerange: str) -> Cube: Time ranges are outside the cube's time limits. """ - start_date = _parse_start_date(timerange.split('/')[0]) - end_date = _parse_end_date(timerange.split('/')[1]) + start_date = _parse_start_date(timerange.split("/")[0]) + end_date = _parse_end_date(timerange.split("/")[1]) if isinstance(start_date, isodate.duration.Duration): start_date = _duration_to_date(start_date, end_date, sign=-1) @@ -322,34 +330,34 @@ def extract_season(cube: Cube, season: str) -> Cube: """ season = season.upper() - allmonths = 'JFMAMJJASOND' * 2 + allmonths = "JFMAMJJASOND" * 2 if season not in allmonths: - raise ValueError(f"Unable to extract Season {season} " - f"combination of months not possible.") + raise ValueError( + f"Unable to extract Season {season} " + f"combination of months not possible." + ) sstart = allmonths.index(season) - res_season = allmonths[sstart + len(season):sstart + 12] + res_season = allmonths[sstart + len(season) : sstart + 12] seasons = [season, res_season] coords_to_remove = [] - if not cube.coords('clim_season'): - iris.coord_categorisation.add_season(cube, - 'time', - name='clim_season', - seasons=seasons) - coords_to_remove.append('clim_season') + if not cube.coords("clim_season"): + iris.coord_categorisation.add_season( + cube, "time", name="clim_season", seasons=seasons + ) + coords_to_remove.append("clim_season") - if not cube.coords('season_year'): - iris.coord_categorisation.add_season_year(cube, - 'time', - name='season_year', - seasons=seasons) - coords_to_remove.append('season_year') + if not cube.coords("season_year"): + iris.coord_categorisation.add_season_year( + cube, "time", name="season_year", seasons=seasons + ) + coords_to_remove.append("season_year") result = cube.extract(iris.Constraint(clim_season=season)) for coord in coords_to_remove: cube.remove_coord(coord) if result is None: - raise ValueError(f'Season {season!r} not present in cube {cube}') + raise ValueError(f"Season {season!r} not present in cube {cube}") return result @@ -375,27 +383,29 @@ def extract_month(cube: Cube, month: int) -> Cube: """ if month not in range(1, 13): - raise ValueError('Please provide a month number between 1 and 12.') - if not cube.coords('month_number'): - iris.coord_categorisation.add_month_number(cube, - 'time', - name='month_number') + raise ValueError("Please provide a month number between 1 and 12.") + if not cube.coords("month_number"): + iris.coord_categorisation.add_month_number( + cube, "time", name="month_number" + ) result = cube.extract(iris.Constraint(month_number=month)) if result is None: - raise ValueError(f'Month {month!r} not present in cube {cube}') + raise ValueError(f"Month {month!r} not present in cube {cube}") return result def _aggregate_time_fx(result_cube, source_cube): - time_dim = set(source_cube.coord_dims(source_cube.coord('time'))) + time_dim = set(source_cube.coord_dims(source_cube.coord("time"))) if source_cube.cell_measures(): for measure in source_cube.cell_measures(): measure_dims = set(source_cube.cell_measure_dims(measure)) if time_dim.intersection(measure_dims): - logger.debug('Averaging time dimension in measure %s.', - measure.var_name) - result_measure = da.mean(measure.core_data(), - axis=tuple(time_dim)) + logger.debug( + "Averaging time dimension in measure %s.", measure.var_name + ) + result_measure = da.mean( + measure.core_data(), axis=tuple(time_dim) + ) measure = measure.copy(result_measure) measure_dims = tuple(measure_dims - time_dim) result_cube.add_cell_measure(measure, measure_dims) @@ -403,24 +413,28 @@ def _aggregate_time_fx(result_cube, source_cube): if source_cube.ancillary_variables(): for ancillary_var in source_cube.ancillary_variables(): ancillary_dims = set( - source_cube.ancillary_variable_dims(ancillary_var)) + source_cube.ancillary_variable_dims(ancillary_var) + ) if time_dim.intersection(ancillary_dims): logger.debug( - 'Averaging time dimension in ancillary variable %s.', - ancillary_var.var_name) - result_ancillary_var = da.mean(ancillary_var.core_data(), - axis=tuple(time_dim)) + "Averaging time dimension in ancillary variable %s.", + ancillary_var.var_name, + ) + result_ancillary_var = da.mean( + ancillary_var.core_data(), axis=tuple(time_dim) + ) ancillary_var = ancillary_var.copy(result_ancillary_var) ancillary_dims = tuple(ancillary_dims - time_dim) - result_cube.add_ancillary_variable(ancillary_var, - ancillary_dims) + result_cube.add_ancillary_variable( + ancillary_var, ancillary_dims + ) @preserve_float_dtype def hourly_statistics( cube: Cube, hours: int, - operator: str = 'mean', + operator: str = "mean", **operator_kwargs, ) -> Cube: """Compute hourly statistics. @@ -448,26 +462,27 @@ def hourly_statistics( Hourly statistics cube. """ - if not cube.coords('hour_group'): + if not cube.coords("hour_group"): iris.coord_categorisation.add_categorised_coord( cube, - 'hour_group', - 'time', + "hour_group", + "time", lambda coord, value: coord.units.num2date(value).hour // hours, - units='1') - if not cube.coords('day_of_year'): - iris.coord_categorisation.add_day_of_year(cube, 'time') - if not cube.coords('year'): - iris.coord_categorisation.add_year(cube, 'time') + units="1", + ) + if not cube.coords("day_of_year"): + iris.coord_categorisation.add_day_of_year(cube, "time") + if not cube.coords("year"): + iris.coord_categorisation.add_year(cube, "time") (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) result = cube.aggregated_by( - ['hour_group', 'day_of_year', 'year'], agg, **agg_kwargs + ["hour_group", "day_of_year", "year"], agg, **agg_kwargs ) - result.remove_coord('hour_group') - result.remove_coord('day_of_year') - result.remove_coord('year') + result.remove_coord("hour_group") + result.remove_coord("day_of_year") + result.remove_coord("year") return result @@ -475,7 +490,7 @@ def hourly_statistics( @preserve_float_dtype def daily_statistics( cube: Cube, - operator: str = 'mean', + operator: str = "mean", **operator_kwargs, ) -> Cube: """Compute daily statistics. @@ -500,23 +515,23 @@ def daily_statistics( Daily statistics cube. """ - if not cube.coords('day_of_year'): - iris.coord_categorisation.add_day_of_year(cube, 'time') - if not cube.coords('year'): - iris.coord_categorisation.add_year(cube, 'time') + if not cube.coords("day_of_year"): + iris.coord_categorisation.add_day_of_year(cube, "time") + if not cube.coords("year"): + iris.coord_categorisation.add_year(cube, "time") (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) - result = cube.aggregated_by(['day_of_year', 'year'], agg, **agg_kwargs) + result = cube.aggregated_by(["day_of_year", "year"], agg, **agg_kwargs) - result.remove_coord('day_of_year') - result.remove_coord('year') + result.remove_coord("day_of_year") + result.remove_coord("year") return result @preserve_float_dtype def monthly_statistics( cube: Cube, - operator: str = 'mean', + operator: str = "mean", **operator_kwargs, ) -> Cube: """Compute monthly statistics. @@ -541,13 +556,13 @@ def monthly_statistics( Monthly statistics cube. """ - if not cube.coords('month_number'): - iris.coord_categorisation.add_month_number(cube, 'time') - if not cube.coords('year'): - iris.coord_categorisation.add_year(cube, 'time') + if not cube.coords("month_number"): + iris.coord_categorisation.add_month_number(cube, "time") + if not cube.coords("year"): + iris.coord_categorisation.add_year(cube, "time") (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) - result = cube.aggregated_by(['month_number', 'year'], agg, **agg_kwargs) + result = cube.aggregated_by(["month_number", "year"], agg, **agg_kwargs) _aggregate_time_fx(result, cube) return result @@ -555,8 +570,8 @@ def monthly_statistics( @preserve_float_dtype def seasonal_statistics( cube: Cube, - operator: str = 'mean', - seasons: Iterable[str] = ('DJF', 'MAM', 'JJA', 'SON'), + operator: str = "mean", + seasons: Iterable[str] = ("DJF", "MAM", "JJA", "SON"), **operator_kwargs, ) -> Cube: """Compute seasonal statistics. @@ -590,29 +605,29 @@ def seasonal_statistics( if any(len(sea) < 2 for sea in seasons): raise ValueError( - f"Minimum of 2 month is required per Seasons: {seasons}.") + f"Minimum of 2 month is required per Seasons: {seasons}." + ) - if not cube.coords('clim_season'): - iris.coord_categorisation.add_season(cube, - 'time', - name='clim_season', - seasons=seasons) + if not cube.coords("clim_season"): + iris.coord_categorisation.add_season( + cube, "time", name="clim_season", seasons=seasons + ) else: - old_seasons = sorted(set(cube.coord('clim_season').points)) + old_seasons = sorted(set(cube.coord("clim_season").points)) if not all(osea in seasons for osea in old_seasons): raise ValueError( f"Seasons {seasons} do not match prior season extraction " - f"{old_seasons}.") + f"{old_seasons}." + ) - if not cube.coords('season_year'): - iris.coord_categorisation.add_season_year(cube, - 'time', - name='season_year', - seasons=seasons) + if not cube.coords("season_year"): + iris.coord_categorisation.add_season_year( + cube, "time", name="season_year", seasons=seasons + ) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) result = cube.aggregated_by( - ['clim_season', 'season_year'], agg, **agg_kwargs + ["clim_season", "season_year"], agg, **agg_kwargs ) # CMOR Units are days so we are safe to operate on days @@ -633,10 +648,10 @@ def spans_full_season(cube: Cube) -> list[bool]: Truth statements if time bounds are within (month*29, month*31) """ - time = cube.coord('time') + time = cube.coord("time") num_days = [(tt.bounds[0, 1] - tt.bounds[0, 0]) for tt in time] - seasons = cube.coord('clim_season').points + seasons = cube.coord("clim_season").points tar_days = [(len(sea) * 29, len(sea) * 31) for sea in seasons] return [dt[0] <= dn <= dt[1] for dn, dt in zip(num_days, tar_days)] @@ -650,7 +665,7 @@ def spans_full_season(cube: Cube) -> list[bool]: @preserve_float_dtype def annual_statistics( cube: Cube, - operator: str = 'mean', + operator: str = "mean", **operator_kwargs, ) -> Cube: """Compute annual statistics. @@ -682,9 +697,9 @@ def annual_statistics( (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) - if not cube.coords('year'): - iris.coord_categorisation.add_year(cube, 'time') - result = cube.aggregated_by('year', agg, **agg_kwargs) + if not cube.coords("year"): + iris.coord_categorisation.add_year(cube, "time") + result = cube.aggregated_by("year", agg, **agg_kwargs) _aggregate_time_fx(result, cube) return result @@ -692,7 +707,7 @@ def annual_statistics( @preserve_float_dtype def decadal_statistics( cube: Cube, - operator: str = 'mean', + operator: str = "mean", **operator_kwargs, ) -> Cube: """Compute decadal statistics. @@ -724,7 +739,7 @@ def decadal_statistics( (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) - if not cube.coords('decade'): + if not cube.coords("decade"): def get_decade(coord, value): """Categorize time coordinate into decades.""" @@ -732,8 +747,9 @@ def get_decade(coord, value): return date.year - date.year % 10 iris.coord_categorisation.add_categorised_coord( - cube, 'decade', 'time', get_decade) - result = cube.aggregated_by('decade', agg, **agg_kwargs) + cube, "decade", "time", get_decade + ) + result = cube.aggregated_by("decade", agg, **agg_kwargs) _aggregate_time_fx(result, cube) return result @@ -741,9 +757,9 @@ def get_decade(coord, value): @preserve_float_dtype def climate_statistics( cube: Cube, - operator: str = 'mean', - period: str = 'full', - seasons: Iterable[str] = ('DJF', 'MAM', 'JJA', 'SON'), + operator: str = "mean", + period: str = "full", + seasons: Iterable[str] = ("DJF", "MAM", "JJA", "SON"), **operator_kwargs, ) -> Cube: """Compute climate statistics with the specified granularity. @@ -784,42 +800,44 @@ def climate_statistics( period = period.lower() # Use Cube.collapsed when full period is requested - if period in ('full', ): + if period in ("full",): (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) agg_kwargs = update_weights_kwargs( - agg, agg_kwargs, '_time_weights_', cube, _add_time_weights_coord + agg, agg_kwargs, "_time_weights_", cube, _add_time_weights_coord ) with warnings.catch_warnings(): warnings.filterwarnings( - 'ignore', + "ignore", message=( "Cannot check if coordinate is contiguous: Invalid " "operation for '_time_weights_'" ), category=UserWarning, - module='iris', + module="iris", ) - clim_cube = cube.collapsed('time', agg, **agg_kwargs) + clim_cube = cube.collapsed("time", agg, **agg_kwargs) # Make sure input and output cubes do not have auxiliary coordinate - if cube.coords('_time_weights_'): - cube.remove_coord('_time_weights_') - if clim_cube.coords('_time_weights_'): - clim_cube.remove_coord('_time_weights_') + if cube.coords("_time_weights_"): + cube.remove_coord("_time_weights_") + if clim_cube.coords("_time_weights_"): + clim_cube.remove_coord("_time_weights_") # Use Cube.aggregated_by for other periods else: clim_coord = _get_period_coord(cube, period, seasons) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) clim_cube = cube.aggregated_by(clim_coord, agg, **agg_kwargs) - clim_cube.remove_coord('time') + clim_cube.remove_coord("time") _aggregate_time_fx(clim_cube, cube) if clim_cube.coord(clim_coord.name()).is_monotonic(): - iris.util.promote_aux_coord_to_dim_coord(clim_cube, - clim_coord.name()) + iris.util.promote_aux_coord_to_dim_coord( + clim_cube, clim_coord.name() + ) else: clim_cube = CubeList( - clim_cube.slices_over(clim_coord.name())).merge_cube() + clim_cube.slices_over(clim_coord.name()) + ).merge_cube() cube.remove_coord(clim_coord) return clim_cube @@ -829,10 +847,10 @@ def _add_time_weights_coord(cube): """Add time weight coordinate to cube (in-place).""" time_weights_coord = AuxCoord( get_time_weights(cube), - long_name='_time_weights_', - units=cube.coord('time').units, + long_name="_time_weights_", + units=cube.coord("time").units, ) - cube.add_aux_coord(time_weights_coord, cube.coord_dims('time')) + cube.add_aux_coord(time_weights_coord, cube.coord_dims("time")) @preserve_float_dtype @@ -841,7 +859,7 @@ def anomalies( period: str, reference: Optional[dict] = None, standardize: bool = False, - seasons: Iterable[str] = ('DJF', 'MAM', 'JJA', 'SON'), + seasons: Iterable[str] = ("DJF", "MAM", "JJA", "SON"), ) -> Cube: """Compute anomalies using a mean with the specified granularity. @@ -874,39 +892,40 @@ def anomalies( reference_cube = cube else: reference_cube = extract_time(cube, **reference) - reference = climate_statistics(reference_cube, - period=period, - seasons=seasons) - if period in ['full']: + reference = climate_statistics( + reference_cube, period=period, seasons=seasons + ) + if period in ["full"]: metadata = copy.deepcopy(cube.metadata) cube = cube - reference cube.metadata = metadata if standardize: - cube_stddev = climate_statistics(cube, - operator='std_dev', - period=period, - seasons=seasons) + cube_stddev = climate_statistics( + cube, operator="std_dev", period=period, seasons=seasons + ) cube = cube / cube_stddev - cube.units = '1' + cube.units = "1" return cube cube = _compute_anomalies(cube, reference, period, seasons) # standardize the results if requested if standardize: - cube_stddev = climate_statistics(cube, - operator='std_dev', - period=period) - tdim = cube.coord_dims('time')[0] + cube_stddev = climate_statistics( + cube, operator="std_dev", period=period + ) + tdim = cube.coord_dims("time")[0] reps = cube.shape[tdim] / cube_stddev.shape[tdim] if not reps % 1 == 0: raise ValueError( "Cannot safely apply preprocessor to this dataset, " "since the full time period of this dataset is not " - f"a multiple of the period '{period}'") + f"a multiple of the period '{period}'" + ) cube.data = cube.core_data() / da.concatenate( - [cube_stddev.core_data() for _ in range(int(reps))], axis=tdim) - cube.units = '1' + [cube_stddev.core_data() for _ in range(int(reps))], axis=tdim + ) + cube.units = "1" return cube @@ -922,7 +941,7 @@ def _compute_anomalies( for idx, point in enumerate(ref_coord.points): indices = np.where(cube_coord.points == point, idx, indices) ref_data = reference.core_data() - axis, = cube.coord_dims(cube_coord) + (axis,) = cube.coord_dims(cube_coord) if cube.has_lazy_data() and reference.has_lazy_data(): # Rechunk reference data because iris.cube.Cube.aggregate_by, used to # compute the reference, produces very small chunks. @@ -942,24 +961,24 @@ def _compute_anomalies( def _get_period_coord(cube, period, seasons): """Get periods.""" - if period in ['hourly', 'hour', 'hr']: - if not cube.coords('hour'): - iris.coord_categorisation.add_hour(cube, 'time') - return cube.coord('hour') - if period in ['daily', 'day']: - if not cube.coords('day_of_year'): - iris.coord_categorisation.add_day_of_year(cube, 'time') - return cube.coord('day_of_year') - if period in ['monthly', 'month', 'mon']: - if not cube.coords('month_number'): - iris.coord_categorisation.add_month_number(cube, 'time') - return cube.coord('month_number') - if period in ['seasonal', 'season']: - if not cube.coords('season_number'): - iris.coord_categorisation.add_season_number(cube, - 'time', - seasons=seasons) - return cube.coord('season_number') + if period in ["hourly", "hour", "hr"]: + if not cube.coords("hour"): + iris.coord_categorisation.add_hour(cube, "time") + return cube.coord("hour") + if period in ["daily", "day"]: + if not cube.coords("day_of_year"): + iris.coord_categorisation.add_day_of_year(cube, "time") + return cube.coord("day_of_year") + if period in ["monthly", "month", "mon"]: + if not cube.coords("month_number"): + iris.coord_categorisation.add_month_number(cube, "time") + return cube.coord("month_number") + if period in ["seasonal", "season"]: + if not cube.coords("season_number"): + iris.coord_categorisation.add_season_number( + cube, "time", seasons=seasons + ) + return cube.coord("season_number") raise ValueError(f"Period '{period}' not supported") @@ -967,7 +986,7 @@ def regrid_time( cube: Cube, frequency: str, calendar: Optional[str] = None, - units: str = 'days since 1850-01-01 00:00:00', + units: str = "days since 1850-01-01 00:00:00", ) -> Cube: """Align time coordinate for cubes. @@ -1040,10 +1059,10 @@ def regrid_time( """ # Do not overwrite input cube cube = cube.copy() - coord = cube.coord('time') + coord = cube.coord("time") # Raise an error if calendar is used for a non-supported frequency - if calendar is not None and ('day' in frequency or 'hr' in frequency): + if calendar is not None and ("day" in frequency or "hr" in frequency): raise NotImplementedError( f"Setting a fixed calendar is not supported for frequency " f"'{frequency}'" @@ -1054,9 +1073,9 @@ def regrid_time( if calendar is not None: new_coord = DimCoord( coord.points, - standard_name='time', - long_name='time', - var_name='time', + standard_name="time", + long_name="time", + var_name="time", units=Unit(units, calendar=calendar), ) else: @@ -1071,24 +1090,24 @@ def regrid_time( # Adapt auxiliary time coordinates if necessary aux_coord_names = [ - 'day_of_month', - 'day_of_year', - 'hour', - 'month', - 'month_fullname', - 'month_number', - 'season', - 'season_number', - 'season_year', - 'weekday', - 'weekday_fullname', - 'weekday_number', - 'year', + "day_of_month", + "day_of_year", + "hour", + "month", + "month_fullname", + "month_number", + "season", + "season_number", + "season_year", + "weekday", + "weekday_fullname", + "weekday_number", + "year", ] for coord_name in aux_coord_names: if cube.coords(coord_name): cube.remove_coord(coord_name) - getattr(iris.coord_categorisation, f'add_{coord_name}')( + getattr(iris.coord_categorisation, f"add_{coord_name}")( cube, new_coord ) @@ -1099,20 +1118,20 @@ def _get_new_dates(frequency: str, coord: Coord) -> list[datetime.datetime]: """Get transformed dates.""" dates = coord.units.num2date(coord.points) - if 'dec' in frequency: + if "dec" in frequency: dates = [datetime.datetime(d.year, 1, 1, 0, 0, 0) for d in dates] - elif 'yr' in frequency: + elif "yr" in frequency: dates = [datetime.datetime(d.year, 7, 1, 0, 0, 0) for d in dates] - elif 'mon' in frequency: + elif "mon" in frequency: dates = [ datetime.datetime(d.year, d.month, 15, 0, 0, 0) for d in dates ] - elif 'day' in frequency: + elif "day" in frequency: dates = [ datetime.datetime(d.year, d.month, d.day, 12, 0, 0) for d in dates ] - elif 'hr' in frequency: - (n_hours_str, _, _) = frequency.partition('hr') + elif "hr" in frequency: + (n_hours_str, _, _) = frequency.partition("hr") if not n_hours_str: n_hours = 1 else: @@ -1126,7 +1145,8 @@ def _get_new_dates(frequency: str, coord: Coord) -> list[datetime.datetime]: dates = [ datetime.datetime( d.year, d.month, d.day, d.hour - d.hour % n_hours, 0, 0 - ) + half_interval + ) + + half_interval for d in dates ] else: @@ -1158,11 +1178,11 @@ def low_pass_weights(window, cutoff): weights = np.zeros([nwts]) half_order = nwts // 2 weights[half_order] = 2 * cutoff - kidx = np.arange(1., half_order) + kidx = np.arange(1.0, half_order) sigma = np.sin(np.pi * kidx / half_order) * half_order / (np.pi * kidx) - firstfactor = np.sin(2. * np.pi * cutoff * kidx) / (np.pi * kidx) - weights[(half_order - 1):0:-1] = firstfactor * sigma - weights[(half_order + 1):-1] = firstfactor * sigma + firstfactor = np.sin(2.0 * np.pi * cutoff * kidx) / (np.pi * kidx) + weights[(half_order - 1) : 0 : -1] = firstfactor * sigma + weights[(half_order + 1) : -1] = firstfactor * sigma return weights[1:-1] @@ -1172,8 +1192,8 @@ def timeseries_filter( cube: Cube, window: int, span: int, - filter_type: str = 'lowpass', - filter_stats: str = 'sum', + filter_type: str = "lowpass", + filter_stats: str = "sum", **operator_kwargs, ) -> Cube: """Apply a timeseries filter. @@ -1223,7 +1243,7 @@ def timeseries_filter( """ try: - cube.coord('time') + cube.coord("time") except CoordinateNotFoundError: logger.error("Cube %s does not have time coordinate", cube) raise @@ -1231,22 +1251,23 @@ def timeseries_filter( # Construct weights depending on frequency # TODO implement more filters! supported_filters = [ - 'lowpass', + "lowpass", ] if filter_type in supported_filters: - if filter_type == 'lowpass': + if filter_type == "lowpass": # These weights sum to one and are dimensionless (-> we do NOT need # to consider units for sums) - wgts = low_pass_weights(window, 1. / span) + wgts = low_pass_weights(window, 1.0 / span) else: raise NotImplementedError( f"Filter type {filter_type} not implemented, " - f"please choose one of {', '.join(supported_filters)}") + f"please choose one of {', '.join(supported_filters)}" + ) # Apply filter (agg, agg_kwargs) = get_iris_aggregator(filter_stats, **operator_kwargs) - agg_kwargs['weights'] = wgts - cube = cube.rolling_window('time', agg, len(wgts), **agg_kwargs) + agg_kwargs["weights"] = wgts + cube = cube.rolling_window("time", agg, len(wgts), **agg_kwargs) return cube @@ -1255,7 +1276,7 @@ def resample_hours( cube: Cube, interval: int, offset: int = 0, - interpolate: Optional[Literal['nearest', 'linear']] = None, + interpolate: Optional[Literal["nearest", "linear"]] = None, ) -> Cube: """Convert x-hourly data to y-hourly data. @@ -1300,34 +1321,41 @@ def resample_hours( allowed_intervals = (1, 2, 3, 4, 6, 12) if interval not in allowed_intervals: raise ValueError( - f'The number of hours must be one of {allowed_intervals}') + f"The number of hours must be one of {allowed_intervals}" + ) if offset >= interval: - raise ValueError(f'The offset ({offset}) must be lower than ' - f'the interval ({interval})') - time = cube.coord('time') + raise ValueError( + f"The offset ({offset}) must be lower than " + f"the interval ({interval})" + ) + time = cube.coord("time") cube_period = time.cell(1).point - time.cell(0).point if cube_period.total_seconds() / 3600 > interval: - raise ValueError(f"Data period ({cube_period}) should be lower than " - f"the interval ({interval})") + raise ValueError( + f"Data period ({cube_period}) should be lower than " + f"the interval ({interval})" + ) dates = time.units.num2date(time.points) # Interpolate input time to requested hours if desired if interpolate: - if interpolate == 'nearest': + if interpolate == "nearest": interpolation_scheme = iris.analysis.Nearest() - elif interpolate == 'linear': + elif interpolate == "linear": interpolation_scheme = iris.analysis.Linear() else: raise ValueError( f"Expected `None`, 'nearest' or 'linear' for `interpolate`, " f"got '{interpolate}'" ) - new_dates = sorted([ - cf_datetime(y, m, d, h, calendar=time.units.calendar) - for h in range(0 + offset, 24, interval) - for (y, m, d) in {(d.year, d.month, d.day) for d in dates} - ]) - cube = cube.interpolate([('time', new_dates)], interpolation_scheme) + new_dates = sorted( + [ + cf_datetime(y, m, d, h, calendar=time.units.calendar) + for h in range(0 + offset, 24, interval) + for (y, m, d) in {(d.year, d.month, d.day) for d in dates} + ] + ) + cube = cube.interpolate([("time", new_dates)], interpolation_scheme) else: hours = [ PartialDateTime(hour=h) for h in range(0 + offset, 24, interval) @@ -1389,21 +1417,22 @@ def resample_time( Cube with the new frequency. """ - time = cube.coord('time') + time = cube.coord("time") dates = time.units.num2date(time.points) requested = PartialDateTime(month=month, day=day, hour=hour) select = dates == requested cube = _select_timeslice(cube, select) if cube is None: raise ValueError( - f"Time coordinate {dates} does not contain {requested} for {cube}") + f"Time coordinate {dates} does not contain {requested} for {cube}" + ) return cube def _lin_pad(array: np.ndarray, delta: float, pad_with: int) -> np.ndarray: """Linearly pad an array on both sides with constant difference.""" end_values = (array[0] - pad_with * delta, array[-1] + pad_with * delta) - new_array = np.pad(array, pad_with, 'linear_ramp', end_values=end_values) + new_array = np.pad(array, pad_with, "linear_ramp", end_values=end_values) return new_array @@ -1430,7 +1459,7 @@ def _get_lst_offset(lon_coord: Coord) -> np.ndarray: # Make sure that longitude is in degrees and shift it to [-180, 180] first # (do NOT overwrite input coordinate) lon_coord = lon_coord.copy() - lon_coord.convert_units('degrees') + lon_coord.convert_units("degrees") shifted_lon = (lon_coord.points + 180.0) % 360 - 180.0 return 12.0 * (shifted_lon / 180.0) @@ -1446,9 +1475,7 @@ def _get_lsts(time_coord: DimCoord, lon_coord: Coord) -> np.ndarray: """ # Pad time coordinate with 1 time step at both sides for the bins for LSTs # outside of the time coordinate - dtime = np.abs( - time_coord.bounds[0, 1] - time_coord.bounds[0, 0] - ) + dtime = np.abs(time_coord.bounds[0, 1] - time_coord.bounds[0, 0]) new_points = _lin_pad(time_coord.points, dtime, 1) bnds = time_coord.bounds new_bounds = np.stack( @@ -1469,7 +1496,7 @@ def _get_lsts(time_coord: DimCoord, lon_coord: Coord) -> np.ndarray: bins = np.concatenate(([time_coord.bounds[0, 0]], time_coord.bounds[:, 1])) idx = np.digitize(exact_lst_array, bins) - 1 # (lon, time); idx for time idx[idx < 0] = 0 # values outside the time coordinate - idx[idx >= n_time] = - 1 # values outside the time coordinate + idx[idx >= n_time] = -1 # values outside the time coordinate lst_array = time_coord.points[idx] # (lon, time) # Remove time steps again that have been added previously @@ -1495,7 +1522,7 @@ def _get_time_index_and_mask( # Make sure that time coordinate has bounds (these are necessary for the # binning) and uses 'hours' as reference units time_coord.convert_units( - Unit('hours since 1850-01-01', calendar=time_coord.units.calendar) + Unit("hours since 1850-01-01", calendar=time_coord.units.calendar) ) _guess_time_bounds(time_coord) @@ -1505,8 +1532,8 @@ def _get_time_index_and_mask( # We use np.searchsorted to calculate the indices necessary to put the UTC # times into their corresponding (binned) LSTs. These incides are 2D since # they depend on time and longitude. - searchsorted_l = partial(np.searchsorted, side='left') - _get_indices_l = np.vectorize(searchsorted_l, signature='(i),(i)->(i)') + searchsorted_l = partial(np.searchsorted, side="left") + _get_indices_l = np.vectorize(searchsorted_l, signature="(i),(i)->(i)") time_index_l = _get_indices_l(lsts, time_coord.points) # (lon, time) # To calculate the mask, we need to detect which LSTs are outside of the @@ -1517,8 +1544,8 @@ def _get_time_index_and_mask( # 'left'). Indices that are the same in both arrays need to be masked, as # these are the ones outside of the time coordinate. All others will # change. - searchsorted_r = partial(np.searchsorted, side='right') - _get_indices_r = np.vectorize(searchsorted_r, signature='(i),(i)->(i)') + searchsorted_r = partial(np.searchsorted, side="right") + _get_indices_r = np.vectorize(searchsorted_r, signature="(i),(i)->(i)") time_index_r = _get_indices_r(lsts, time_coord.points) # (lon, time) mask = time_index_l == time_index_r # (lon, time) @@ -1603,7 +1630,7 @@ def _transform_to_lst_lazy( """ new_data = da.apply_gufunc( _transform_to_lst_eager, - '(t,x),(t,x),(t,x)->(t,x)', + "(t,x),(t,x),(t,x)->(t,x)", data, time_index, mask, @@ -1653,12 +1680,13 @@ def _transform_cube_to_lst(cube: Cube) -> Cube: # dimension); this also creates a new cube so the original input cube is # not overwritten complete_coords = [ - cube.coord('time', dim_coords=True), cube.coord('longitude'), + cube.coord("time", dim_coords=True), + cube.coord("longitude"), ] cube = rechunk_cube(cube, complete_coords) - time_coord = cube.coord('time', dim_coords=True) - lon_coord = cube.coord('longitude') + time_coord = cube.coord("time", dim_coords=True) + lon_coord = cube.coord("longitude") time_dim = cube.coord_dims(time_coord)[0] lon_dim = cube.coord_dims(lon_coord)[0] @@ -1731,22 +1759,22 @@ def _transform_cube_to_lst(cube: Cube) -> Cube: def _check_cube_coords(cube): - if not cube.coords('time', dim_coords=True): + if not cube.coords("time", dim_coords=True): raise CoordinateNotFoundError( f"Input cube {cube.summary(shorten=True)} needs a dimensional " f"coordinate `time`" ) - time_coord = cube.coord('time', dim_coords=True) + time_coord = cube.coord("time", dim_coords=True) # The following works since DimCoords are always 1D and monotonic if time_coord.points[0] > time_coord.points[-1]: raise ValueError("`time` coordinate must be monotonically increasing") - if not cube.coords('longitude'): + if not cube.coords("longitude"): raise CoordinateNotFoundError( f"Input cube {cube.summary(shorten=True)} needs a coordinate " f"`longitude`" ) - lon_ndim = len(cube.coord_dims('longitude')) + lon_ndim = len(cube.coord_dims("longitude")) if lon_ndim != 1: raise CoordinateMultiDimError( f"Input cube {cube.summary(shorten=True)} needs a 1D coordinate " @@ -1821,6 +1849,6 @@ def local_solar_time(cube: Cube) -> Cube: cube = _transform_cube_to_lst(cube) # Adapt metadata of time coordinate - cube.coord('time', dim_coords=True).long_name = 'Local Solar Time' + cube.coord("time", dim_coords=True).long_name = "Local Solar Time" return cube diff --git a/esmvalcore/preprocessor/_trend.py b/esmvalcore/preprocessor/_trend.py index c592f5196e..2e2d778b8a 100644 --- a/esmvalcore/preprocessor/_trend.py +++ b/esmvalcore/preprocessor/_trend.py @@ -1,4 +1,5 @@ """Preprocessor functions calculate trends from data.""" + import logging import dask.array as da @@ -57,8 +58,9 @@ def _get_slope_stderr(y_arr, x_arr): slope = _slope(x_arr, y_arr) intercept = y_mean - slope * x_mean y_estim = slope * x_arr + intercept - slope_stderr = np.sqrt(((y_arr - y_estim)**2).sum() / dof / - ((x_arr - x_mean)**2).sum()) + slope_stderr = np.sqrt( + ((y_arr - y_estim) ** 2).sum() / dof / ((x_arr - x_mean) ** 2).sum() + ) return slope_stderr @@ -67,14 +69,20 @@ def _set_trend_units(cube, coord): coord_units = coord.units if coord_units.is_time_reference(): coord_units = Unit(coord_units.symbol.split()[0]) - invalid_units = any([cube.units is None, cube.units.is_unknown(), - cube.units.is_no_unit(), coord_units.is_no_unit()]) + invalid_units = any( + [ + cube.units is None, + cube.units.is_unknown(), + cube.units.is_no_unit(), + coord_units.is_no_unit(), + ] + ) if not invalid_units: cube.units /= coord_units @preserve_float_dtype -def linear_trend(cube, coordinate='time'): +def linear_trend(cube, coordinate="time"): """Calculate linear trend of data along a given coordinate. The linear trend is defined as the slope of an ordinary linear regression. @@ -110,13 +118,14 @@ def call_func(data, axis, x_data): def lazy_func(data, axis, x_data): """Calculate trend lazily.""" trend_arr = da.apply_along_axis( - _get_slope, axis, data, x_data, dtype=data.dtype, shape=()) + _get_slope, axis, data, x_data, dtype=data.dtype, shape=() + ) trend_arr = da.ma.masked_invalid(trend_arr) return trend_arr - aggregator = iris.analysis.Aggregator('trend', call_func, - lazy_func=lazy_func, - x_data=coord.points) + aggregator = iris.analysis.Aggregator( + "trend", call_func, lazy_func=lazy_func, x_data=coord.points + ) cube = cube.collapsed(coord, aggregator) # Adapt units @@ -126,7 +135,7 @@ def lazy_func(data, axis, x_data): @preserve_float_dtype -def linear_trend_stderr(cube, coordinate='time'): +def linear_trend_stderr(cube, coordinate="time"): """Calculate standard error of linear trend along a given coordinate. This gives the standard error (not confidence intervals!) of the trend @@ -158,21 +167,23 @@ def linear_trend_stderr(cube, coordinate='time'): # Construct aggregator and calculate standard error of the trend def call_func(data, axis, x_data): """Calculate trend standard error.""" - trend_std_arr = np.apply_along_axis(_get_slope_stderr, axis, data, - x_data) + trend_std_arr = np.apply_along_axis( + _get_slope_stderr, axis, data, x_data + ) trend_std_arr = np.ma.masked_invalid(trend_std_arr) return trend_std_arr def lazy_func(data, axis, x_data): """Calculate trend standard error lazily.""" trend_std_arr = da.apply_along_axis( - _get_slope_stderr, axis, data, x_data, dtype=data.dtype, shape=()) + _get_slope_stderr, axis, data, x_data, dtype=data.dtype, shape=() + ) trend_std_arr = da.ma.masked_invalid(trend_std_arr) return trend_std_arr - aggregator = iris.analysis.Aggregator('trend_stderr', call_func, - lazy_func=lazy_func, - x_data=coord.points) + aggregator = iris.analysis.Aggregator( + "trend_stderr", call_func, lazy_func=lazy_func, x_data=coord.points + ) cube = cube.collapsed(coord, aggregator) # Adapt units diff --git a/esmvalcore/preprocessor/_units.py b/esmvalcore/preprocessor/_units.py index 8c96f78ae1..23ebe23cc2 100644 --- a/esmvalcore/preprocessor/_units.py +++ b/esmvalcore/preprocessor/_units.py @@ -2,6 +2,7 @@ Allows for unit conversions. """ + from __future__ import annotations import logging @@ -21,12 +22,12 @@ # mm s-1 for precipitation SPECIAL_CASES = [ [ - ('precipitation_flux', 'kg m-2 s-1'), - ('lwe_precipitation_rate', 'mm s-1'), + ("precipitation_flux", "kg m-2 s-1"), + ("lwe_precipitation_rate", "mm s-1"), ], [ - ('equivalent_thickness_at_stp_of_atmosphere_ozone_content', 'm'), - ('equivalent_thickness_at_stp_of_atmosphere_ozone_content', '1e5 DU'), + ("equivalent_thickness_at_stp_of_atmosphere_ozone_content", "m"), + ("equivalent_thickness_at_stp_of_atmosphere_ozone_content", "1e5 DU"), ], ] @@ -34,7 +35,7 @@ def _try_special_conversions(cube, units): """Try special conversion.""" for special_case in SPECIAL_CASES: - for (std_name, special_units) in special_case: + for std_name, special_units in special_case: # Special unit conversion only works if all of the following # criteria are met: # - the cube's standard_name is one of the supported @@ -45,9 +46,10 @@ def _try_special_conversions(cube, units): # one of the other standard_names in that special case # Step 1: find suitable source name and units - if (cube.standard_name == std_name and - cube.units.is_convertible(special_units)): - for (target_std_name, target_units) in special_case: + if cube.standard_name == std_name and cube.units.is_convertible( + special_units + ): + for target_std_name, target_units in special_case: if target_units == special_units: continue @@ -125,7 +127,7 @@ def convert_units(cube, units): def accumulate_coordinate( cube: iris.cube.Cube, - coordinate: str | iris.coords.DimCoord | iris.coords.AuxCoord + coordinate: str | iris.coords.DimCoord | iris.coords.AuxCoord, ) -> iris.cube.Cube: """Weight data using the bounds from a given coordinate. @@ -157,12 +159,14 @@ def accumulate_coordinate( coord = cube.coord(coordinate) except iris.exceptions.CoordinateNotFoundError as err: raise ValueError( - "Requested coordinate %s not found in cube %s", - coordinate, cube.summary(shorten=True)) from err + f"Requested coordinate {coordinate} not found in cube " + f"{cube.summary(shorten=True)}", + ) from err if coord.ndim > 1: raise NotImplementedError( - f'Multidimensional coordinate {coord} not supported.') + f"Multidimensional coordinate {coord} not supported." + ) array_module = da if coord.has_lazy_bounds() else np factor = iris.coords.AuxCoord( @@ -172,7 +176,7 @@ def accumulate_coordinate( units=coord.units, ) result = cube * factor - unit = result.units.format().split(' ')[-1] + unit = result.units.format().split(" ")[-1] result.convert_units(unit) result.long_name = f"{cube.long_name} * {factor.long_name}" return result diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index c59fe82936..169dcb3bba 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -3,6 +3,7 @@ Allows for selecting data subsets using certain volume bounds; selecting depth or height regions; constructing volumetric averages; """ + from __future__ import annotations import logging @@ -14,9 +15,9 @@ import numpy as np from iris.coords import AuxCoord, CellMeasure from iris.cube import Cube +from iris.util import broadcast_to_shape from ._shared import ( - broadcast_to_shape, get_iris_aggregator, get_normalized_cube, preserve_float_dtype, @@ -32,7 +33,7 @@ def extract_volume( cube: Cube, z_min: float, z_max: float, - interval_bounds: str = 'open', + interval_bounds: str = "open", nearest_value: bool = False, ) -> Cube: """Subset a cube based on a range of values in the z-coordinate. @@ -76,7 +77,7 @@ def extract_volume( zmax = float(z_max) zmin = float(z_min) - z_coord = cube.coord(axis='Z') + z_coord = cube.coord(axis="Z") if nearest_value: min_index = np.argmin(np.abs(z_coord.core_points() - zmin)) @@ -84,18 +85,19 @@ def extract_volume( zmin = z_coord.core_points()[min_index] zmax = z_coord.core_points()[max_index] - if interval_bounds == 'open': + if interval_bounds == "open": coord_values = {z_coord: lambda cell: zmin < cell.point < zmax} - elif interval_bounds == 'closed': + elif interval_bounds == "closed": coord_values = {z_coord: lambda cell: zmin <= cell.point <= zmax} - elif interval_bounds == 'left_closed': + elif interval_bounds == "left_closed": coord_values = {z_coord: lambda cell: zmin <= cell.point < zmax} - elif interval_bounds == 'right_closed': + elif interval_bounds == "right_closed": coord_values = {z_coord: lambda cell: zmin < cell.point <= zmax} else: raise ValueError( 'Depth extraction bounds can be set to "open", "closed", ' - f'"left_closed", or "right_closed". Got "{interval_bounds}".') + f'"left_closed", or "right_closed". Got "{interval_bounds}".' + ) z_constraint = iris.Constraint(coord_values=coord_values) @@ -127,7 +129,7 @@ def calculate_volume(cube: Cube) -> da.core.Array: """ # Load depth field and figure out which dim is which - depth = cube.coord(axis='z') + depth = cube.coord(axis="z") z_dim = cube.coord_dims(depth) depth = depth.copy() @@ -142,14 +144,16 @@ def calculate_volume(cube: Cube) -> da.core.Array: raise ValueError( f"Z axis bounds shape found {depth.core_bounds().shape}. " "Bounds should be 2 in the last dimension to compute the " - "thickness.") + "thickness." + ) # Convert units to get the thickness in meters try: - depth.convert_units('m') + depth.convert_units("m") except ValueError as err: raise ValueError( - f'Cannot compute volume using the Z-axis. {err}') from err + f"Cannot compute volume using the Z-axis. {err}" + ) from err # Calculate Z-direction thickness thickness = depth.core_bounds()[..., 1] - depth.core_bounds()[..., 0] @@ -157,23 +161,26 @@ def calculate_volume(cube: Cube) -> da.core.Array: thickness = da.array(thickness) # Get or calculate the horizontal areas of the cube - has_cell_measure = bool(cube.cell_measures('cell_area')) + has_cell_measure = bool(cube.cell_measures("cell_area")) try_adding_calculated_cell_area(cube) - area = cube.cell_measure('cell_area').copy() + area = cube.cell_measure("cell_area").copy() area_dim = cube.cell_measure_dims(area) - - # Ensure cell area is in square meters as the units - area.convert_units('m2') + area.convert_units("m2") + area_array = area.core_data() + if cube.has_lazy_data(): + area_array = da.array(area_array) # Make sure input cube has not been modified if not has_cell_measure: - cube.remove_cell_measure('cell_area') + cube.remove_cell_measure("cell_area") chunks = cube.core_data().chunks if cube.has_lazy_data() else None area_arr = broadcast_to_shape( - area.core_data(), cube.shape, area_dim, chunks=chunks) + area_array, cube.shape, area_dim, chunks=chunks + ) thickness_arr = broadcast_to_shape( - thickness, cube.shape, z_dim, chunks=chunks) + thickness, cube.shape, z_dim, chunks=chunks + ) grid_volume = area_arr * thickness_arr return grid_volume @@ -181,7 +188,7 @@ def calculate_volume(cube: Cube) -> da.core.Array: def _try_adding_calculated_ocean_volume(cube: Cube) -> None: """Try to add calculated cell measure 'ocean_volume' to cube (in-place).""" - if cube.cell_measures('ocean_volume'): + if cube.cell_measures("ocean_volume"): return logger.debug( @@ -195,22 +202,22 @@ def _try_adding_calculated_ocean_volume(cube: Cube) -> None: cell_measure = CellMeasure( grid_volume, - standard_name='ocean_volume', - units='m3', - measure='volume', + standard_name="ocean_volume", + units="m3", + measure="volume", ) cube.add_cell_measure(cell_measure, np.arange(cube.ndim)) @register_supplementaries( - variables=['volcello', 'areacello'], - required='prefer_at_least_one', + variables=["volcello", "areacello"], + required="prefer_at_least_one", ) @preserve_float_dtype def volume_statistics( cube: Cube, operator: str, - normalize: Optional[Literal['subtract', 'divide']] = None, + normalize: Optional[Literal["subtract", "divide"]] = None, **operator_kwargs, ) -> Cube: """Apply a statistical operation over a volume. @@ -253,16 +260,16 @@ def volume_statistics( Collapsed cube. """ - has_cell_measure = bool(cube.cell_measures('ocean_volume')) + has_cell_measure = bool(cube.cell_measures("ocean_volume")) # TODO: Test sigma coordinates. # TODO: Add other operations. - if operator != 'mean': + if operator != "mean": raise ValueError(f"Volume operator {operator} not recognised.") # get z, y, x coords - z_axis = cube.coord(axis='Z') - y_axis = cube.coord(axis='Y') - x_axis = cube.coord(axis='X') + z_axis = cube.coord(axis="Z") + y_axis = cube.coord(axis="Y") + x_axis = cube.coord(axis="X") # assert z axis only uses 1 dimension more than x, y axis xy_dims = tuple({*cube.coord_dims(y_axis), *cube.coord_dims(x_axis)}) @@ -272,13 +279,14 @@ def volume_statistics( f"X and Y axis coordinates depend on {xy_dims} dimensions, " f"while X, Y, and Z axis depends on {xyz_dims} dimensions. " "This may indicate Z axis depending on other dimension than " - "space that could provoke invalid aggregation...") + "space that could provoke invalid aggregation..." + ) (agg, agg_kwargs) = get_iris_aggregator(operator, **operator_kwargs) agg_kwargs = update_weights_kwargs( agg, agg_kwargs, - 'ocean_volume', + "ocean_volume", cube, _try_adding_calculated_ocean_volume, ) @@ -288,8 +296,8 @@ def volume_statistics( result = get_normalized_cube(cube, result, normalize) # Make sure input cube has not been modified - if not has_cell_measure and cube.cell_measures('ocean_volume'): - cube.remove_cell_measure('ocean_volume') + if not has_cell_measure and cube.cell_measures("ocean_volume"): + cube.remove_cell_measure("ocean_volume") return result @@ -299,7 +307,7 @@ def axis_statistics( cube: Cube, axis: str, operator: str, - normalize: Optional[Literal['subtract', 'divide']] = None, + normalize: Optional[Literal["subtract", "divide"]] = None, **operator_kwargs, ) -> Cube: """Perform statistics along a given axis. @@ -362,7 +370,7 @@ def axis_statistics( agg_kwargs = update_weights_kwargs( agg, agg_kwargs, - '_axis_statistics_weights_', + "_axis_statistics_weights_", cube, _add_axis_stats_weights_coord, coord=coord, @@ -371,13 +379,13 @@ def axis_statistics( with warnings.catch_warnings(): warnings.filterwarnings( - 'ignore', + "ignore", message=( "Cannot check if coordinate is contiguous: Invalid " "operation for '_axis_statistics_weights_'" ), category=UserWarning, - module='iris', + module="iris", ) result = cube.collapsed(coord, agg, **agg_kwargs) @@ -385,10 +393,10 @@ def axis_statistics( result = get_normalized_cube(cube, result, normalize) # Make sure input and output cubes do not have auxiliary coordinate - if cube.coords('_axis_statistics_weights_'): - cube.remove_coord('_axis_statistics_weights_') - if result.coords('_axis_statistics_weights_'): - result.remove_coord('_axis_statistics_weights_') + if cube.coords("_axis_statistics_weights_"): + cube.remove_coord("_axis_statistics_weights_") + if result.coords("_axis_statistics_weights_"): + result.remove_coord("_axis_statistics_weights_") return result @@ -400,7 +408,7 @@ def _add_axis_stats_weights_coord(cube, coord, coord_dims): weights = weights.compute() weights_coord = AuxCoord( weights, - long_name='_axis_statistics_weights_', + long_name="_axis_statistics_weights_", units=coord.units, ) cube.add_aux_coord(weights_coord, coord_dims) @@ -425,8 +433,8 @@ def depth_integration(cube: Cube) -> Cube: Collapsed cube. """ - result = axis_statistics(cube, axis='z', operator='sum') - result.rename('Depth_integrated_' + str(cube.name())) + result = axis_statistics(cube, axis="z", operator="sum") + result.rename("Depth_integrated_" + str(cube.name())) return result @@ -482,24 +490,28 @@ def extract_transect( # ### coord_dim2 = False second_coord_range: None | list = None - lats = cube.coord('latitude') - lons = cube.coord('longitude') + lats = cube.coord("latitude") + lons = cube.coord("longitude") if lats.ndim == 2: raise ValueError( - 'extract_transect: Not implemented for irregular arrays!' + - '\nTry regridding the data first.') + "extract_transect: Not implemented for irregular arrays!" + + "\nTry regridding the data first." + ) if isinstance(latitude, float) and isinstance(longitude, float): raise ValueError( - "extract_transect: Can't slice along lat and lon at the same time") + "extract_transect: Can't slice along lat and lon at the same time" + ) if isinstance(latitude, list) and isinstance(longitude, list): raise ValueError( - "extract_transect: Can't reduce lat and lon at the same time") + "extract_transect: Can't reduce lat and lon at the same time" + ) - for dim_name, dim_cut, coord in zip(['latitude', 'longitude'], - [latitude, longitude], [lats, lons]): + for dim_name, dim_cut, coord in zip( + ["latitude", "longitude"], [latitude, longitude], [lats, lons] + ): # #### # Look for the first coordinate. if isinstance(dim_cut, float): @@ -512,7 +524,7 @@ def extract_transect( coord_dim2 = cube.coord_dims(dim_name)[0] second_coord_range = [ coord.nearest_neighbour_index(dim_cut[0]), - coord.nearest_neighbour_index(dim_cut[1]) + coord.nearest_neighbour_index(dim_cut[1]), ] # #### # Extracting the line of constant longitude/latitude @@ -520,8 +532,9 @@ def extract_transect( slices[coord_dim] = coord_index if second_coord_range is not None: - slices[coord_dim2] = slice(second_coord_range[0], - second_coord_range[1]) + slices[coord_dim2] = slice( + second_coord_range[0], second_coord_range[1] + ) return cube[tuple(slices)] @@ -574,7 +587,8 @@ def extract_trajectory( if len(latitudes) != len(longitudes): raise ValueError( - 'Longitude & Latitude coordinates have different lengths') + "Longitude & Latitude coordinates have different lengths" + ) if len(latitudes) == len(longitudes) == 2: minlat, maxlat = np.min(latitudes), np.max(latitudes) @@ -583,6 +597,6 @@ def extract_trajectory( longitudes = np.linspace(minlon, maxlon, num=number_points) latitudes = np.linspace(minlat, maxlat, num=number_points) - points = [('latitude', latitudes), ('longitude', longitudes)] + points = [("latitude", latitudes), ("longitude", longitudes)] interpolated_cube = interpolate(cube, points) # Very slow! return interpolated_cube diff --git a/esmvalcore/preprocessor/_weighting.py b/esmvalcore/preprocessor/_weighting.py index 5aa2a70dd5..ab577ad594 100644 --- a/esmvalcore/preprocessor/_weighting.py +++ b/esmvalcore/preprocessor/_weighting.py @@ -15,26 +15,28 @@ def _get_land_fraction(cube): land_fraction = None errors = [] try: - fx_cube = cube.ancillary_variable('land_area_fraction') + fx_cube = cube.ancillary_variable("land_area_fraction") except iris.exceptions.AncillaryVariableNotFoundError: try: - fx_cube = cube.ancillary_variable('sea_area_fraction') + fx_cube = cube.ancillary_variable("sea_area_fraction") except iris.exceptions.AncillaryVariableNotFoundError: - errors.append('Ancillary variables land/sea area fraction not ' - 'found in cube. Check ancillary data availability.') + errors.append( + "Ancillary variables land/sea area fraction not " + "found in cube. Check ancillary data availability." + ) return (land_fraction, errors) - if fx_cube.var_name == 'sftlf': + if fx_cube.var_name == "sftlf": land_fraction = fx_cube.core_data() / 100.0 - if fx_cube.var_name == 'sftof': + if fx_cube.var_name == "sftof": land_fraction = 1.0 - fx_cube.core_data() / 100.0 return (land_fraction, errors) @register_supplementaries( - variables=['sftlf', 'sftof'], - required='require_at_least_one', + variables=["sftlf", "sftof"], + required="require_at_least_one", ) def weighting_landsea_fraction(cube, area_type): """Weight fields using land or sea fraction. @@ -69,17 +71,19 @@ def weighting_landsea_fraction(cube, area_type): ValueError Land/sea fraction variables ``sftlf`` or ``sftof`` not found. """ - if area_type not in ('land', 'sea'): + if area_type not in ("land", "sea"): raise TypeError( - f"Expected 'land' or 'sea' for area_type, got '{area_type}'") + f"Expected 'land' or 'sea' for area_type, got '{area_type}'" + ) (land_fraction, errors) = _get_land_fraction(cube) if land_fraction is None: raise ValueError( f"Weighting of '{cube.var_name}' with '{area_type}' fraction " - f"failed because of the following errors: {' '.join(errors)}") + f"failed because of the following errors: {' '.join(errors)}" + ) core_data = cube.core_data() - if area_type == 'land': + if area_type == "land": cube.data = core_data * land_fraction - elif area_type == 'sea': + elif area_type == "sea": cube.data = core_data * (1.0 - land_fraction) return cube diff --git a/esmvalcore/preprocessor/regrid_schemes.py b/esmvalcore/preprocessor/regrid_schemes.py index 91af9e3fdf..abe02a2cd1 100644 --- a/esmvalcore/preprocessor/regrid_schemes.py +++ b/esmvalcore/preprocessor/regrid_schemes.py @@ -1,4 +1,5 @@ """Regridding schemes.""" + from __future__ import annotations import logging @@ -12,6 +13,7 @@ ESMPyNearest, ESMPyRegridder, ) +from esmvalcore.preprocessor._regrid_iris_esmf_regrid import IrisESMFRegrid from esmvalcore.preprocessor._regrid_unstructured import ( UnstructuredLinear, UnstructuredLinearRegridder, @@ -20,17 +22,17 @@ logger = logging.getLogger(__name__) - __all__ = [ - 'ESMPyAreaWeighted', - 'ESMPyLinear', - 'ESMPyNearest', - 'ESMPyRegridder', - 'GenericFuncScheme', - 'GenericRegridder', - 'UnstructuredLinear', - 'UnstructuredLinearRegridder', - 'UnstructuredNearest', + "ESMPyAreaWeighted", + "ESMPyLinear", + "ESMPyNearest", + "ESMPyRegridder", + "IrisESMFRegrid", + "GenericFuncScheme", + "GenericRegridder", + "UnstructuredLinear", + "UnstructuredLinearRegridder", + "UnstructuredNearest", ] @@ -51,7 +53,6 @@ class GenericRegridder: Cube, \*\*kwargs) -> Cube. **kwargs: Keyword arguments for the generic regridding function. - """ def __init__( @@ -79,7 +80,6 @@ def __call__(self, cube: Cube) -> Cube: ------- Cube Regridded cube. - """ return self.func(cube, self.tgt_cube, **self.kwargs) @@ -98,7 +98,6 @@ class GenericFuncScheme: Cube, \*\*kwargs) -> Cube. **kwargs: Keyword arguments for the generic regridding function. - """ def __init__(self, func: Callable, **kwargs): @@ -108,8 +107,8 @@ def __init__(self, func: Callable, **kwargs): def __repr__(self) -> str: """Return string representation of class.""" - kwargs = ', '.join(f"{k}={v}" for (k, v) in self.kwargs.items()) - return f'GenericFuncScheme({self.func.__name__}, {kwargs})' + kwargs = ", ".join(f"{k}={v}" for (k, v) in self.kwargs.items()) + return f"GenericFuncScheme({self.func.__name__}, {kwargs})" def regridder(self, src_cube: Cube, tgt_cube: Cube) -> GenericRegridder: """Get regridder. @@ -125,6 +124,5 @@ def regridder(self, src_cube: Cube, tgt_cube: Cube) -> GenericRegridder: ------- GenericRegridder Regridder instance. - """ return GenericRegridder(src_cube, tgt_cube, self.func, **self.kwargs) diff --git a/esmvalcore/typing.py b/esmvalcore/typing.py index 410b31f0f8..1c03d07944 100644 --- a/esmvalcore/typing.py +++ b/esmvalcore/typing.py @@ -1,4 +1,5 @@ """Type aliases for providing type hints.""" + from __future__ import annotations from numbers import Number diff --git a/notebooks/composing-recipes.ipynb b/notebooks/composing-recipes.ipynb index bb7259c31b..5156618d88 100644 --- a/notebooks/composing-recipes.ipynb +++ b/notebooks/composing-recipes.ipynb @@ -17,9 +17,10 @@ "metadata": {}, "outputs": [], "source": [ + "import yaml\n", + "\n", "from esmvalcore.config import CFG\n", - "from esmvalcore.dataset import Dataset, datasets_to_recipe\n", - "import yaml" + "from esmvalcore.dataset import Dataset, datasets_to_recipe" ] }, { @@ -38,7 +39,7 @@ "metadata": {}, "outputs": [], "source": [ - "CFG['search_esgf'] = 'always'" + "CFG[\"search_esgf\"] = \"always\"" ] }, { @@ -84,18 +85,18 @@ ], "source": [ "tas = Dataset(\n", - " short_name='tas',\n", - " mip='Amon',\n", - " project='CMIP6',\n", - " dataset='CanESM5-1',\n", - " ensemble='r1i1p1f1',\n", - " exp='historical',\n", - " grid='gn',\n", - " timerange='2000/2002',\n", + " short_name=\"tas\",\n", + " mip=\"Amon\",\n", + " project=\"CMIP6\",\n", + " dataset=\"CanESM5-1\",\n", + " ensemble=\"r1i1p1f1\",\n", + " exp=\"historical\",\n", + " grid=\"gn\",\n", + " timerange=\"2000/2002\",\n", ")\n", - "tas['diagnostic'] = 'diagnostic_name'\n", + "tas[\"diagnostic\"] = \"diagnostic_name\"\n", "\n", - "pr = tas.copy(short_name='pr')\n", + "pr = tas.copy(short_name=\"pr\")\n", "\n", "print(yaml.safe_dump(datasets_to_recipe([tas, pr])))" ] @@ -127,14 +128,14 @@ ], "source": [ "dataset_template = Dataset(\n", - " short_name='tas',\n", - " mip='Amon',\n", - " project='CMIP6',\n", - " exp='historical',\n", - " dataset='*',\n", - " institute='*',\n", - " ensemble='*',\n", - " grid='*',\n", + " short_name=\"tas\",\n", + " mip=\"Amon\",\n", + " project=\"CMIP6\",\n", + " exp=\"historical\",\n", + " dataset=\"*\",\n", + " institute=\"*\",\n", + " ensemble=\"*\",\n", + " grid=\"*\",\n", ")\n", "datasets = list(dataset_template.from_files())\n", "len(datasets)" @@ -584,7 +585,7 @@ ], "source": [ "for dataset in datasets:\n", - " dataset.facets['diagnostic'] = 'diagnostic_name'\n", + " dataset.facets[\"diagnostic\"] = \"diagnostic_name\"\n", "print(yaml.safe_dump(datasets_to_recipe(datasets)))" ] } diff --git a/notebooks/discovering-data.ipynb b/notebooks/discovering-data.ipynb index 923c915ac7..d6c9001ef2 100644 --- a/notebooks/discovering-data.ipynb +++ b/notebooks/discovering-data.ipynb @@ -19,9 +19,8 @@ "outputs": [], "source": [ "from esmvalcore.config import CFG\n", - "from esmvalcore.dataset import Dataset, datasets_to_recipe\n", - "from esmvalcore.esgf import download\n", - "import yaml" + "from esmvalcore.dataset import Dataset\n", + "from esmvalcore.esgf import download" ] }, { @@ -40,7 +39,7 @@ "metadata": {}, "outputs": [], "source": [ - "CFG['search_esgf'] = 'always'" + "CFG[\"search_esgf\"] = \"always\"" ] }, { @@ -60,14 +59,14 @@ "outputs": [], "source": [ "dataset_template = Dataset(\n", - " short_name='tas',\n", - " mip='Amon',\n", - " project='CMIP6',\n", - " exp='historical',\n", - " dataset='*',\n", - " institute='*',\n", - " ensemble='*',\n", - " grid='*',\n", + " short_name=\"tas\",\n", + " mip=\"Amon\",\n", + " project=\"CMIP6\",\n", + " exp=\"historical\",\n", + " dataset=\"*\",\n", + " institute=\"*\",\n", + " ensemble=\"*\",\n", + " grid=\"*\",\n", ")" ] }, @@ -292,7 +291,7 @@ } ], "source": [ - "dataset.files[0].download(CFG['download_dir'])" + "dataset.files[0].download(CFG[\"download_dir\"])" ] }, { @@ -311,7 +310,7 @@ "metadata": {}, "outputs": [], "source": [ - "download(dataset.files, CFG['download_dir'])" + "download(dataset.files, CFG[\"download_dir\"])" ] } ], diff --git a/notebooks/loading-and-processing-data.ipynb b/notebooks/loading-and-processing-data.ipynb index 1487f3f10c..bb85566430 100644 --- a/notebooks/loading-and-processing-data.ipynb +++ b/notebooks/loading-and-processing-data.ipynb @@ -19,13 +19,13 @@ "source": [ "%matplotlib inline\n", "\n", - "import matplotlib.pyplot as plt\n", "import iris.quickplot\n", + "import matplotlib.pyplot as plt\n", "\n", "from esmvalcore.config import CFG\n", "from esmvalcore.dataset import Dataset\n", - "from esmvalcore.esgf import download, ESGFFile\n", - "from esmvalcore.preprocessor import area_statistics, annual_statistics" + "from esmvalcore.esgf import ESGFFile, download\n", + "from esmvalcore.preprocessor import annual_statistics, area_statistics" ] }, { @@ -43,7 +43,7 @@ "metadata": {}, "outputs": [], "source": [ - "CFG['search_esgf'] = 'when_missing'" + "CFG[\"search_esgf\"] = \"when_missing\"" ] }, { @@ -80,13 +80,13 @@ ], "source": [ "tas = Dataset(\n", - " short_name='tas',\n", - " mip='Amon',\n", - " project='CMIP5',\n", - " dataset='MPI-ESM-MR',\n", - " ensemble='r1i1p1',\n", - " exp='historical',\n", - " timerange='1850/2000',\n", + " short_name=\"tas\",\n", + " mip=\"Amon\",\n", + " project=\"CMIP5\",\n", + " dataset=\"MPI-ESM-MR\",\n", + " ensemble=\"r1i1p1\",\n", + " exp=\"historical\",\n", + " timerange=\"1850/2000\",\n", ")\n", "tas" ] @@ -124,7 +124,7 @@ } ], "source": [ - "tas.add_supplementary(short_name='areacella', mip='fx', ensemble='r0i0p0')\n", + "tas.add_supplementary(short_name=\"areacella\", mip=\"fx\", ensemble=\"r0i0p0\")\n", "tas.supplementaries" ] }, @@ -247,7 +247,7 @@ "for supplementary_ds in tas.supplementaries:\n", " files.extend(supplementary_ds.files)\n", "files = [file for file in files if isinstance(file, ESGFFile)]\n", - "download(files, CFG['download_dir'])\n", + "download(files, CFG[\"download_dir\"])\n", "tas.find_files()\n", "print(tas.files)\n", "for supplementary_ds in tas.supplementaries:\n", @@ -548,9 +548,9 @@ "metadata": {}, "outputs": [], "source": [ - "cube = area_statistics(cube, operator='mean')\n", - "cube = annual_statistics(cube, operator='mean')\n", - "cube.convert_units('degrees_C')" + "cube = area_statistics(cube, operator=\"mean\")\n", + "cube = annual_statistics(cube, operator=\"mean\")\n", + "cube.convert_units(\"degrees_C\")" ] }, { diff --git a/pyproject.toml b/pyproject.toml index 7a7d8388dc..5a45ca2ab9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,10 @@ build-backend = "setuptools.build_meta" [tool.setuptools_scm] version_scheme = "release-branch-semver" +[tool.codespell] +skip = "*.ipynb,esmvalcore/config/extra_facets/ipslcm-mappings.yml" +ignore-words-list = "vas,hist,oce" + [tool.pylint.main] jobs = 1 # Running more than one job in parallel crashes prospector. ignore-paths = [ @@ -22,5 +26,21 @@ max-line-length = 79 disable = [ "import-error", # Needed because Codacy does not install dependencies "file-ignored", # Disable messages about disabling checks + "line-too-long", # Disable line-too-long as this is taken care of by the formatter. "locally-disabled", # Disable messages about disabling checks ] +[tool.ruff] +line-length = 79 +[tool.ruff.lint] +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "ISC001", # pycodestyle + "W", # pycodestyle +] +ignore = [ + "E501", # Disable line-too-long as this is taken care of by the formatter. +] +[tool.ruff.lint.isort] +known-first-party = ["esmvalcore"] diff --git a/setup.cfg b/setup.cfg index 3bf09d26f9..15d02392d7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,5 @@ [tool:pytest] addopts = - --mypy --doctest-modules --ignore=esmvalcore/cmor/tables/ --cov=esmvalcore @@ -14,11 +13,6 @@ markers = installation: Test requires installation of dependencies use_sample_data: Run functional tests using real data -[flake8] -exclude = - .eggs/ - doc/conf.py - [coverage:run] parallel = true [coverage:report] @@ -27,23 +21,18 @@ exclude_lines = if __name__ == .__main__.: if TYPE_CHECKING: +[pycodestyle] +# ignore rules that conflict with ruff formatter +# E203: https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#slices +# E501: https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules +# W503: https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes +ignore = E203,E501,W503 + [pydocstyle] convention = numpy -[isort] -multi_line_output = 3 -include_trailing_comma = true - [mypy] # see mypy.readthedocs.io/en/stable/command_line.html -python_version = 3.9 +python_version = 3.12 ignore_missing_imports = True files = esmvalcore, tests - -[yapf] -based_on_style = pep8 -# see https://github.com/google/yapf/issues/744 -blank_line_before_nested_class_or_def = true - -[codespell] -ignore-words-list = vas,hist diff --git a/setup.py b/setup.py index 9438202769..32d2e15e27 100755 --- a/setup.py +++ b/setup.py @@ -16,89 +16,77 @@ from setuptools import Command, setup PACKAGES = [ - 'esmvalcore', + "esmvalcore", ] REQUIREMENTS = { # Installation script (this file) dependencies - 'setup': [ - 'setuptools_scm', + "setup": [ + "setuptools_scm", ], # Installation dependencies # Use with pip install . to install from source - 'install': [ - 'cartopy', - 'cf-units', - 'dask[array,distributed]', - 'dask-jobqueue', - 'esgf-pyclient>=0.3.1', - 'esmf-regrid>=0.10.0', # iris-esmf-regrid #342 - 'esmpy!=8.1.0', # not on PyPI - 'filelock', - 'fiona', - 'fire', - 'geopy', - 'humanfriendly', - "importlib_metadata;python_version<'3.10'", - 'isodate', - 'jinja2', - 'nc-time-axis', # needed by iris.plot - 'nested-lookup', - 'netCDF4', - 'numpy!=1.24.3,<2.0.0', # avoid pulling 2.0.0rc1 - 'packaging', - 'pandas!=2.2.0,!=2.2.1,!=2.2.2', # GH #2305 #2349 etc - 'pillow', - 'prov', - 'psutil', - 'py-cordex', - 'pybtex', - 'pyyaml', - 'requests', - 'scipy>=1.6', - 'scitools-iris>=3.9.0', - 'shapely>=2.0.0', - 'stratify>=0.3', - 'yamale', + "install": [ + "cartopy", + "cf-units", + "dask[array,distributed]!=2024.8.0", # ESMValCore/issues/2503 + "dask-jobqueue", + "esgf-pyclient>=0.3.1", + "esmf-regrid>=0.11.0", + "esmpy!=8.1.0", # not on PyPI + "filelock", + "fiona", + "fire", + "geopy", + "humanfriendly", + "iris-grib", + "isodate", + "jinja2", + "nc-time-axis", # needed by iris.plot + "nested-lookup", + "netCDF4", + "numpy!=1.24.3,<2.0.0", # avoid pulling 2.0.0rc1 + "packaging", + "pandas", + "pillow", + "prov", + "psutil", + "py-cordex", + "pybtex", + "pyyaml", + "requests", + "scipy>=1.6", + "scitools-iris>=3.10.0", + "shapely>=2.0.0", + "stratify>=0.3", + "yamale", ], # Test dependencies - 'test': [ - 'flake8>=7.0.0', # not to pick up E231 - 'pytest>=3.9,!=6.0.0rc1,!=6.0.0', - 'pytest-cov>=2.10.1', - 'pytest-env', - 'pytest-html!=2.1.0', - 'pytest-metadata>=1.5.1', - 'pytest-mypy>=0.10.3', # gh issue/2314 - 'pytest-mock', - 'pytest-xdist', - 'ESMValTool_sample_data==0.0.3', - # MyPy library stubs - 'mypy>=0.990', - 'types-requests', - 'types-PyYAML', + "test": [ + "pytest>=3.9,!=6.0.0rc1,!=6.0.0", + "pytest-cov>=2.10.1", + "pytest-env", + "pytest-html!=2.1.0", + "pytest-metadata>=1.5.1", + "pytest-mock", + "pytest-xdist", + "ESMValTool_sample_data==0.0.3", ], # Documentation dependencies - 'doc': [ - 'autodocsumm>=0.2.2', - 'ipython', - 'nbsphinx', - 'sphinx>=6.1.3', - 'pydata_sphinx_theme', + "doc": [ + "autodocsumm>=0.2.2", + "ipython", + "nbsphinx", + "sphinx>=6.1.3", + "pydata_sphinx_theme", ], # Development dependencies # Use pip install -e .[develop] to install in development mode - 'develop': [ - 'codespell', - 'docformatter', - 'isort', - 'flake8>=7', - 'pre-commit', - 'pylint', - 'pydocstyle', - 'vprof', - 'yamllint', - 'yapf', + "develop": [ + "pre-commit", + "pylint", + "pydocstyle", + "vprof", ], } @@ -116,8 +104,7 @@ def _ignore(path): continue for filename in files: filename = os.path.join(root, filename) - if (filename.lower().endswith('.py') - and not _ignore(filename)): + if filename.lower().endswith(".py") and not _ignore(filename): yield filename @@ -128,7 +115,8 @@ def install_deps_temp(self): """Try to temporarily install packages needed to run the command.""" if self.distribution.install_requires: self.distribution.fetch_build_eggs( - self.distribution.install_requires) + self.distribution.install_requires + ) if self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) @@ -147,11 +135,11 @@ def finalize_options(self): def run(self): """Run prospector and generate a report.""" check_paths = PACKAGES + [ - 'setup.py', - 'tests', + "setup.py", + "tests", ] ignore = [ - 'doc/', + "doc/", ] # try to install missing dependencies and import prospector @@ -159,7 +147,7 @@ def run(self): from prospector.run import main except ImportError: # try to install and then import - self.distribution.fetch_build_eggs(['prospector[with_pyroma]']) + self.distribution.fetch_build_eggs(["prospector[with_pyroma]"]) from prospector.run import main self.install_deps_temp() @@ -172,7 +160,7 @@ def run(self): # write command line files = discover_python_files(check_paths, ignore) - sys.argv = ['prospector'] + sys.argv = ["prospector"] sys.argv.extend(files) # run prospector @@ -183,70 +171,69 @@ def run(self): def read_authors(filename): """Read the list of authors from .zenodo.json file.""" - with Path(filename).open(encoding='utf-8') as file: + with Path(filename).open(encoding="utf-8") as file: info = json.load(file) authors = [] - for author in info['creators']: - name = ' '.join(author['name'].split(',')[::-1]).strip() + for author in info["creators"]: + name = " ".join(author["name"].split(",")[::-1]).strip() authors.append(name) - return ', '.join(authors) + return ", ".join(authors) def read_description(filename): """Read the description from .zenodo.json file.""" - with Path(filename).open(encoding='utf-8') as file: + with Path(filename).open(encoding="utf-8") as file: info = json.load(file) - return info['description'] + return info["description"] setup( - name='ESMValCore', - author=read_authors('.zenodo.json'), - description=read_description('.zenodo.json'), - long_description=Path('README.md').read_text(encoding='utf-8'), - long_description_content_type='text/markdown', - url='https://www.esmvaltool.org', - download_url='https://github.com/ESMValGroup/ESMValCore', - license='Apache License, Version 2.0', + name="ESMValCore", + author=read_authors(".zenodo.json"), + description=read_description(".zenodo.json"), + long_description=Path("README.md").read_text(encoding="utf-8"), + long_description_content_type="text/markdown", + url="https://www.esmvaltool.org", + download_url="https://github.com/ESMValGroup/ESMValCore", + license="Apache License, Version 2.0", classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: Apache Software License', - 'Natural Language :: English', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Topic :: Scientific/Engineering', - 'Topic :: Scientific/Engineering :: Atmospheric Science', - 'Topic :: Scientific/Engineering :: GIS', - 'Topic :: Scientific/Engineering :: Hydrology', - 'Topic :: Scientific/Engineering :: Physics', + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Scientific/Engineering :: GIS", + "Topic :: Scientific/Engineering :: Hydrology", + "Topic :: Scientific/Engineering :: Physics", ], packages=PACKAGES, # Include all version controlled files include_package_data=True, - setup_requires=REQUIREMENTS['setup'], - install_requires=REQUIREMENTS['install'], - tests_require=REQUIREMENTS['test'], + setup_requires=REQUIREMENTS["setup"], + install_requires=REQUIREMENTS["install"], + tests_require=REQUIREMENTS["test"], extras_require={ - 'develop': - REQUIREMENTS['develop'] + REQUIREMENTS['test'] + REQUIREMENTS['doc'], - 'test': - REQUIREMENTS['test'], - 'doc': - REQUIREMENTS['doc'], + "develop": REQUIREMENTS["develop"] + + REQUIREMENTS["test"] + + REQUIREMENTS["doc"], + "test": REQUIREMENTS["test"], + "doc": REQUIREMENTS["doc"], }, entry_points={ - 'console_scripts': [ - 'esmvaltool = esmvalcore._main:run', + "console_scripts": [ + "esmvaltool = esmvalcore._main:run", ], }, cmdclass={ - 'lint': RunLinter, + "lint": RunLinter, }, zip_safe=False, ) diff --git a/tests/__init__.py b/tests/__init__.py index a059d6b310..7c55516496 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,5 @@ """Provides testing capabilities for :mod:`esmvaltool` package.""" + import unittest from unittest import mock @@ -16,6 +17,7 @@ def assert_array_equal(a, b): class Test(unittest.TestCase): """Provides esmvaltool specific testing functionality.""" + def _remove_testcase_patches(self): """ Helper method to remove per-testcase patches installed by @@ -53,7 +55,7 @@ def patch(self, *args, **kwargs): # Create the per-testcases control variable if it does not exist. # NOTE: this mimics a setUp method, but continues to work when a # subclass defines its own setUp. - if not hasattr(self, 'testcase_patches'): + if not hasattr(self, "testcase_patches"): self.testcase_patches = {} # When installing the first patch, schedule remove-all at cleanup. diff --git a/tests/integration/cmor/_fixes/access/__init__.py b/tests/integration/cmor/_fixes/access/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/cmor/_fixes/access/test_access_esm1_5.py b/tests/integration/cmor/_fixes/access/test_access_esm1_5.py new file mode 100644 index 0000000000..7eac272619 --- /dev/null +++ b/tests/integration/cmor/_fixes/access/test_access_esm1_5.py @@ -0,0 +1,401 @@ +"""Tests for the ACCESS-ESM on-the-fly CMORizer.""" + +import dask.array as da +import iris +import numpy as np +import pytest +from cf_units import Unit +from iris.coords import DimCoord +from iris.cube import Cube, CubeList + +import esmvalcore.cmor._fixes.access.access_esm1_5 +from esmvalcore.cmor._fixes.fix import GenericFix +from esmvalcore.cmor.fix import Fix +from esmvalcore.cmor.table import CoordinateInfo, get_var_info +from esmvalcore.config._config import get_extra_facets +from esmvalcore.dataset import Dataset + +time_coord = DimCoord( + [15, 45], + standard_name="time", + var_name="time", + units=Unit("days since 1851-01-01", calendar="noleap"), + attributes={"test": 1, "time_origin": "will_be_removed"}, +) +lat_coord = DimCoord( + [0, 10], + standard_name="latitude", + var_name="lat", + units="degrees", +) +lon_coord = DimCoord( + [-180, 0], + standard_name="longitude", + var_name="lon", + units="degrees", +) +coord_spec_3d = [ + (time_coord, 0), + (lat_coord, 1), + (lon_coord, 2), +] + + +@pytest.fixture +def cubes_2d(test_data_path): + """2D sample cubes.""" + nc_path = test_data_path / "access_native.nc" + return iris.load(str(nc_path)) + + +def _get_fix(mip, frequency, short_name, fix_name): + """Load a fix from :mod:`esmvalcore.cmor._fixes.access.access_esm1_5`.""" + dataset = Dataset( + project="ACCESS", + dataset="ACCESS-ESM1-5", + mip=mip, + short_name=short_name, + ) + extra_facets = get_extra_facets(dataset, ()) + extra_facets["frequency"] = frequency + extra_facets["exp"] = "amip" + vardef = get_var_info(project="ACCESS", mip=mip, short_name=short_name) + cls = getattr(esmvalcore.cmor._fixes.access.access_esm1_5, fix_name) + fix = cls(vardef, extra_facets=extra_facets, session={}, frequency="") + return fix + + +def get_fix(mip, frequency, short_name): + """Load a variable fix from esmvalcore.cmor._fixes.access.access_esm1_5.""" + fix_name = short_name[0].upper() + short_name[1:] + return _get_fix(mip, frequency, short_name, fix_name) + + +def get_fix_allvar(mip, frequency, short_name): + """Load a AllVar fix from esmvalcore.cmor._fixes.access.access_esm1_5.""" + return _get_fix(mip, frequency, short_name, "AllVars") + + +def fix_metadata(cubes, mip, frequency, short_name): + """Fix metadata of cubes.""" + fix = get_fix(mip, frequency, short_name) + cubes = fix.fix_metadata(cubes) + return cubes + + +def check_tas_metadata(cubes): + """Check tas metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == "tas" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes + return cube + + +def check_pr_metadata(cubes): + """Check pr metadata.""" + assert len(cubes) == 1 + cube = cubes[0] + assert cube.var_name == "pr" + assert cube.standard_name == "precipitation_flux" + assert cube.long_name == "Precipitation" + assert cube.units == "kg m-2 s-1" + assert "positive" not in cube.attributes + return cube + + +def check_time(cube): + """Check time coordinate of cube.""" + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.bounds.shape == (1, 2) + assert time.attributes == {} + + +def check_lat(cube): + """Check latitude coordinate of cube.""" + assert cube.coords("latitude", dim_coords=True) + lat = cube.coord("latitude", dim_coords=True) + assert lat.var_name == "lat" + assert lat.standard_name == "latitude" + assert lat.units == "degrees_north" + assert lat.attributes == {} + + +def check_lon(cube): + """Check longitude coordinate of cube.""" + assert cube.coords("longitude", dim_coords=True) + lon = cube.coord("longitude", dim_coords=True) + assert lon.var_name == "lon" + assert lon.standard_name == "longitude" + assert lon.units == "degrees_east" + assert lon.attributes == {} + + +def check_heightxm(cube, height_value): + """Check scalar heightxm coordinate of cube.""" + assert cube.coords("height") + height = cube.coord("height") + assert height.var_name == "height" + assert height.standard_name == "height" + assert height.units == "m" + assert height.attributes == {"positive": "up"} + np.testing.assert_allclose(height.points, [height_value]) + assert height.bounds is None + + +def assert_plev_metadata(cube): + """Assert plev metadata is correct.""" + assert cube.coord("air_pressure").standard_name == "air_pressure" + assert cube.coord("air_pressure").var_name == "plev" + assert cube.coord("air_pressure").units == "Pa" + assert cube.coord("air_pressure").attributes == {"positive": "down"} + + +def test_only_time(monkeypatch, cubes_2d): + """Test fix.""" + fix = get_fix_allvar("Amon", "mon", "pr") + + coord_info = CoordinateInfo("time") + coord_info.standard_name = "time" + monkeypatch.setattr(fix.vardef, "coordinates", {"time": coord_info}) + + cubes = cubes_2d + fixed_cubes = fix.fix_metadata(cubes) + + # Check cube metadata + cube = check_pr_metadata(fixed_cubes) + + # Check cube data + assert cube.shape == (1, 145, 192) + + # Check time metadata + assert cube.coords("time") + new_time_coord = cube.coord("time", dim_coords=True) + assert new_time_coord.var_name == "time" + assert new_time_coord.standard_name == "time" + + +def test_only_latitude(monkeypatch, cubes_2d): + """Test fix.""" + fix = get_fix_allvar("Amon", "mon", "pr") + + coord_info = CoordinateInfo("latitude") + coord_info.standard_name = "latitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"latitude": coord_info}) + + cubes = cubes_2d + fixed_cubes = fix.fix_metadata(cubes) + + # Check cube metadata + cube = check_pr_metadata(fixed_cubes) + + # Check cube data + assert cube.shape == (1, 145, 192) + + # Check latitude metadata + assert cube.coords("latitude", dim_coords=True) + new_lat_coord = cube.coord("latitude") + assert new_lat_coord.var_name == "lat" + assert new_lat_coord.standard_name == "latitude" + assert new_lat_coord.units == "degrees_north" + + +def test_only_longitude(monkeypatch, cubes_2d): + """Test fix.""" + fix = get_fix_allvar("Amon", "mon", "pr") + + coord_info = CoordinateInfo("longitude") + coord_info.standard_name = "longitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"longitude": coord_info}) + + cubes = cubes_2d + fixed_cubes = fix.fix_metadata(cubes) + + # Check cube metadata + cube = check_pr_metadata(fixed_cubes) + + # Check cube data + assert cube.shape == (1, 145, 192) + + # Check longitude metadata + assert cube.coords("longitude", dim_coords=True) + new_lon_coord = cube.coord("longitude") + assert new_lon_coord.var_name == "lon" + assert new_lon_coord.standard_name == "longitude" + assert new_lon_coord.units == "degrees_east" + + +def test_get_tas_fix(): + """Test getting of fix 'tas'.""" + fix = Fix.get_fixes("ACCESS", "ACCESS_ESM1_5", "Amon", "tas") + assert fix == [ + esmvalcore.cmor._fixes.access.access_esm1_5.Tas( + vardef={}, extra_facets={}, session={}, frequency="" + ), + esmvalcore.cmor._fixes.access.access_esm1_5.AllVars( + vardef={}, extra_facets={}, session={}, frequency="" + ), + GenericFix(None), + ] + + +def test_tas_fix(cubes_2d): + """Test fix 'tas'.""" + fix_tas = get_fix("Amon", "mon", "tas") + fix_allvar = get_fix_allvar("Amon", "mon", "tas") + fixed_cubes = fix_tas.fix_metadata(cubes_2d) + fixed_cubes = fix_allvar.fix_metadata(fixed_cubes) + fixed_cube = check_tas_metadata(fixed_cubes) + + check_time(fixed_cube) + check_lat(fixed_cube) + check_lon(fixed_cube) + check_heightxm(fixed_cube, 2) + + assert fixed_cube.shape == (1, 145, 192) + + +def test_hus_fix(): + """Test fix 'hus'.""" + time_coord = DimCoord( + [15, 45], + standard_name="time", + var_name="time", + units=Unit("days since 1851-01-01", calendar="noleap"), + attributes={"test": 1, "time_origin": "will_be_removed"}, + ) + plev_coord_rev = DimCoord( + [250, 500, 850], + var_name="pressure", + units="Pa", + ) + lat_coord_rev = DimCoord( + [10, -10], + standard_name="latitude", + var_name="lat", + units="degrees", + ) + lon_coord = DimCoord( + [-180, 0], + standard_name="longitude", + var_name="lon", + units="degrees", + ) + coord_spec_4d = [ + (time_coord, 0), + (plev_coord_rev, 1), + (lat_coord_rev, 2), + (lon_coord, 3), + ] + cube_4d = Cube( + da.arange(2 * 3 * 2 * 2, dtype=np.float32).reshape(2, 3, 2, 2), + standard_name="specific_humidity", + long_name="Specific Humidity", + var_name="fld_s30i205", + units="1", + dim_coords_and_dims=coord_spec_4d, + attributes={}, + ) + cubes_4d = CubeList([cube_4d]) + + fix = get_fix_allvar("Amon", "mon", "hus") + fixed_cubes = fix.fix_metadata(cubes_4d) + fixed_cube = fixed_cubes[0] + assert_plev_metadata(fixed_cube) + + assert fixed_cube.shape == (2, 3, 2, 2) + + +def test_rsus_fix(): + """Test fix 'rsus'.""" + time_coord = DimCoord( + [15, 45], + standard_name="time", + var_name="time", + units=Unit("days since 1851-01-01", calendar="noleap"), + attributes={"test": 1, "time_origin": "will_be_removed"}, + ) + lat_coord = DimCoord( + [0, 10], + standard_name="latitude", + var_name="lat", + units="degrees", + ) + lon_coord = DimCoord( + [-180, 0], + standard_name="longitude", + var_name="lon", + units="degrees", + ) + coord_spec_3d = [ + (time_coord, 0), + (lat_coord, 1), + (lon_coord, 2), + ] + cube_3d_1 = Cube( + da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), + var_name="fld_s01i235", + units="W m-2", + dim_coords_and_dims=coord_spec_3d, + attributes={}, + ) + cube_3d_2 = Cube( + da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), + var_name="fld_s01i201", + units="W m-2", + dim_coords_and_dims=coord_spec_3d, + attributes={}, + ) + cubes_3d = CubeList([cube_3d_1, cube_3d_2]) + + cube_result = cubes_3d[0] - cubes_3d[1] + + fix = get_fix("Amon", "mon", "rsus") + fixed_cubes = fix.fix_metadata(cubes_3d) + np.testing.assert_allclose(fixed_cubes[0].data, cube_result.data) + + +def test_rlus_fix(): + """Test fix 'rlus'.""" + cube_3d_1 = Cube( + da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), + var_name="fld_s02i207", + units="W m-2", + dim_coords_and_dims=coord_spec_3d, + attributes={}, + ) + cube_3d_2 = Cube( + da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), + var_name="fld_s02i201", + units="W m-2", + dim_coords_and_dims=coord_spec_3d, + attributes={}, + ) + cube_3d_3 = Cube( + da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), + var_name="fld_s03i332", + units="W m-2", + dim_coords_and_dims=coord_spec_3d, + attributes={}, + ) + cube_3d_4 = Cube( + da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), + var_name="fld_s02i205", + units="W m-2", + dim_coords_and_dims=coord_spec_3d, + attributes={}, + ) + + cubes_3d = CubeList([cube_3d_1, cube_3d_2, cube_3d_3, cube_3d_4]) + + cube_result = cubes_3d[0] - cubes_3d[1] + cubes_3d[2] - cubes_3d[3] + + fix = get_fix("Amon", "mon", "rlus") + fixed_cubes = fix.fix_metadata(cubes_3d) + np.testing.assert_allclose(fixed_cubes[0].data, cube_result.data) diff --git a/tests/integration/cmor/_fixes/cesm/test_cesm2.py b/tests/integration/cmor/_fixes/cesm/test_cesm2.py index 661eadcfb7..82a5f687d5 100644 --- a/tests/integration/cmor/_fixes/cesm/test_cesm2.py +++ b/tests/integration/cmor/_fixes/cesm/test_cesm2.py @@ -1,4 +1,5 @@ """Tests for the CESM2 on-the-fly CMORizer.""" + import iris import numpy as np import pytest @@ -19,7 +20,7 @@ @pytest.fixture def cubes_2d(test_data_path): """2D sample cubes.""" - nc_path = test_data_path / 'cesm2_native.nc' + nc_path = test_data_path / "cesm2_native.nc" return iris.load(str(nc_path)) @@ -29,10 +30,10 @@ def cube_1d_time(): time_coord = DimCoord( [2, 4, 6], bounds=[[0, 2], [2, 4], [4, 6]], - standard_name='time', - var_name='time', - long_name='time', - units='days since 1850-01-01', + standard_name="time", + var_name="time", + long_name="time", + units="days since 1850-01-01", ) cube = Cube([0, 0, 0], dim_coords_and_dims=[(time_coord, 0)]) return cube @@ -41,14 +42,14 @@ def cube_1d_time(): def _get_fix(mip, frequency, short_name, fix_name): """Load a fix from :mod:`esmvalcore.cmor._fixes.cesm.cesm2`.""" dataset = Dataset( - project='CESM', - dataset='CESM2', + project="CESM", + dataset="CESM2", mip=mip, short_name=short_name, ) extra_facets = get_extra_facets(dataset, ()) - extra_facets['frequency'] = frequency - vardef = get_var_info(project='CESM', mip=mip, short_name=short_name) + extra_facets["frequency"] = frequency + vardef = get_var_info(project="CESM", mip=mip, short_name=short_name) cls = getattr(esmvalcore.cmor._fixes.cesm.cesm2, fix_name) fix = cls(vardef, extra_facets=extra_facets) return fix @@ -62,7 +63,7 @@ def get_fix(mip, frequency, short_name): def get_allvars_fix(mip, frequency, short_name): """Load the AllVars fix from esmvalcore.cmor._fixes.cesm.cesm.""" - return _get_fix(mip, frequency, short_name, 'AllVars') + return _get_fix(mip, frequency, short_name, "AllVars") def fix_metadata(cubes, mip, frequency, short_name): @@ -78,27 +79,40 @@ def check_tas_metadata(cubes): """Check tas metadata.""" assert len(cubes) == 1 cube = cubes[0] - assert cube.var_name == 'tas' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "tas" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes return cube def check_time(cube): """Check time coordinate of cube.""" - assert cube.coords('time', dim_coords=True) - time = cube.coord('time', dim_coords=True) - assert time.var_name == 'time' - assert time.standard_name == 'time' - assert time.long_name == 'time' - assert time.units == Unit('days since 1979-01-01 00:00:00', - calendar='365_day') + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.long_name == "time" + assert time.units == Unit( + "days since 1979-01-01 00:00:00", calendar="365_day" + ) np.testing.assert_allclose( time.points, - [7649.5, 7680.5, 7710.0, 7739.5, 7770.0, 7800.5, 7831.0, 7861.5, - 7892.5, 7923.0, 7953.5, 7984.0], + [ + 7649.5, + 7680.5, + 7710.0, + 7739.5, + 7770.0, + 7800.5, + 7831.0, + 7861.5, + 7892.5, + 7923.0, + 7953.5, + 7984.0, + ], ) assert time.bounds.shape == (12, 2) assert time.attributes == {} @@ -106,56 +120,70 @@ def check_time(cube): def check_lat(cube): """Check latitude coordinate of cube.""" - assert cube.coords('latitude', dim_coords=True) - lat = cube.coord('latitude', dim_coords=True) - assert lat.var_name == 'lat' - assert lat.standard_name == 'latitude' - assert lat.long_name == 'latitude' - assert lat.units == 'degrees_north' + assert cube.coords("latitude", dim_coords=True) + lat = cube.coord("latitude", dim_coords=True) + assert lat.var_name == "lat" + assert lat.standard_name == "latitude" + assert lat.long_name == "latitude" + assert lat.units == "degrees_north" np.testing.assert_allclose( lat.points, - [59.4444082891668, 19.8757191474409, -19.8757191474409, - -59.4444082891668], + [ + 59.4444082891668, + 19.8757191474409, + -19.8757191474409, + -59.4444082891668, + ], ) np.testing.assert_allclose( lat.bounds, - [[90.0, 39.384861047478], - [39.384861047478, 0.0], - [0.0, -39.384861047478], - [-39.384861047478, -90.0]], + [ + [90.0, 39.384861047478], + [39.384861047478, 0.0], + [0.0, -39.384861047478], + [-39.384861047478, -90.0], + ], ) assert lat.attributes == {} def check_lon(cube): """Check longitude coordinate of cube.""" - assert cube.coords('longitude', dim_coords=True) - lon = cube.coord('longitude', dim_coords=True) - assert lon.var_name == 'lon' - assert lon.standard_name == 'longitude' - assert lon.long_name == 'longitude' - assert lon.units == 'degrees_east' + assert cube.coords("longitude", dim_coords=True) + lon = cube.coord("longitude", dim_coords=True) + assert lon.var_name == "lon" + assert lon.standard_name == "longitude" + assert lon.long_name == "longitude" + assert lon.units == "degrees_east" np.testing.assert_allclose( lon.points, [0.0, 45.0, 90.0, 135.0, 180.0, 225.0, 270.0, 315.0], ) np.testing.assert_allclose( lon.bounds, - [[-22.5, 22.5], [22.5, 67.5], [67.5, 112.5], [112.5, 157.5], - [157.5, 202.5], [202.5, 247.5], [247.5, 292.5], [292.5, 337.5]], + [ + [-22.5, 22.5], + [22.5, 67.5], + [67.5, 112.5], + [112.5, 157.5], + [157.5, 202.5], + [202.5, 247.5], + [247.5, 292.5], + [292.5, 337.5], + ], ) assert lon.attributes == {} def check_heightxm(cube, height_value): """Check scalar heightxm coordinate of cube.""" - assert cube.coords('height') - height = cube.coord('height') - assert height.var_name == 'height' - assert height.standard_name == 'height' - assert height.long_name == 'height' - assert height.units == 'm' - assert height.attributes == {'positive': 'up'} + assert cube.coords("height") + height = cube.coord("height") + assert height.var_name == "height" + assert height.standard_name == "height" + assert height.long_name == "height" + assert height.units == "m" + assert height.attributes == {"positive": "up"} np.testing.assert_allclose(height.points, [height_value]) assert height.bounds is None @@ -165,23 +193,34 @@ def check_heightxm(cube, height_value): def test_only_time(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') + fix = get_allvars_fix("Amon", "mon", "tas") # We know that tas has dimensions time, latitude, longitude, but the CESM2 # CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of tas to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('time') - coord_info.standard_name = 'time' - monkeypatch.setattr(fix.vardef, 'coordinates', {'time': coord_info}) + coord_info = CoordinateInfo("time") + coord_info.standard_name = "time" + monkeypatch.setattr(fix.vardef, "coordinates", {"time": coord_info}) # Create cube with only a single dimension - time_coord = DimCoord([0.0, 1.0], var_name='time', standard_name='time', - long_name='time', units='days since 1850-01-01') - cubes = CubeList([ - Cube([1, 1], var_name='TREFHT', units='K', - dim_coords_and_dims=[(time_coord, 0)]), - ]) + time_coord = DimCoord( + [0.0, 1.0], + var_name="time", + standard_name="time", + long_name="time", + units="days since 1850-01-01", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="TREFHT", + units="K", + dim_coords_and_dims=[(time_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -192,38 +231,46 @@ def test_only_time(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check time metadata - assert cube.coords('time') - new_time_coord = cube.coord('time', dim_coords=True) - assert new_time_coord.var_name == 'time' - assert new_time_coord.standard_name == 'time' - assert new_time_coord.long_name == 'time' - assert new_time_coord.units == 'days since 1850-01-01' + assert cube.coords("time") + new_time_coord = cube.coord("time", dim_coords=True) + assert new_time_coord.var_name == "time" + assert new_time_coord.standard_name == "time" + assert new_time_coord.long_name == "time" + assert new_time_coord.units == "days since 1850-01-01" # Check time data np.testing.assert_allclose(new_time_coord.points, [0.0, 1.0]) - np.testing.assert_allclose(new_time_coord.bounds, - [[-0.5, 0.5], [0.5, 1.5]]) + np.testing.assert_allclose( + new_time_coord.bounds, [[-0.5, 0.5], [0.5, 1.5]] + ) def test_only_latitude(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') + fix = get_allvars_fix("Amon", "mon", "tas") # We know that tas has dimensions time, latitude, longitude, but the CESM2 # CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of tas to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('latitude') - coord_info.standard_name = 'latitude' - monkeypatch.setattr(fix.vardef, 'coordinates', {'latitude': coord_info}) + coord_info = CoordinateInfo("latitude") + coord_info.standard_name = "latitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"latitude": coord_info}) # Create cube with only a single dimension - lat_coord = DimCoord([0.0, 10.0], var_name='lat', standard_name='latitude', - units='degrees') - cubes = CubeList([ - Cube([1, 1], var_name='TREFHT', units='K', - dim_coords_and_dims=[(lat_coord, 0)]), - ]) + lat_coord = DimCoord( + [0.0, 10.0], var_name="lat", standard_name="latitude", units="degrees" + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="TREFHT", + units="K", + dim_coords_and_dims=[(lat_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -234,38 +281,49 @@ def test_only_latitude(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check latitude metadata - assert cube.coords('latitude', dim_coords=True) - new_lat_coord = cube.coord('latitude') - assert new_lat_coord.var_name == 'lat' - assert new_lat_coord.standard_name == 'latitude' - assert new_lat_coord.long_name == 'latitude' - assert new_lat_coord.units == 'degrees_north' + assert cube.coords("latitude", dim_coords=True) + new_lat_coord = cube.coord("latitude") + assert new_lat_coord.var_name == "lat" + assert new_lat_coord.standard_name == "latitude" + assert new_lat_coord.long_name == "latitude" + assert new_lat_coord.units == "degrees_north" # Check latitude data np.testing.assert_allclose(new_lat_coord.points, [0.0, 10.0]) - np.testing.assert_allclose(new_lat_coord.bounds, - [[-5.0, 5.0], [5.0, 15.0]]) + np.testing.assert_allclose( + new_lat_coord.bounds, [[-5.0, 5.0], [5.0, 15.0]] + ) def test_only_longitude(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') + fix = get_allvars_fix("Amon", "mon", "tas") # We know that tas has dimensions time, latitude, longitude, but the CESM2 # CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of tas to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('longitude') - coord_info.standard_name = 'longitude' - monkeypatch.setattr(fix.vardef, 'coordinates', {'longitude': coord_info}) + coord_info = CoordinateInfo("longitude") + coord_info.standard_name = "longitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"longitude": coord_info}) # Create cube with only a single dimension - lon_coord = DimCoord([0.0, 180.0], var_name='lon', - standard_name='longitude', units='degrees') - cubes = CubeList([ - Cube([1, 1], var_name='TREFHT', units='K', - dim_coords_and_dims=[(lon_coord, 0)]), - ]) + lon_coord = DimCoord( + [0.0, 180.0], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="TREFHT", + units="K", + dim_coords_and_dims=[(lon_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -276,17 +334,18 @@ def test_only_longitude(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check longitude metadata - assert cube.coords('longitude', dim_coords=True) - new_lon_coord = cube.coord('longitude') - assert new_lon_coord.var_name == 'lon' - assert new_lon_coord.standard_name == 'longitude' - assert new_lon_coord.long_name == 'longitude' - assert new_lon_coord.units == 'degrees_east' + assert cube.coords("longitude", dim_coords=True) + new_lon_coord = cube.coord("longitude") + assert new_lon_coord.var_name == "lon" + assert new_lon_coord.standard_name == "longitude" + assert new_lon_coord.long_name == "longitude" + assert new_lon_coord.units == "degrees_east" # Check longitude data np.testing.assert_allclose(new_lon_coord.points, [0.0, 180.0]) - np.testing.assert_allclose(new_lon_coord.bounds, - [[-90.0, 90.0], [90.0, 270.0]]) + np.testing.assert_allclose( + new_lon_coord.bounds, [[-90.0, 90.0], [90.0, 270.0]] + ) # Test AllVars._fix_time @@ -294,9 +353,9 @@ def test_only_longitude(monkeypatch): def test_fix_time_mon(cube_1d_time): """Test `_fix_time``.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') + fix = get_allvars_fix("Amon", "mon", "tas") fix._fix_time(cube_1d_time) - time_coord = cube_1d_time.coord('time') + time_coord = cube_1d_time.coord("time") np.testing.assert_array_equal(time_coord.points, [1, 3, 5]) np.testing.assert_array_equal(time_coord.bounds, [[0, 2], [2, 4], [4, 6]]) @@ -304,29 +363,29 @@ def test_fix_time_mon(cube_1d_time): def test_fix_time2_mon(cube_1d_time): """Test `_fix_time``.""" # ch4Clim has dimensions [longitude, latitude, plev19, time2] - fix = get_allvars_fix('Amon', 'mon', 'ch4Clim') + fix = get_allvars_fix("Amon", "mon", "ch4Clim") fix._fix_time(cube_1d_time) - time_coord = cube_1d_time.coord('time') + time_coord = cube_1d_time.coord("time") np.testing.assert_array_equal(time_coord.points, [1, 3, 5]) np.testing.assert_array_equal(time_coord.bounds, [[0, 2], [2, 4], [4, 6]]) def test_fix_time_mon_point(cube_1d_time): """Test `_fix_time``.""" - cube_1d_time.add_cell_method(CellMethod('point', 'time')) - fix = get_allvars_fix('Amon', 'mon', 'tas') + cube_1d_time.add_cell_method(CellMethod("point", "time")) + fix = get_allvars_fix("Amon", "mon", "tas") fix._fix_time(cube_1d_time) - time_coord = cube_1d_time.coord('time') + time_coord = cube_1d_time.coord("time") np.testing.assert_array_equal(time_coord.points, [2, 4, 6]) np.testing.assert_array_equal(time_coord.bounds, [[0, 2], [2, 4], [4, 6]]) def test_fix_time_day(monkeypatch, cube_1d_time): """Test `_fix_time``.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') - monkeypatch.setitem(fix.extra_facets, 'frequency', 'day') + fix = get_allvars_fix("Amon", "mon", "tas") + monkeypatch.setitem(fix.extra_facets, "frequency", "day") fix._fix_time(cube_1d_time) - time_coord = cube_1d_time.coord('time') + time_coord = cube_1d_time.coord("time") np.testing.assert_array_equal(time_coord.points, [2, 4, 6]) np.testing.assert_array_equal(time_coord.bounds, [[0, 2], [2, 4], [4, 6]]) @@ -336,7 +395,7 @@ def test_fix_time_day(monkeypatch, cube_1d_time): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CESM', 'CESM2', 'Amon', 'tas') + fix = Fix.get_fixes("CESM", "CESM2", "Amon", "tas") assert fix == [ esmvalcore.cmor._fixes.cesm.cesm2.AllVars(None), GenericFix(None), @@ -345,7 +404,7 @@ def test_get_tas_fix(): def test_tas_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') + fix = get_allvars_fix("Amon", "mon", "tas") fixed_cubes = fix.fix_metadata(cubes_2d) fixed_cube = check_tas_metadata(fixed_cubes) @@ -363,19 +422,19 @@ def test_tas_fix(cubes_2d): def test_fix_invalid_units(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'mon', 'tas') + fix = get_allvars_fix("Amon", "mon", "tas") # We know that tas has units 'K', but to check if the invalid units # 'fraction' are correctly handled, we change tas' units to '1'. This is an # artificial, but realistic test case. - monkeypatch.setattr(fix.vardef, 'units', '1') - cube = Cube(1.0, attributes={'invalid_units': 'fraction'}) + monkeypatch.setattr(fix.vardef, "units", "1") + cube = Cube(1.0, attributes={"invalid_units": "fraction"}) fix.fix_var_metadata(cube) - assert cube.var_name == 'tas' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.units == '1' - assert 'positive' not in cube.attributes + assert cube.var_name == "tas" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, 1.0) diff --git a/tests/integration/cmor/_fixes/cmip5/test_access1_0.py b/tests/integration/cmor/_fixes/cmip5/test_access1_0.py index 3cba189b7a..126685067b 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_access1_0.py +++ b/tests/integration/cmor/_fixes/cmip5/test_access1_0.py @@ -1,4 +1,5 @@ """Test Access1-0 fixes.""" + import unittest from datetime import datetime @@ -16,18 +17,18 @@ @pytest.fixture def cube(): """Cube for testing.""" - test_cube = Cube([1.0, 2.0], var_name='co2', units='J') + test_cube = Cube([1.0, 2.0], var_name="co2", units="J") reference_dates = [ datetime(300, 1, 16, 12), # e.g. piControl - datetime(1850, 1, 16, 12) # e.g. historical + datetime(1850, 1, 16, 12), # e.g. historical ] esgf_time_units = Unit( - 'days since 0001-01-01', - calendar='proleptic_gregorian', + "days since 0001-01-01", + calendar="proleptic_gregorian", ) time_points = date2num(reference_dates, esgf_time_units) test_cube.add_dim_coord( - DimCoord(time_points, 'time', 'time', 'time', esgf_time_units), + DimCoord(time_points, "time", "time", "time", esgf_time_units), data_dim=0, ) return test_cube @@ -39,8 +40,9 @@ class TestAllVars: @staticmethod def test_get(): """Test getting of fix.""" - assert Fix.get_fixes('CMIP5', 'ACCESS1-0', 'Amon', 'tas') == [ - AllVars(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "ACCESS1-0", "Amon", "tas") == [ + AllVars(None), + GenericFix(None), ] @staticmethod @@ -48,25 +50,25 @@ def test_fix_metadata(cube): """Test fix for bad calendar.""" fix = AllVars(None) cube = fix.fix_metadata([cube])[0] - time = cube.coord('time') + time = cube.coord("time") dates = num2date(time.points, time.units.name, time.units.calendar) - assert time.units.calendar in ('standard', 'gregorian') - u = Unit('days since 300-01-01 12:00:00', calendar='standard') + assert time.units.calendar in ("standard", "gregorian") + u = Unit("days since 300-01-01 12:00:00", calendar="standard") assert dates[0] == u.num2date(15) - u = Unit('days since 1850-01-01 12:00:00', calendar='standard') + u = Unit("days since 1850-01-01 12:00:00", calendar="standard") assert dates[1] == u.num2date(15) @staticmethod def test_fix_metadata_if_not_time(cube): """Test calendar fix do not fail if no time coord present.""" - cube.remove_coord('time') + cube.remove_coord("time") fix = AllVars(None) fix.fix_metadata([cube]) def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'ACCESS1-0', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "ACCESS1-0", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] @@ -75,37 +77,40 @@ def cl_cubes(): """Cubes for ``cl.``.""" b_coord = AuxCoord( [1.0], - var_name='b', - long_name='vertical coordinate formula term: b(k)', - attributes={'a': 1, 'b': '2'}, + var_name="b", + long_name="vertical coordinate formula term: b(k)", + attributes={"a": 1, "b": "2"}, ) cl_cube = Cube( [0.0], - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", aux_coords_and_dims=[(b_coord.copy(), 0)], ) - x_cube = Cube([0.0], - long_name='x', - aux_coords_and_dims=[(b_coord.copy(), 0)]) + x_cube = Cube( + [0.0], long_name="x", aux_coords_and_dims=[(b_coord.copy(), 0)] + ) cubes = CubeList([cl_cube, x_cube]) return cubes @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip5.access1_0.ClFixHybridHeightCoord.' - 'fix_metadata', autospec=True) + "esmvalcore.cmor._fixes.cmip5.access1_0.ClFixHybridHeightCoord." + "fix_metadata", + autospec=True, +) def test_cl_fix_metadata(mock_base_fix_metadata, cl_cubes): """Test ``fix_metadata`` for ``cl``.""" mock_base_fix_metadata.return_value = cl_cubes - fix = Cl(get_var_info('CMIP5', 'Amon', 'cl')) + fix = Cl(get_var_info("CMIP5", "Amon", "cl")) fixed_cubes = fix.fix_metadata(cl_cubes) mock_base_fix_metadata.assert_called_once_with(fix, cl_cubes) assert len(fixed_cubes) == 2 cl_cube = fixed_cubes.extract_cube( - 'cloud_area_fraction_in_atmosphere_layer') - b_coord_cl = cl_cube.coord('vertical coordinate formula term: b(k)') + "cloud_area_fraction_in_atmosphere_layer" + ) + b_coord_cl = cl_cube.coord("vertical coordinate formula term: b(k)") assert not b_coord_cl.attributes - x_cube = fixed_cubes.extract_cube('x') - b_coord_x = x_cube.coord('vertical coordinate formula term: b(k)') - assert b_coord_x.attributes == {'a': 1, 'b': '2'} + x_cube = fixed_cubes.extract_cube("x") + b_coord_x = x_cube.coord("vertical coordinate formula term: b(k)") + assert b_coord_x.attributes == {"a": 1, "b": "2"} diff --git a/tests/integration/cmor/_fixes/cmip5/test_access1_3.py b/tests/integration/cmor/_fixes/cmip5/test_access1_3.py index 684e88f65f..10b1b29570 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_access1_3.py +++ b/tests/integration/cmor/_fixes/cmip5/test_access1_3.py @@ -1,4 +1,5 @@ """Test fixes for ACCESS1-3.""" + from datetime import datetime import pytest @@ -15,18 +16,18 @@ @pytest.fixture def cube(): """Cube for testing.""" - test_cube = Cube([1.0, 2.0], var_name='co2', units='J') + test_cube = Cube([1.0, 2.0], var_name="co2", units="J") reference_dates = [ datetime(300, 1, 16, 12), # e.g. piControl - datetime(1850, 1, 16, 12) # e.g. historical + datetime(1850, 1, 16, 12), # e.g. historical ] esgf_time_units = Unit( - 'days since 0001-01-01', - calendar='proleptic_gregorian', + "days since 0001-01-01", + calendar="proleptic_gregorian", ) time_points = date2num(reference_dates, esgf_time_units) test_cube.add_dim_coord( - DimCoord(time_points, 'time', 'time', 'time', esgf_time_units), + DimCoord(time_points, "time", "time", "time", esgf_time_units), data_dim=0, ) return test_cube @@ -38,8 +39,9 @@ class TestAllVars: @staticmethod def test_get(): """Test getting of fix.""" - assert Fix.get_fixes('CMIP5', 'ACCESS1-3', 'Amon', 'tas') == [ - AllVars(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "ACCESS1-3", "Amon", "tas") == [ + AllVars(None), + GenericFix(None), ] @staticmethod @@ -47,25 +49,25 @@ def test_fix_metadata(cube): """Test fix for bad calendar.""" fix = AllVars(None) cube = fix.fix_metadata([cube])[0] - time = cube.coord('time') + time = cube.coord("time") dates = num2date(time.points, time.units.name, time.units.calendar) - assert time.units.calendar in ('standard', 'gregorian') - u = Unit('days since 300-01-01 12:00:00', calendar='standard') + assert time.units.calendar in ("standard", "gregorian") + u = Unit("days since 300-01-01 12:00:00", calendar="standard") assert dates[0] == u.num2date(15) - u = Unit('days since 1850-01-01 12:00:00', calendar='standard') + u = Unit("days since 1850-01-01 12:00:00", calendar="standard") assert dates[1] == u.num2date(15) @staticmethod def test_fix_metadata_if_not_time(cube): """Test calendar fix do not fail if no time coord present.""" - cube.remove_coord('time') + cube.remove_coord("time") fix = AllVars(None) fix.fix_metadata([cube]) def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'ACCESS1-3', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "ACCESS1-3", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1.py b/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1.py index 65f37766e0..0aff384807 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1.py +++ b/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1.py @@ -15,7 +15,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'bcc-csm1-1', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "bcc-csm1-1", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -26,7 +26,7 @@ def test_cl_fix(): def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'bcc-csm1-1', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP5", "bcc-csm1-1", "Omon", "tos") assert fix == [Tos(None), GenericFix(None)] @@ -40,89 +40,106 @@ def test_tos_fix_metadata(): grid_lat = iris.coords.DimCoord( [20.0, 40.0], bounds=[[10.0, 30.0], [30.0, 50.0]], - var_name='rlat', - standard_name='grid_latitude', + var_name="rlat", + standard_name="grid_latitude", ) grid_lon = iris.coords.DimCoord( [10.0, 20.0, 30.0], bounds=[[5.0, 15.0], [15.0, 25.0], [25.0, 35.0]], - var_name='rlon', - standard_name='grid_longitude', + var_name="rlon", + standard_name="grid_longitude", ) latitude = iris.coords.AuxCoord( [[-40.0, -20.0, 0.0], [-20.0, 0.0, 20.0]], - var_name='lat', - standard_name='latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + units="degrees_north", ) longitude = iris.coords.AuxCoord( [[100.0, 140.0, 180.0], [80.0, 100.0, 120.0]], - var_name='lon', - standard_name='longitude', - units='degrees_east', + var_name="lon", + standard_name="longitude", + units="degrees_east", ) time_coord = iris.coords.DimCoord( 1.0, bounds=[0.0, 2.0], - var_name='time', - standard_name='time', - long_name='time', - units='days since 1950-01-01', + var_name="time", + standard_name="time", + long_name="time", + units="days since 1950-01-01", ) # Create cube without bounds cube = iris.cube.Cube( np.full((1, 2, 3), 300.0), - var_name='tos', - standard_name='sea_surface_temperature', - units='K', + var_name="tos", + standard_name="sea_surface_temperature", + units="K", dim_coords_and_dims=[(time_coord, 0), (grid_lat, 1), (grid_lon, 2)], aux_coords_and_dims=[(latitude, (1, 2)), (longitude, (1, 2))], ) - assert cube.coord('latitude').bounds is None - assert cube.coord('longitude').bounds is None + assert cube.coord("latitude").bounds is None + assert cube.coord("longitude").bounds is None # Apply fix - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = Tos(vardef) cubes = iris.cube.CubeList([cube]) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 - fixed_cube = fixed_cubes.extract_cube('sea_surface_temperature') + fixed_cube = fixed_cubes.extract_cube("sea_surface_temperature") assert fixed_cube is cube - i_coord = fixed_cube.coord('cell index along first dimension') - j_coord = fixed_cube.coord('cell index along second dimension') - assert i_coord.var_name == 'i' + i_coord = fixed_cube.coord("cell index along first dimension") + j_coord = fixed_cube.coord("cell index along second dimension") + assert i_coord.var_name == "i" assert i_coord.standard_name is None - assert i_coord.long_name == 'cell index along first dimension' - assert i_coord.units == '1' + assert i_coord.long_name == "cell index along first dimension" + assert i_coord.units == "1" assert i_coord.circular is False - assert j_coord.var_name == 'j' + assert j_coord.var_name == "j" assert j_coord.standard_name is None - assert j_coord.long_name == 'cell index along second dimension' - assert j_coord.units == '1' + assert j_coord.long_name == "cell index along second dimension" + assert j_coord.units == "1" np.testing.assert_allclose(i_coord.points, [0, 1, 2]) - np.testing.assert_allclose(i_coord.bounds, - [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]]) + np.testing.assert_allclose( + i_coord.bounds, [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]] + ) np.testing.assert_allclose(j_coord.points, [0, 1]) np.testing.assert_allclose(j_coord.bounds, [[-0.5, 0.5], [0.5, 1.5]]) - assert fixed_cube.coord('latitude').bounds is not None - assert fixed_cube.coord('longitude').bounds is not None + assert fixed_cube.coord("latitude").bounds is not None + assert fixed_cube.coord("longitude").bounds is not None latitude_bounds = np.array( - [[[-43.48076211, -34.01923789, -22.00961894, -31.47114317], - [-34.01923789, -10.0, 2.00961894, -22.00961894], - [-10.0, -0.53847577, 11.47114317, 2.00961894]], - [[-31.47114317, -22.00961894, -10.0, -19.46152423], - [-22.00961894, 2.00961894, 14.01923789, -10.0], - [2.00961894, 11.47114317, 23.48076211, 14.01923789]]] + [ + [ + [-43.48076211, -34.01923789, -22.00961894, -31.47114317], + [-34.01923789, -10.0, 2.00961894, -22.00961894], + [-10.0, -0.53847577, 11.47114317, 2.00961894], + ], + [ + [-31.47114317, -22.00961894, -10.0, -19.46152423], + [-22.00961894, 2.00961894, 14.01923789, -10.0], + [2.00961894, 11.47114317, 23.48076211, 14.01923789], + ], + ] + ) + np.testing.assert_allclose( + fixed_cube.coord("latitude").bounds, latitude_bounds + ) + longitude_bounds = np.array( + [ + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + ] + ) + np.testing.assert_allclose( + fixed_cube.coord("longitude").bounds, longitude_bounds ) - np.testing.assert_allclose(fixed_cube.coord('latitude').bounds, - latitude_bounds) - longitude_bounds = np.array([[[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]], - [[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]]]) - np.testing.assert_allclose(fixed_cube.coord('longitude').bounds, - longitude_bounds) diff --git a/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1_m.py b/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1_m.py index 8fe017fe9b..7736db4c59 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1_m.py +++ b/tests/integration/cmor/_fixes/cmip5/test_bcc_csm1_1_m.py @@ -10,7 +10,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'bcc-csm1-1-m', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "bcc-csm1-1-m", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -21,7 +21,7 @@ def test_cl_fix(): def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'bcc-csm1-1-m', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP5", "bcc-csm1-1-m", "Omon", "tos") assert fix == [Tos(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_bnu_esm.py b/tests/integration/cmor/_fixes/cmip5/test_bnu_esm.py index d988e02441..a48dd9860b 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_bnu_esm.py +++ b/tests/integration/cmor/_fixes/cmip5/test_bnu_esm.py @@ -1,4 +1,5 @@ """Test fixes for BNU-ESM.""" + import unittest import numpy.ma as ma @@ -21,14 +22,15 @@ class TestCl(unittest.TestCase): """Test cl fix.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='cl', units='%') + self.cube = Cube([1.0], var_name="cl", units="%") self.fix = Cl(None) def test_get(self): """Test fix get""" - fix = Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] def test_cl_fix(self): @@ -46,26 +48,28 @@ class TestCo2(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='co2', units='J') - self.vardef = get_var_info('CMIP5', 'Amon', self.cube.var_name) + self.cube = Cube([1.0], var_name="co2", units="J") + self.vardef = get_var_info("CMIP5", "Amon", self.cube.var_name) self.fix = Co2(self.vardef) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'co2'), - [Co2(self.vardef), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "co2"), + [Co2(self.vardef), GenericFix(None)], + ) def test_fix_metadata(self): """Test unit change.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('1e-6')) + self.assertEqual(cube.units, Unit("1e-6")) self.assertEqual(cube.data[0], 1.0) def test_fix_data(self): """Test fix values.""" cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29.0 / 44.0 * 1.e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.data[0], 29.0 / 44.0 * 1.0e6) + self.assertEqual(cube.units, Unit("J")) class Testfgco2(unittest.TestCase): @@ -73,27 +77,28 @@ class Testfgco2(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='fgco2', units='J') - self.vardef = get_var_info('CMIP5', 'Omon', self.cube.var_name) + self.cube = Cube([1.0], var_name="fgco2", units="J") + self.vardef = get_var_info("CMIP5", "Omon", self.cube.var_name) self.fix = FgCo2(self.vardef) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'fgco2'), - [FgCo2(self.vardef), GenericFix(None)]) + Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "fgco2"), + [FgCo2(self.vardef), GenericFix(None)], + ) def test_fix_metadata(self): """Test unit fix.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('kg m-2 s-1')) + self.assertEqual(cube.units, Unit("kg m-2 s-1")) self.assertEqual(cube.data[0], 1) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 12.0 / 44.0) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestCh4(unittest.TestCase): @@ -101,26 +106,28 @@ class TestCh4(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='ch4', units='J') - self.vardef = get_var_info('CMIP5', 'Amon', self.cube.var_name) + self.cube = Cube([1.0], var_name="ch4", units="J") + self.vardef = get_var_info("CMIP5", "Amon", self.cube.var_name) self.fix = Ch4(self.vardef) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'ch4'), - [Ch4(self.vardef), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "ch4"), + [Ch4(self.vardef), GenericFix(None)], + ) def test_fix_metadata(self): """Test unit fix.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('1e-9')) + self.assertEqual(cube.units, Unit("1e-9")) self.assertEqual(cube.data[0], 1) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29.0 / 16.0 * 1.e9) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.data[0], 29.0 / 16.0 * 1.0e9) + self.assertEqual(cube.units, Unit("J")) class Testspco2(unittest.TestCase): @@ -128,26 +135,27 @@ class Testspco2(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='spco2', units='J') + self.cube = Cube([1.0], var_name="spco2", units="J") self.fix = SpCo2(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'spco2'), - [SpCo2(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "spco2"), + [SpCo2(None), GenericFix(None)], + ) def test_fix_metadata(self): """Test fix.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) self.assertEqual(cube.data[0], 1) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1.e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.data[0], 1.0e6) + self.assertEqual(cube.units, Unit("J")) class TestOd550Aer(unittest.TestCase): @@ -156,16 +164,17 @@ class TestOd550Aer(unittest.TestCase): def setUp(self): """Prepare tests.""" self.cube = Cube( - ma.MaskedArray([1.e36], mask=(False, )), - var_name='od550aer', + ma.MaskedArray([1.0e36], mask=(False,)), + var_name="od550aer", ) self.fix = Od550Aer(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'od550aer'), - [Od550Aer(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "od550aer"), + [Od550Aer(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" diff --git a/tests/integration/cmor/_fixes/cmip5/test_canesm2.py b/tests/integration/cmor/_fixes/cmip5/test_canesm2.py index 4e8f9c826d..27e239b9dc 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_canesm2.py +++ b/tests/integration/cmor/_fixes/cmip5/test_canesm2.py @@ -1,4 +1,5 @@ """Test CanESM2 fixes.""" + import unittest from cf_units import Unit @@ -12,7 +13,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'CanESM2', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "CanESM2", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -26,17 +27,18 @@ class TestCanESM2Fgco2(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='fgco2', units='J') + self.cube = Cube([1.0], var_name="fgco2", units="J") self.fix = FgCo2(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'CANESM2', 'Amon', 'fgco2'), - [FgCo2(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "CANESM2", "Amon", "fgco2"), + [FgCo2(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 12.0 / 44.0) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_ccsm4.py b/tests/integration/cmor/_fixes/cmip5/test_ccsm4.py index 05bb6620a0..f800820627 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_ccsm4.py +++ b/tests/integration/cmor/_fixes/cmip5/test_ccsm4.py @@ -1,4 +1,5 @@ """Test fixes for CCSM4.""" + import unittest import numpy as np @@ -14,7 +15,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fixes = Fix.get_fixes('CMIP5', 'CCSM4', 'Amon', 'cl') + fixes = Fix.get_fixes("CMIP5", "CCSM4", "Amon", "cl") assert Cl(None) in fixes @@ -28,12 +29,12 @@ class TestCsoil(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0, 1.e33], var_name='cSoil', units='kg m-2') + self.cube = Cube([1.0, 1.0e33], var_name="cSoil", units="kg m-2") self.fix = Csoil(None) def test_get(self): """Test fix get.""" - assert Csoil(None) in Fix.get_fixes('CMIP5', 'CCSM4', 'Lmon', 'cSoil') + assert Csoil(None) in Fix.get_fixes("CMIP5", "CCSM4", "Lmon", "cSoil") def test_fix_data(self): """Test data fix.""" @@ -47,30 +48,39 @@ class TestsAllVars(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0, 2.0], var_name='rlut') + self.cube = Cube([1.0, 2.0], var_name="rlut") self.cube.add_dim_coord( - DimCoord([0.50001, 1.499999], - standard_name='latitude', - bounds=[ - [0.00001, 0.999999], - [1.00001, 1.999999], - ]), 0) + DimCoord( + [0.50001, 1.499999], + standard_name="latitude", + bounds=[ + [0.00001, 0.999999], + [1.00001, 1.999999], + ], + ), + 0, + ) self.fix = AllVars(None) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'CCSM4', 'Amon', 'rlut'), - [AllVars(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "CCSM4", "Amon", "rlut"), + [AllVars(None), GenericFix(None)], + ) def test_fix_metadata(self): """Check that latitudes values are rounded.""" cube = self.fix.fix_metadata([self.cube])[0] - latitude = cube.coord('latitude') + latitude = cube.coord("latitude") self.assertTrue(np.all(latitude.points == np.array([0.5000, 1.5000]))) self.assertTrue( - np.all(latitude.bounds == np.array([[0.0000, 1.0000], - [1.0000, 2.0000]]))) + np.all( + latitude.bounds + == np.array([[0.0000, 1.0000], [1.0000, 2.0000]]) + ) + ) class TestSo(unittest.TestCase): @@ -78,15 +88,15 @@ class TestSo(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0, 2.0], var_name='so', units='1.0') - self.vardef = get_var_info('CMIP5', 'Omon', self.cube.var_name) + self.cube = Cube([1.0, 2.0], var_name="so", units="1.0") + self.vardef = get_var_info("CMIP5", "Omon", self.cube.var_name) self.fix = So(self.vardef) def test_get(self): """Test fix get.""" - assert So(self.vardef) in Fix.get_fixes('CMIP5', 'CCSM4', 'Amon', 'so') + assert So(self.vardef) in Fix.get_fixes("CMIP5", "CCSM4", "Amon", "so") def test_fix_metadata(self): """Checks that units are changed to the correct value.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual('1e3', cube.units.origin) + self.assertEqual("1e3", cube.units.origin) diff --git a/tests/integration/cmor/_fixes/cmip5/test_cesm1_bgc.py b/tests/integration/cmor/_fixes/cmip5/test_cesm1_bgc.py index fdb2314d80..4151bf1a92 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_cesm1_bgc.py +++ b/tests/integration/cmor/_fixes/cmip5/test_cesm1_bgc.py @@ -1,4 +1,5 @@ """Tests for CESM1-BGC fixes.""" + import unittest import numpy as np @@ -12,7 +13,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'CESM1-BGC', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "CESM1-BGC", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -32,8 +33,9 @@ def setUp(self): def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'CESM1-BGC', 'Amon', 'gpp'), - [Gpp(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "CESM1-BGC", "Amon", "gpp"), + [Gpp(None), GenericFix(None)], + ) def test_fix_data(self): """Test fix to set missing values correctly.""" @@ -56,5 +58,6 @@ def setUp(self): def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'CESM1-BGC', 'Amon', 'nbp'), - [Nbp(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "CESM1-BGC", "Amon", "nbp"), + [Nbp(None), GenericFix(None)], + ) diff --git a/tests/integration/cmor/_fixes/cmip5/test_cesm1_cam5.py b/tests/integration/cmor/_fixes/cmip5/test_cesm1_cam5.py index 07cf8ab1a9..e652254d58 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_cesm1_cam5.py +++ b/tests/integration/cmor/_fixes/cmip5/test_cesm1_cam5.py @@ -1,4 +1,5 @@ """Tests for the fixes of CESM1-CAM5.""" + import iris import pytest @@ -9,7 +10,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'CESM1-CAM5', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "CESM1-CAM5", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -18,9 +19,9 @@ def cl_cube(): """``cl`` cube.""" cube = iris.cube.Cube( [1.0], - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', - units='%', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", + units="%", ) return cube diff --git a/tests/integration/cmor/_fixes/cmip5/test_cesm1_fastchem.py b/tests/integration/cmor/_fixes/cmip5/test_cesm1_fastchem.py index 6ae2a44913..5be9059e1b 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_cesm1_fastchem.py +++ b/tests/integration/cmor/_fixes/cmip5/test_cesm1_fastchem.py @@ -1,4 +1,5 @@ """Tests for CESM1-FASTCHEM fixes.""" + from esmvalcore.cmor._fixes.cmip5.cesm1_cam5 import Cl as BaseCl from esmvalcore.cmor._fixes.cmip5.cesm1_fastchem import Cl from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'CESM1-FASTCHEM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "CESM1-FASTCHEM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_cesm1_waccm.py b/tests/integration/cmor/_fixes/cmip5/test_cesm1_waccm.py index 146a456dad..702a1fe88e 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_cesm1_waccm.py +++ b/tests/integration/cmor/_fixes/cmip5/test_cesm1_waccm.py @@ -1,4 +1,5 @@ """Tests for CESM1-WACCM fixes.""" + from esmvalcore.cmor._fixes.cmip5.cesm1_cam5 import Cl as BaseCl from esmvalcore.cmor._fixes.cmip5.cesm1_waccm import Cl from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'CESM1-WACCM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "CESM1-WACCM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_cnrm_cm5.py b/tests/integration/cmor/_fixes/cmip5/test_cnrm_cm5.py index ce6ebe0121..cc7c9a8059 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_cnrm_cm5.py +++ b/tests/integration/cmor/_fixes/cmip5/test_cnrm_cm5.py @@ -1,4 +1,5 @@ """Test CNRM-CM5 fixes.""" + import unittest from cf_units import Unit @@ -11,39 +12,43 @@ class TestMsftmyz(unittest.TestCase): """Test msftmyz fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='msftmyz', units='J') + self.cube = Cube([1.0], var_name="msftmyz", units="J") self.fix = Msftmyz(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'CNRM-CM5', 'Amon', 'msftmyz'), - [Msftmyz(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "CNRM-CM5", "Amon", "msftmyz"), + [Msftmyz(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1.0e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestMsftmyzba(unittest.TestCase): """Test msftmyzba fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='msftmyzba', units='J') + self.cube = Cube([1.0], var_name="msftmyzba", units="J") self.fix = Msftmyzba(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'CNRM-CM5', 'Amon', 'msftmyzba'), - [Msftmyzba(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "CNRM-CM5", "Amon", "msftmyzba"), + [Msftmyzba(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1.0e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_csiro_mk3_6_0.py b/tests/integration/cmor/_fixes/cmip5/test_csiro_mk3_6_0.py index 3fa4318d72..533a310d05 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_csiro_mk3_6_0.py +++ b/tests/integration/cmor/_fixes/cmip5/test_csiro_mk3_6_0.py @@ -1,4 +1,5 @@ """Test fixes for CSIRO-Mk3-6-0.""" + from esmvalcore.cmor._fixes.cmip5.csiro_mk3_6_0 import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'CSIRO-Mk3-6-0', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "CSIRO-Mk3-6-0", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_ec_earth.py b/tests/integration/cmor/_fixes/cmip5/test_ec_earth.py index 4b480ef81f..9fdd838268 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_ec_earth.py +++ b/tests/integration/cmor/_fixes/cmip5/test_ec_earth.py @@ -1,4 +1,5 @@ """Test EC-EARTH fixes.""" + import unittest import numpy as np @@ -20,79 +21,89 @@ class TestSic(unittest.TestCase): """Test sic fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='sic', units='J') + self.cube = Cube([1.0], var_name="sic", units="J") self.fix = Sic(None) def test_get(self): """Test fix get""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'EC-EARTH', 'Amon', 'sic'), - [Sic(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "EC-EARTH", "Amon", "sic"), + [Sic(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestSftlf(unittest.TestCase): """Test sftlf fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='sftlf', units='J') + self.cube = Cube([1.0], var_name="sftlf", units="J") self.fix = Sftlf(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'EC-EARTH', 'Amon', 'sftlf'), - [Sftlf(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "EC-EARTH", "Amon", "sftlf"), + [Sftlf(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestTas(unittest.TestCase): """Test tas fixes.""" + def setUp(self): """Prepare tests.""" - height_coord = DimCoord(2., - standard_name='height', - long_name='height', - var_name='height', - units='m', - bounds=None, - attributes={'positive': 'up'}) + height_coord = DimCoord( + 2.0, + standard_name="height", + long_name="height", + var_name="height", + units="m", + bounds=None, + attributes={"positive": "up"}, + ) time_coord = DimCoord( - 1., - standard_name='time', - var_name='time', - units=Unit('days since 2070-01-01 00:00:00', calendar='gregorian'), + 1.0, + standard_name="time", + var_name="time", + units=Unit("days since 2070-01-01 00:00:00", calendar="gregorian"), ) self.height_coord = height_coord - self.cube_without = CubeList([Cube([3.0], var_name='tas')]) + self.cube_without = CubeList([Cube([3.0], var_name="tas")]) self.cube_without[0].add_aux_coord(time_coord, 0) - self.cube_with = CubeList([Cube([3.0], var_name='tas')]) + self.cube_with = CubeList([Cube([3.0], var_name="tas")]) self.cube_with[0].add_aux_coord(height_coord, ()) self.cube_with[0].add_aux_coord(time_coord, 0) - self.cube_with[0].coord('time').long_name = 'time' + self.cube_with[0].coord("time").long_name = "time" self.fix = Tas(None) def test_get(self): """Test fix get""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'EC-EARTH', 'Amon', 'tas'), - [Tas(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "EC-EARTH", "Amon", "tas"), + [Tas(None), GenericFix(None)], + ) def test_tas_fix_metadata(self): """Test metadata fix.""" @@ -102,54 +113,59 @@ def test_tas_fix_metadata(self): # make sure this does not raise an error out_cube_with = self.fix.fix_metadata(self.cube_with) - coord = out_cube_without[0].coord('height') + coord = out_cube_without[0].coord("height") assert coord == self.height_coord - coord = out_cube_without[0].coord('time') + coord = out_cube_without[0].coord("time") assert coord.long_name == "time" - coord = out_cube_with[0].coord('height') + coord = out_cube_with[0].coord("height") assert coord == self.height_coord - coord = out_cube_with[0].coord('time') + coord = out_cube_with[0].coord("time") assert coord.long_name == "time" class TestAreacello(unittest.TestCase): """Test areacello fixes.""" + def setUp(self): """Prepare tests.""" latitude = Cube( np.ones((2, 2)), - standard_name='latitude', - long_name='latitude', - var_name='lat', - units='degrees_north', + standard_name="latitude", + long_name="latitude", + var_name="lat", + units="degrees_north", ) longitude = Cube( np.ones((2, 2)), - standard_name='longitude', - long_name='longitude', - var_name='lon', - units='degrees_north', + standard_name="longitude", + long_name="longitude", + var_name="lon", + units="degrees_north", ) - self.cubes = CubeList([ - Cube( - np.ones((2, 2)), - var_name='areacello', - long_name='Areas of grid cell', - ), latitude, longitude - ]) + self.cubes = CubeList( + [ + Cube( + np.ones((2, 2)), + var_name="areacello", + long_name="Areas of grid cell", + ), + latitude, + longitude, + ] + ) self.fix = Areacello(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'EC-EARTH', 'Omon', 'areacello'), + Fix.get_fixes("CMIP5", "EC-EARTH", "Omon", "areacello"), [Areacello(None), GenericFix(None)], ) @@ -159,51 +175,52 @@ def test_areacello_fix_metadata(self): out_cube = self.fix.fix_metadata(self.cubes) assert len(out_cube) == 1 - out_cube[0].coord('latitude') - out_cube[0].coord('longitude') + out_cube[0].coord("latitude") + out_cube[0].coord("longitude") class TestPr(unittest.TestCase): """Test pr fixes.""" + def setUp(self): """Prepare tests.""" wrong_time_coord = AuxCoord( points=[1.0, 2.0, 1.0, 2.0, 3.0], - var_name='time', - standard_name='time', - units='days since 1850-01-01', - ) + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) correct_time_coord = AuxCoord( points=[1.0, 2.0, 3.0], - var_name='time', - standard_name='time', - units='days since 1850-01-01', - ) + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) lat_coord = DimCoord( [0.0], - standard_name='latitude', - var_name='lat', - ) + standard_name="latitude", + var_name="lat", + ) lon_coord = DimCoord( [0.0], - standard_name='longitude', - var_name='lon', - ) + standard_name="longitude", + var_name="lon", + ) self.time_coord = correct_time_coord - self.wrong_cube = CubeList([Cube(np.ones((5, 1, 1)), - var_name='pr', - units='kg m-2 s-1')]) + self.wrong_cube = CubeList( + [Cube(np.ones((5, 1, 1)), var_name="pr", units="kg m-2 s-1")] + ) self.wrong_cube[0].add_aux_coord(wrong_time_coord, 0) self.wrong_cube[0].add_dim_coord(lat_coord, 1) self.wrong_cube[0].add_dim_coord(lon_coord, 2) - self.correct_cube = CubeList([Cube(np.ones(3), - var_name='pr', - units='kg m-2 s-1')]) + self.correct_cube = CubeList( + [Cube(np.ones(3), var_name="pr", units="kg m-2 s-1")] + ) self.correct_cube[0].add_aux_coord(correct_time_coord, 0) self.fix = Pr(None) @@ -211,7 +228,7 @@ def setUp(self): def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'EC-EARTH', 'Amon', 'pr'), + Fix.get_fixes("CMIP5", "EC-EARTH", "Amon", "pr"), [Pr(None), GenericFix(None)], ) @@ -221,15 +238,15 @@ def test_pr_fix_metadata(self): out_wrong_cube = self.fix.fix_metadata(self.wrong_cube) out_correct_cube = self.fix.fix_metadata(self.correct_cube) - time = out_wrong_cube[0].coord('time') + time = out_wrong_cube[0].coord("time") assert time == self.time_coord - time = out_correct_cube[0].coord('time') + time = out_correct_cube[0].coord("time") assert time == self.time_coord def test_pr_fix_metadata_no_time(self): """Test metadata fix with no time coord.""" - self.correct_cube[0].remove_coord('time') + self.correct_cube[0].remove_coord("time") out_correct_cube = self.fix.fix_metadata(self.correct_cube) with self.assertRaises(CoordinateNotFoundError): - out_correct_cube[0].coord('time') + out_correct_cube[0].coord("time") diff --git a/tests/integration/cmor/_fixes/cmip5/test_fgoals_g2.py b/tests/integration/cmor/_fixes/cmip5/test_fgoals_g2.py index b70b16fa25..612873dc07 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_fgoals_g2.py +++ b/tests/integration/cmor/_fixes/cmip5/test_fgoals_g2.py @@ -1,4 +1,5 @@ """Test FGOALS-g2 fixes.""" + import pytest from cf_units import Unit from iris.coords import DimCoord @@ -12,19 +13,18 @@ @pytest.fixture def cube(): """Cube for testing.""" - test_cube = Cube([[1.0, 2.0]], var_name='co2', units='J') + test_cube = Cube([[1.0, 2.0]], var_name="co2", units="J") test_cube.add_dim_coord( DimCoord( [0.0, 1.0], - standard_name='time', - units=Unit('days since 0001-01', calendar='gregorian')), - 1) + standard_name="time", + units=Unit("days since 0001-01", calendar="gregorian"), + ), + 1, + ) test_cube.add_dim_coord( - DimCoord( - [180], - standard_name='longitude', - units=Unit('degrees')), - 0) + DimCoord([180], standard_name="longitude", units=Unit("degrees")), 0 + ) return test_cube @@ -34,8 +34,9 @@ class TestAll: @staticmethod def test_get(): """Test fix get.""" - assert Fix.get_fixes('CMIP5', 'FGOALS-G2', 'Amon', 'tas') == [ - AllVars(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "FGOALS-G2", "Amon", "tas") == [ + AllVars(None), + GenericFix(None), ] @staticmethod @@ -44,20 +45,20 @@ def test_fix_metadata(cube): fix = AllVars(None) cube = fix.fix_metadata([cube])[0] - time = cube.coord('time') - assert time.units.origin == 'day since 1-01-01 00:00:00.000000' - assert time.units.calendar in ('standard', 'gregorian') + time = cube.coord("time") + assert time.units.origin == "day since 1-01-01 00:00:00.000000" + assert time.units.calendar in ("standard", "gregorian") @staticmethod def test_fix_metadata_dont_fail_if_not_longitude(cube): """Test calendar fix.""" - cube.remove_coord('longitude') + cube.remove_coord("longitude") fix = AllVars(None) fix.fix_metadata([cube]) @staticmethod def test_fix_metadata_dont_fail_if_not_time(cube): """Test calendar fix.""" - cube.remove_coord('time') + cube.remove_coord("time") fix = AllVars(None) fix.fix_metadata([cube]) diff --git a/tests/integration/cmor/_fixes/cmip5/test_fgoals_s2.py b/tests/integration/cmor/_fixes/cmip5/test_fgoals_s2.py index a1c15afebd..09c153fdcb 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_fgoals_s2.py +++ b/tests/integration/cmor/_fixes/cmip5/test_fgoals_s2.py @@ -1,4 +1,5 @@ """Test FGOALS-s2 fixes.""" + import numpy as np from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList @@ -10,45 +11,51 @@ def test_get_allvars_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'FGOALS-s2', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP5", "FGOALS-s2", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] LAT_COORD = DimCoord( [-20.0, 0.0, 10.0], bounds=[[-30.0, -10.0], [-10.0, 5.0], [5.0, 20.0]], - var_name='lat', - standard_name='latitude', + var_name="lat", + standard_name="latitude", ) LAT_COORD_MULT = AuxCoord( [[-20.0], [0.0], [10.0]], bounds=[[[-30.0, -10.0]], [[-10.0, 5.0]], [[5.0, 20.0]]], - var_name='lat', - standard_name='latitude', + var_name="lat", + standard_name="latitude", +) +LAT_COORD_SMALL = DimCoord( + [0.0], bounds=[-45.0, 45.0], var_name="lat", standard_name="latitude" ) -LAT_COORD_SMALL = DimCoord([0.0], - bounds=[-45.0, 45.0], - var_name='lat', - standard_name='latitude') def test_allvars_fix_metadata(): """Test ``fix_metadata`` for all variables.""" - cubes = CubeList([ - Cube([1, 2, 3], dim_coords_and_dims=[(LAT_COORD.copy(), 0)]), - Cube([[1], [2], [3]], - aux_coords_and_dims=[(LAT_COORD_MULT.copy(), (0, 1))]), - Cube([1], dim_coords_and_dims=[(LAT_COORD_SMALL.copy(), 0)]), - Cube(0.0), - ]) + cubes = CubeList( + [ + Cube([1, 2, 3], dim_coords_and_dims=[(LAT_COORD.copy(), 0)]), + Cube( + [[1], [2], [3]], + aux_coords_and_dims=[(LAT_COORD_MULT.copy(), (0, 1))], + ), + Cube([1], dim_coords_and_dims=[(LAT_COORD_SMALL.copy(), 0)]), + Cube(0.0), + ] + ) fix = AllVars(None) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 4 - assert fixed_cubes[0].coord('latitude') != LAT_COORD - np.testing.assert_allclose(fixed_cubes[0].coord('latitude').points, - [-20.0, 0.0, 10.0]) - np.testing.assert_allclose(fixed_cubes[0].coord('latitude').bounds, - [[-25.0, -10.0], [-10.0, 5.0], [5.0, 20.0]]) - assert fixed_cubes[1].coord('latitude') == LAT_COORD_MULT - assert fixed_cubes[2].coord('latitude') == LAT_COORD_SMALL + assert fixed_cubes[0].coord("latitude") != LAT_COORD + np.testing.assert_allclose( + fixed_cubes[0].coord("latitude").points, [-20.0, 0.0, 10.0] + ) + np.testing.assert_allclose( + fixed_cubes[0].coord("latitude").bounds, + [[-25.0, -10.0], [-10.0, 5.0], [5.0, 20.0]], + ) + assert fixed_cubes[1].coord("latitude") == LAT_COORD_MULT + assert fixed_cubes[2].coord("latitude") == LAT_COORD_SMALL assert fixed_cubes[3] == Cube(0.0) diff --git a/tests/integration/cmor/_fixes/cmip5/test_fio_esm.py b/tests/integration/cmor/_fixes/cmip5/test_fio_esm.py index c0705e58fa..2da5fd742f 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_fio_esm.py +++ b/tests/integration/cmor/_fixes/cmip5/test_fio_esm.py @@ -1,4 +1,5 @@ """Test fixes for FIO-ESM.""" + import unittest from cf_units import Unit @@ -12,7 +13,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'FIO-ESM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "FIO-ESM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -23,37 +24,43 @@ def test_cl_fix(): class TestCh4(unittest.TestCase): """Test ch4 fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='ch4', units='J') + self.cube = Cube([1.0], var_name="ch4", units="J") self.fix = Ch4(None) def test_get(self): """Test fix get""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'FIO-ESM', 'Amon', 'ch4'), - [Ch4(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "FIO-ESM", "Amon", "ch4"), + [Ch4(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29. / 16. * 1.e9) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.data[0], 29.0 / 16.0 * 1.0e9) + self.assertEqual(cube.units, Unit("J")) class TestCo2(unittest.TestCase): """Test co2 fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='co2', units='J') + self.cube = Cube([1.0], var_name="co2", units="J") self.fix = Co2(None) def test_get(self): """Test fix get""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'FIO-ESM', 'Amon', 'co2'), - [Co2(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "FIO-ESM", "Amon", "co2"), + [Co2(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 29. / 44. * 1.e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.data[0], 29.0 / 44.0 * 1.0e6) + self.assertEqual(cube.units, Unit("J")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py b/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py index 8a4fa19fd1..0ddcfdcacf 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py +++ b/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm2p1.py @@ -1,4 +1,5 @@ """Test GDL-CM2P1 fixes.""" + import unittest from unittest import mock @@ -21,11 +22,13 @@ class TestCl(unittest.TestCase): """Test cl fix.""" + def test_get(self): """Test getting of fix.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-CM2P1', 'Amon', 'cl'), - [Cl(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-CM2P1", "Amon", "cl"), + [Cl(None), AllVars(None), GenericFix(None)], + ) def test_fix(self): """Test fix for ``cl``.""" @@ -34,22 +37,24 @@ def test_fix(self): class TestSftof(unittest.TestCase): """Test sftof fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name="sftof", units="J") self.fix = Sftof(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-CM2P1', 'fx', 'sftof'), - [Sftof(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-CM2P1", "fx", "sftof"), + [Sftof(None), AllVars(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestAreacello(unittest.TestCase): @@ -57,30 +62,33 @@ class TestAreacello(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='areacello', units='m-2') - self.vardef = get_var_info('CMIP5', 'fx', self.cube.var_name) + self.cube = Cube([1.0], var_name="areacello", units="m-2") + self.vardef = get_var_info("CMIP5", "fx", self.cube.var_name) self.fix = Areacello(self.vardef) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-CM2P1', 'Amon', 'areacello'), - [Areacello(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "GFDL-CM2P1", "Amon", "areacello"), + [ + Areacello(self.vardef), + AllVars(self.vardef), + GenericFix(self.vardef), + ], + ) def test_fix_metadata(self): """Test data fix.""" - cube = self.fix.fix_metadata((self.cube, ))[0] + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) def test_fix_data(self): """Test data fix.""" - self.cube.units = 'm2' - cube = self.fix.fix_metadata((self.cube, ))[0] + self.cube.units = "m2" + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) class TestSit(unittest.TestCase): @@ -88,35 +96,36 @@ class TestSit(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0, 2.0], var_name='sit', units='m') + self.cube = Cube([1.0, 2.0], var_name="sit", units="m") self.cube.add_dim_coord( iris.coords.DimCoord( points=[45000.5, 45031.5], - var_name='time', - standard_name='time', - long_name='time', - units='days since 1850-01-01', - bounds=[[1e8, 1.1e8], [1.1e8, 1.2e8]] + var_name="time", + standard_name="time", + long_name="time", + units="days since 1850-01-01", + bounds=[[1e8, 1.1e8], [1.1e8, 1.2e8]], ), - 0 + 0, ) self.var_info_mock = mock.Mock() - self.var_info_mock.frequency = 'mon' - self.var_info_mock.short_name = 'sit' + self.var_info_mock.frequency = "mon" + self.var_info_mock.short_name = "sit" self.fix = Sit(self.var_info_mock) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-CM2P1', 'OImon', 'sit'), - [Sit(self.var_info_mock), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-CM2P1", "OImon", "sit"), + [Sit(self.var_info_mock), AllVars(None), GenericFix(None)], + ) def test_fix_metadata_day_do_nothing(self): """Test data fix.""" - self.var_info_mock.frequency = 'day' + self.var_info_mock.frequency = "day" fix = Sit(self.var_info_mock) cube = fix.fix_metadata((self.cube,))[0] - time = cube.coord('time') + time = cube.coord("time") self.assertEqual(time.bounds[0, 0], 1e8) self.assertEqual(time.bounds[0, 1], 1.1e8) self.assertEqual(time.bounds[1, 0], 1.1e8) @@ -126,7 +135,7 @@ def test_fix_metadata(self): """Test data fix.""" fix = Sit(self.var_info_mock) cube = fix.fix_metadata((self.cube,))[0] - time = cube.coord('time') + time = cube.coord("time") self.assertEqual(time.bounds[0, 0], 44984) self.assertEqual(time.bounds[0, 1], 45015) self.assertEqual(time.bounds[1, 0], 45015) @@ -136,8 +145,8 @@ def test_fix_metadata_not_needed(self): """Test data fix.""" fix = Sit(self.var_info_mock) cube = fix.fix_metadata((self.cube,))[0] - time = cube.coord('time') - new_bounds = [[44985., 45014.], [45016., 45044.]] + time = cube.coord("time") + new_bounds = [[44985.0, 45014.0], [45016.0, 45044.0]] time.bounds = new_bounds self.assertEqual(time.bounds[0, 0], 44985) self.assertEqual(time.bounds[0, 1], 45014) diff --git a/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm3.py b/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm3.py index bcda26c95b..fffe1812ee 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm3.py +++ b/tests/integration/cmor/_fixes/cmip5/test_gfdl_cm3.py @@ -1,4 +1,5 @@ """Test GFDL-CM3 fixes.""" + import unittest from cf_units import Unit @@ -12,49 +13,55 @@ class TestSftof(unittest.TestCase): """Test sftof fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name="sftof", units="J") self.fix = Sftof(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-CM3', 'fx', 'sftof'), - [Sftof(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-CM3", "fx", "sftof"), + [Sftof(None), AllVars(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestAreacello(unittest.TestCase): """Test sftof fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='areacello', units='m-2') - self.vardef = get_var_info('CMIP5', 'fx', self.cube.var_name) + self.cube = Cube([1.0], var_name="areacello", units="m-2") + self.vardef = get_var_info("CMIP5", "fx", self.cube.var_name) self.fix = Areacello(self.vardef) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-CM3', 'Amon', 'areacello'), - [Areacello(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "GFDL-CM3", "Amon", "areacello"), + [ + Areacello(self.vardef), + AllVars(self.vardef), + GenericFix(self.vardef), + ], + ) def test_fix_metadata(self): """Test data fix.""" - cube = self.fix.fix_metadata((self.cube, ))[0] + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) def test_fix_data(self): """Test data fix.""" - self.cube.units = 'm2' - cube = self.fix.fix_metadata((self.cube, ))[0] + self.cube.units = "m2" + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2g.py b/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2g.py index d5189e84c4..30421411e3 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2g.py +++ b/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2g.py @@ -1,4 +1,5 @@ """Tests for fixes of GFDL-ESM2G (CMIP5).""" + import unittest from unittest import mock @@ -20,8 +21,8 @@ from esmvalcore.cmor.fix import Fix from esmvalcore.cmor.table import get_var_info -CUBE_1 = iris.cube.Cube([1.0], long_name='to_be_rm') -CUBE_2 = iris.cube.Cube([1.0], long_name='not_to_be_rm') +CUBE_1 = iris.cube.Cube([1.0], long_name="to_be_rm") +CUBE_2 = iris.cube.Cube([1.0], long_name="not_to_be_rm") CUBES_LISTS = [ (iris.cube.CubeList([CUBE_1]), iris.cube.CubeList([])), (iris.cube.CubeList([CUBE_1, CUBE_2]), iris.cube.CubeList([CUBE_2])), @@ -29,10 +30,10 @@ ] -@pytest.mark.parametrize('cubes_in,cubes_out', CUBES_LISTS) +@pytest.mark.parametrize("cubes_in,cubes_out", CUBES_LISTS) def test_get_and_remove(cubes_in, cubes_out): """Test get and remove helper method.""" - _get_and_remove(cubes_in, 'to_be_rm') + _get_and_remove(cubes_in, "to_be_rm") assert cubes_in is not cubes_out assert cubes_in == cubes_out @@ -40,122 +41,130 @@ def test_get_and_remove(cubes_in, cubes_out): CUBES = iris.cube.CubeList([CUBE_1, CUBE_2]) -@mock.patch('esmvalcore.cmor._fixes.cmip5.gfdl_esm2g._get_and_remove', - autospec=True) +@mock.patch( + "esmvalcore.cmor._fixes.cmip5.gfdl_esm2g._get_and_remove", autospec=True +) def test_allvars(mock_get_and_remove): """Test fixes for all vars.""" fix = AllVars(None) fix.fix_metadata(CUBES) assert mock_get_and_remove.call_count == 3 assert mock_get_and_remove.call_args_list == [ - mock.call(CUBES, 'Start time for average period'), - mock.call(CUBES, 'End time for average period'), - mock.call(CUBES, 'Length of average period'), + mock.call(CUBES, "Start time for average period"), + mock.call(CUBES, "End time for average period"), + mock.call(CUBES, "Length of average period"), ] -@mock.patch('esmvalcore.cmor._fixes.cmip5.gfdl_esm2g._get_and_remove', - autospec=True) +@mock.patch( + "esmvalcore.cmor._fixes.cmip5.gfdl_esm2g._get_and_remove", autospec=True +) def test_fgco2(mock_get_and_remove): """Test fgco2 fixes.""" fix = FgCo2(None) fix.fix_metadata(CUBES) assert mock_get_and_remove.call_count == 2 assert mock_get_and_remove.call_args_list == [ - mock.call(CUBES, 'Latitude of tracer (h) points'), - mock.call(CUBES, 'Longitude of tracer (h) points'), + mock.call(CUBES, "Latitude of tracer (h) points"), + mock.call(CUBES, "Longitude of tracer (h) points"), ] class TestCo2(unittest.TestCase): """Test co2 fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = iris.cube.Cube([1.0], var_name='co2', units='J') + self.cube = iris.cube.Cube([1.0], var_name="co2", units="J") self.fix = Co2(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2G', 'Amon', 'co2'), - [Co2(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2G", "Amon", "co2"), + [Co2(None), AllVars(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestUsi(unittest.TestCase): """Test usi fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = iris.cube.Cube([1.0], var_name='usi', units='J') - self.vardef = get_var_info('CMIP5', 'day', self.cube.var_name) + self.cube = iris.cube.Cube([1.0], var_name="usi", units="J") + self.vardef = get_var_info("CMIP5", "day", self.cube.var_name) self.fix = Usi(self.vardef) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2G', 'day', 'usi'), - [Usi(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2G", "day", "usi"), + [Usi(self.vardef), AllVars(self.vardef), GenericFix(self.vardef)], + ) def test_fix_data(self): """Test metadata fix.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.standard_name, 'sea_ice_x_velocity') + self.assertEqual(cube.standard_name, "sea_ice_x_velocity") class TestVsi(unittest.TestCase): """Test vsi fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = iris.cube.Cube([1.0], var_name='vsi', units='J') - self.vardef = get_var_info('CMIP5', 'day', self.cube.var_name) + self.cube = iris.cube.Cube([1.0], var_name="vsi", units="J") + self.vardef = get_var_info("CMIP5", "day", self.cube.var_name) self.fix = Vsi(self.vardef) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2G', 'day', 'vsi'), - [Vsi(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2G", "day", "vsi"), + [Vsi(self.vardef), AllVars(self.vardef), GenericFix(self.vardef)], + ) def test_fix_data(self): """Test metadata fix.""" cube = self.fix.fix_metadata([self.cube])[0] - self.assertEqual(cube.standard_name, 'sea_ice_y_velocity') + self.assertEqual(cube.standard_name, "sea_ice_y_velocity") class TestAreacello(unittest.TestCase): """Test sftof fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='areacello', units='m-2') - self.vardef = get_var_info('CMIP5', 'fx', self.cube.var_name) + self.cube = Cube([1.0], var_name="areacello", units="m-2") + self.vardef = get_var_info("CMIP5", "fx", self.cube.var_name) self.fix = Areacello(self.vardef) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2G', 'fx', 'areacello'), - [Areacello(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2G", "fx", "areacello"), + [ + Areacello(self.vardef), + AllVars(self.vardef), + GenericFix(self.vardef), + ], + ) def test_fix_metadata(self): """Test data fix.""" - cube = self.fix.fix_metadata((self.cube, ))[0] + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) def test_fix_data(self): """Test data fix.""" - self.cube.units = 'm2' - cube = self.fix.fix_metadata((self.cube, ))[0] + self.cube.units = "m2" + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2m.py b/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2m.py index 1897ba5c85..29f3ba4c7e 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2m.py +++ b/tests/integration/cmor/_fixes/cmip5/test_gfdl_esm2m.py @@ -1,4 +1,5 @@ """Test fixes for GFDL-ES2M.""" + import unittest from cf_units import Unit @@ -17,69 +18,77 @@ class TestSftof(unittest.TestCase): """Test sftof fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name="sftof", units="J") self.fix = Sftof(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2M', 'fx', 'sftof'), - [Sftof(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2M", "fx", "sftof"), + [Sftof(None), AllVars(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestCo2(unittest.TestCase): """Test co2 fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='co2', units='J') + self.cube = Cube([1.0], var_name="co2", units="J") self.fix = Co2(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2M', 'Amon', 'co2'), - [Co2(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2M", "Amon", "co2"), + [Co2(None), AllVars(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1e6) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestAreacello(unittest.TestCase): """Test sftof fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='areacello', units='m-2') - self.vardef = get_var_info('CMIP5', 'fx', self.cube.var_name) + self.cube = Cube([1.0], var_name="areacello", units="m-2") + self.vardef = get_var_info("CMIP5", "fx", self.cube.var_name) self.fix = Areacello(self.vardef) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'GFDL-ESM2M', 'fx', 'areacello'), - [Areacello(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "GFDL-ESM2M", "fx", "areacello"), + [ + Areacello(self.vardef), + AllVars(self.vardef), + GenericFix(self.vardef), + ], + ) def test_fix_metadata(self): """Test data fix.""" - cube = self.fix.fix_metadata((self.cube, ))[0] + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) def test_fix_data(self): """Test data fix.""" - self.cube.units = 'm2' - cube = self.fix.fix_metadata((self.cube, ))[0] + self.cube.units = "m2" + cube = self.fix.fix_metadata((self.cube,))[0] self.assertEqual(cube.data[0], 1.0) - self.assertEqual(cube.units, Unit('m2')) + self.assertEqual(cube.units, Unit("m2")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_giss_e2_h.py b/tests/integration/cmor/_fixes/cmip5/test_giss_e2_h.py index 9c39dd136f..95acbb9cbf 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_giss_e2_h.py +++ b/tests/integration/cmor/_fixes/cmip5/test_giss_e2_h.py @@ -1,4 +1,5 @@ """Test fixes for GISS-E2-H.""" + from esmvalcore.cmor._fixes.cmip5.giss_e2_h import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'GISS-E2-H', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "GISS-E2-H", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_giss_e2_r.py b/tests/integration/cmor/_fixes/cmip5/test_giss_e2_r.py index d95b517cdd..5b65d3a594 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_giss_e2_r.py +++ b/tests/integration/cmor/_fixes/cmip5/test_giss_e2_r.py @@ -1,4 +1,5 @@ """Test fixes for GISS-E2-R.""" + from esmvalcore.cmor._fixes.cmip5.giss_e2_r import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'GISS-E2-R', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "GISS-E2-R", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_hadgem2_cc.py b/tests/integration/cmor/_fixes/cmip5/test_hadgem2_cc.py index 0e6a4fc57a..13642a35ad 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_hadgem2_cc.py +++ b/tests/integration/cmor/_fixes/cmip5/test_hadgem2_cc.py @@ -1,4 +1,5 @@ """Test HADGEM2-CC fixes.""" + import unittest from esmvalcore.cmor._fixes.cmip5.hadgem2_cc import O2, AllVars @@ -8,17 +9,21 @@ class TestAllVars(unittest.TestCase): """Test allvars fixes.""" + def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'HADGEM2-CC', 'Amon', 'tas'), - [AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "HADGEM2-CC", "Amon", "tas"), + [AllVars(None), GenericFix(None)], + ) class TestO2(unittest.TestCase): """Test o2 fixes.""" + def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'HADGEM2-CC', 'Amon', 'o2'), - [O2(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "HADGEM2-CC", "Amon", "o2"), + [O2(None), AllVars(None), GenericFix(None)], + ) diff --git a/tests/integration/cmor/_fixes/cmip5/test_hadgem2_es.py b/tests/integration/cmor/_fixes/cmip5/test_hadgem2_es.py index 17d928e574..a98bba6b3c 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_hadgem2_es.py +++ b/tests/integration/cmor/_fixes/cmip5/test_hadgem2_es.py @@ -1,4 +1,5 @@ """Test HADGEM2-ES fixes.""" + import unittest import dask.array as da @@ -19,8 +20,9 @@ class TestAllVars(unittest.TestCase): def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'HADGEM2-ES', 'Amon', 'tas'), - [AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "HADGEM2-ES", "Amon", "tas"), + [AllVars(None), GenericFix(None)], + ) @staticmethod def test_clip_latitude(): @@ -29,9 +31,9 @@ def test_clip_latitude(): aux_coords_and_dims=[ ( iris.coords.AuxCoord( - da.asarray([90., 91.]), + da.asarray([90.0, 91.0]), bounds=da.asarray([[89.5, 90.5], [90.5, 91.5]]), - standard_name='latitude', + standard_name="latitude", ), 0, ), @@ -40,11 +42,13 @@ def test_clip_latitude(): fix = AllVars(None) cubes = fix.fix_metadata([cube]) assert len(cubes) == 1 - coord = cubes[0].coord('latitude') + coord = cubes[0].coord("latitude") assert coord.has_lazy_points() assert coord.has_lazy_bounds() - assert_array_equal(coord.points, np.array([90., 90])) - assert_array_equal(coord.bounds, np.array([[89.5, 90.], [90., 90.]])) + assert_array_equal(coord.points, np.array([90.0, 90])) + assert_array_equal( + coord.bounds, np.array([[89.5, 90.0], [90.0, 90.0]]) + ) class TestO2(unittest.TestCase): @@ -53,13 +57,14 @@ class TestO2(unittest.TestCase): def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'HADGEM2-ES', 'Amon', 'o2'), - [O2(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "HADGEM2-ES", "Amon", "o2"), + [O2(None), AllVars(None), GenericFix(None)], + ) def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'HadGEM2-ES', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "HadGEM2-ES", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_inmcm4.py b/tests/integration/cmor/_fixes/cmip5/test_inmcm4.py index dcabbbf313..c4b4ba93e5 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_inmcm4.py +++ b/tests/integration/cmor/_fixes/cmip5/test_inmcm4.py @@ -1,4 +1,5 @@ """Tests for inmcm4 fixes.""" + import unittest from cf_units import Unit @@ -12,7 +13,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'inmcm4', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "inmcm4", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -26,19 +27,21 @@ class TestGpp(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='gpp', units='J') + self.cube = Cube([1.0], var_name="gpp", units="J") self.fix = Gpp(None) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'INMCM4', 'Amon', 'gpp'), - [Gpp(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "INMCM4", "Amon", "gpp"), + [Gpp(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fox.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], -1) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestLai(unittest.TestCase): @@ -46,19 +49,21 @@ class TestLai(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='lai', units='J') + self.cube = Cube([1.0], var_name="lai", units="J") self.fix = Lai(None) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'INMCM4', 'Amon', 'lai'), - [Lai(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "INMCM4", "Amon", "lai"), + [Lai(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1.0 / 100.0) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestNbp(unittest.TestCase): @@ -66,18 +71,21 @@ class TestNbp(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='nbp') + self.cube = Cube([1.0], var_name="nbp") self.fix = Nbp(None) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'INMCM4', 'Amon', 'nbp'), - [Nbp(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "INMCM4", "Amon", "nbp"), + [Nbp(None), GenericFix(None)], + ) def test_fix_metadata(self): """Test fix on nbp files to set standard_name.""" new_cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual( new_cube.standard_name, - 'surface_net_downward_mass_flux_of_carbon_dioxide_' - 'expressed_as_carbon_due_to_all_land_processes') + "surface_net_downward_mass_flux_of_carbon_dioxide_" + "expressed_as_carbon_due_to_all_land_processes", + ) diff --git a/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_lr.py b/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_lr.py index 67f50c1ea0..d8c19c82b9 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_lr.py +++ b/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_lr.py @@ -1,4 +1,5 @@ """Test fixes for IPSL-CM5A-LR.""" + from esmvalcore.cmor._fixes.cmip5.ipsl_cm5a_lr import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'IPSL-CM5A-LR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "IPSL-CM5A-LR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_mr.py b/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_mr.py index 787a2d804a..7686409681 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_mr.py +++ b/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5a_mr.py @@ -1,4 +1,5 @@ """Test fixes for IPSL-CM5A-MR.""" + from esmvalcore.cmor._fixes.cmip5.ipsl_cm5a_mr import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'IPSL-CM5A-MR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "IPSL-CM5A-MR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5b_lr.py b/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5b_lr.py index 4a7b0a4476..de45b3615c 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5b_lr.py +++ b/tests/integration/cmor/_fixes/cmip5/test_ipsl_cm5b_lr.py @@ -1,4 +1,5 @@ """Test fixes for IPSL-CM5B-LR.""" + from esmvalcore.cmor._fixes.cmip5.ipsl_cm5b_lr import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'IPSL-CM5B-LR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "IPSL-CM5B-LR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_miroc5.py b/tests/integration/cmor/_fixes/cmip5/test_miroc5.py index 63a706eef7..aade2205e5 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_miroc5.py +++ b/tests/integration/cmor/_fixes/cmip5/test_miroc5.py @@ -1,4 +1,5 @@ """Tests for MIROC5.""" + import unittest import iris @@ -13,7 +14,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MIROC5', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "MIROC5", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -24,34 +25,34 @@ def test_cl_fix(): def test_get_hur_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MIROC5', 'Amon', 'hur') + fix = Fix.get_fixes("CMIP5", "MIROC5", "Amon", "hur") assert fix == [Hur(None), GenericFix(None)] def test_get_pr_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MIROC5', 'Amon', 'pr') + fix = Fix.get_fixes("CMIP5", "MIROC5", "Amon", "pr") assert fix == [Pr(None), GenericFix(None)] @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip5.miroc5.Tas.fix_metadata', - autospec=True) + "esmvalcore.cmor._fixes.cmip5.miroc5.Tas.fix_metadata", autospec=True +) def test_hur_fix_metadata(mock_base_fix_metadata): """Test ``fix_metadata`` for ``hur``.""" fix = Hur(None) - fix.fix_metadata('cubes') - mock_base_fix_metadata.assert_called_once_with(fix, 'cubes') + fix.fix_metadata("cubes") + mock_base_fix_metadata.assert_called_once_with(fix, "cubes") @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip5.miroc5.Tas.fix_metadata', - autospec=True) + "esmvalcore.cmor._fixes.cmip5.miroc5.Tas.fix_metadata", autospec=True +) def test_pr_fix_metadata(mock_base_fix_metadata): """Test ``fix_metadata`` for ``pr``.""" fix = Pr(None) - fix.fix_metadata('cubes') - mock_base_fix_metadata.assert_called_once_with(fix, 'cubes') + fix.fix_metadata("cubes") + mock_base_fix_metadata.assert_called_once_with(fix, "cubes") class TestSftof(unittest.TestCase): @@ -59,19 +60,21 @@ class TestSftof(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name="sftof", units="J") self.fix = Sftof(None) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'MIROC5', 'Amon', 'sftof'), - [Sftof(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "MIROC5", "Amon", "sftof"), + [Sftof(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestTas(unittest.TestCase): @@ -79,17 +82,21 @@ class TestTas(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.coord_name = 'latitude' - self.coord = iris.coords.DimCoord([3.141592], - bounds=[[1.23, 4.5678910]], - standard_name=self.coord_name) + self.coord_name = "latitude" + self.coord = iris.coords.DimCoord( + [3.141592], + bounds=[[1.23, 4.5678910]], + standard_name=self.coord_name, + ) self.cube = Cube([1.0], dim_coords_and_dims=[(self.coord, 0)]) self.fix = Tas(None) def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('CMIP5', 'MIROC5', 'Amon', 'tas'), - [Tas(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("CMIP5", "MIROC5", "Amon", "tas"), + [Tas(None), GenericFix(None)], + ) def test_fix_metadata(self): """Test metadata fix.""" diff --git a/tests/integration/cmor/_fixes/cmip5/test_miroc_esm.py b/tests/integration/cmor/_fixes/cmip5/test_miroc_esm.py index 1010e4e670..e93673d453 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_miroc_esm.py +++ b/tests/integration/cmor/_fixes/cmip5/test_miroc_esm.py @@ -1,4 +1,5 @@ """Test MIROC-ESM fixes.""" + import unittest import numpy as np @@ -16,7 +17,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MIROC-ESM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "MIROC-ESM", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] @@ -30,23 +31,22 @@ class TestCo2(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='co2', units='J') - self.vardef = get_var_info('CMIP5', 'Amon', self.cube.var_name) + self.cube = Cube([1.0], var_name="co2", units="J") + self.vardef = get_var_info("CMIP5", "Amon", self.cube.var_name) self.fix = Co2(self.vardef) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MIROC-ESM', 'Amon', 'co2'), - [Co2(self.vardef), - AllVars(self.vardef), - GenericFix(self.vardef)]) + Fix.get_fixes("CMIP5", "MIROC-ESM", "Amon", "co2"), + [Co2(self.vardef), AllVars(self.vardef), GenericFix(self.vardef)], + ) def test_fix_metadata(self): """Test unit fix.""" cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.data[0], 1) - self.assertEqual(cube.units, Unit('1e-6')) + self.assertEqual(cube.units, Unit("1e-6")) class TestTro3(unittest.TestCase): @@ -54,20 +54,21 @@ class TestTro3(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='tro3', units='J') + self.cube = Cube([1.0], var_name="tro3", units="J") self.fix = Tro3(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MIROC-ESM', 'Amon', 'tro3'), - [Tro3(None), AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MIROC-ESM", "Amon", "tro3"), + [Tro3(None), AllVars(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1000) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) class TestAll(unittest.TestCase): @@ -75,72 +76,81 @@ class TestAll(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([[1.0, 2.0], [3.0, 4.0]], var_name='co2', units='J') + self.cube = Cube([[1.0, 2.0], [3.0, 4.0]], var_name="co2", units="J") self.cube.add_dim_coord( - DimCoord([0, 1], - standard_name='time', - units=Unit('days since 0000-01-01 00:00:00', - calendar='gregorian')), 0) - self.cube.add_dim_coord(DimCoord([0, 1], long_name='AR5PL35'), 1) + DimCoord( + [0, 1], + standard_name="time", + units=Unit( + "days since 0000-01-01 00:00:00", calendar="gregorian" + ), + ), + 0, + ) + self.cube.add_dim_coord(DimCoord([0, 1], long_name="AR5PL35"), 1) - time_units = Unit('days since 1950-1-1 00:00:00', calendar='gregorian') + time_units = Unit("days since 1950-1-1 00:00:00", calendar="gregorian") # Setup wrong time coordinate that is present in some files # (-711860.5 days from 1950-01-01 is < year 1) time_coord = DimCoord( [-711845.0, -711814.0], bounds=[[-711860.5, -711829.5], [-711829.5, -711800.0]], - var_name='time', - standard_name='time', - long_name='time', + var_name="time", + standard_name="time", + long_name="time", units=time_units, ) - self.cube_with_wrong_time = Cube([0.0, 1.0], var_name='co2', - units='ppm', - dim_coords_and_dims=[(time_coord, 0)]) + self.cube_with_wrong_time = Cube( + [0.0, 1.0], + var_name="co2", + units="ppm", + dim_coords_and_dims=[(time_coord, 0)], + ) self.fix = AllVars(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MIROC-ESM', 'Amon', 'tos'), - [AllVars(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MIROC-ESM", "Amon", "tos"), + [AllVars(None), GenericFix(None)], + ) def test_fix_metadata_plev(self): """Test plev fix.""" - time = self.cube.coord('time') + time = self.cube.coord("time") time.units = Unit("days since 1-1-1", time.units.calendar) cube = self.fix.fix_metadata([self.cube])[0] - cube.coord('air_pressure') + cube.coord("air_pressure") def test_fix_metadata_no_plev(self): """Test plev fix work with no plev.""" - self.cube.remove_coord('AR5PL35') + self.cube.remove_coord("AR5PL35") cube = self.fix.fix_metadata([self.cube])[0] with self.assertRaises(CoordinateNotFoundError): - cube.coord('air_pressure') + cube.coord("air_pressure") def test_fix_metadata_correct_time(self): """Test fix for time.""" fixed_cube = self.fix.fix_metadata([self.cube])[0] - time_coord = fixed_cube.coord('time') + time_coord = fixed_cube.coord("time") np.testing.assert_allclose(time_coord.points, [0, 1]) assert time_coord.bounds is None def test_fix_metadata_wrong_time(self): """Test fix for time.""" fixed_cube = self.fix.fix_metadata([self.cube_with_wrong_time])[0] - time_coord = fixed_cube.coord('time') + time_coord = fixed_cube.coord("time") np.testing.assert_allclose(time_coord.points, [-711841.5, -711810.5]) np.testing.assert_allclose( - time_coord.bounds, - [[-711857.0, -711826.0], [-711826.0, -711796.5]]) + time_coord.bounds, [[-711857.0, -711826.0], [-711826.0, -711796.5]] + ) def test_fix_metadata_wrong_time_no_bounds(self): """Test fix for time.""" - self.cube_with_wrong_time.coord('time').bounds = None + self.cube_with_wrong_time.coord("time").bounds = None fixed_cube = self.fix.fix_metadata([self.cube_with_wrong_time])[0] - time_coord = fixed_cube.coord('time') + time_coord = fixed_cube.coord("time") np.testing.assert_allclose(time_coord.points, [-711845.0, -711814.0]) assert time_coord.bounds is None diff --git a/tests/integration/cmor/_fixes/cmip5/test_miroc_esm_chem.py b/tests/integration/cmor/_fixes/cmip5/test_miroc_esm_chem.py index 1e14dfd30c..21624bcc44 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_miroc_esm_chem.py +++ b/tests/integration/cmor/_fixes/cmip5/test_miroc_esm_chem.py @@ -12,19 +12,21 @@ class TestTro3(unittest.TestCase): """Test tro3 fixes.""" + def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='tro3', units='J') + self.cube = Cube([1.0], var_name="tro3", units="J") self.fix = Tro3(None) def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MIROC-ESM-CHEM', 'Amon', 'tro3'), - [Tro3(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MIROC-ESM-CHEM", "Amon", "tro3"), + [Tro3(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1000) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_lr.py b/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_lr.py index 07aa974d15..e63b6a2cd2 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_lr.py +++ b/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_lr.py @@ -1,4 +1,5 @@ """Test MPI-ESM-LR fixes.""" + import unittest from cf_units import Unit @@ -12,7 +13,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MPI-ESM-LR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "MPI-ESM-LR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -26,17 +27,18 @@ class TestPctisccp2(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='pctisccp', units='J') + self.cube = Cube([1.0], var_name="pctisccp", units="J") self.fix = Pctisccp(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MPI-ESM-LR', 'Amon', 'pctisccp'), - [Pctisccp(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MPI-ESM-LR", "Amon", "pctisccp"), + [Pctisccp(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('J')) + self.assertEqual(cube.units, Unit("J")) diff --git a/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_mr.py b/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_mr.py index 96cd83b963..5ecb86f01b 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_mr.py +++ b/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_mr.py @@ -1,4 +1,5 @@ """Test fixes for MPI-ESM-MR.""" + from esmvalcore.cmor._fixes.cmip5.mpi_esm_mr import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MPI-ESM-MR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "MPI-ESM-MR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_p.py b/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_p.py index 2466e5a9f8..8c7aeb09ab 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_p.py +++ b/tests/integration/cmor/_fixes/cmip5/test_mpi_esm_p.py @@ -1,4 +1,5 @@ """Test fixes for MPI-ESM-P.""" + from esmvalcore.cmor._fixes.cmip5.mpi_esm_p import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MPI-ESM-P', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "MPI-ESM-P", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_mri_cgcm3.py b/tests/integration/cmor/_fixes/cmip5/test_mri_cgcm3.py index 717c66cee6..ff697d5d11 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_mri_cgcm3.py +++ b/tests/integration/cmor/_fixes/cmip5/test_mri_cgcm3.py @@ -1,4 +1,5 @@ """Test MRI-CGCM3 fixes.""" + import unittest from esmvalcore.cmor._fixes.cmip5.mri_cgcm3 import Cl, Msftmyz, ThetaO @@ -9,7 +10,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'MRI-CGCM3', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "MRI-CGCM3", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -24,8 +25,9 @@ class TestMsftmyz(unittest.TestCase): def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MRI-CGCM3', 'Amon', 'msftmyz'), - [Msftmyz(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MRI-CGCM3", "Amon", "msftmyz"), + [Msftmyz(None), GenericFix(None)], + ) class TestThetao(unittest.TestCase): @@ -34,5 +36,6 @@ class TestThetao(unittest.TestCase): def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MRI-CGCM3', 'Amon', 'thetao'), - [ThetaO(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MRI-CGCM3", "Amon", "thetao"), + [ThetaO(None), GenericFix(None)], + ) diff --git a/tests/integration/cmor/_fixes/cmip5/test_mri_esm1.py b/tests/integration/cmor/_fixes/cmip5/test_mri_esm1.py index f4d39eb70b..a847a30f15 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_mri_esm1.py +++ b/tests/integration/cmor/_fixes/cmip5/test_mri_esm1.py @@ -1,4 +1,5 @@ """Test MRI-ESM1 fixes.""" + import unittest from esmvalcore.cmor._fixes.cmip5.mri_esm1 import Msftmyz @@ -8,8 +9,10 @@ class TestMsftmyz(unittest.TestCase): """Test msftmyz fixes.""" + def test_get(self): """Test fix get""" self.assertListEqual( - Fix.get_fixes('CMIP5', 'MRI-ESM1', 'Amon', 'msftmyz'), - [Msftmyz(None), GenericFix(None)]) + Fix.get_fixes("CMIP5", "MRI-ESM1", "Amon", "msftmyz"), + [Msftmyz(None), GenericFix(None)], + ) diff --git a/tests/integration/cmor/_fixes/cmip5/test_noresm1_m.py b/tests/integration/cmor/_fixes/cmip5/test_noresm1_m.py index 1ad165c633..9793b9f7a1 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_noresm1_m.py +++ b/tests/integration/cmor/_fixes/cmip5/test_noresm1_m.py @@ -1,4 +1,5 @@ """Test fixes for NorESM1-M.""" + from esmvalcore.cmor._fixes.cmip5.noresm1_m import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP5', 'NorESM1-M', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP5", "NorESM1-M", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip5/test_noresm1_me.py b/tests/integration/cmor/_fixes/cmip5/test_noresm1_me.py index b600311d5a..87d94760a1 100644 --- a/tests/integration/cmor/_fixes/cmip5/test_noresm1_me.py +++ b/tests/integration/cmor/_fixes/cmip5/test_noresm1_me.py @@ -1,4 +1,5 @@ """Tests for fixes of NorESM1-ME (CMIP5).""" + import iris import pytest from iris.cube import CubeList @@ -10,7 +11,7 @@ DIM_COORD_SHORT = iris.coords.DimCoord( [1.0, 2.0, 3.0], bounds=[[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], - var_name='dim_coord', + var_name="dim_coord", ) DIM_COORD_LONG = iris.coords.DimCoord( [1.1234567891011, 2.1234567891011, 3.1234567891011], @@ -19,7 +20,7 @@ [1.51234567891011, 2.51234567891011], [2.51234567891011, 3.51234567891011], ], - var_name='dim_coord', + var_name="dim_coord", ) DIM_COORD_ROUNDED = iris.coords.DimCoord( [1.123456789101, 2.123456789101, 3.123456789101], @@ -28,11 +29,11 @@ [1.512345678910, 2.512345678910], [2.512345678910, 3.512345678910], ], - var_name='dim_coord', + var_name="dim_coord", ) AUX_COORD = iris.coords.AuxCoord( [1.1284712947128749498712, 2.12421841274128947982, 3.12787129852141124214], - var_name='aux_coord', + var_name="aux_coord", ) CUBE_IN_SHORT = iris.cube.Cube( @@ -54,12 +55,14 @@ CUBES_TO_FIX = [ (CubeList([CUBE_IN_SHORT]), CubeList([CUBE_IN_SHORT])), (CubeList([CUBE_IN_LONG]), CubeList([CUBE_OUT_LONG])), - (CubeList([CUBE_IN_LONG, - CUBE_IN_SHORT]), CubeList([CUBE_OUT_LONG, CUBE_IN_SHORT])), + ( + CubeList([CUBE_IN_LONG, CUBE_IN_SHORT]), + CubeList([CUBE_OUT_LONG, CUBE_IN_SHORT]), + ), ] -@pytest.mark.parametrize('cubes_in,cubes_out', CUBES_TO_FIX) +@pytest.mark.parametrize("cubes_in,cubes_out", CUBES_TO_FIX) def test_tas(cubes_in, cubes_out): """Test tas fixes.""" fix = Tas(None) @@ -70,6 +73,7 @@ def test_tas(cubes_in, cubes_out): def test_get(): """Test fix get""" - assert Fix.get_fixes('CMIP5', 'NORESM1-ME', 'Amon', 'tas') == [ - Tas(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "NORESM1-ME", "Amon", "tas") == [ + Tas(None), + GenericFix(None), ] diff --git a/tests/integration/cmor/_fixes/cmip6/test_access_cm2.py b/tests/integration/cmor/_fixes/cmip6/test_access_cm2.py index 00977403ea..d6bcb1c7d7 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_access_cm2.py +++ b/tests/integration/cmor/_fixes/cmip6/test_access_cm2.py @@ -1,4 +1,5 @@ """Tests for the fixes of ACCESS-CM2.""" + import unittest.mock import iris @@ -11,25 +12,91 @@ from esmvalcore.cmor.fix import Fix B_POINTS = [ - 0.997741281986237, 0.993982434272766, 0.988731920719147, - 0.982001721858978, 0.973807096481323, 0.964166879653931, - 0.953103065490723, 0.940641283988953, 0.926810503005981, - 0.911642968654633, 0.895174443721771, 0.877444267272949, - 0.858494758605957, 0.838372051715851, 0.81712543964386, - 0.7948077917099, 0.77147513628006, 0.747187197208405, - 0.722006916999817, 0.696000635623932, 0.669238269329071, - 0.641793012619019, 0.613741397857666, 0.585163474082947, - 0.556142747402191, 0.526765942573547, 0.49712336063385, - 0.467308610677719, 0.437418729066849, 0.40755420923233, - 0.377818822860718, 0.348319888114929, 0.319168090820312, - 0.290477395057678, 0.262365132570267, 0.234952658414841, - 0.20836341381073, 0.182725623250008, 0.158169254660606, - 0.134828746318817, 0.112841464579105, 0.0923482477664948, - 0.0734933465719223, 0.0564245767891407, 0.041294027119875, - 0.028257654979825, 0.0174774676561356, 0.00912047084420919, - 0.00336169824004173, 0.000384818413294852, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0.997741281986237, + 0.993982434272766, + 0.988731920719147, + 0.982001721858978, + 0.973807096481323, + 0.964166879653931, + 0.953103065490723, + 0.940641283988953, + 0.926810503005981, + 0.911642968654633, + 0.895174443721771, + 0.877444267272949, + 0.858494758605957, + 0.838372051715851, + 0.81712543964386, + 0.7948077917099, + 0.77147513628006, + 0.747187197208405, + 0.722006916999817, + 0.696000635623932, + 0.669238269329071, + 0.641793012619019, + 0.613741397857666, + 0.585163474082947, + 0.556142747402191, + 0.526765942573547, + 0.49712336063385, + 0.467308610677719, + 0.437418729066849, + 0.40755420923233, + 0.377818822860718, + 0.348319888114929, + 0.319168090820312, + 0.290477395057678, + 0.262365132570267, + 0.234952658414841, + 0.20836341381073, + 0.182725623250008, + 0.158169254660606, + 0.134828746318817, + 0.112841464579105, + 0.0923482477664948, + 0.0734933465719223, + 0.0564245767891407, + 0.041294027119875, + 0.028257654979825, + 0.0174774676561356, + 0.00912047084420919, + 0.00336169824004173, + 0.000384818413294852, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, ] B_BOUNDS = [ [1, 0.995860934257507], @@ -82,25 +149,55 @@ [0.01296216994524, 0.00588912842795253], [0.00588912842795253, 0.00150532135739923], [0.00150532135739923, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], ] @pytest.fixture def cl_cubes(): """``cl`` cubes.""" - b_coord = iris.coords.AuxCoord(np.zeros_like(B_POINTS), - bounds=np.zeros_like(B_BOUNDS), - var_name='b') + b_coord = iris.coords.AuxCoord( + np.zeros_like(B_POINTS), bounds=np.zeros_like(B_BOUNDS), var_name="b" + ) cube = iris.cube.Cube( np.ones_like(B_POINTS), - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', - units='%', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", + units="%", aux_coords_and_dims=[(b_coord, 0)], ) return iris.cube.CubeList([cube]) @@ -108,7 +205,7 @@ def cl_cubes(): def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-CM2', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "ACCESS-CM2", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -118,21 +215,23 @@ def test_cl_fix(): @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip6.access_cm2.ClFixHybridHeightCoord.' - 'fix_metadata', autospec=True) + "esmvalcore.cmor._fixes.cmip6.access_cm2.ClFixHybridHeightCoord." + "fix_metadata", + autospec=True, +) def test_cl_fix_metadata(mock_base_fix_metadata, cl_cubes): """Test ``fix_metadata`` for ``cl``.""" mock_base_fix_metadata.side_effect = lambda x, y: y fix = Cl(None) out_cube = fix.fix_metadata(cl_cubes)[0] - b_coord = out_cube.coord(var_name='b') + b_coord = out_cube.coord(var_name="b") np.testing.assert_allclose(b_coord.points, B_POINTS) np.testing.assert_allclose(b_coord.bounds, B_BOUNDS) def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-CM2', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "ACCESS-CM2", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -143,7 +242,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-CM2', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "ACCESS-CM2", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_access_esm1_5.py b/tests/integration/cmor/_fixes/cmip6/test_access_esm1_5.py index 29fb72c870..4216a2057f 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_access_esm1_5.py +++ b/tests/integration/cmor/_fixes/cmip6/test_access_esm1_5.py @@ -1,4 +1,5 @@ """Tests for the fixes of ACCESS-ESM1-5.""" + import unittest.mock import iris @@ -12,17 +13,44 @@ from esmvalcore.cmor.table import get_var_info B_POINTS = [ - 0.99771648645401, 0.990881502628326, 0.979542553424835, - 0.9637770652771, 0.943695485591888, 0.919438362121582, - 0.891178011894226, 0.859118342399597, 0.823493480682373, - 0.784570515155792, 0.742646217346191, 0.698050200939178, - 0.651142716407776, 0.602314412593842, 0.55198872089386, - 0.500619947910309, 0.44869339466095, 0.39672577381134, - 0.34526526927948, 0.294891387224197, 0.24621507525444, - 0.199878215789795, 0.156554222106934, 0.116947874426842, - 0.0817952379584312, 0.0518637150526047, 0.0279368180781603, - 0.0107164792716503, 0.00130179093685001, - 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0.99771648645401, + 0.990881502628326, + 0.979542553424835, + 0.9637770652771, + 0.943695485591888, + 0.919438362121582, + 0.891178011894226, + 0.859118342399597, + 0.823493480682373, + 0.784570515155792, + 0.742646217346191, + 0.698050200939178, + 0.651142716407776, + 0.602314412593842, + 0.55198872089386, + 0.500619947910309, + 0.44869339466095, + 0.39672577381134, + 0.34526526927948, + 0.294891387224197, + 0.24621507525444, + 0.199878215789795, + 0.156554222106934, + 0.116947874426842, + 0.0817952379584312, + 0.0518637150526047, + 0.0279368180781603, + 0.0107164792716503, + 0.00130179093685001, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, ] B_BOUNDS = [ [1, 0.994296252727509], @@ -54,22 +82,29 @@ [0.0389823913574219, 0.0183146875351667], [0.0183146875351667, 0.00487210927531123], [0.00487210927531123, 0], - [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0, 0], - [0, 0], [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], ] @pytest.fixture def cl_cubes(): """``cl`` cubes.""" - b_coord = iris.coords.AuxCoord(np.zeros_like(B_POINTS), - bounds=np.zeros_like(B_BOUNDS), - var_name='b') + b_coord = iris.coords.AuxCoord( + np.zeros_like(B_POINTS), bounds=np.zeros_like(B_BOUNDS), var_name="b" + ) cube = iris.cube.Cube( np.ones_like(B_POINTS), - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', - units='%', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", + units="%", aux_coords_and_dims=[(b_coord, 0)], ) return iris.cube.CubeList([cube]) @@ -77,19 +112,21 @@ def cl_cubes(): def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "ACCESS-ESM1-5", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip6.access_esm1_5.ClFixHybridHeightCoord.' - 'fix_metadata', autospec=True) + "esmvalcore.cmor._fixes.cmip6.access_esm1_5.ClFixHybridHeightCoord." + "fix_metadata", + autospec=True, +) def test_cl_fix_metadata(mock_base_fix_metadata, cl_cubes): """Test ``fix_metadata`` for ``cl``.""" mock_base_fix_metadata.side_effect = lambda x, y: y fix = Cl(None) out_cube = fix.fix_metadata(cl_cubes)[0] - b_coord = out_cube.coord(var_name='b') + b_coord = out_cube.coord(var_name="b") np.testing.assert_allclose(b_coord.points, B_POINTS) np.testing.assert_allclose(b_coord.bounds, B_BOUNDS) @@ -101,7 +138,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "ACCESS-ESM1-5", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -112,7 +149,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "ACCESS-ESM1-5", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -127,67 +164,79 @@ def cubes_with_wrong_air_pressure(): air_pressure_coord = iris.coords.DimCoord( [1000.09, 600.6, 200.0], bounds=[[1200.00001, 800], [800, 400.8], [400.8, 1.9]], - var_name='plev', - standard_name='air_pressure', - units='pa', + var_name="plev", + standard_name="air_pressure", + units="pa", ) hus_cube = iris.cube.Cube( [0.0, 1.0, 2.0], - var_name='hus', + var_name="hus", dim_coords_and_dims=[(air_pressure_coord, 0)], ) zg_cube = hus_cube.copy() - zg_cube.var_name = 'zg' + zg_cube.var_name = "zg" return iris.cube.CubeList([hus_cube, zg_cube]) def test_get_hus_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'hus') + fix = Fix.get_fixes("CMIP6", "ACCESS-ESM1-5", "Amon", "hus") assert fix == [Hus(None), GenericFix(None)] def test_hus_fix_metadata(cubes_with_wrong_air_pressure): """Test ``fix_metadata`` for ``hus``.""" - vardef = get_var_info('CMIP6', 'Amon', 'hus') + vardef = get_var_info("CMIP6", "Amon", "hus") fix = Hus(vardef) out_cubes = fix.fix_metadata(cubes_with_wrong_air_pressure) assert len(out_cubes) == 2 - hus_cube = out_cubes.extract_cube('hus') - zg_cube = out_cubes.extract_cube('zg') - assert hus_cube.var_name == 'hus' - assert zg_cube.var_name == 'zg' - np.testing.assert_allclose(hus_cube.coord('air_pressure').points, - [1000.0, 601.0, 200.0]) - np.testing.assert_allclose(hus_cube.coord('air_pressure').bounds, - [[1200.0, 800.0], [800.0, 401.0], [401.0, 2.0]]) - np.testing.assert_allclose(zg_cube.coord('air_pressure').points, - [1000.09, 600.6, 200.0]) - np.testing.assert_allclose(zg_cube.coord('air_pressure').bounds, - [[1200.00001, 800], [800, 400.8], [400.8, 1.9]]) + hus_cube = out_cubes.extract_cube("hus") + zg_cube = out_cubes.extract_cube("zg") + assert hus_cube.var_name == "hus" + assert zg_cube.var_name == "zg" + np.testing.assert_allclose( + hus_cube.coord("air_pressure").points, [1000.0, 601.0, 200.0] + ) + np.testing.assert_allclose( + hus_cube.coord("air_pressure").bounds, + [[1200.0, 800.0], [800.0, 401.0], [401.0, 2.0]], + ) + np.testing.assert_allclose( + zg_cube.coord("air_pressure").points, [1000.09, 600.6, 200.0] + ) + np.testing.assert_allclose( + zg_cube.coord("air_pressure").bounds, + [[1200.00001, 800], [800, 400.8], [400.8, 1.9]], + ) def test_get_zg_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ACCESS-ESM1-5', 'Amon', 'zg') + fix = Fix.get_fixes("CMIP6", "ACCESS-ESM1-5", "Amon", "zg") assert fix == [Zg(None), GenericFix(None)] def test_zg_fix_metadata(cubes_with_wrong_air_pressure): """Test ``fix_metadata`` for ``zg``.""" - vardef = get_var_info('CMIP6', 'Amon', 'zg') + vardef = get_var_info("CMIP6", "Amon", "zg") fix = Zg(vardef) out_cubes = fix.fix_metadata(cubes_with_wrong_air_pressure) assert len(out_cubes) == 2 - hus_cube = out_cubes.extract_cube('hus') - zg_cube = out_cubes.extract_cube('zg') - assert hus_cube.var_name == 'hus' - assert zg_cube.var_name == 'zg' - np.testing.assert_allclose(hus_cube.coord('air_pressure').points, - [1000.09, 600.6, 200.0]) - np.testing.assert_allclose(hus_cube.coord('air_pressure').bounds, - [[1200.00001, 800], [800, 400.8], [400.8, 1.9]]) - np.testing.assert_allclose(zg_cube.coord('air_pressure').points, - [1000.0, 601.0, 200.0]) - np.testing.assert_allclose(zg_cube.coord('air_pressure').bounds, - [[1200.0, 800.0], [800.0, 401.0], [401.0, 2.0]]) + hus_cube = out_cubes.extract_cube("hus") + zg_cube = out_cubes.extract_cube("zg") + assert hus_cube.var_name == "hus" + assert zg_cube.var_name == "zg" + np.testing.assert_allclose( + hus_cube.coord("air_pressure").points, [1000.09, 600.6, 200.0] + ) + np.testing.assert_allclose( + hus_cube.coord("air_pressure").bounds, + [[1200.00001, 800], [800, 400.8], [400.8, 1.9]], + ) + np.testing.assert_allclose( + zg_cube.coord("air_pressure").points, [1000.0, 601.0, 200.0] + ) + np.testing.assert_allclose( + zg_cube.coord("air_pressure").bounds, + [[1200.0, 800.0], [800.0, 401.0], [401.0, 2.0]], + ) diff --git a/tests/integration/cmor/_fixes/cmip6/test_awi_cm_1_1_mr.py b/tests/integration/cmor/_fixes/cmip6/test_awi_cm_1_1_mr.py index 780b09bd7f..1aa5466471 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_awi_cm_1_1_mr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_awi_cm_1_1_mr.py @@ -1,4 +1,5 @@ """Tests for the fixes of AWI-CM-1-1-MR.""" + import iris import pytest @@ -9,32 +10,33 @@ @pytest.fixture def cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude', - long_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude', - long_name='Latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude') + correct_lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude", long_name="latitude" + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], + var_name="latitudeCoord", + standard_name="latitude", + long_name="Latitude", + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) correct_cube = iris.cube.Cube( [[10.0]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = iris.cube.Cube( [[10.0]], - var_name='ta', + var_name="ta", dim_coords_and_dims=[(wrong_lat_coord, 0), (correct_lon_coord, 1)], ) return iris.cube.CubeList([correct_cube, wrong_cube]) def test_get_allvars_fix(): - fix = Fix.get_fixes('CMIP6', 'AWI-CM-1-1-MR', 'Amon', 'wrong_lat_lname') + fix = Fix.get_fixes("CMIP6", "AWI-CM-1-1-MR", "Amon", "wrong_lat_lname") assert fix == [AllVars(None), GenericFix(None)] @@ -44,8 +46,8 @@ def test_allvars_fix_metadata(cubes): assert cubes is out_cubes for cube in out_cubes: try: - lat_coord = cube.coord('latitude') + lat_coord = cube.coord("latitude") except iris.exceptions.CoordinateNotFoundError: pass else: - assert lat_coord.long_name == 'latitude' + assert lat_coord.long_name == "latitude" diff --git a/tests/integration/cmor/_fixes/cmip6/test_awi_esm_1_1_lr.py b/tests/integration/cmor/_fixes/cmip6/test_awi_esm_1_1_lr.py index 07dd1f41ef..1738ee8c22 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_awi_esm_1_1_lr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_awi_esm_1_1_lr.py @@ -1,4 +1,5 @@ """Tests for the fixes of AWI-ESM-1-1-LR.""" + import iris import pytest @@ -9,31 +10,35 @@ @pytest.fixture def sample_cubes(): - ta_cube = iris.cube.Cube([1.0], var_name='ta') - tas_cube = iris.cube.Cube([3.0], var_name='tas') + ta_cube = iris.cube.Cube([1.0], var_name="ta") + tas_cube = iris.cube.Cube([3.0], var_name="tas") return iris.cube.CubeList([ta_cube, tas_cube]) def test_get_tas_fix(): - fix = Fix.get_fixes('CMIP6', 'AWI-ESM-1-1-LR', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "AWI-ESM-1-1-LR", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] def test_allvars_fix_metadata(sample_cubes): for cube in sample_cubes: - cube.attributes['parent_time_units'] = 'days since 0001-01-01 00:00:00' + cube.attributes["parent_time_units"] = "days since 0001-01-01 00:00:00" out_cubes = AllVars(None).fix_metadata(sample_cubes) assert out_cubes is sample_cubes for cube in out_cubes: - assert cube.attributes[ - 'parent_time_units'] == 'days since 0001-01-01 00:00:00' + assert ( + cube.attributes["parent_time_units"] + == "days since 0001-01-01 00:00:00" + ) def test_allvars_no_need_tofix_metadata(sample_cubes): for cube in sample_cubes: - cube.attributes['parent_time_units'] = 'days since 0001-01-01 00:00:00' + cube.attributes["parent_time_units"] = "days since 0001-01-01 00:00:00" out_cubes = AllVars(None).fix_metadata(sample_cubes) assert out_cubes is sample_cubes for cube in out_cubes: - assert cube.attributes[ - 'parent_time_units'] == 'days since 0001-01-01 00:00:00' + assert ( + cube.attributes["parent_time_units"] + == "days since 0001-01-01 00:00:00" + ) diff --git a/tests/integration/cmor/_fixes/cmip6/test_bcc_csm2_mr.py b/tests/integration/cmor/_fixes/cmip6/test_bcc_csm2_mr.py index 5281953fa0..7d00014f59 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_bcc_csm2_mr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_bcc_csm2_mr.py @@ -1,4 +1,5 @@ """Test fixes for BCC-CSM2-MR.""" + from esmvalcore.cmor._fixes.cmip6.bcc_csm2_mr import ( Areacello, Cl, @@ -17,7 +18,7 @@ def test_get_areacello_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Amon', 'areacello') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Amon", "areacello") assert fix == [Areacello(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_areacello_fix(): def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -39,7 +40,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -50,7 +51,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -61,7 +62,7 @@ def test_clw_fix(): def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Omon", "tos") assert fix == [Tos(None), GenericFix(None)] @@ -72,7 +73,7 @@ def test_tos_fix(): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -83,7 +84,7 @@ def test_siconc_fix(): def test_get_sos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Omon', 'sos') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Omon", "sos") assert fix == [Sos(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_bcc_esm1.py b/tests/integration/cmor/_fixes/cmip6/test_bcc_esm1.py index 6840730241..7442d7fae7 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_bcc_esm1.py +++ b/tests/integration/cmor/_fixes/cmip6/test_bcc_esm1.py @@ -1,4 +1,5 @@ """Test fixes for BCC-ESM1.""" + from esmvalcore.cmor._fixes.cmip6.bcc_esm1 import ( Cl, Cli, @@ -17,7 +18,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -39,7 +40,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -50,7 +51,7 @@ def test_clw_fix(): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -61,7 +62,7 @@ def test_siconc_fix(): def test_get_so_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'Omon', 'so') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "Omon", "so") assert fix == [So(None), GenericFix(None)] @@ -72,7 +73,7 @@ def test_so_fix(): def test_get_sos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'Omon', 'sos') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "Omon", "sos") assert fix == [Sos(None), GenericFix(None)] @@ -83,7 +84,7 @@ def test_sos_fix(): def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-ESM1', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "BCC-ESM1", "Omon", "tos") assert fix == [Tos(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cams_csm1_0.py b/tests/integration/cmor/_fixes/cmip6/test_cams_csm1_0.py index eb2367539c..e34692dbd4 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cams_csm1_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cams_csm1_0.py @@ -1,4 +1,5 @@ """Test fixes for CAMS-CSM1-0.""" + from esmvalcore.cmor._fixes.cmip6.cams_csm1_0 import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CAMS-CSM1-0', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CAMS-CSM1-0", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -17,7 +18,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CAMS-CSM1-0', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CAMS-CSM1-0", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CAMS-CSM1-0', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CAMS-CSM1-0", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_canesm5.py b/tests/integration/cmor/_fixes/cmip6/test_canesm5.py index 5f23af82f3..e14702382c 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_canesm5.py +++ b/tests/integration/cmor/_fixes/cmip6/test_canesm5.py @@ -1,4 +1,5 @@ """Tests for the fixes of CanESM5.""" + import iris import numpy as np import pytest @@ -10,7 +11,7 @@ def test_get_co2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CanESM5', 'Amon', 'co2') + fix = Fix.get_fixes("CMIP6", "CanESM5", "Amon", "co2") assert fix == [Co2(None), GenericFix(None)] @@ -19,9 +20,9 @@ def co2_cube(): """``co2`` cube.""" cube = iris.cube.Cube( [1.0], - var_name='co2', - standard_name='mole_fraction_of_carbon_dioxide_in_air', - units='mol mol-1', + var_name="co2", + standard_name="mole_fraction_of_carbon_dioxide_in_air", + units="mol mol-1", ) return cube @@ -30,7 +31,7 @@ def test_co2_fix_data(co2_cube): """Test ``fix_data`` for ``co2``.""" fix = Co2(None) out_cube = fix.fix_data(co2_cube) - np.testing.assert_allclose(out_cube.data, [1.e-6]) + np.testing.assert_allclose(out_cube.data, [1.0e-6]) @pytest.fixture @@ -38,17 +39,17 @@ def gpp_cube(): """``gpp`` cube.""" cube = iris.cube.Cube( [0, 1], - var_name='gpp', - standard_name='gross_primary_productivity_of_biomass_expressed_as_' - 'carbon', - units='kg m-2 s-1', + var_name="gpp", + standard_name="gross_primary_productivity_of_biomass_expressed_as_" + "carbon", + units="kg m-2 s-1", ) return cube def test_get_gpp_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CanESM5', 'Lmon', 'gpp') + fix = Fix.get_fixes("CMIP6", "CanESM5", "Lmon", "gpp") assert fix == [Gpp(None), GenericFix(None)] @@ -56,6 +57,7 @@ def test_gpp_fix_data(gpp_cube): """Test ``fix_data`` for ``gpp``.""" fix = Gpp(None) out_cube = fix.fix_data(gpp_cube) - np.testing.assert_allclose(out_cube.data, - np.ma.masked_invalid([np.nan, 1])) + np.testing.assert_allclose( + out_cube.data, np.ma.masked_invalid([np.nan, 1]) + ) assert np.array_equal(out_cube.data.mask, [True, False]) diff --git a/tests/integration/cmor/_fixes/cmip6/test_canesm5_canoe.py b/tests/integration/cmor/_fixes/cmip6/test_canesm5_canoe.py index f0f43fd597..53eef288a3 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_canesm5_canoe.py +++ b/tests/integration/cmor/_fixes/cmip6/test_canesm5_canoe.py @@ -1,4 +1,5 @@ """Test fixes for CanESM5-CanOE.""" + from esmvalcore.cmor._fixes.cmip6.canesm5 import Co2 as BaseCo2 from esmvalcore.cmor._fixes.cmip6.canesm5 import Gpp as BaseGpp from esmvalcore.cmor._fixes.cmip6.canesm5_canoe import Co2, Gpp @@ -7,7 +8,7 @@ def test_get_co2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CanESM5-CanOE', 'Amon', 'co2') + fix = Fix.get_fixes("CMIP6", "CanESM5-CanOE", "Amon", "co2") assert fix == [Co2(None), GenericFix(None)] @@ -18,7 +19,7 @@ def test_co2_fix(): def test_get_gpp_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CanESM5-CanOE', 'Lmon', 'gpp') + fix = Fix.get_fixes("CMIP6", "CanESM5-CanOE", "Lmon", "gpp") assert fix == [Gpp(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cas_esm2_0.py b/tests/integration/cmor/_fixes/cmip6/test_cas_esm2_0.py index 2e22f91c1d..7c568d899b 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cas_esm2_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cas_esm2_0.py @@ -1,4 +1,5 @@ """Tests for the fixes of CAS-ESM2-0.""" + from esmvalcore.cmor._fixes.cmip6.cas_esm2_0 import Cl from esmvalcore.cmor._fixes.cmip6.ciesm import Cl as BaseCl from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CAS-ESM2-0', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CAS-ESM2-0", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cesm2.py b/tests/integration/cmor/_fixes/cmip6/test_cesm2.py index e60e1727b2..1624a688ea 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cesm2.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cesm2.py @@ -1,4 +1,5 @@ """Tests for the fixes of CESM2.""" + import os import sys import unittest.mock @@ -26,69 +27,69 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CESM2", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] -AIR_PRESSURE_POINTS = np.array([[[[1.0, 1.0, 1.0, 1.0], - [1.0, 1.0, 1.0, 1.0], - [1.0, 1.0, 1.0, 1.0]], - [[2.0, 3.0, 4.0, 5.0], - [6.0, 7.0, 8.0, 9.0], - [10.0, 11.0, 12.0, 13.0]]]]) -AIR_PRESSURE_BOUNDS = np.array([[[[[0.0, 1.5], - [-1.0, 2.0], - [-2.0, 2.5], - [-3.0, 3.0]], - [[-4.0, 3.5], - [-5.0, 4.0], - [-6.0, 4.5], - [-7.0, 5.0]], - [[-8.0, 5.5], - [-9.0, 6.0], - [-10.0, 6.5], - [-11.0, 7.0]]], - [[[1.5, 3.0], - [2.0, 5.0], - [2.5, 7.0], - [3.0, 9.0]], - [[3.5, 11.0], - [4.0, 13.0], - [4.5, 15.0], - [5.0, 17.0]], - [[5.5, 19.0], - [6.0, 21.0], - [6.5, 23.0], - [7.0, 25.0]]]]]) +AIR_PRESSURE_POINTS = np.array( + [ + [ + [[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]], + [ + [2.0, 3.0, 4.0, 5.0], + [6.0, 7.0, 8.0, 9.0], + [10.0, 11.0, 12.0, 13.0], + ], + ] + ] +) +AIR_PRESSURE_BOUNDS = np.array( + [ + [ + [ + [[0.0, 1.5], [-1.0, 2.0], [-2.0, 2.5], [-3.0, 3.0]], + [[-4.0, 3.5], [-5.0, 4.0], [-6.0, 4.5], [-7.0, 5.0]], + [[-8.0, 5.5], [-9.0, 6.0], [-10.0, 6.5], [-11.0, 7.0]], + ], + [ + [[1.5, 3.0], [2.0, 5.0], [2.5, 7.0], [3.0, 9.0]], + [[3.5, 11.0], [4.0, 13.0], [4.5, 15.0], [5.0, 17.0]], + [[5.5, 19.0], [6.0, 21.0], [6.5, 23.0], [7.0, 25.0]], + ], + ] + ] +) @pytest.mark.sequential -@pytest.mark.skipif(sys.version_info < (3, 7, 6), - reason="requires python3.7.6 or newer") +@pytest.mark.skipif( + sys.version_info < (3, 7, 6), reason="requires python3.7.6 or newer" +) @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip6.cesm2.Fix.get_fixed_filepath', - autospec=True) + "esmvalcore.cmor._fixes.cmip6.cesm2.Fix.get_fixed_filepath", autospec=True +) def test_cl_fix_file(mock_get_filepath, tmp_path, test_data_path): """Test ``fix_file`` for ``cl``.""" - nc_path = test_data_path / 'cesm2_cl.nc' + nc_path = test_data_path / "cesm2_cl.nc" cubes = iris.load(str(nc_path)) # Raw cubes assert len(cubes) == 5 var_names = [cube.var_name for cube in cubes] - assert 'cl' in var_names - assert 'a' in var_names - assert 'b' in var_names - assert 'p0' in var_names - assert 'ps' in var_names + assert "cl" in var_names + assert "a" in var_names + assert "b" in var_names + assert "p0" in var_names + assert "ps" in var_names # Raw cl cube - raw_cube = cubes.extract_cube('cloud_area_fraction_in_atmosphere_layer') - assert not raw_cube.coords('air_pressure') + raw_cube = cubes.extract_cube("cloud_area_fraction_in_atmosphere_layer") + assert not raw_cube.coords("air_pressure") # Apply fix - mock_get_filepath.return_value = os.path.join(tmp_path, - 'fixed_cesm2_cl.nc') + mock_get_filepath.return_value = os.path.join( + tmp_path, "fixed_cesm2_cl.nc" + ) fix = Cl(None) fixed_file = fix.fix_file(nc_path, tmp_path) mock_get_filepath.assert_called_once_with( @@ -97,39 +98,56 @@ def test_cl_fix_file(mock_get_filepath, tmp_path, test_data_path): fixed_cubes = iris.load(fixed_file) assert len(fixed_cubes) == 2 var_names = [cube.var_name for cube in fixed_cubes] - assert 'cl' in var_names - assert 'ps' in var_names + assert "cl" in var_names + assert "ps" in var_names fixed_cl_cube = fixed_cubes.extract_cube( - 'cloud_area_fraction_in_atmosphere_layer') - fixed_air_pressure_coord = fixed_cl_cube.coord('air_pressure') + "cloud_area_fraction_in_atmosphere_layer" + ) + fixed_air_pressure_coord = fixed_cl_cube.coord("air_pressure") assert fixed_air_pressure_coord.points is not None assert fixed_air_pressure_coord.bounds is not None - np.testing.assert_allclose(fixed_air_pressure_coord.points, - AIR_PRESSURE_POINTS) - np.testing.assert_allclose(fixed_air_pressure_coord.bounds, - AIR_PRESSURE_BOUNDS) + np.testing.assert_allclose( + fixed_air_pressure_coord.points, AIR_PRESSURE_POINTS + ) + np.testing.assert_allclose( + fixed_air_pressure_coord.bounds, AIR_PRESSURE_BOUNDS + ) @pytest.fixture def cl_cubes(): """``cl`` cube.""" time_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0, 1.0], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) a_coord = iris.coords.AuxCoord( - [0.1, 0.2, 0.1], bounds=[[0.0, 0.15], [0.15, 0.25], [0.25, 0.0]], - var_name='a', units='1') + [0.1, 0.2, 0.1], + bounds=[[0.0, 0.15], [0.15, 0.25], [0.25, 0.0]], + var_name="a", + units="1", + ) b_coord = iris.coords.AuxCoord( - [0.9, 0.3, 0.1], bounds=[[1.0, 0.8], [0.8, 0.25], [0.25, 0.0]], - var_name='b', units='1') + [0.9, 0.3, 0.1], + bounds=[[1.0, 0.8], [0.8, 0.25], [0.25, 0.0]], + var_name="b", + units="1", + ) lev_coord = iris.coords.DimCoord( - [999.0, 99.0, 9.0], var_name='lev', - standard_name='atmosphere_hybrid_sigma_pressure_coordinate', - units='hPa', attributes={'positive': 'up'}) + [999.0, 99.0, 9.0], + var_name="lev", + standard_name="atmosphere_hybrid_sigma_pressure_coordinate", + units="hPa", + attributes={"positive": "up"}, + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) coord_specs = [ (time_coord, 0), (lev_coord, 1), @@ -138,9 +156,9 @@ def cl_cubes(): ] cube = iris.cube.Cube( np.arange(2 * 3 * 2 * 2).reshape(2, 3, 2, 2), - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', - units='%', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", + units="%", dim_coords_and_dims=coord_specs, aux_coords_and_dims=[(a_coord, 1), (b_coord, 1)], ) @@ -149,21 +167,23 @@ def cl_cubes(): def test_cl_fix_metadata(cl_cubes): """Test ``fix_metadata`` for ``cl``.""" - vardef = get_var_info('CMIP6', 'Amon', 'cl') + vardef = get_var_info("CMIP6", "Amon", "cl") fix = Cl(vardef) out_cubes = fix.fix_metadata(cl_cubes) out_cube = out_cubes.extract_cube( - 'cloud_area_fraction_in_atmosphere_layer') - lev_coord = out_cube.coord(var_name='lev') - assert lev_coord.units == '1' + "cloud_area_fraction_in_atmosphere_layer" + ) + lev_coord = out_cube.coord(var_name="lev") + assert lev_coord.units == "1" np.testing.assert_allclose(lev_coord.points, [1.0, 0.5, 0.2]) - np.testing.assert_allclose(lev_coord.bounds, - [[1.0, 0.95], [0.95, 0.5], [0.5, 0.0]]) + np.testing.assert_allclose( + lev_coord.bounds, [[1.0, 0.95], [0.95, 0.5], [0.5, 0.0]] + ) def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CESM2", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -174,7 +194,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CESM2", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -187,12 +207,17 @@ def test_clw_fix(): def tas_cubes(): """Cubes to test fixes for ``tas``.""" time_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0, 1.0], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) coord_specs = [ (time_coord, 0), (lat_coord, 1), @@ -200,12 +225,12 @@ def tas_cubes(): ] ta_cube = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='ta', + var_name="ta", dim_coords_and_dims=coord_specs, ) tas_cube = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='tas', + var_name="tas", dim_coords_and_dims=coord_specs, ) @@ -216,12 +241,17 @@ def tas_cubes(): def tos_cubes(): """Cubes to test fixes for ``tos``.""" time_coord = iris.coords.DimCoord( - [0.0004, 1.09776], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0004, 1.09776], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) coord_specs = [ (time_coord, 0), (lat_coord, 1), @@ -229,11 +259,11 @@ def tos_cubes(): ] tos_cube = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='tos', + var_name="tos", dim_coords_and_dims=coord_specs, ) tos_cube.attributes = {} - tos_cube.attributes['mipTable'] = 'Omon' + tos_cube.attributes["mipTable"] = "Omon" return iris.cube.CubeList([tos_cube]) @@ -242,16 +272,25 @@ def tos_cubes(): def thetao_cubes(): """Cubes to test fixes for ``thetao``.""" time_coord = iris.coords.DimCoord( - [0.0004, 1.09776], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0004, 1.09776], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) lev_coord = iris.coords.DimCoord( - [500.0, 1000.0], bounds=[[2.5, 7.5], [7.5, 12.5]], - var_name='lev', standard_name=None, units='cm', - attributes={'positive': 'up'}) + [500.0, 1000.0], + bounds=[[2.5, 7.5], [7.5, 12.5]], + var_name="lev", + standard_name=None, + units="cm", + attributes={"positive": "up"}, + ) coord_specs = [ (time_coord, 0), (lev_coord, 1), @@ -260,7 +299,7 @@ def thetao_cubes(): ] thetao_cube = iris.cube.Cube( np.ones((2, 2, 2, 2)), - var_name='thetao', + var_name="thetao", dim_coords_and_dims=coord_specs, ) return iris.cube.CubeList([thetao_cube]) @@ -268,31 +307,31 @@ def thetao_cubes(): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "CESM2", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "CESM2", "Omon", "tos") assert fix == [Tos(None), Omon(None), GenericFix(None)] def test_get_thetao_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Omon', 'thetao') + fix = Fix.get_fixes("CMIP6", "CESM2", "Omon", "thetao") assert fix == [Omon(None), GenericFix(None)] def test_get_fgco2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'Omon', 'fgco2') + fix = Fix.get_fixes("CMIP6", "CESM2", "Omon", "fgco2") assert fix == [Fgco2(None), Omon(None), GenericFix(None)] def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "CESM2", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -300,41 +339,43 @@ def test_tas_fix_metadata(tas_cubes): """Test ``fix_metadata`` for ``tas``.""" for cube in tas_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(2.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 2.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Tas(vardef) out_cubes = fix.fix_metadata(tas_cubes) assert out_cubes is tas_cubes for cube in out_cubes: assert cube.coord("longitude").has_bounds() assert cube.coord("latitude").has_bounds() - if cube.var_name == 'tas': - coord = cube.coord('height') + if cube.var_name == "tas": + coord = cube.coord("height") assert coord == height_coord else: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') + cube.coord("height") def test_tos_fix_metadata(tos_cubes): """Test ``fix_metadata`` for ``tos``.""" - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = Tos(vardef) out_cubes = fix.fix_metadata(tos_cubes) assert out_cubes is tos_cubes for cube in out_cubes: - np.testing.assert_equal(cube.coord("time").points, [0., 1.1]) + np.testing.assert_equal(cube.coord("time").points, [0.0, 1.1]) def test_thetao_fix_metadata(thetao_cubes): """Test ``fix_metadata`` for ``thetao``.""" - vardef = get_var_info('CMIP6', 'Omon', 'thetao') + vardef = get_var_info("CMIP6", "Omon", "thetao") fix = Omon(vardef) out_cubes = fix.fix_metadata(thetao_cubes) assert out_cubes is thetao_cubes @@ -342,12 +383,12 @@ def test_thetao_fix_metadata(thetao_cubes): out_cube = out_cubes[0] # Check metadata of depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'lev' - assert depth_coord.long_name == 'ocean depth coordinate' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "lev" + assert depth_coord.long_name == "ocean depth coordinate" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} # Check values of depth coordinate np.testing.assert_allclose(depth_coord.points, [5.0, 10.0]) @@ -356,10 +397,12 @@ def test_thetao_fix_metadata(thetao_cubes): def test_fgco2_fix_metadata(): """Test ``fix_metadata`` for ``fgco2``.""" - vardef = get_var_info('CMIP6', 'Omon', 'fgco2') - cubes = iris.cube.CubeList([ - iris.cube.Cube(0.0, var_name='fgco2'), - ]) + vardef = get_var_info("CMIP6", "Omon", "fgco2") + cubes = iris.cube.CubeList( + [ + iris.cube.Cube(0.0, var_name="fgco2"), + ] + ) fix = Fgco2(vardef) out_cubes = fix.fix_metadata(cubes) assert out_cubes is cubes @@ -367,12 +410,12 @@ def test_fgco2_fix_metadata(): out_cube = out_cubes[0] # Check depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'depth' - assert depth_coord.long_name == 'depth' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "depth" + assert depth_coord.long_name == "depth" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} # Check values of depth coordinate np.testing.assert_allclose(depth_coord.points, 0.0) diff --git a/tests/integration/cmor/_fixes/cmip6/test_cesm2_fv2.py b/tests/integration/cmor/_fixes/cmip6/test_cesm2_fv2.py index 16d8a9c0b2..50e67e5d0f 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cesm2_fv2.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cesm2_fv2.py @@ -1,4 +1,5 @@ """Tests for the fixes of CESM2-FV2.""" + from esmvalcore.cmor._fixes.cmip6.cesm2 import Cl as BaseCl from esmvalcore.cmor._fixes.cmip6.cesm2 import Fgco2 as BaseFgco2 from esmvalcore.cmor._fixes.cmip6.cesm2 import Tas as BaseTas @@ -18,7 +19,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-FV2', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CESM2-FV2", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -29,7 +30,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-FV2', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CESM2-FV2", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -40,7 +41,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-FV2', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CESM2-FV2", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -51,7 +52,7 @@ def test_clw_fix(): def test_get_fgco2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-FV2', 'Omon', 'fgco2') + fix = Fix.get_fixes("CMIP6", "CESM2-FV2", "Omon", "fgco2") assert fix == [Fgco2(None), Omon(None), GenericFix(None)] @@ -62,7 +63,7 @@ def test_fgco2_fix(): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-FV2', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "CESM2-FV2", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -73,7 +74,7 @@ def test_siconc_fix(): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-FV2', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "CESM2-FV2", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm.py b/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm.py index b80beb45fc..dd400c8ab1 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm.py @@ -1,4 +1,5 @@ """Tests for the fixes of CESM2-WACCM.""" + import os import sys import unittest.mock @@ -26,7 +27,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -35,28 +36,31 @@ def test_cl_fix(): assert issubclass(Cl, BaseCl) -@pytest.mark.skipif(sys.version_info < (3, 7, 6), - reason="requires python3.7.6 or newer") +@pytest.mark.skipif( + sys.version_info < (3, 7, 6), reason="requires python3.7.6 or newer" +) @unittest.mock.patch( - 'esmvalcore.cmor._fixes.cmip6.cesm2.Fix.get_fixed_filepath', - autospec=True) + "esmvalcore.cmor._fixes.cmip6.cesm2.Fix.get_fixed_filepath", autospec=True +) def test_cl_fix_file(mock_get_filepath, tmp_path, test_data_path): """Test ``fix_file`` for ``cl``.""" - nc_path = test_data_path / 'cesm2_waccm_cl.nc' - mock_get_filepath.return_value = os.path.join(tmp_path, - 'fixed_cesm2_waccm_cl.nc') + nc_path = test_data_path / "cesm2_waccm_cl.nc" + mock_get_filepath.return_value = os.path.join( + tmp_path, "fixed_cesm2_waccm_cl.nc" + ) fix = Cl(None) fixed_file = fix.fix_file(nc_path, tmp_path) mock_get_filepath.assert_called_once_with( tmp_path, nc_path, add_unique_suffix=False ) fixed_cube = iris.load_cube(fixed_file) - lev_coord = fixed_cube.coord(var_name='lev') - a_coord = fixed_cube.coord(var_name='a') - b_coord = fixed_cube.coord(var_name='b') + lev_coord = fixed_cube.coord(var_name="lev") + a_coord = fixed_cube.coord(var_name="a") + b_coord = fixed_cube.coord(var_name="b") assert lev_coord.standard_name == ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - assert lev_coord.units == '1' + "atmosphere_hybrid_sigma_pressure_coordinate" + ) + assert lev_coord.units == "1" np.testing.assert_allclose(a_coord.points, [1.0, 2.0]) np.testing.assert_allclose(a_coord.bounds, [[0.0, 1.5], [1.5, 3.0]]) np.testing.assert_allclose(b_coord.points, [0.0, 1.0]) @@ -65,7 +69,7 @@ def test_cl_fix_file(mock_get_filepath, tmp_path, test_data_path): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -76,7 +80,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -87,7 +91,7 @@ def test_clw_fix(): def test_get_fgco2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM', 'Omon', 'fgco2') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM", "Omon", "fgco2") assert fix == [Fgco2(None), Omon(None), GenericFix(None)] @@ -98,7 +102,7 @@ def test_fgco2_fix(): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -110,14 +114,14 @@ def test_siconc_fix(): @pytest.fixture def tas_cubes(): """Cubes to test fixes for ``tas``.""" - ta_cube = iris.cube.Cube([1.0], var_name='ta') - tas_cube = iris.cube.Cube([3.0], var_name='tas') + ta_cube = iris.cube.Cube([1.0], var_name="ta") + tas_cube = iris.cube.Cube([3.0], var_name="tas") return iris.cube.CubeList([ta_cube, tas_cube]) def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm_fv2.py b/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm_fv2.py index d4eb9b4b62..e61fec5745 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm_fv2.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cesm2_waccm_fv2.py @@ -1,4 +1,5 @@ """Tests for the fixes of CESM2-WACCM-FV2.""" + from esmvalcore.cmor._fixes.cmip6.cesm2 import Fgco2 as BaseFgco2 from esmvalcore.cmor._fixes.cmip6.cesm2 import Tas as BaseTas from esmvalcore.cmor._fixes.cmip6.cesm2_waccm import Cl as BaseCl @@ -18,7 +19,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM-FV2', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM-FV2", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -29,7 +30,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM-FV2', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM-FV2", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -40,7 +41,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM-FV2', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM-FV2", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -51,7 +52,7 @@ def test_clw_fix(): def test_get_fgco2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM-FV2', 'Omon', 'fgco2') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM-FV2", "Omon", "fgco2") assert fix == [Fgco2(None), Omon(None), GenericFix(None)] @@ -62,7 +63,7 @@ def test_fgco2_fix(): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM-FV2', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM-FV2", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -73,7 +74,7 @@ def test_siconc_fix(): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CESM2-WACCM-FV2', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "CESM2-WACCM-FV2", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_ciesm.py b/tests/integration/cmor/_fixes/cmip6/test_ciesm.py index 53fc57723b..cac46ecccb 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_ciesm.py +++ b/tests/integration/cmor/_fixes/cmip6/test_ciesm.py @@ -1,4 +1,5 @@ """Tests for the fixes of CIESM.""" + import iris.cube import numpy as np import pytest @@ -11,7 +12,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CIESM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CIESM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -20,9 +21,9 @@ def cl_cube(): """``cl`` cube.""" cube = iris.cube.Cube( [1.0], - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', - units='%', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", + units="%", ) return cube @@ -42,22 +43,22 @@ def test_cl_fix_data(cl_cube): def test_clt_fix(): """Test `Clt.fix_data`.""" cube = iris.cube.Cube(0.5) - fix = Fix.get_fixes('CMIP6', 'CIESM', 'Amon', 'clt')[0] + fix = Fix.get_fixes("CMIP6", "CIESM", "Amon", "clt")[0] out_cube = fix.fix_data(cube) np.testing.assert_allclose(out_cube.data, 50.0) - assert out_cube.units == '%' + assert out_cube.units == "%" def test_pr_fix(): """Test `Pr.fix_data`.""" cube = iris.cube.Cube( [2.82e-08], - var_name='pr', - units='kg m-2 s-1', + var_name="pr", + units="kg m-2 s-1", ) - fix = Fix.get_fixes('CMIP6', 'CIESM', 'Amon', 'pr')[0] + fix = Fix.get_fixes("CMIP6", "CIESM", "Amon", "pr")[0] out_cube = fix.fix_data(cube) assert out_cube.data == [2.82e-05] - assert out_cube.units == 'kg m-2 s-1' + assert out_cube.units == "kg m-2 s-1" diff --git a/tests/integration/cmor/_fixes/cmip6/test_cmcc_cm2_sr5.py b/tests/integration/cmor/_fixes/cmip6/test_cmcc_cm2_sr5.py index 27aa306289..472db3f812 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cmcc_cm2_sr5.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cmcc_cm2_sr5.py @@ -1,4 +1,5 @@ """Tests for the fixes of CMCC-CM2-SR5.""" + from unittest import mock import iris @@ -13,20 +14,21 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CMCC-CM2-SR5', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CMCC-CM2-SR5", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @pytest.fixture def cl_cubes(): """``cl`` cubes.""" - ps_coord = iris.coords.AuxCoord([0.0], var_name='ps', - standard_name='air_pressure') + ps_coord = iris.coords.AuxCoord( + [0.0], var_name="ps", standard_name="air_pressure" + ) cube = iris.cube.Cube( [1.0], - var_name='cl', - standard_name='cloud_area_fraction_in_atmosphere_layer', - units='%', + var_name="cl", + standard_name="cloud_area_fraction_in_atmosphere_layer", + units="%", aux_coords_and_dims=[(ps_coord, 0)], ) return iris.cube.CubeList([cube]) @@ -38,13 +40,15 @@ def test_cl_fix(): @mock.patch( - 'esmvalcore.cmor._fixes.cmip6.cmcc_cm2_sr5.ClFixHybridPressureCoord.' - 'fix_metadata', autospec=True) + "esmvalcore.cmor._fixes.cmip6.cmcc_cm2_sr5.ClFixHybridPressureCoord." + "fix_metadata", + autospec=True, +) def test_cl_fix_metadata(mock_base_fix_metadata, cl_cubes): """Test ``fix_metadata`` for ``cl``.""" mock_base_fix_metadata.side_effect = lambda x, y: y - vardef = get_var_info('CMIP6', 'Amon', 'cl') + vardef = get_var_info("CMIP6", "Amon", "cl") fix = Cl(vardef) - assert cl_cubes[0].coord(var_name='ps').standard_name == 'air_pressure' + assert cl_cubes[0].coord(var_name="ps").standard_name == "air_pressure" out_cube = fix.fix_metadata(cl_cubes)[0] - assert out_cube.coord(var_name='ps').standard_name is None + assert out_cube.coord(var_name="ps").standard_name is None diff --git a/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1.py b/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1.py index 18ca8a3fa4..ab48a45f7e 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1.py @@ -19,106 +19,113 @@ @pytest.fixture def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] -AIR_PRESSURE_POINTS = np.array([[[[1.0, 1.0], - [1.0, 1.0]], - [[2.0, 3.0], - [4.0, 5.0]], - [[5.0, 8.0], - [11.0, 14.0]]]]) -AIR_PRESSURE_BOUNDS = np.array([[[[[0.0, 1.5], - [-1.0, 2.0]], - [[-2.0, 2.5], - [-3.0, 3.0]]], - [[[1.5, 3.0], - [2.0, 5.0]], - [[2.5, 7.0], - [3.0, 9.0]]], - [[[3.0, 6.0], - [5.0, 11.0]], - [[7.0, 16.0], - [9.0, 21.0]]]]]) +AIR_PRESSURE_POINTS = np.array( + [ + [ + [[1.0, 1.0], [1.0, 1.0]], + [[2.0, 3.0], [4.0, 5.0]], + [[5.0, 8.0], [11.0, 14.0]], + ] + ] +) +AIR_PRESSURE_BOUNDS = np.array( + [ + [ + [[[0.0, 1.5], [-1.0, 2.0]], [[-2.0, 2.5], [-3.0, 3.0]]], + [[[1.5, 3.0], [2.0, 5.0]], [[2.5, 7.0], [3.0, 9.0]]], + [[[3.0, 6.0], [5.0, 11.0]], [[7.0, 16.0], [9.0, 21.0]]], + ] + ] +) def test_cl_fix_metadata(test_data_path): """Test ``fix_metadata`` for ``cl``.""" - nc_path = test_data_path / 'cnrm_cm6_1_cl.nc' + nc_path = test_data_path / "cnrm_cm6_1_cl.nc" cubes = iris.load(str(nc_path)) # Raw cubes assert len(cubes) == 6 var_names = [cube.var_name for cube in cubes] - assert 'cl' in var_names - assert 'ap' in var_names - assert 'ap_bnds' in var_names - assert 'b' in var_names - assert 'b_bnds' in var_names - assert 'ps' in var_names + assert "cl" in var_names + assert "ap" in var_names + assert "ap_bnds" in var_names + assert "b" in var_names + assert "b_bnds" in var_names + assert "ps" in var_names # Raw cl cube - cl_cube = cubes.extract_cube('cloud_area_fraction_in_atmosphere_layer') - assert not cl_cube.coords('air_pressure') + cl_cube = cubes.extract_cube("cloud_area_fraction_in_atmosphere_layer") + assert not cl_cube.coords("air_pressure") # Apply fix - vardef = get_var_info('CMIP6', 'Amon', 'cl') + vardef = get_var_info("CMIP6", "Amon", "cl") fix = Cl(vardef) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cl_cube = fixed_cubes.extract_cube( - 'cloud_area_fraction_in_atmosphere_layer') - fixed_air_pressure_coord = fixed_cl_cube.coord('air_pressure') + "cloud_area_fraction_in_atmosphere_layer" + ) + fixed_air_pressure_coord = fixed_cl_cube.coord("air_pressure") assert fixed_air_pressure_coord.points is not None assert fixed_air_pressure_coord.bounds is not None assert fixed_air_pressure_coord.points.shape == (1, 3, 2, 2) assert fixed_air_pressure_coord.bounds.shape == (1, 3, 2, 2, 2) - np.testing.assert_allclose(fixed_air_pressure_coord.points, - AIR_PRESSURE_POINTS) - np.testing.assert_allclose(fixed_air_pressure_coord.bounds, - AIR_PRESSURE_BOUNDS) - lat_coord = fixed_cl_cube.coord('latitude') - lon_coord = fixed_cl_cube.coord('longitude') + np.testing.assert_allclose( + fixed_air_pressure_coord.points, AIR_PRESSURE_POINTS + ) + np.testing.assert_allclose( + fixed_air_pressure_coord.bounds, AIR_PRESSURE_BOUNDS + ) + lat_coord = fixed_cl_cube.coord("latitude") + lon_coord = fixed_cl_cube.coord("longitude") assert lat_coord.bounds is not None assert lon_coord.bounds is not None - np.testing.assert_allclose(lat_coord.bounds, - [[-45.0, -15.0], [-15.0, 15.0]]) - np.testing.assert_allclose(lon_coord.bounds, - [[15.0, 45.0], [45.0, 75.0]]) + np.testing.assert_allclose( + lat_coord.bounds, [[-45.0, -15.0], [-15.0, 15.0]] + ) + np.testing.assert_allclose(lon_coord.bounds, [[15.0, 45.0], [45.0, 75.0]]) def test_get_clcalipso_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1', 'CFmon', 'clcalipso') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1", "CFmon", "clcalipso") assert fix == [Clcalipso(None), GenericFix(None)] @pytest.fixture def clcalipso_cubes(): """Cubes to test fix for ``clcalipso``.""" - alt_40_coord = iris.coords.DimCoord([0.0], var_name='alt40') - cube = iris.cube.Cube([0.0], var_name='clcalipso', - dim_coords_and_dims=[(alt_40_coord.copy(), 0)]) - x_cube = iris.cube.Cube([0.0], var_name='x', - dim_coords_and_dims=[(alt_40_coord.copy(), 0)]) + alt_40_coord = iris.coords.DimCoord([0.0], var_name="alt40") + cube = iris.cube.Cube( + [0.0], + var_name="clcalipso", + dim_coords_and_dims=[(alt_40_coord.copy(), 0)], + ) + x_cube = iris.cube.Cube( + [0.0], var_name="x", dim_coords_and_dims=[(alt_40_coord.copy(), 0)] + ) return iris.cube.CubeList([cube, x_cube]) def test_clcalipso_fix_metadata(clcalipso_cubes): """Test ``fix_metadata`` for ``clcalipso``.""" - vardef = get_var_info('CMIP6', 'CFmon', 'clcalipso') + vardef = get_var_info("CMIP6", "CFmon", "clcalipso") fix = Clcalipso(vardef) cubes = fix.fix_metadata(clcalipso_cubes) assert len(cubes) == 1 cube = cubes[0] - coord = cube.coord('altitude') - assert coord.standard_name == 'altitude' + coord = cube.coord("altitude") + assert coord.standard_name == "altitude" def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -129,7 +136,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -140,5 +147,5 @@ def test_clw_fix(): def test_get_thetao_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1', 'Omon', 'thetao') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1", "Omon", "thetao") assert fix == [Omon(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1_hr.py b/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1_hr.py index 9ac3e8bad1..0e5311cc0e 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1_hr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cnrm_cm6_1_hr.py @@ -1,4 +1,5 @@ """Test fixes for CNRM-CM6-1-HR.""" + from esmvalcore.cmor._fixes.cmip6.cnrm_cm6_1 import Cl as BaseCl from esmvalcore.cmor._fixes.cmip6.cnrm_cm6_1 import Cli as BaseCli from esmvalcore.cmor._fixes.cmip6.cnrm_cm6_1 import Clw as BaseClw @@ -8,7 +9,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1-HR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1-HR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -19,7 +20,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1-HR', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1-HR", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -30,7 +31,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-CM6-1-HR', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CNRM-CM6-1-HR", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_cnrm_esm2_1.py b/tests/integration/cmor/_fixes/cmip6/test_cnrm_esm2_1.py index 3e6d66ebb9..dd7c8d6037 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cnrm_esm2_1.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cnrm_esm2_1.py @@ -1,4 +1,5 @@ """Test fixes for CNRM-ESM2-1.""" + import iris import numpy as np import pytest @@ -20,7 +21,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-ESM2-1', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "CNRM-ESM2-1", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -31,7 +32,7 @@ def test_cl_fix(): def test_get_clcalipso_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-ESM2-1', 'Amon', 'clcalipso') + fix = Fix.get_fixes("CMIP6", "CNRM-ESM2-1", "Amon", "clcalipso") assert fix == [Clcalipso(None), GenericFix(None)] @@ -42,7 +43,7 @@ def test_clcalipso_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-ESM2-1', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "CNRM-ESM2-1", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -53,7 +54,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-ESM2-1', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "CNRM-ESM2-1", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -66,16 +67,25 @@ def test_clw_fix(): def thetao_cubes(): """Cubes to test fixes for ``thetao``.""" time_coord = iris.coords.DimCoord( - [0.0004, 1.09776], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0004, 1.09776], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) lev_coord = iris.coords.DimCoord( - [5.0, 10.0], bounds=[[2.5, 7.5], [7.5, 12.5]], - var_name='lev', standard_name=None, units='m', - attributes={'positive': 'up'}) + [5.0, 10.0], + bounds=[[2.5, 7.5], [7.5, 12.5]], + var_name="lev", + standard_name=None, + units="m", + attributes={"positive": "up"}, + ) coord_specs = [ (time_coord, 0), (lev_coord, 1), @@ -84,7 +94,7 @@ def thetao_cubes(): ] thetao_cube = iris.cube.Cube( np.ones((2, 2, 2, 2)), - var_name='thetao', + var_name="thetao", dim_coords_and_dims=coord_specs, ) return iris.cube.CubeList([thetao_cube]) @@ -92,13 +102,13 @@ def thetao_cubes(): def test_get_thetao_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'CNRM-ESM2-1', 'Omon', 'thetao') + fix = Fix.get_fixes("CMIP6", "CNRM-ESM2-1", "Omon", "thetao") assert fix == [Omon(None), GenericFix(None)] def test_thetao_fix_metadata(thetao_cubes): """Test ``fix_metadata`` for ``thetao``.""" - vardef = get_var_info('CMIP6', 'Omon', 'thetao') + vardef = get_var_info("CMIP6", "Omon", "thetao") fix = Omon(vardef) out_cubes = fix.fix_metadata(thetao_cubes) assert out_cubes is thetao_cubes @@ -106,9 +116,9 @@ def test_thetao_fix_metadata(thetao_cubes): out_cube = out_cubes[0] # Check metadata of depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'lev' - assert depth_coord.long_name == 'ocean depth coordinate' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "lev" + assert depth_coord.long_name == "ocean depth coordinate" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} diff --git a/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_0.py b/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_0.py index 890c81fd6b..32b5f0b200 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_0.py @@ -1,4 +1,5 @@ """Tests for the fixes of E3SM-1-0.""" + from esmvalcore.cmor._fixes.cmip6.e3sm_1_0 import Cl from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'E3SM-1-0', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "E3SM-1-0", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3.py b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3.py index 4f3d14a243..6eda4f9f5a 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3.py +++ b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3.py @@ -1,4 +1,5 @@ """Tests for EC-Earth3.""" + import unittest import cf_units @@ -18,53 +19,61 @@ class TestSiconca(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='siconca', units='%') + self.cube = Cube([1.0], var_name="siconca", units="%") self.fix = Siconca(None) def test_get(self): """Test fix get.""" - assert Siconca(None) in Fix.get_fixes('CMIP6', 'EC-Earth3', 'SImon', - 'siconca') + assert Siconca(None) in Fix.get_fixes( + "CMIP6", "EC-Earth3", "SImon", "siconca" + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('%')) + self.assertEqual(cube.units, Unit("%")) @pytest.fixture def tas_cubes(): """Cubes to test fixes for ``tas``.""" - time_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='time', - standard_name='time', - units='days since 1850-01-01 00:00:00') - lat_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lat', - standard_name='latitude', - units='degrees') + time_coord = iris.coords.DimCoord( + [0.0, 1.0], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) + lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) lat_coord_to_round = iris.coords.DimCoord( [0.0000000001, 0.9999999999], bounds=[[-0.5000000001, 0.5000000001], [0.5000000001, 1.5000000001]], - var_name='lat', - standard_name='latitude', - units='degrees') - lon_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='lon', - standard_name='longitude', - units='degrees') + var_name="lat", + standard_name="latitude", + units="degrees", + ) + lon_coord = iris.coords.DimCoord( + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) tas_cube = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], ) tas_cube_to_round = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='tas', - dim_coords_and_dims=[(time_coord, 0), (lat_coord_to_round, 1), - (lon_coord, 2)], + var_name="tas", + dim_coords_and_dims=[ + (time_coord, 0), + (lat_coord_to_round, 1), + (lon_coord, 2), + ], ) return iris.cube.CubeList([tas_cube, tas_cube_to_round]) @@ -72,69 +81,70 @@ def tas_cubes(): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'EC-Earth3', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "EC-Earth3", "Amon", "tas") assert Tas(None) in fix def test_tas_fix_metadata(tas_cubes): """Test ``fix_metadata`` for ``tas``.""" - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Tas(vardef) fixed_cubes = fix.fix_metadata(tas_cubes) - assert fixed_cubes[0].coord('latitude') == fixed_cubes[1].coord('latitude') + assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") def test_get_allvars_fix(): """Test getting of fix.""" - fixes = Fix.get_fixes('CMIP6', 'EC-Earth3', 'Amon', 'tas') + fixes = Fix.get_fixes("CMIP6", "EC-Earth3", "Amon", "tas") assert AllVars(None) in fixes def test_allvars_r3i1p1f1_fix_calendar(): - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = AllVars(vardef) cube = iris.cube.Cube([1, 2]) - bad_unit = cf_units.Unit('days since 1850-01-01 00:00:00', 'gregorian') + bad_unit = cf_units.Unit("days since 1850-01-01 00:00:00", "gregorian") time_coord = iris.coords.DimCoord( [0.0, 1.0], - var_name='time', - standard_name='time', + var_name="time", + standard_name="time", units=bad_unit, ) cube.add_dim_coord(time_coord, 0) - cube.attributes['experiment_id'] = 'historical' - cube.attributes['variant_label'] = 'r3i1p1f1' + cube.attributes["experiment_id"] = "historical" + cube.attributes["variant_label"] = "r3i1p1f1" fixed_cubes = fix.fix_metadata([cube]) - good_unit = cf_units.Unit('days since 1850-01-01 00:00:00', - 'proleptic_gregorian') - assert fixed_cubes[0].coord('time').units == good_unit + good_unit = cf_units.Unit( + "days since 1850-01-01 00:00:00", "proleptic_gregorian" + ) + assert fixed_cubes[0].coord("time").units == good_unit def test_allvars_r3i1p1f1_fix_latitude(): lat_coord1 = iris.coords.DimCoord( [-71.22775], - var_name='lat', - standard_name='latitude', - units='degrees', + var_name="lat", + standard_name="latitude", + units="degrees", ) lat_coord2 = iris.coords.DimCoord( [-71.22774993], - var_name='lat', - standard_name='latitude', - units='degrees', + var_name="lat", + standard_name="latitude", + units="degrees", ) cube1 = iris.cube.Cube([0]) - cube1.attributes['variant_label'] = 'r3i1p1f1' + cube1.attributes["variant_label"] = "r3i1p1f1" cube1.add_dim_coord(lat_coord1, 0) cube2 = iris.cube.Cube([0]) - cube2.attributes['variant_label'] = 'r3i1p1f1' + cube2.attributes["variant_label"] = "r3i1p1f1" cube2.add_dim_coord(lat_coord2, 0) fix = AllVars(None) fixed_cubes = fix.fix_metadata([cube1, cube2]) - assert fixed_cubes[0].coord('latitude').points[0] == -71.228 - assert fixed_cubes[1].coord('latitude').points[0] == -71.228 + assert fixed_cubes[0].coord("latitude").points[0] == -71.228 + assert fixed_cubes[1].coord("latitude").points[0] == -71.228 diff --git a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg.py b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg.py index b9145e6e87..90f4d96ac1 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg.py +++ b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg.py @@ -1,4 +1,5 @@ """Tests for EC-Earth3-Veg.""" + import unittest import cf_units @@ -24,85 +25,94 @@ class TestSiconca(unittest.TestCase): def setUp(self): """Prepare tests.""" - self.cube = Cube([1.0], var_name='siconca', units='%') + self.cube = Cube([1.0], var_name="siconca", units="%") self.fix = Siconca(None) def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('CMIP6', 'EC-Earth3-Veg', 'SImon', 'siconca'), - [Siconca(None), GenericFix(None)]) + Fix.get_fixes("CMIP6", "EC-Earth3-Veg", "SImon", "siconca"), + [Siconca(None), GenericFix(None)], + ) def test_fix_data(self): """Test data fix.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 100) - self.assertEqual(cube.units, Unit('%')) + self.assertEqual(cube.units, Unit("%")) def test_get_siconc_fix(): """Test sinconc calendar is fixed.""" - fix = Fix.get_fixes('CMIP6', 'EC-Earth3-Veg', 'SImon', 'siconc')[0] + fix = Fix.get_fixes("CMIP6", "EC-Earth3-Veg", "SImon", "siconc")[0] assert isinstance(fix, CalendarFix) def test_get_tos_fix(): """Test tos calendar is fixed.""" - fix = Fix.get_fixes('CMIP6', 'EC-Earth3-Veg', 'Omon', 'tos')[0] + fix = Fix.get_fixes("CMIP6", "EC-Earth3-Veg", "Omon", "tos")[0] assert isinstance(fix, CalendarFix) def test_siconc_fix_calendar(): - vardef = get_var_info('CMIP6', 'SImon', 'siconc') + vardef = get_var_info("CMIP6", "SImon", "siconc") fix = Siconc(vardef) cube = iris.cube.Cube([1, 2]) - bad_unit = cf_units.Unit('days since 1850-01-01 00:00:00', 'gregorian') + bad_unit = cf_units.Unit("days since 1850-01-01 00:00:00", "gregorian") time_coord = iris.coords.DimCoord( [0.0, 1.0], - var_name='time', - standard_name='time', + var_name="time", + standard_name="time", units=bad_unit, ) cube.add_dim_coord(time_coord, 0) fixed_cubes = fix.fix_metadata([cube]) - good_unit = cf_units.Unit('days since 1850-01-01 00:00:00', - 'proleptic_gregorian') - assert fixed_cubes[0].coord('time').units == good_unit + good_unit = cf_units.Unit( + "days since 1850-01-01 00:00:00", "proleptic_gregorian" + ) + assert fixed_cubes[0].coord("time").units == good_unit @pytest.fixture def tas_cubes(): """Cubes to test fixes for ``tas``.""" - time_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='time', - standard_name='time', - units='days since 1850-01-01 00:00:00') - lat_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lat', - standard_name='latitude', - units='degrees') + time_coord = iris.coords.DimCoord( + [0.0, 1.0], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) + lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) lat_coord_to_round = iris.coords.DimCoord( [0.0000000001, 0.9999999999], bounds=[[-0.5000000001, 0.5000000001], [0.5000000001, 1.5000000001]], - var_name='lat', - standard_name='latitude', - units='degrees') - lon_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='lon', - standard_name='longitude', - units='degrees') + var_name="lat", + standard_name="latitude", + units="degrees", + ) + lon_coord = iris.coords.DimCoord( + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) tas_cube = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], ) tas_cube_to_round = iris.cube.Cube( np.ones((2, 2, 2)), - var_name='tas', - dim_coords_and_dims=[(time_coord, 0), (lat_coord_to_round, 1), - (lon_coord, 2)], + var_name="tas", + dim_coords_and_dims=[ + (time_coord, 0), + (lat_coord_to_round, 1), + (lon_coord, 2), + ], ) return iris.cube.CubeList([tas_cube, tas_cube_to_round]) @@ -110,13 +120,13 @@ def tas_cubes(): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'EC-Earth3-Veg', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "EC-Earth3-Veg", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] def test_tas_fix_metadata(tas_cubes): """Test ``fix_metadata`` for ``tas``.""" - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Tas(vardef) fixed_cubes = fix.fix_metadata(tas_cubes) - assert fixed_cubes[0].coord('latitude') == fixed_cubes[1].coord('latitude') + assert fixed_cubes[0].coord("latitude") == fixed_cubes[1].coord("latitude") diff --git a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg_lr.py b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg_lr.py index b3f7963a7d..d856596d66 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg_lr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_veg_lr.py @@ -1,4 +1,5 @@ """Test fixes for EC-Earth3-Veg-LR.""" + from esmvalcore.cmor._fixes.cmip6.ec_earth3_veg_lr import Siconc from esmvalcore.cmor._fixes.common import OceanFixGrid from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'EC-Earth3-Veg-LR', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "EC-Earth3-Veg-LR", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_fgoals_f3_l.py b/tests/integration/cmor/_fixes/cmip6/test_fgoals_f3_l.py index 7e9aa38d06..2d564430df 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_fgoals_f3_l.py +++ b/tests/integration/cmor/_fixes/cmip6/test_fgoals_f3_l.py @@ -15,67 +15,80 @@ def cubes(): correct_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[0., 31.], [31., 59.], [59., 90.]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) + bounds=[[0.0, 31.0], [31.0, 59.0], [59.0, 90.0]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) wrong_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[5.5, 25.5], [35., 55.], [64.5, 84.5]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) + bounds=[[5.5, 25.5], [35.0, 55.0], [64.5, 84.5]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) correct_lat_coord = iris.coords.DimCoord( [0.0, 1.0], bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lat', - standard_name='latitude', - units='degrees') + var_name="lat", + standard_name="latitude", + units="degrees", + ) wrong_lat_coord = iris.coords.DimCoord( [0.0, 1.0], - bounds=[[-0.5, 0.5], [1.5, 2.]], - var_name='lat', - standard_name='latitude', - units='degrees') + bounds=[[-0.5, 0.5], [1.5, 2.0]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) correct_lon_coord = iris.coords.DimCoord( [0.0, 1.0], bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lon', - standard_name='longitude', - units='degrees') + var_name="lon", + standard_name="longitude", + units="degrees", + ) wrong_lon_coord = iris.coords.DimCoord( [0.0, 1.0], - bounds=[[-0.5, 0.5], [1.5, 2.]], - var_name='lon', - standard_name='longitude', - units='degrees') - - correct_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tas', - dim_coords_and_dims=[(correct_time_coord, 0), - (correct_lat_coord, 1), - (correct_lon_coord, 2) - ], - attributes={'table_id': 'Amon'}, - units=Unit('degC')) - - wrong_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tas', - dim_coords_and_dims=[(wrong_time_coord, 0), - (wrong_lat_coord, 1), - (wrong_lon_coord, 2)], - attributes={'table_id': 'Amon'}, - units=Unit('degC')) + bounds=[[-0.5, 0.5], [1.5, 2.0]], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + + correct_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tas", + dim_coords_and_dims=[ + (correct_time_coord, 0), + (correct_lat_coord, 1), + (correct_lon_coord, 2), + ], + attributes={"table_id": "Amon"}, + units=Unit("degC"), + ) + + wrong_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tas", + dim_coords_and_dims=[ + (wrong_time_coord, 0), + (wrong_lat_coord, 1), + (wrong_lon_coord, 2), + ], + attributes={"table_id": "Amon"}, + units=Unit("degC"), + ) return iris.cube.CubeList([correct_cube, wrong_cube]) def test_get_allvars_fix(): - fix = Fix.get_fixes('CMIP6', 'FGOALS-f3-L', 'Amon', 'wrong_time_bnds') + fix = Fix.get_fixes("CMIP6", "FGOALS-f3-L", "Amon", "wrong_time_bnds") assert fix == [AllVars(None), GenericFix(None)] @@ -84,9 +97,9 @@ def test_allvars_fix_metadata(cubes): out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - time = cube.coord('time') - lat = cube.coord('latitude') - lon = cube.coord('longitude') + time = cube.coord("time") + lat = cube.coord("latitude") + lon = cube.coord("longitude") assert all(time.bounds[1:, 0] == time.bounds[:-1, 1]) assert all(lat.bounds[1:, 0] == lat.bounds[:-1, 1]) assert all(lon.bounds[1:, 0] == lon.bounds[:-1, 1]) @@ -99,7 +112,7 @@ def test_tos_fix(): def test_get_clt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'FGOALS-f3-l', 'Amon', 'clt') + fix = Fix.get_fixes("CMIP6", "FGOALS-f3-l", "Amon", "clt") assert fix == [Clt(None), AllVars(None), GenericFix(None)] @@ -108,9 +121,9 @@ def clt_cube(): """``clt`` cube.""" cube = iris.cube.Cube( [1.0], - var_name='clt', - standard_name='cloud_area_fraction', - units='%', + var_name="clt", + standard_name="cloud_area_fraction", + units="%", ) return cube @@ -124,7 +137,7 @@ def test_clt_fix_data(clt_cube): def test_get_sftlf_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'FGOALS-f3-l', 'Amon', 'sftlf') + fix = Fix.get_fixes("CMIP6", "FGOALS-f3-l", "Amon", "sftlf") assert fix == [Sftlf(None), AllVars(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py b/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py index 8bb9457021..eb16a4d2ba 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py +++ b/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py @@ -1,4 +1,5 @@ """Tests for the fixes of FGOALS-g3.""" + from unittest import mock import iris @@ -13,7 +14,7 @@ def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "Omon", "tos") assert fix == [Tos(None), GenericFix(None)] @@ -22,41 +23,47 @@ def test_tos_fix(): assert issubclass(Tos, OceanFixGrid) -@mock.patch('esmvalcore.cmor._fixes.cmip6.fgoals_g3.OceanFixGrid.fix_metadata', - autospec=True) +@mock.patch( + "esmvalcore.cmor._fixes.cmip6.fgoals_g3.OceanFixGrid.fix_metadata", + autospec=True, +) def test_tos_fix_metadata(mock_base_fix_metadata): """Test ``fix_metadata`` for ``tos``.""" mock_base_fix_metadata.side_effect = lambda x, y: y # Create test cube - lat_coord = iris.coords.AuxCoord([3.14, 1200.0, 6.28], - var_name='lat', - standard_name='latitude') - lon_coord = iris.coords.AuxCoord([1.0, 2.0, 1e30], - var_name='lon', - standard_name='longitude') - cube = iris.cube.Cube([1.0, 2.0, 3.0], - var_name='tos', - standard_name='sea_surface_temperature', - aux_coords_and_dims=[(lat_coord, 0), (lon_coord, 0)]) + lat_coord = iris.coords.AuxCoord( + [3.14, 1200.0, 6.28], var_name="lat", standard_name="latitude" + ) + lon_coord = iris.coords.AuxCoord( + [1.0, 2.0, 1e30], var_name="lon", standard_name="longitude" + ) + cube = iris.cube.Cube( + [1.0, 2.0, 3.0], + var_name="tos", + standard_name="sea_surface_temperature", + aux_coords_and_dims=[(lat_coord, 0), (lon_coord, 0)], + ) cubes = iris.cube.CubeList([cube]) # Apply fix - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = Tos(vardef) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes[0] np.testing.assert_allclose( - fixed_cube.coord('latitude').points, [3.14, 0.0, 6.28]) + fixed_cube.coord("latitude").points, [3.14, 0.0, 6.28] + ) np.testing.assert_allclose( - fixed_cube.coord('longitude').points, [1.0, 2.0, 0.0]) + fixed_cube.coord("longitude").points, [1.0, 2.0, 0.0] + ) mock_base_fix_metadata.assert_called_once_with(fix, cubes) def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'BCC-CSM2-MR', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "BCC-CSM2-MR", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -67,7 +74,7 @@ def test_siconc_fix(): def test_get_mrsos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'FGOALS-g3', 'Lmon', 'mrsos') + fix = Fix.get_fixes("CMIP6", "FGOALS-g3", "Lmon", "mrsos") assert fix == [Mrsos(None), GenericFix(None)] @@ -76,73 +83,83 @@ def test_mrsos_fix(): assert issubclass(Mrsos, Fix) -@mock.patch('esmvalcore.cmor._fixes.cmip6.fgoals_g3.Fix.fix_metadata', - autospec=True) +@mock.patch( + "esmvalcore.cmor._fixes.cmip6.fgoals_g3.Fix.fix_metadata", autospec=True +) def test_mrsos_fix_metadata(mock_base_fix_metadata): """Test ``fix_metadata`` for ``mrsos``.""" mock_base_fix_metadata.side_effect = lambda x, y: y # Create test cube - lat_coord = iris.coords.AuxCoord([1.0, 2.0, 3.0], - var_name='lat', - standard_name='latitude') + lat_coord = iris.coords.AuxCoord( + [1.0, 2.0, 3.0], var_name="lat", standard_name="latitude" + ) lat_coord.bounds = [[0.5, 1.5], [-0.5, 0.5], [2.5, 3.5]] - lon_coord = iris.coords.AuxCoord([1.0, 2.0, 3.0], - var_name='lon', - standard_name='longitude') + lon_coord = iris.coords.AuxCoord( + [1.0, 2.0, 3.0], var_name="lon", standard_name="longitude" + ) lon_coord.bounds = [[0.5, 1.5], [-0.5, 0.5], [2.5, 3.5]] - cube = iris.cube.Cube([1.0, 2.0, 3.0], - var_name='mrsos', - standard_name='mass_content_of_water_in_soil_layer', - aux_coords_and_dims=[(lat_coord, 0), (lon_coord, 0)]) + cube = iris.cube.Cube( + [1.0, 2.0, 3.0], + var_name="mrsos", + standard_name="mass_content_of_water_in_soil_layer", + aux_coords_and_dims=[(lat_coord, 0), (lon_coord, 0)], + ) cubes = iris.cube.CubeList([cube]) # Apply fix - vardef = get_var_info('CMIP6', 'Lmon', 'mrsos') + vardef = get_var_info("CMIP6", "Lmon", "mrsos") fix = Mrsos(vardef) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes[0] np.testing.assert_allclose( - fixed_cube.coord('latitude').bounds, - [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]) + fixed_cube.coord("latitude").bounds, + [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], + ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, - [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]) + fixed_cube.coord("longitude").bounds, + [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], + ) mock_base_fix_metadata.assert_called_once_with(fix, cubes) -@mock.patch('esmvalcore.cmor._fixes.cmip6.fgoals_g3.Fix.fix_metadata', - autospec=True) +@mock.patch( + "esmvalcore.cmor._fixes.cmip6.fgoals_g3.Fix.fix_metadata", autospec=True +) def test_mrsos_fix_metadata_2(mock_base_fix_metadata): """Test ``fix_metadata`` for ``mrsos`` if no fix is necessary.""" mock_base_fix_metadata.side_effect = lambda x, y: y # Create test cube - lat_coord = iris.coords.AuxCoord([1.0, 2.0, 3.0], - var_name='lat', - standard_name='latitude') + lat_coord = iris.coords.AuxCoord( + [1.0, 2.0, 3.0], var_name="lat", standard_name="latitude" + ) lat_coord.bounds = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]] - lon_coord = iris.coords.AuxCoord([1.0, 2.0, 3.0], - var_name='lon', - standard_name='longitude') + lon_coord = iris.coords.AuxCoord( + [1.0, 2.0, 3.0], var_name="lon", standard_name="longitude" + ) lon_coord.bounds = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]] - cube = iris.cube.Cube([1.0, 2.0, 3.0], - var_name='mrsos', - standard_name='mass_content_of_water_in_soil_layer', - aux_coords_and_dims=[(lat_coord, 0), (lon_coord, 0)]) + cube = iris.cube.Cube( + [1.0, 2.0, 3.0], + var_name="mrsos", + standard_name="mass_content_of_water_in_soil_layer", + aux_coords_and_dims=[(lat_coord, 0), (lon_coord, 0)], + ) cubes = iris.cube.CubeList([cube]) # Apply fix - vardef = get_var_info('CMIP6', 'Lmon', 'mrsos') + vardef = get_var_info("CMIP6", "Lmon", "mrsos") fix = Mrsos(vardef) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes[0] np.testing.assert_allclose( - fixed_cube.coord('latitude').bounds, - [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]) + fixed_cube.coord("latitude").bounds, + [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], + ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, - [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]) + fixed_cube.coord("longitude").bounds, + [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], + ) mock_base_fix_metadata.assert_called_once_with(fix, cubes) diff --git a/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py b/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py index 2bf811e455..85e77aed88 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py @@ -1,4 +1,5 @@ """Tests for the fixes of FIO-ESM-2-0.""" + import iris import numpy as np import pytest @@ -14,21 +15,21 @@ def test_clt_fix(): """Test `Clt.fix_data`.""" cube = iris.cube.Cube(0.5) - fix = Fix.get_fixes('CMIP6', 'FIO-ESM-2-0', 'Amon', 'clt')[0] + fix = Fix.get_fixes("CMIP6", "FIO-ESM-2-0", "Amon", "clt")[0] out_cube = fix.fix_data(cube) np.testing.assert_allclose(out_cube.data, 50.0) - assert out_cube.units == '%' + assert out_cube.units == "%" def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'FIO-ESM-2-0', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "FIO-ESM-2-0", "Amon", "tas") assert fix == [Amon(None), GenericFix(None)] def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'FIO-ESM-2-0', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "FIO-ESM-2-0", "Omon", "tos") assert fix == [OceanFixGrid(None), Omon(None), GenericFix(None)] @@ -41,50 +42,67 @@ def test_tos_fix(): def tas_cubes(): correct_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[0., 31.], [31., 59.], [59., 90.]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) - - correct_lat_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lat', - standard_name='latitude', - units='degrees') - - wrong_lat_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [1.5, 2.]], - var_name='lat', - standard_name='latitude', - units='degrees') - - correct_lon_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lon', - standard_name='longitude', - units='degrees') - - wrong_lon_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [1.5, 2.]], - var_name='lon', - standard_name='longitude', - units='degrees') - - correct_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tas', - dim_coords_and_dims=[(correct_time_coord, 0), - (correct_lat_coord, 1), - (correct_lon_coord, 2)], - attributes={'table_id': 'Amon'}, - units=Unit('degC')) - - wrong_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tas', - dim_coords_and_dims=[(correct_time_coord, 0), - (wrong_lat_coord, 1), - (wrong_lon_coord, 2)], - attributes={'table_id': 'Amon'}, - units=Unit('degC')) + bounds=[[0.0, 31.0], [31.0, 59.0], [59.0, 90.0]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) + + correct_lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) + + wrong_lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [1.5, 2.0]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) + + correct_lon_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + + wrong_lon_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [1.5, 2.0]], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + + correct_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tas", + dim_coords_and_dims=[ + (correct_time_coord, 0), + (correct_lat_coord, 1), + (correct_lon_coord, 2), + ], + attributes={"table_id": "Amon"}, + units=Unit("degC"), + ) + + wrong_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tas", + dim_coords_and_dims=[ + (correct_time_coord, 0), + (wrong_lat_coord, 1), + (wrong_lon_coord, 2), + ], + attributes={"table_id": "Amon"}, + units=Unit("degC"), + ) return iris.cube.CubeList([correct_cube, wrong_cube]) @@ -92,53 +110,63 @@ def tas_cubes(): @pytest.fixture def tos_cubes(): """Sample cube.""" - time_coord = iris.coords.DimCoord([0.2], - standard_name='time', - var_name='time', - units='days since 1850-01-01') - lat_coord = iris.coords.DimCoord([23.0211555789], - standard_name='latitude', - var_name='lat', - units='degrees_north') - lon_coord = iris.coords.DimCoord([30.0211534556], - standard_name='longitude', - var_name='lon', - units='degrees_east') + time_coord = iris.coords.DimCoord( + [0.2], + standard_name="time", + var_name="time", + units="days since 1850-01-01", + ) + lat_coord = iris.coords.DimCoord( + [23.0211555789], + standard_name="latitude", + var_name="lat", + units="degrees_north", + ) + lon_coord = iris.coords.DimCoord( + [30.0211534556], + standard_name="longitude", + var_name="lon", + units="degrees_east", + ) coords_specs = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] - cube = iris.cube.Cube([[[22.0]]], - standard_name='sea_surface_temperature', - var_name='tos', - units='degC', - dim_coords_and_dims=coords_specs) + cube = iris.cube.Cube( + [[[22.0]]], + standard_name="sea_surface_temperature", + var_name="tos", + units="degC", + dim_coords_and_dims=coords_specs, + ) return iris.cube.CubeList([cube]) def test_tos_fix_metadata(tos_cubes, caplog): """Test ``fix_metadata``.""" - vardef = get_var_info('CMIP6', 'Omon', 'tos') - fix = Omon(vardef, extra_facets={'dataset': 'FIO-ESM-2-0'}) + vardef = get_var_info("CMIP6", "Omon", "tos") + fix = Omon(vardef, extra_facets={"dataset": "FIO-ESM-2-0"}) fixed_cubes = fix.fix_metadata(tos_cubes) assert len(fixed_cubes) == 1 - fixed_tos_cube = fixed_cubes.extract_cube('sea_surface_temperature') - fixed_lon = fixed_tos_cube.coord('longitude') - fixed_lat = fixed_tos_cube.coord('latitude') + fixed_tos_cube = fixed_cubes.extract_cube("sea_surface_temperature") + fixed_lon = fixed_tos_cube.coord("longitude") + fixed_lat = fixed_tos_cube.coord("latitude") np.testing.assert_equal(fixed_lon.points, [30.021153]) np.testing.assert_equal(fixed_lat.points, [23.021156]) - msg = ("Using 'area_weighted' regridder scheme in Omon variables " - "for dataset FIO-ESM-2-0 causes discontinuities in the longitude " - "coordinate.") + msg = ( + "Using 'area_weighted' regridder scheme in Omon variables " + "for dataset FIO-ESM-2-0 causes discontinuities in the longitude " + "coordinate." + ) assert msg in caplog.text def test_amon_fix_metadata(tas_cubes): - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Amon(vardef) out_cubes = fix.fix_metadata(tas_cubes) assert tas_cubes is out_cubes for cube in out_cubes: - time = cube.coord('time') - lat = cube.coord('latitude') - lon = cube.coord('longitude') + time = cube.coord("time") + lat = cube.coord("latitude") + lon = cube.coord("longitude") assert all(time.bounds[1:, 0] == time.bounds[:-1, 1]) assert all(lat.bounds[1:, 0] == lat.bounds[:-1, 1]) assert all(lon.bounds[1:, 0] == lon.bounds[:-1, 1]) diff --git a/tests/integration/cmor/_fixes/cmip6/test_gfdl_cm4.py b/tests/integration/cmor/_fixes/cmip6/test_gfdl_cm4.py index 65ca211e12..7e5fb81110 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_gfdl_cm4.py +++ b/tests/integration/cmor/_fixes/cmip6/test_gfdl_cm4.py @@ -1,7 +1,9 @@ """Tests for the fixes of GFDL-CM4.""" + import iris import numpy as np import pytest +from cf_units import Unit from esmvalcore.cmor._fixes.cmip6.gfdl_cm4 import ( Cl, @@ -15,78 +17,80 @@ Tos, Uas, ) -from esmvalcore.cmor._fixes.common import SiconcFixScalarCoord, OceanFixGrid +from esmvalcore.cmor._fixes.common import OceanFixGrid, SiconcFixScalarCoord from esmvalcore.cmor._fixes.fix import GenericFix from esmvalcore.cmor.fix import Fix from esmvalcore.cmor.table import get_var_info -from cf_units import Unit def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] -AIR_PRESSURE_POINTS = np.array([[[[1.0, 1.0], - [1.0, 1.0]], - [[2.0, 3.0], - [4.0, 5.0]], - [[5.0, 8.0], - [11.0, 14.0]]]]) -AIR_PRESSURE_BOUNDS = np.array([[[[[0.0, 1.5], - [-1.0, 2.0]], - [[-2.0, 2.5], - [-3.0, 3.0]]], - [[[1.5, 3.0], - [2.0, 5.0]], - [[2.5, 7.0], - [3.0, 9.0]]], - [[[3.0, 6.0], - [5.0, 11.0]], - [[7.0, 16.0], - [9.0, 21.0]]]]]) +AIR_PRESSURE_POINTS = np.array( + [ + [ + [[1.0, 1.0], [1.0, 1.0]], + [[2.0, 3.0], [4.0, 5.0]], + [[5.0, 8.0], [11.0, 14.0]], + ] + ] +) +AIR_PRESSURE_BOUNDS = np.array( + [ + [ + [[[0.0, 1.5], [-1.0, 2.0]], [[-2.0, 2.5], [-3.0, 3.0]]], + [[[1.5, 3.0], [2.0, 5.0]], [[2.5, 7.0], [3.0, 9.0]]], + [[[3.0, 6.0], [5.0, 11.0]], [[7.0, 16.0], [9.0, 21.0]]], + ] + ] +) @pytest.mark.sequential def test_cl_fix_metadata(test_data_path): """Test ``fix_metadata`` for ``cl``.""" - nc_path = test_data_path / 'gfdl_cm4_cl.nc' + nc_path = test_data_path / "gfdl_cm4_cl.nc" cubes = iris.load(str(nc_path)) # Raw cubes assert len(cubes) == 6 var_names = [cube.var_name for cube in cubes] - assert 'cl' in var_names - assert 'ap' in var_names - assert 'ap_bnds' in var_names - assert 'b' in var_names - assert 'b_bnds' in var_names - assert 'ps' in var_names + assert "cl" in var_names + assert "ap" in var_names + assert "ap_bnds" in var_names + assert "b" in var_names + assert "b_bnds" in var_names + assert "ps" in var_names # Raw cl cube - cl_cube = cubes.extract_cube('cloud_area_fraction_in_atmosphere_layer') - assert not cl_cube.coords('air_pressure') + cl_cube = cubes.extract_cube("cloud_area_fraction_in_atmosphere_layer") + assert not cl_cube.coords("air_pressure") # Apply fix - vardef = get_var_info('CMIP6', 'Amon', 'cl') + vardef = get_var_info("CMIP6", "Amon", "cl") fix = Cl(vardef) fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cl_cube = fixed_cubes.extract_cube( - 'cloud_area_fraction_in_atmosphere_layer') - fixed_air_pressure_coord = fixed_cl_cube.coord('air_pressure') + "cloud_area_fraction_in_atmosphere_layer" + ) + fixed_air_pressure_coord = fixed_cl_cube.coord("air_pressure") assert fixed_air_pressure_coord.points is not None assert fixed_air_pressure_coord.bounds is not None - np.testing.assert_allclose(fixed_air_pressure_coord.points, - AIR_PRESSURE_POINTS) - np.testing.assert_allclose(fixed_air_pressure_coord.bounds, - AIR_PRESSURE_BOUNDS) + np.testing.assert_allclose( + fixed_air_pressure_coord.points, AIR_PRESSURE_POINTS + ) + np.testing.assert_allclose( + fixed_air_pressure_coord.bounds, AIR_PRESSURE_BOUNDS + ) def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -97,7 +101,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -108,13 +112,13 @@ def test_clw_fix(): def test_get_fgco2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'Omon', 'fgco2') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "Omon", "fgco2") assert fix == [Fgco2(None), Omon(None), GenericFix(None)] def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -125,7 +129,7 @@ def test_siconc_fix(): def test_get_sos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'Omon', 'sos') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "Omon", "sos") assert fix == [Sos(None), Omon(None), GenericFix(None)] @@ -136,7 +140,7 @@ def test_sos_fix(): def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "GFDL-CM4", "Omon", "tos") assert fix == [Tos(None), Omon(None), GenericFix(None)] @@ -147,99 +151,107 @@ def test_tos_fix(): @pytest.fixture def tas_cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude') - correct_cube = iris.cube.Cube([[2.0]], - var_name='tas', - dim_coords_and_dims=[(correct_lat_coord, 0), - (correct_lon_coord, 1)]) - wrong_cube = iris.cube.Cube([[2.0]], - var_name='ta', - dim_coords_and_dims=[(wrong_lat_coord, 0), - (wrong_lon_coord, 1)]) - scalar_cube = iris.cube.Cube(0.0, var_name='ps') + correct_lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], var_name="latitudeCoord", standard_name="latitude" + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], var_name="longitudeCoord", standard_name="longitude" + ) + correct_cube = iris.cube.Cube( + [[2.0]], + var_name="tas", + dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], + ) + wrong_cube = iris.cube.Cube( + [[2.0]], + var_name="ta", + dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], + ) + scalar_cube = iris.cube.Cube(0.0, var_name="ps") return iris.cube.CubeList([correct_cube, wrong_cube, scalar_cube]) def test_get_tas_fix(): - fixes = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'day', 'tas') + fixes = Fix.get_fixes("CMIP6", "GFDL-CM4", "day", "tas") assert Tas(None) in fixes def test_tas_fix_metadata(tas_cubes): for cube in tas_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(2.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'day', 'tas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 2.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "day", "tas") fix = Tas(vardef) out_cubes = fix.fix_metadata(tas_cubes) - assert out_cubes[0].var_name == 'tas' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "tas" + coord = out_cubes[0].coord("height") assert coord == height_coord @pytest.fixture def uas_cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude') - correct_cube = iris.cube.Cube([[10.0]], - var_name='uas', - dim_coords_and_dims=[(correct_lat_coord, 0), - (correct_lon_coord, 1)]) - wrong_cube = iris.cube.Cube([[10.0]], - var_name='ua', - dim_coords_and_dims=[(wrong_lat_coord, 0), - (wrong_lon_coord, 1)]) - scalar_cube = iris.cube.Cube(0.0, var_name='ps') + correct_lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], var_name="latitudeCoord", standard_name="latitude" + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], var_name="longitudeCoord", standard_name="longitude" + ) + correct_cube = iris.cube.Cube( + [[10.0]], + var_name="uas", + dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], + ) + wrong_cube = iris.cube.Cube( + [[10.0]], + var_name="ua", + dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], + ) + scalar_cube = iris.cube.Cube(0.0, var_name="ps") return iris.cube.CubeList([correct_cube, wrong_cube, scalar_cube]) def test_get_uas_fix(): - fixes = Fix.get_fixes('CMIP6', 'GFDL-CM4', 'day', 'uas') + fixes = Fix.get_fixes("CMIP6", "GFDL-CM4", "day", "uas") assert Uas(None) in fixes def test_uas_fix_metadata(uas_cubes): for cube in uas_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(10.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'day', 'uas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 10.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "day", "uas") fix = Uas(vardef) out_cubes = fix.fix_metadata(uas_cubes) - assert out_cubes[0].var_name == 'uas' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "uas" + coord = out_cubes[0].coord("height") assert coord == height_coord diff --git a/tests/integration/cmor/_fixes/cmip6/test_gfdl_esm4.py b/tests/integration/cmor/_fixes/cmip6/test_gfdl_esm4.py index 1db8eca4a0..a9132af9b6 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_gfdl_esm4.py +++ b/tests/integration/cmor/_fixes/cmip6/test_gfdl_esm4.py @@ -13,7 +13,7 @@ def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-ESM4', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "GFDL-ESM4", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -26,16 +26,25 @@ def test_siconc_fix(): def thetao_cubes(): """Cubes to test fixes for ``thetao``.""" time_coord = iris.coords.DimCoord( - [0.0004, 1.09776], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0004, 1.09776], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) lev_coord = iris.coords.DimCoord( - [5.0, 10.0], bounds=[[2.5, 7.5], [7.5, 12.5]], - var_name='lev', standard_name=None, units='m', - attributes={'positive': 'up'}) + [5.0, 10.0], + bounds=[[2.5, 7.5], [7.5, 12.5]], + var_name="lev", + standard_name=None, + units="m", + attributes={"positive": "up"}, + ) coord_specs = [ (time_coord, 0), (lev_coord, 1), @@ -44,7 +53,7 @@ def thetao_cubes(): ] thetao_cube = iris.cube.Cube( np.ones((2, 2, 2, 2)), - var_name='thetao', + var_name="thetao", dim_coords_and_dims=coord_specs, ) return iris.cube.CubeList([thetao_cube]) @@ -52,13 +61,13 @@ def thetao_cubes(): def test_get_thetao_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-ESM4', 'Omon', 'thetao') + fix = Fix.get_fixes("CMIP6", "GFDL-ESM4", "Omon", "thetao") assert fix == [Omon(None), GenericFix(None)] def test_thetao_fix_metadata(thetao_cubes): """Test ``fix_metadata`` for ``thetao``.""" - vardef = get_var_info('CMIP6', 'Omon', 'thetao') + vardef = get_var_info("CMIP6", "Omon", "thetao") fix = Omon(vardef) out_cubes = fix.fix_metadata(thetao_cubes) assert out_cubes is thetao_cubes @@ -66,26 +75,28 @@ def test_thetao_fix_metadata(thetao_cubes): out_cube = out_cubes[0] # Check metadata of depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'lev' - assert depth_coord.long_name == 'ocean depth coordinate' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "lev" + assert depth_coord.long_name == "ocean depth coordinate" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} def test_get_fgco2_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GFDL-ESM4', 'Omon', 'fgco2') + fix = Fix.get_fixes("CMIP6", "GFDL-ESM4", "Omon", "fgco2") assert fix == [Fgco2(None), Omon(None), GenericFix(None)] def test_fgco2_fix_metadata(): """Test ``fix_metadata`` for ``fgco2``.""" - vardef = get_var_info('CMIP6', 'Omon', 'fgco2') - cubes = iris.cube.CubeList([ - iris.cube.Cube(0.0, var_name='fgco2'), - ]) + vardef = get_var_info("CMIP6", "Omon", "fgco2") + cubes = iris.cube.CubeList( + [ + iris.cube.Cube(0.0, var_name="fgco2"), + ] + ) fix = Fgco2(vardef) out_cubes = fix.fix_metadata(cubes) assert out_cubes is cubes @@ -93,12 +104,12 @@ def test_fgco2_fix_metadata(): out_cube = out_cubes[0] # Check depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'depth' - assert depth_coord.long_name == 'depth' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "depth" + assert depth_coord.long_name == "depth" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} # Check values of depth coordinate np.testing.assert_allclose(depth_coord.points, 0.0) diff --git a/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_g.py b/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_g.py index 40e61adb86..976556b2be 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_g.py +++ b/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_g.py @@ -1,4 +1,5 @@ """Test fixes for GISS-E2-1-G.""" + import dask.array as da import numpy as np from iris.cube import Cube @@ -10,7 +11,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-G', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-G", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -21,7 +22,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-G', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-G", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -32,7 +33,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-G', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-G", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -42,12 +43,12 @@ def test_clw_fix(): def test_tos_fix(): - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-G', 'Omon', 'tos')[0] + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-G", "Omon", "tos")[0] cube = Cube( da.array([274], dtype=np.float32), - var_name='tos', - units='degC', + var_name="tos", + units="degC", ) - result, = fix.fix_metadata([cube]) - assert 0. < result.data < 1. - assert result.units == 'degC' + (result,) = fix.fix_metadata([cube]) + assert 0.0 < result.data < 1.0 + assert result.units == "degC" diff --git a/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_h.py b/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_h.py index 482e72cd22..d86d09daf9 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_h.py +++ b/tests/integration/cmor/_fixes/cmip6/test_giss_e2_1_h.py @@ -1,4 +1,5 @@ """Test fixes for GISS-E2-1-H.""" + from esmvalcore.cmor._fixes.cmip6.giss_e2_1_h import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-H', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-H", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -17,7 +18,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-H', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-H", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'GISS-E2-1-H', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "GISS-E2-1-H", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_hadgem3_gc31_ll.py b/tests/integration/cmor/_fixes/cmip6/test_hadgem3_gc31_ll.py index aa9e7c34cb..0a776c8bf6 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_hadgem3_gc31_ll.py +++ b/tests/integration/cmor/_fixes/cmip6/test_hadgem3_gc31_ll.py @@ -1,4 +1,5 @@ """Tests for the fixes of HadGEM3-GC31-LL.""" + import iris import pytest @@ -10,37 +11,37 @@ @pytest.fixture def sample_cubes(): - ta_cube = iris.cube.Cube([1.0], var_name='ta') - tas_cube = iris.cube.Cube([3.0], var_name='tas') + ta_cube = iris.cube.Cube([1.0], var_name="ta") + tas_cube = iris.cube.Cube([3.0], var_name="tas") return iris.cube.CubeList([ta_cube, tas_cube]) def test_get_tas_fix(): - fix = Fix.get_fixes('CMIP6', 'HadGEM3-GC31-LL', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "HadGEM3-GC31-LL", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] def test_allvars_fix_metadata(sample_cubes): for cube in sample_cubes: - cube.attributes['parent_time_units'] = 'days since 1850-01-01' + cube.attributes["parent_time_units"] = "days since 1850-01-01" out_cubes = AllVars(None).fix_metadata(sample_cubes) assert out_cubes is sample_cubes for cube in out_cubes: - assert cube.attributes['parent_time_units'] == 'days since 1850-01-01' + assert cube.attributes["parent_time_units"] == "days since 1850-01-01" def test_allvars_no_need_tofix_metadata(sample_cubes): for cube in sample_cubes: - cube.attributes['parent_time_units'] = 'days since 1850-01-01' + cube.attributes["parent_time_units"] = "days since 1850-01-01" out_cubes = AllVars(None).fix_metadata(sample_cubes) assert out_cubes is sample_cubes for cube in out_cubes: - assert cube.attributes['parent_time_units'] == 'days since 1850-01-01' + assert cube.attributes["parent_time_units"] == "days since 1850-01-01" def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'HadGEM3-GC31-LL', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "HadGEM3-GC31-LL", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] @@ -51,7 +52,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'HadGEM3-GC31-LL', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "HadGEM3-GC31-LL", "Amon", "cli") assert fix == [Cli(None), AllVars(None), GenericFix(None)] @@ -62,7 +63,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'HadGEM3-GC31-LL', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "HadGEM3-GC31-LL", "Amon", "clw") assert fix == [Clw(None), AllVars(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_icon_esm_lr.py b/tests/integration/cmor/_fixes/cmip6/test_icon_esm_lr.py index dfd183788d..a9a78a145a 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_icon_esm_lr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_icon_esm_lr.py @@ -1,4 +1,5 @@ """Tests for the fixes of ICON-ESM-LR.""" + import pytest from iris.coords import AuxCoord from iris.cube import Cube, CubeList @@ -11,22 +12,26 @@ @pytest.fixture def cubes(): """Cubes to test fix.""" - correct_lat_coord = AuxCoord([0.0], var_name='lat', - standard_name='latitude') - wrong_lat_coord = AuxCoord([0.0], var_name='latitude', - standard_name='latitude') - correct_lon_coord = AuxCoord([0.0], var_name='lon', - standard_name='longitude') - wrong_lon_coord = AuxCoord([0.0], var_name='longitude', - standard_name='longitude') + correct_lat_coord = AuxCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = AuxCoord( + [0.0], var_name="latitude", standard_name="latitude" + ) + correct_lon_coord = AuxCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = AuxCoord( + [0.0], var_name="longitude", standard_name="longitude" + ) correct_cube = Cube( [10.0], - var_name='tas', + var_name="tas", aux_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 0)], ) wrong_cube = Cube( [10.0], - var_name='pr', + var_name="pr", aux_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 0)], ) return CubeList([correct_cube, wrong_cube]) @@ -34,7 +39,7 @@ def cubes(): def test_get_allvars_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'ICON-ESM-LR', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "ICON-ESM-LR", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] @@ -44,41 +49,41 @@ def test_allvars_fix_metadata_lat_lon(cubes): out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - lat_coord = cube.coord('latitude') - lon_coord = cube.coord('longitude') - assert lat_coord.var_name == 'lat' - assert lon_coord.var_name == 'lon' + lat_coord = cube.coord("latitude") + lon_coord = cube.coord("longitude") + assert lat_coord.var_name == "lat" + assert lon_coord.var_name == "lon" def test_allvars_fix_metadata_lat(cubes): """Test ``fix_metadata`` for all variables.""" for cube in cubes: - cube.remove_coord('longitude') + cube.remove_coord("longitude") fix = AllVars(None) out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - lat_coord = cube.coord('latitude') - assert lat_coord.var_name == 'lat' + lat_coord = cube.coord("latitude") + assert lat_coord.var_name == "lat" def test_allvars_fix_metadata_lon(cubes): """Test ``fix_metadata`` for all variables.""" for cube in cubes: - cube.remove_coord('latitude') + cube.remove_coord("latitude") fix = AllVars(None) out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - lon_coord = cube.coord('longitude') - assert lon_coord.var_name == 'lon' + lon_coord = cube.coord("longitude") + assert lon_coord.var_name == "lon" def test_allvars_fix_metadata_no_lat_lon(cubes): """Test ``fix_metadata`` for all variables.""" for cube in cubes: - cube.remove_coord('latitude') - cube.remove_coord('longitude') + cube.remove_coord("latitude") + cube.remove_coord("longitude") fix = AllVars(None) out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes diff --git a/tests/integration/cmor/_fixes/cmip6/test_iitm_esm.py b/tests/integration/cmor/_fixes/cmip6/test_iitm_esm.py index 16efd1d75c..5ee4dd2116 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_iitm_esm.py +++ b/tests/integration/cmor/_fixes/cmip6/test_iitm_esm.py @@ -1,4 +1,5 @@ """Tests for the fixes of IITM-ESM.""" + import iris import numpy as np import pytest @@ -12,11 +13,9 @@ def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', - 'IITM-ESM', - 'Omon', - 'tos', - extra_facets={"frequency": "mon"}) + fix = Fix.get_fixes( + "CMIP6", "IITM-ESM", "Omon", "tos", extra_facets={"frequency": "mon"} + ) assert fix == [Tos(None), AllVars(None), GenericFix(None)] @@ -29,58 +28,74 @@ def test_tos_fix(): def cubes(): correct_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[0., 31.], [31., 59.], [59., 90.]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) + bounds=[[0.0, 31.0], [31.0, 59.0], [59.0, 90.0]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) wrong_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[5.5, 25.5], [35., 55.], [64.5, 84.5]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) + bounds=[[5.5, 25.5], [35.0, 55.0], [64.5, 84.5]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) - correct_lat_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lat', - standard_name='latitude', - units='degrees') + correct_lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) - correct_lon_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lon', - standard_name='longitude', - units='degrees') + correct_lon_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lon", + standard_name="longitude", + units="degrees", + ) - correct_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tos', - dim_coords_and_dims=[(correct_time_coord, 0), - (correct_lat_coord, 1), - (correct_lon_coord, 2)], - attributes={'table_id': 'Omon'}, - units=Unit('degC')) + correct_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tos", + dim_coords_and_dims=[ + (correct_time_coord, 0), + (correct_lat_coord, 1), + (correct_lon_coord, 2), + ], + attributes={"table_id": "Omon"}, + units=Unit("degC"), + ) - wrong_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tos', - dim_coords_and_dims=[(wrong_time_coord, 0), - (correct_lat_coord, 1), - (correct_lon_coord, 2)], - attributes={'table_id': 'Omon'}, - units=Unit('degC')) + wrong_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tos", + dim_coords_and_dims=[ + (wrong_time_coord, 0), + (correct_lat_coord, 1), + (correct_lon_coord, 2), + ], + attributes={"table_id": "Omon"}, + units=Unit("degC"), + ) return iris.cube.CubeList([correct_cube, wrong_cube]) def test_allvars_fix_metadata(monkeypatch, cubes, caplog): fix = AllVars(None) - monkeypatch.setitem(fix.extra_facets, 'frequency', 'mon') - monkeypatch.setitem(fix.extra_facets, 'dataset', 'IITM-ESM') + monkeypatch.setitem(fix.extra_facets, "frequency", "mon") + monkeypatch.setitem(fix.extra_facets, "dataset", "IITM-ESM") out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - time = cube.coord('time') + time = cube.coord("time") assert all(time.bounds[1:, 0] == time.bounds[:-1, 1]) - msg = ("Using 'area_weighted' regridder scheme in Omon variables " - "for dataset IITM-ESM causes discontinuities in the longitude " - "coordinate.") + msg = ( + "Using 'area_weighted' regridder scheme in Omon variables " + "for dataset IITM-ESM causes discontinuities in the longitude " + "coordinate." + ) assert msg in caplog.text diff --git a/tests/integration/cmor/_fixes/cmip6/test_ipsl_cm6a_lr.py b/tests/integration/cmor/_fixes/cmip6/test_ipsl_cm6a_lr.py index c32008ef0c..99155f4743 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_ipsl_cm6a_lr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_ipsl_cm6a_lr.py @@ -1,4 +1,5 @@ """Tests for the fixes of IPSL-CM6A-LR.""" + import unittest import iris @@ -18,108 +19,129 @@ class TestAllVars(unittest.TestCase): def setUp(self): """Set up tests.""" - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") self.fix = AllVars(vardef) - self.cube = Cube(np.random.rand(2, 2, 2), var_name='tos') + self.cube = Cube(np.random.rand(2, 2, 2), var_name="tos") self.cube.add_aux_coord( - AuxCoord(np.random.rand(2, 2), - var_name='nav_lat', - standard_name='latitude'), (1, 2)) + AuxCoord( + np.random.rand(2, 2), + var_name="nav_lat", + standard_name="latitude", + ), + (1, 2), + ) self.cube.add_aux_coord( - AuxCoord(np.random.rand(2, 2), - var_name='nav_lon', - standard_name='longitude'), (1, 2)) + AuxCoord( + np.random.rand(2, 2), + var_name="nav_lon", + standard_name="longitude", + ), + (1, 2), + ) def test_fix_metadata_ocean_var(self): """Test ``fix_metadata`` for ocean variables.""" - cell_area = Cube(np.random.rand(2, 2), standard_name='cell_area') + cell_area = Cube(np.random.rand(2, 2), standard_name="cell_area") cubes = self.fix.fix_metadata(CubeList([self.cube, cell_area])) self.assertEqual(len(cubes), 1) cube = cubes[0] - self.assertEqual(cube.var_name, 'tos') - self.assertEqual(cube.coord('latitude').var_name, 'lat') - self.assertEqual(cube.coord('longitude').var_name, 'lon') + self.assertEqual(cube.var_name, "tos") + self.assertEqual(cube.coord("latitude").var_name, "lat") + self.assertEqual(cube.coord("longitude").var_name, "lon") def test_fix_data_no_lat(self): """Test ``fix_metadata`` when no latitude is present.""" - self.cube.remove_coord('latitude') + self.cube.remove_coord("latitude") cubes = self.fix.fix_metadata(CubeList([self.cube])) self.assertEqual(len(cubes), 1) cube = cubes[0] - self.assertEqual(cube.coord('longitude').var_name, 'lon') + self.assertEqual(cube.coord("longitude").var_name, "lon") with self.assertRaises(CoordinateNotFoundError): - self.cube.coord('latitude') + self.cube.coord("latitude") def test_fix_data_no_lon(self): """Test ``fix_metadata`` when no longitude is present.""" - self.cube.remove_coord('longitude') + self.cube.remove_coord("longitude") cubes = self.fix.fix_metadata(CubeList([self.cube])) self.assertEqual(len(cubes), 1) cube = cubes[0] - self.assertEqual(cube.coord('latitude').var_name, 'lat') + self.assertEqual(cube.coord("latitude").var_name, "lat") with self.assertRaises(CoordinateNotFoundError): - self.cube.coord('longitude') + self.cube.coord("longitude") def test_fix_data_no_lat_lon(self): """Test ``fix_metadata`` for cubes with no latitude and longitude.""" - self.cube.remove_coord('latitude') - self.cube.remove_coord('longitude') + self.cube.remove_coord("latitude") + self.cube.remove_coord("longitude") cubes = self.fix.fix_metadata(CubeList([self.cube])) self.assertEqual(len(cubes), 1) with self.assertRaises(CoordinateNotFoundError): - self.cube.coord('latitude') + self.cube.coord("latitude") with self.assertRaises(CoordinateNotFoundError): - self.cube.coord('longitude') + self.cube.coord("longitude") def test_get_clcalipso_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'IPSL-CM6A-LR', 'CFmon', 'clcalipso') + fix = Fix.get_fixes("CMIP6", "IPSL-CM6A-LR", "CFmon", "clcalipso") assert fix == [Clcalipso(None), AllVars(None), GenericFix(None)] @pytest.fixture def clcalipso_cubes(): """Cubes to test fix for ``clcalipso``.""" - alt_40_coord = iris.coords.DimCoord([0.0], var_name='height') - cube = iris.cube.Cube([0.0], var_name='clcalipso', - dim_coords_and_dims=[(alt_40_coord.copy(), 0)]) - x_cube = iris.cube.Cube([0.0], var_name='x', - dim_coords_and_dims=[(alt_40_coord.copy(), 0)]) + alt_40_coord = iris.coords.DimCoord([0.0], var_name="height") + cube = iris.cube.Cube( + [0.0], + var_name="clcalipso", + dim_coords_and_dims=[(alt_40_coord.copy(), 0)], + ) + x_cube = iris.cube.Cube( + [0.0], var_name="x", dim_coords_and_dims=[(alt_40_coord.copy(), 0)] + ) return iris.cube.CubeList([cube, x_cube]) def test_clcalipso_fix_metadata(clcalipso_cubes): """Test ``fix_metadata`` for ``clcalipso``.""" - vardef = get_var_info('CMIP6', 'CFmon', 'clcalipso') + vardef = get_var_info("CMIP6", "CFmon", "clcalipso") fix = Clcalipso(vardef) cubes = fix.fix_metadata(clcalipso_cubes) assert len(cubes) == 1 cube = cubes[0] - coord = cube.coord('altitude') - assert coord.long_name == 'altitude' - assert coord.standard_name == 'altitude' - assert coord.var_name == 'alt40' + coord = cube.coord("altitude") + assert coord.long_name == "altitude" + assert coord.standard_name == "altitude" + assert coord.var_name == "alt40" @pytest.fixture def thetao_cubes(): """Cubes to test fixes for ``thetao``.""" time_coord = iris.coords.DimCoord( - [0.0004, 1.09776], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0004, 1.09776], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) lon_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lon', standard_name='longitude', units='degrees') + [0.0, 1.0], var_name="lon", standard_name="longitude", units="degrees" + ) lev_coord = iris.coords.DimCoord( - [5.0, 10.0], bounds=[[2.5, 7.5], [7.5, 12.5]], - var_name='olevel', standard_name=None, units='m', - attributes={'positive': 'up'}) + [5.0, 10.0], + bounds=[[2.5, 7.5], [7.5, 12.5]], + var_name="olevel", + standard_name=None, + units="m", + attributes={"positive": "up"}, + ) coord_specs = [ (time_coord, 0), (lev_coord, 1), @@ -128,7 +150,7 @@ def thetao_cubes(): ] thetao_cube = iris.cube.Cube( np.ones((2, 2, 2, 2)), - var_name='thetao', + var_name="thetao", dim_coords_and_dims=coord_specs, ) return iris.cube.CubeList([thetao_cube]) @@ -136,13 +158,13 @@ def thetao_cubes(): def test_get_thetao_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'IPSL-CM6A-LR', 'Omon', 'thetao') + fix = Fix.get_fixes("CMIP6", "IPSL-CM6A-LR", "Omon", "thetao") assert fix == [Omon(None), AllVars(None), GenericFix(None)] def test_thetao_fix_metadata(thetao_cubes): """Test ``fix_metadata`` for ``thetao``.""" - vardef = get_var_info('CMIP6', 'Omon', 'thetao') + vardef = get_var_info("CMIP6", "Omon", "thetao") fix = Omon(vardef) out_cubes = fix.fix_metadata(thetao_cubes) assert out_cubes is thetao_cubes @@ -150,9 +172,9 @@ def test_thetao_fix_metadata(thetao_cubes): out_cube = out_cubes[0] # Check metadata of depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'lev' - assert depth_coord.long_name == 'ocean depth coordinate' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "lev" + assert depth_coord.long_name == "ocean depth coordinate" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} diff --git a/tests/integration/cmor/_fixes/cmip6/test_kace_1_0_g.py b/tests/integration/cmor/_fixes/cmip6/test_kace_1_0_g.py index 5d6b4641d8..71b90c8125 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_kace_1_0_g.py +++ b/tests/integration/cmor/_fixes/cmip6/test_kace_1_0_g.py @@ -1,4 +1,5 @@ """Tests for the fixes of KACE-1-0-G.""" + import iris import numpy as np import pytest @@ -12,7 +13,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'KACE-1-0-G', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "KACE-1-0-G", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] @@ -23,7 +24,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'KACE-1-0-G', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "KACE-1-0-G", "Amon", "cli") assert fix == [Cli(None), AllVars(None), GenericFix(None)] @@ -34,7 +35,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'KACE-1-0-G', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "KACE-1-0-G", "Amon", "clw") assert fix == [Clw(None), AllVars(None), GenericFix(None)] @@ -45,7 +46,7 @@ def test_clw_fix(): def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'KACE-1-0-G', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "KACE-1-0-G", "Omon", "tos") assert fix == [Tos(None), AllVars(None), GenericFix(None)] @@ -58,63 +59,79 @@ def test_tos_fix(): def tos_cubes(): correct_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[0., 31.], [31., 59.], [59., 90.]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) + bounds=[[0.0, 31.0], [31.0, 59.0], [59.0, 90.0]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) wrong_time_coord = iris.coords.DimCoord( [15.5, 45, 74.5], - bounds=[[5.5, 25.5], [35., 55.], [64.5, 84.5]], - var_name='time', - standard_name='time', - units=Unit('days since 0001-01-01 00:00:00', calendar='365_day')) - - correct_lat_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lat', - standard_name='latitude', - units='degrees') - - correct_lon_coord = iris.coords.DimCoord([0.0, 1.0], - bounds=[[-0.5, 0.5], [0.5, 1.5]], - var_name='lon', - standard_name='longitude', - units='degrees') - - correct_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tos', - dim_coords_and_dims=[(correct_time_coord, 0), - (correct_lat_coord, 1), - (correct_lon_coord, 2)], - attributes={'table_id': 'Omon'}, - units=Unit('degC')) - - wrong_cube = iris.cube.Cube(10 * np.ones((3, 2, 2)), - var_name='tos', - dim_coords_and_dims=[(wrong_time_coord, 0), - (correct_lat_coord, 1), - (correct_lon_coord, 2)], - attributes={'table_id': 'Omon'}, - units=Unit('degC')) + bounds=[[5.5, 25.5], [35.0, 55.0], [64.5, 84.5]], + var_name="time", + standard_name="time", + units=Unit("days since 0001-01-01 00:00:00", calendar="365_day"), + ) + + correct_lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lat", + standard_name="latitude", + units="degrees", + ) + + correct_lon_coord = iris.coords.DimCoord( + [0.0, 1.0], + bounds=[[-0.5, 0.5], [0.5, 1.5]], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + + correct_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tos", + dim_coords_and_dims=[ + (correct_time_coord, 0), + (correct_lat_coord, 1), + (correct_lon_coord, 2), + ], + attributes={"table_id": "Omon"}, + units=Unit("degC"), + ) + + wrong_cube = iris.cube.Cube( + 10 * np.ones((3, 2, 2)), + var_name="tos", + dim_coords_and_dims=[ + (wrong_time_coord, 0), + (correct_lat_coord, 1), + (correct_lon_coord, 2), + ], + attributes={"table_id": "Omon"}, + units=Unit("degC"), + ) return iris.cube.CubeList([correct_cube, wrong_cube]) def test_get_allvars_fix(): - fix = Fix.get_fixes('CMIP6', 'KACE-1-0-G', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "KACE-1-0-G", "Omon", "tos") assert fix == [OceanFixGrid(None), AllVars(None), GenericFix(None)] def test_allvars_fix_metadata(monkeypatch, tos_cubes, caplog): fix = AllVars(None) - monkeypatch.setitem(fix.extra_facets, 'frequency', 'mon') - monkeypatch.setitem(fix.extra_facets, 'dataset', 'KACE-1-0-G') + monkeypatch.setitem(fix.extra_facets, "frequency", "mon") + monkeypatch.setitem(fix.extra_facets, "dataset", "KACE-1-0-G") out_cubes = fix.fix_metadata(tos_cubes) assert tos_cubes is out_cubes for cube in out_cubes: - time = cube.coord('time') + time = cube.coord("time") assert all(time.bounds[1:, 0] == time.bounds[:-1, 1]) - msg = ("Using 'area_weighted' regridder scheme in Omon variables " - "for dataset KACE-1-0-G causes discontinuities in the longitude " - "coordinate.") + msg = ( + "Using 'area_weighted' regridder scheme in Omon variables " + "for dataset KACE-1-0-G causes discontinuities in the longitude " + "coordinate." + ) assert msg in caplog.text diff --git a/tests/integration/cmor/_fixes/cmip6/test_kiost_esm.py b/tests/integration/cmor/_fixes/cmip6/test_kiost_esm.py index 80af6436e1..0ed403145b 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_kiost_esm.py +++ b/tests/integration/cmor/_fixes/cmip6/test_kiost_esm.py @@ -1,4 +1,5 @@ """Test fixes for KIOST-ESM.""" + import iris import numpy as np import pytest @@ -14,96 +15,98 @@ @pytest.fixture def sfcwind_cubes(): - correct_lat_coord = DimCoord([0.0], - var_name='lat', - standard_name='latitude') - wrong_lat_coord = DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude') - correct_lon_coord = DimCoord([0.0], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude') + correct_lat_coord = DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = DimCoord( + [0.0], var_name="latitudeCoord", standard_name="latitude" + ) + correct_lon_coord = DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = DimCoord( + [0.0], var_name="longitudeCoord", standard_name="longitude" + ) correct_cube = Cube( [[10.0]], - var_name='sfcWind', + var_name="sfcWind", dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = Cube( [[10.0]], - var_name='ta', + var_name="ta", dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], - attributes={'parent_time_units': 'days since 0000-00-00 (noleap)'}, + attributes={"parent_time_units": "days since 0000-00-00 (noleap)"}, ) - scalar_cube = Cube(0.0, var_name='ps') + scalar_cube = Cube(0.0, var_name="ps") return CubeList([correct_cube, wrong_cube, scalar_cube]) @pytest.fixture def tas_cubes(): - correct_lat_coord = DimCoord([0.0], - var_name='lat', - standard_name='latitude') - wrong_lat_coord = DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude') - correct_lon_coord = DimCoord([0.0], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude') + correct_lat_coord = DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = DimCoord( + [0.0], var_name="latitudeCoord", standard_name="latitude" + ) + correct_lon_coord = DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = DimCoord( + [0.0], var_name="longitudeCoord", standard_name="longitude" + ) correct_cube = Cube( [[10.0]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = Cube( [[10.0]], - var_name='ta', + var_name="ta", dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], - attributes={'parent_time_units': 'days since 0000-00-00 (noleap)'}, + attributes={"parent_time_units": "days since 0000-00-00 (noleap)"}, ) - scalar_cube = Cube(0.0, var_name='ps') + scalar_cube = Cube(0.0, var_name="ps") return CubeList([correct_cube, wrong_cube, scalar_cube]) def test_get_sfcwind_fix(): - fix = Fix.get_fixes('CMIP6', 'KIOST-ESM', 'Amon', 'sfcWind') + fix = Fix.get_fixes("CMIP6", "KIOST-ESM", "Amon", "sfcWind") assert fix == [SfcWind(None), GenericFix(None)] def test_sfcwind_fix_metadata(sfcwind_cubes): for cube in sfcwind_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(10.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'Amon', 'sfcWind') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 10.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "Amon", "sfcWind") fix = SfcWind(vardef) # Check fix out_cubes = fix.fix_metadata(sfcwind_cubes) - assert out_cubes[0].var_name == 'sfcWind' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "sfcWind" + coord = out_cubes[0].coord("height") assert coord == height_coord # Check that height coordinate is not added twice out_cubes_2 = fix.fix_metadata(out_cubes) - assert out_cubes_2[0].var_name == 'sfcWind' - coord = out_cubes_2[0].coord('height') + assert out_cubes_2[0].var_name == "sfcWind" + coord = out_cubes_2[0].coord("height") assert coord == height_coord def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'KIOST-ESM', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "KIOST-ESM", "SImon", "siconc") assert fix == [Siconc(None), GenericFix(None)] @@ -114,10 +117,10 @@ def test_siconc_fix(): def test_siconc_fix_data(): """Test fix for ``siconc``.""" - vardef = get_var_info('CMIP6', 'SImon', 'siconc') + vardef = get_var_info("CMIP6", "SImon", "siconc") fix = Siconc(vardef) - cube = Cube([0.0, np.nan, 1.0], var_name='siconc') + cube = Cube([0.0, np.nan, 1.0], var_name="siconc") assert not np.ma.is_masked(cube.data) out_cube = fix.fix_data(cube) @@ -126,31 +129,33 @@ def test_siconc_fix_data(): def test_get_tas_fix(): - fix = Fix.get_fixes('CMIP6', 'KIOST-ESM', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "KIOST-ESM", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] def test_tas_fix_metadata(tas_cubes): for cube in tas_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(2.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 2.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Tas(vardef) # Check fix out_cubes = fix.fix_metadata(tas_cubes) - assert out_cubes[0].var_name == 'tas' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "tas" + coord = out_cubes[0].coord("height") assert coord == height_coord # Check that height coordinate is not added twice out_cubes_2 = fix.fix_metadata(out_cubes) - assert out_cubes_2[0].var_name == 'tas' - coord = out_cubes_2[0].coord('height') + assert out_cubes_2[0].var_name == "tas" + coord = out_cubes_2[0].coord("height") assert coord == height_coord diff --git a/tests/integration/cmor/_fixes/cmip6/test_mcm_ua_1_0.py b/tests/integration/cmor/_fixes/cmip6/test_mcm_ua_1_0.py index 85eed25338..42dc83a214 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_mcm_ua_1_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_mcm_ua_1_0.py @@ -1,4 +1,5 @@ """Tests for the fixes of MCM-UA-1-0.""" + import iris import numpy as np import pytest @@ -12,124 +13,149 @@ @pytest.fixture def cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name=' latitude ', - long_name=' latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name=' latitude', - long_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name=' longitude ', - long_name='longitude ') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude', - long_name=' longitude') + correct_lat_coord = iris.coords.DimCoord( + [0.0], + var_name="lat", + standard_name=" latitude ", + long_name=" latitude", + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], + var_name="latitudeCoord", + standard_name=" latitude", + long_name="latitude", + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], + var_name="lon", + standard_name=" longitude ", + long_name="longitude ", + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], + var_name="longitudeCoord", + standard_name="longitude", + long_name=" longitude", + ) correct_cube = iris.cube.Cube( [[10.0]], - var_name='tas', - standard_name='air_temperature ', - long_name=' Air Temperature ', + var_name="tas", + standard_name="air_temperature ", + long_name=" Air Temperature ", dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = iris.cube.Cube( [[10.0]], - var_name='ta', - standard_name=' air_temperature ', - long_name='Air Temperature', + var_name="ta", + standard_name=" air_temperature ", + long_name="Air Temperature", dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], - attributes={'parent_time_units': 'days since 0000-00-00 (noleap)'}, + attributes={"parent_time_units": "days since 0000-00-00 (noleap)"}, + ) + scalar_cube = iris.cube.Cube( + 0.0, + var_name="ps", + standard_name="air_pressure ", + long_name=" Air pressure ", ) - scalar_cube = iris.cube.Cube(0.0, var_name='ps', - standard_name='air_pressure ', - long_name=' Air pressure ') return iris.cube.CubeList([correct_cube, wrong_cube, scalar_cube]) @pytest.fixture def uas_cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name=' latitude ', - long_name=' latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name=' latitude', - long_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name=' longitude ', - long_name='longitude ') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude', - long_name=' longitude') + correct_lat_coord = iris.coords.DimCoord( + [0.0], + var_name="lat", + standard_name=" latitude ", + long_name=" latitude", + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], + var_name="latitudeCoord", + standard_name=" latitude", + long_name="latitude", + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], + var_name="lon", + standard_name=" longitude ", + long_name="longitude ", + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], + var_name="longitudeCoord", + standard_name="longitude", + long_name=" longitude", + ) correct_cube = iris.cube.Cube( [[10.0]], - var_name='uas', - standard_name='eastward_wind ', - long_name=' East Near-Surface Wind ', + var_name="uas", + standard_name="eastward_wind ", + long_name=" East Near-Surface Wind ", dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = iris.cube.Cube( [[10.0]], - var_name='ta', - standard_name=' air_temperature ', - long_name='Air Temperature', + var_name="ta", + standard_name=" air_temperature ", + long_name="Air Temperature", dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], - attributes={'parent_time_units': 'days since 0000-00-00 (noleap)'}, + attributes={"parent_time_units": "days since 0000-00-00 (noleap)"}, + ) + scalar_cube = iris.cube.Cube( + 0.0, + var_name="ps", + standard_name="air_pressure ", + long_name=" Air pressure ", ) - scalar_cube = iris.cube.Cube(0.0, var_name='ps', - standard_name='air_pressure ', - long_name=' Air pressure ') return iris.cube.CubeList([correct_cube, wrong_cube, scalar_cube]) @pytest.fixture def cubes_bounds(): - lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0, 356.25], - bounds=[[-1.875, 1.875], - [354.375, 358.125]], - var_name='lon', - standard_name='longitude', - circular=True) - wrong_lon_coord = iris.coords.DimCoord([0, 356.25], - bounds=[[-1.875, 1.875], - [354.375, 360]], - var_name='lon', - standard_name='longitude') + lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + correct_lon_coord = iris.coords.DimCoord( + [0, 356.25], + bounds=[[-1.875, 1.875], [354.375, 358.125]], + var_name="lon", + standard_name="longitude", + circular=True, + ) + wrong_lon_coord = iris.coords.DimCoord( + [0, 356.25], + bounds=[[-1.875, 1.875], [354.375, 360]], + var_name="lon", + standard_name="longitude", + ) correct_cube = iris.cube.Cube( [[10.0, 10.0]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = iris.cube.Cube( [[10.0, 10.0]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(lat_coord, 0), (wrong_lon_coord, 1)], ) return iris.cube.CubeList([correct_cube, wrong_cube]) def test_get_allvars_fix(): - fix = Fix.get_fixes('CMIP6', 'MCM-UA-1-0', 'Amon', - 'arbitrary_var_name_and_wrong_lon_bnds') + fix = Fix.get_fixes( + "CMIP6", "MCM-UA-1-0", "Amon", "arbitrary_var_name_and_wrong_lon_bnds" + ) assert fix == [AllVars(None), GenericFix(None)] def test_get_tas_fix(): - fix = Fix.get_fixes('CMIP6', 'MCM-UA-1-0', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "MCM-UA-1-0", "Amon", "tas") assert fix == [Tas(None), AllVars(None), GenericFix(None)] def test_get_uas_fix(): - fix = Fix.get_fixes('CMIP6', 'MCM-UA-1-0', 'Amon', 'uas') + fix = Fix.get_fixes("CMIP6", "MCM-UA-1-0", "Amon", "uas") assert fix == [Uas(None), AllVars(None), GenericFix(None)] @@ -138,33 +164,34 @@ def test_allvars_fix_metadata(cubes): out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - if cube.var_name == 'ps': - assert cube.standard_name == 'air_pressure' - assert cube.long_name == 'Air pressure' - elif cube.var_name == 'tas' or cube.var_name == 'ta': - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Air Temperature' + if cube.var_name == "ps": + assert cube.standard_name == "air_pressure" + assert cube.long_name == "Air pressure" + elif cube.var_name == "tas" or cube.var_name == "ta": + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Air Temperature" else: assert False, "Invalid var_name" try: - lat_coord = cube.coord('latitude') + lat_coord = cube.coord("latitude") except iris.exceptions.CoordinateNotFoundError: - assert cube.var_name == 'ps' + assert cube.var_name == "ps" else: - assert lat_coord.var_name == 'lat' - assert lat_coord.standard_name == 'latitude' - assert lat_coord.long_name == 'latitude' + assert lat_coord.var_name == "lat" + assert lat_coord.standard_name == "latitude" + assert lat_coord.long_name == "latitude" try: - lon_coord = cube.coord('longitude') + lon_coord = cube.coord("longitude") except iris.exceptions.CoordinateNotFoundError: - assert cube.var_name == 'ps' + assert cube.var_name == "ps" else: - assert lon_coord.var_name == 'lon' - assert lon_coord.standard_name == 'longitude' - assert lon_coord.long_name == 'longitude' - if 'parent_time_units' in cube.attributes: - assert cube.attributes['parent_time_units'] == ( - 'days since 0000-00-00') + assert lon_coord.var_name == "lon" + assert lon_coord.standard_name == "longitude" + assert lon_coord.long_name == "longitude" + if "parent_time_units" in cube.attributes: + assert cube.attributes["parent_time_units"] == ( + "days since 0000-00-00" + ) def test_allvars_fix_lon_bounds(cubes_bounds): @@ -173,7 +200,7 @@ def test_allvars_fix_lon_bounds(cubes_bounds): assert cubes_bounds is out_cubes for cube in out_cubes: try: - lon_coord = cube.coord('longitude') + lon_coord = cube.coord("longitude") except iris.exceptions.CoordinateNotFoundError: pass else: @@ -184,70 +211,84 @@ def test_allvars_fix_lon_bounds(cubes_bounds): def test_tas_fix_metadata(cubes): for cube in cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(2.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 2.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Tas(vardef) # Check fix out_cubes = fix.fix_metadata(cubes) - assert out_cubes[0].var_name == 'tas' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "tas" + coord = out_cubes[0].coord("height") assert coord == height_coord # Check that height coordinate is not added twice out_cubes_2 = fix.fix_metadata(out_cubes) - assert out_cubes_2[0].var_name == 'tas' - coord = out_cubes_2[0].coord('height') + assert out_cubes_2[0].var_name == "tas" + coord = out_cubes_2[0].coord("height") assert coord == height_coord def test_uas_fix_metadata(uas_cubes): for cube in uas_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(10.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'Amon', 'uas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 10.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "Amon", "uas") fix = Uas(vardef) # Check fix out_cubes = fix.fix_metadata(uas_cubes) - assert out_cubes[0].var_name == 'uas' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "uas" + coord = out_cubes[0].coord("height") assert coord == height_coord # Check that height coordinate is not added twice out_cubes_2 = fix.fix_metadata(out_cubes) - assert out_cubes_2[0].var_name == 'uas' - coord = out_cubes_2[0].coord('height') + assert out_cubes_2[0].var_name == "uas" + coord = out_cubes_2[0].coord("height") assert coord == height_coord @pytest.fixture def thetao_cubes(): time_coord = iris.coords.DimCoord( - [0.0004, 1.09776], var_name='time', standard_name='time', - units='days since 1850-01-01 00:00:00') + [0.0004, 1.09776], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lat_coord = iris.coords.DimCoord( - [0.0, 1.0], var_name='lat', standard_name='latitude', units='degrees') - lon_coord = iris.coords.DimCoord([-0.9375, 357.1875], - bounds=[[-1.875, 0.], [356.25, 358.125]], - var_name='lon', - standard_name='longitude') + [0.0, 1.0], var_name="lat", standard_name="latitude", units="degrees" + ) + lon_coord = iris.coords.DimCoord( + [-0.9375, 357.1875], + bounds=[[-1.875, 0.0], [356.25, 358.125]], + var_name="lon", + standard_name="longitude", + ) lev_coord = iris.coords.DimCoord( - [5.0, 10.0], bounds=[[2.5, 7.5], [7.5, 12.5]], - var_name='lev', standard_name=None, units='m', - attributes={'positive': 'up'}) + [5.0, 10.0], + bounds=[[2.5, 7.5], [7.5, 12.5]], + var_name="lev", + standard_name=None, + units="m", + attributes={"positive": "up"}, + ) coord_specs = [ (time_coord, 0), (lev_coord, 1), @@ -256,7 +297,7 @@ def thetao_cubes(): ] thetao_cube = iris.cube.Cube( np.arange(16).reshape(2, 2, 2, 2), - var_name='thetao', + var_name="thetao", dim_coords_and_dims=coord_specs, ) @@ -265,13 +306,13 @@ def thetao_cubes(): def test_get_thetao_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MCM-UA-1-0', 'Omon', 'thetao') + fix = Fix.get_fixes("CMIP6", "MCM-UA-1-0", "Omon", "thetao") assert fix == [Omon(None), AllVars(None), GenericFix(None)] def test_thetao_fix_metadata(thetao_cubes): """Test ``fix_metadata`` for ``thetao``.""" - vardef = get_var_info('CMIP6', 'Omon', 'thetao') + vardef = get_var_info("CMIP6", "Omon", "thetao") fix_omon = Omon(vardef) fix_allvars = AllVars(vardef) out_cubes = fix_omon.fix_metadata(thetao_cubes) @@ -281,26 +322,25 @@ def test_thetao_fix_metadata(thetao_cubes): out_cube = out_cubes[0] # Check data of cube - np.testing.assert_allclose(out_cube.data, - [[[[1, 0], - [3, 2]], - [[5, 4], - [7, 6]]], - [[[9, 8], - [11, 10]], - [[13, 12], - [15, 14]]]]) + np.testing.assert_allclose( + out_cube.data, + [ + [[[1, 0], [3, 2]], [[5, 4], [7, 6]]], + [[[9, 8], [11, 10]], [[13, 12], [15, 14]]], + ], + ) # Check data of longitude - lon_coord = out_cube.coord('longitude') + lon_coord = out_cube.coord("longitude") np.testing.assert_allclose(lon_coord.points, [357.1875, 359.0625]) - np.testing.assert_allclose(lon_coord.bounds, - [[356.25, 358.125], [358.125, 360.0]]) + np.testing.assert_allclose( + lon_coord.bounds, [[356.25, 358.125], [358.125, 360.0]] + ) # Check metadata of depth coordinate - depth_coord = out_cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'lev' - assert depth_coord.long_name == 'ocean depth coordinate' - assert depth_coord.units == 'm' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = out_cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "lev" + assert depth_coord.long_name == "ocean depth coordinate" + assert depth_coord.units == "m" + assert depth_coord.attributes == {"positive": "down"} diff --git a/tests/integration/cmor/_fixes/cmip6/test_miroc6.py b/tests/integration/cmor/_fixes/cmip6/test_miroc6.py index 26662a0ec3..24b15a07e7 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_miroc6.py +++ b/tests/integration/cmor/_fixes/cmip6/test_miroc6.py @@ -1,4 +1,5 @@ """Test fixes for MIROC6.""" + import iris import numpy as np import pytest @@ -11,7 +12,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC6', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "MIROC6", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -22,7 +23,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC6', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "MIROC6", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -33,7 +34,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC6', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "MIROC6", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -45,41 +46,49 @@ def test_clw_fix(): @pytest.fixture def tos_cubes(): """Sample cube.""" - time_coord = iris.coords.DimCoord([0.2], - standard_name='time', - var_name='time', - units='days since 1850-01-01') - lat_coord = iris.coords.DimCoord([23.0211550, 26.3700972], - standard_name='latitude', - var_name='lat', - units='degrees_north') - lon_coord = iris.coords.DimCoord([23.0211550, 26.3700972], - standard_name='longitude', - var_name='lon', - units='degrees_east') + time_coord = iris.coords.DimCoord( + [0.2], + standard_name="time", + var_name="time", + units="days since 1850-01-01", + ) + lat_coord = iris.coords.DimCoord( + [23.0211550, 26.3700972], + standard_name="latitude", + var_name="lat", + units="degrees_north", + ) + lon_coord = iris.coords.DimCoord( + [23.0211550, 26.3700972], + standard_name="longitude", + var_name="lon", + units="degrees_east", + ) coords_specs = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] - cube = iris.cube.Cube([[[22.0, 22.0], [22.0, 22.0]]], - standard_name='sea_surface_temperature', - var_name='tos', - units='degC', - dim_coords_and_dims=coords_specs) + cube = iris.cube.Cube( + [[[22.0, 22.0], [22.0, 22.0]]], + standard_name="sea_surface_temperature", + var_name="tos", + units="degC", + dim_coords_and_dims=coords_specs, + ) return iris.cube.CubeList([cube]) def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC6', 'Omon', 'tos') + fix = Fix.get_fixes("CMIP6", "MIROC6", "Omon", "tos") assert fix == [Tos(None), GenericFix(None)] def test_tos_fix_metadata(tos_cubes): """Test ``fix_metadata``.""" - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = Tos(vardef) fixed_cubes = fix.fix_metadata(tos_cubes) for fixed_tos_cube in fixed_cubes: - fixed_lon = fixed_tos_cube.coord('longitude') - fixed_lat = fixed_tos_cube.coord('latitude') + fixed_lon = fixed_tos_cube.coord("longitude") + fixed_lat = fixed_tos_cube.coord("latitude") assert fixed_lon.bounds is not None assert fixed_lat.bounds is not None assert fixed_lon.bounds.dtype == np.float64 diff --git a/tests/integration/cmor/_fixes/cmip6/test_miroc_es2l.py b/tests/integration/cmor/_fixes/cmip6/test_miroc_es2l.py index 22ae2d1c03..a183e08e07 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_miroc_es2l.py +++ b/tests/integration/cmor/_fixes/cmip6/test_miroc_es2l.py @@ -1,4 +1,5 @@ """Test fixes for MIROC-ES2L.""" + from esmvalcore.cmor._fixes.cmip6.miroc_es2l import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC-ES2L', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "MIROC-ES2L", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -17,7 +18,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC-ES2L', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "MIROC-ES2L", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MIROC-ES2L', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "MIROC-ES2L", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_hr.py b/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_hr.py index 3a9dac1fb6..5febd99177 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_hr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_hr.py @@ -1,4 +1,5 @@ """Test fixes for MPI-ESM1-2-HR.""" + import iris import pytest from cf_units import Unit @@ -18,7 +19,7 @@ def test_get_allvars_fix(): """Test getting of fix.""" - fixes = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-HR', 'Amon', 'tas') + fixes = Fix.get_fixes("CMIP6", "MPI-ESM1-2-HR", "Amon", "tas") assert AllVars(None) in fixes assert len([fix for fix in fixes if isinstance(fix, AllVars)]) == 1 @@ -26,87 +27,91 @@ def test_get_allvars_fix(): def test_allvars_r2i1p1f1(): lat_coord1 = iris.coords.DimCoord( [-86.49036676628116], - var_name='lat', - standard_name='latitude', - units='degrees', + var_name="lat", + standard_name="latitude", + units="degrees", ) lat_coord2 = iris.coords.DimCoord( [-86.49036676628118], - var_name='lat', - standard_name='latitude', - units='degrees', + var_name="lat", + standard_name="latitude", + units="degrees", ) cube1 = iris.cube.Cube([0]) - cube1.attributes['variant_label'] = 'r2i1p1f1' + cube1.attributes["variant_label"] = "r2i1p1f1" cube1.add_dim_coord(lat_coord1, 0) cube2 = iris.cube.Cube([0]) - cube2.attributes['variant_label'] = 'r2i1p1f1' + cube2.attributes["variant_label"] = "r2i1p1f1" cube2.add_dim_coord(lat_coord2, 0) fix = AllVars(None) fixed_cubes = fix.fix_metadata([cube1, cube2]) - assert fixed_cubes[0].coord('latitude').points[0] == -86.49036676628 - assert fixed_cubes[1].coord('latitude').points[0] == -86.49036676628 + assert fixed_cubes[0].coord("latitude").points[0] == -86.49036676628 + assert fixed_cubes[1].coord("latitude").points[0] == -86.49036676628 @pytest.fixture def sfcwind_cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude') - correct_cube = iris.cube.Cube([[10.0]], var_name='sfcWind', - dim_coords_and_dims=[(correct_lat_coord, 0), - (correct_lon_coord, 1)] - ) - wrong_cube = iris.cube.Cube([[10.0]], - var_name='ta', - dim_coords_and_dims=[(wrong_lat_coord, 0), - (wrong_lon_coord, 1)]) - scalar_cube = iris.cube.Cube(0.0, var_name='ps') + correct_lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], var_name="latitudeCoord", standard_name="latitude" + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], var_name="longitudeCoord", standard_name="longitude" + ) + correct_cube = iris.cube.Cube( + [[10.0]], + var_name="sfcWind", + dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], + ) + wrong_cube = iris.cube.Cube( + [[10.0]], + var_name="ta", + dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], + ) + scalar_cube = iris.cube.Cube(0.0, var_name="ps") return iris.cube.CubeList([correct_cube, wrong_cube, scalar_cube]) @pytest.fixture def tas_cubes(): - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude') - correct_cube = iris.cube.Cube([[10.0]], - var_name='tas', - dim_coords_and_dims=[(correct_lat_coord, 0), - (correct_lon_coord, 1)]) - wrong_cube = iris.cube.Cube([[10.0]], - var_name='ta', - dim_coords_and_dims=[(wrong_lat_coord, 0), - (wrong_lon_coord, 1)]) - scalar_cube = iris.cube.Cube(0.0, var_name='ps') + correct_lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], var_name="latitudeCoord", standard_name="latitude" + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], var_name="longitudeCoord", standard_name="longitude" + ) + correct_cube = iris.cube.Cube( + [[10.0]], + var_name="tas", + dim_coords_and_dims=[(correct_lat_coord, 0), (correct_lon_coord, 1)], + ) + wrong_cube = iris.cube.Cube( + [[10.0]], + var_name="ta", + dim_coords_and_dims=[(wrong_lat_coord, 0), (wrong_lon_coord, 1)], + ) + scalar_cube = iris.cube.Cube(0.0, var_name="ps") return iris.cube.CubeList([correct_cube, wrong_cube, scalar_cube]) def test_get_cl_fix(): """Test getting of fix.""" - fixes = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-HR', 'Amon', 'cl') + fixes = Fix.get_fixes("CMIP6", "MPI-ESM1-2-HR", "Amon", "cl") assert Cl(None) in fixes @@ -117,7 +122,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fixes = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-HR', 'Amon', 'cli') + fixes = Fix.get_fixes("CMIP6", "MPI-ESM1-2-HR", "Amon", "cli") assert Cli(None) in fixes @@ -128,7 +133,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fixes = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-HR', 'Amon', 'clw') + fixes = Fix.get_fixes("CMIP6", "MPI-ESM1-2-HR", "Amon", "clw") assert Clw(None) in fixes @@ -138,48 +143,52 @@ def test_clw_fix(): def test_get_sfcwind_fix(): - fixes = Fix.get_fixes('CMIP6', 'MPI_ESM1_2_HR', 'day', 'sfcWind') + fixes = Fix.get_fixes("CMIP6", "MPI_ESM1_2_HR", "day", "sfcWind") assert SfcWind(None) in fixes def test_sfcwind_fix_metadata(sfcwind_cubes): for cube in sfcwind_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(10.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'day', 'sfcWind') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 10.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "day", "sfcWind") fix = SfcWind(vardef) out_cubes = fix.fix_metadata(sfcwind_cubes) - assert out_cubes[0].var_name == 'sfcWind' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "sfcWind" + coord = out_cubes[0].coord("height") assert coord == height_coord def test_get_tas_fix(): - fixes = Fix.get_fixes('CMIP6', 'MPI_ESM1_2_HR', 'day', 'tas') + fixes = Fix.get_fixes("CMIP6", "MPI_ESM1_2_HR", "day", "tas") assert Tas(None) in fixes def test_tas_fix_metadata(tas_cubes): for cube in tas_cubes: with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube.coord('height') - height_coord = iris.coords.AuxCoord(2.0, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) - vardef = get_var_info('CMIP6', 'day', 'tas') + cube.coord("height") + height_coord = iris.coords.AuxCoord( + 2.0, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) + vardef = get_var_info("CMIP6", "day", "tas") fix = Tas(vardef) out_cubes = fix.fix_metadata(tas_cubes) - assert out_cubes[0].var_name == 'tas' - coord = out_cubes[0].coord('height') + assert out_cubes[0].var_name == "tas" + coord = out_cubes[0].coord("height") assert coord == height_coord diff --git a/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_lr.py b/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_lr.py index 2ae3af9355..29b01cee7b 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_lr.py +++ b/tests/integration/cmor/_fixes/cmip6/test_mpi_esm1_2_lr.py @@ -1,4 +1,5 @@ """Tests for the fixes of MPI-ESM1-2-LR.""" + from esmvalcore.cmor._fixes.cmip6.mpi_esm1_2_lr import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-LR', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "MPI-ESM1-2-LR", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -18,7 +19,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-LR', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "MPI-ESM1-2-LR", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -29,7 +30,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MPI-ESM1-2-LR', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "MPI-ESM1-2-LR", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_mpi_esm_1_2_ham.py b/tests/integration/cmor/_fixes/cmip6/test_mpi_esm_1_2_ham.py index 358cb1f56f..96a69de672 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_mpi_esm_1_2_ham.py +++ b/tests/integration/cmor/_fixes/cmip6/test_mpi_esm_1_2_ham.py @@ -1,4 +1,5 @@ """Tests for the fixes of MPI-ESM-1-2-HAM.""" + from esmvalcore.cmor._fixes.cmip6.mpi_esm_1_2_ham import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MPI-ESM-1-2-HAM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "MPI-ESM-1-2-HAM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -18,7 +19,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MPI-ESM-1-2-HAM', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "MPI-ESM-1-2-HAM", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -29,7 +30,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MPI-ESM-1-2-HAM', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "MPI-ESM-1-2-HAM", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_mri_esm2_0.py b/tests/integration/cmor/_fixes/cmip6/test_mri_esm2_0.py index 6ec68855b7..cfaf223d1b 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_mri_esm2_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_mri_esm2_0.py @@ -1,4 +1,5 @@ """Test fixes for MRI-ESM2-0.""" + from esmvalcore.cmor._fixes.cmip6.mri_esm2_0 import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MRI-ESM2-0', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "MRI-ESM2-0", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -17,7 +18,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MRI-ESM2-0', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "MRI-ESM2-0", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'MRI-ESM2-0', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "MRI-ESM2-0", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_nesm3.py b/tests/integration/cmor/_fixes/cmip6/test_nesm3.py index bc9265bc5b..891cb9c615 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_nesm3.py +++ b/tests/integration/cmor/_fixes/cmip6/test_nesm3.py @@ -1,4 +1,5 @@ """Test fixes for NESM3.""" + from esmvalcore.cmor._fixes.cmip6.nesm3 import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import Fix, GenericFix @@ -6,7 +7,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NESM3', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "NESM3", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -17,7 +18,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NESM3', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "NESM3", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -28,7 +29,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NESM3', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "NESM3", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_noresm2_lm.py b/tests/integration/cmor/_fixes/cmip6/test_noresm2_lm.py index 78ea66effc..adbb348563 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_noresm2_lm.py +++ b/tests/integration/cmor/_fixes/cmip6/test_noresm2_lm.py @@ -19,7 +19,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-LM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "NorESM2-LM", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] @@ -30,7 +30,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-LM', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "NorESM2-LM", "Amon", "cli") assert fix == [Cli(None), AllVars(None), GenericFix(None)] @@ -41,7 +41,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-LM', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "NorESM2-LM", "Amon", "clw") assert fix == [Clw(None), AllVars(None), GenericFix(None)] @@ -53,43 +53,55 @@ def test_clw_fix(): @pytest.fixture def siconc_cubes(): """Sample cube.""" - time_coord = iris.coords.DimCoord([0.2], standard_name='time', - var_name='time', - units='days since 1850-01-01') - lat_coord = iris.coords.DimCoord([30.0], standard_name='latitude', - var_name='lat', units='degrees_north') - lon_coord = iris.coords.DimCoord([30.0], standard_name='longitude', - var_name='lon', units='degrees_east') + time_coord = iris.coords.DimCoord( + [0.2], + standard_name="time", + var_name="time", + units="days since 1850-01-01", + ) + lat_coord = iris.coords.DimCoord( + [30.0], standard_name="latitude", var_name="lat", units="degrees_north" + ) + lon_coord = iris.coords.DimCoord( + [30.0], standard_name="longitude", var_name="lon", units="degrees_east" + ) coords_specs = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] - cube = iris.cube.Cube([[[22.0]]], standard_name='sea_ice_area_fraction', - var_name='siconc', units='%', - dim_coords_and_dims=coords_specs) + cube = iris.cube.Cube( + [[[22.0]]], + standard_name="sea_ice_area_fraction", + var_name="siconc", + units="%", + dim_coords_and_dims=coords_specs, + ) return iris.cube.CubeList([cube]) @pytest.fixture def cubes_bounds(): """Correct and wrong cubes.""" - lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0, 357.5], - bounds=[[-1.25, 1.25], - [356.25, 358.75]], - var_name='lon', - standard_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0, 357.5], - bounds=[[0, 1.25], [356.25, 360]], - var_name='lon', - standard_name='longitude') + lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + correct_lon_coord = iris.coords.DimCoord( + [0, 357.5], + bounds=[[-1.25, 1.25], [356.25, 358.75]], + var_name="lon", + standard_name="longitude", + ) + wrong_lon_coord = iris.coords.DimCoord( + [0, 357.5], + bounds=[[0, 1.25], [356.25, 360]], + var_name="lon", + standard_name="longitude", + ) correct_cube = iris.cube.Cube( [[10.0, 10.0]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(lat_coord, 0), (correct_lon_coord, 1)], ) wrong_cube = iris.cube.Cube( [[10.0, 10.0]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(lat_coord, 0), (wrong_lon_coord, 1)], ) return iris.cube.CubeList([correct_cube, wrong_cube]) @@ -97,7 +109,7 @@ def cubes_bounds(): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-LM', 'SImon', 'siconc') + fix = Fix.get_fixes("CMIP6", "NorESM2-LM", "SImon", "siconc") assert fix == [Siconc(None), AllVars(None), GenericFix(None)] @@ -108,7 +120,7 @@ def test_allvars_fix_lon_bounds(cubes_bounds): assert cubes_bounds is out_cubes for cube in out_cubes: try: - lon_coord = cube.coord('longitude') + lon_coord = cube.coord("longitude") except iris.exceptions.CoordinateNotFoundError: pass else: @@ -128,17 +140,17 @@ def test_siconc_fix_metadata(siconc_cubes): assert siconc_cube.var_name == "siconc" # Extract siconc cube - siconc_cube = siconc_cubes.extract_cube('sea_ice_area_fraction') - assert not siconc_cube.coords('typesi') + siconc_cube = siconc_cubes.extract_cube("sea_ice_area_fraction") + assert not siconc_cube.coords("typesi") # Apply fix - vardef = get_var_info('CMIP6', 'SImon', 'siconc') + vardef = get_var_info("CMIP6", "SImon", "siconc") fix = Siconc(vardef) fixed_cubes = fix.fix_metadata(siconc_cubes) assert len(fixed_cubes) == 1 - fixed_siconc_cube = fixed_cubes.extract_cube('sea_ice_area_fraction') - fixed_lon = fixed_siconc_cube.coord('longitude') - fixed_lat = fixed_siconc_cube.coord('latitude') + fixed_siconc_cube = fixed_cubes.extract_cube("sea_ice_area_fraction") + fixed_lon = fixed_siconc_cube.coord("longitude") + fixed_lat = fixed_siconc_cube.coord("latitude") assert fixed_lon.bounds is not None assert fixed_lat.bounds is not None np.testing.assert_equal(fixed_lon.bounds, [[28.9956, 32.3446]]) diff --git a/tests/integration/cmor/_fixes/cmip6/test_noresm2_mm.py b/tests/integration/cmor/_fixes/cmip6/test_noresm2_mm.py index 0868dd910a..11e06375ff 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_noresm2_mm.py +++ b/tests/integration/cmor/_fixes/cmip6/test_noresm2_mm.py @@ -1,4 +1,5 @@ """Tests for the fixes of NorESM2-MM.""" + from esmvalcore.cmor._fixes.cmip6.noresm2_mm import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-MM', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "NorESM2-MM", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -18,7 +19,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-MM', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "NorESM2-MM", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -29,7 +30,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'NorESM2-MM', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "NorESM2-MM", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_sam0_unicon.py b/tests/integration/cmor/_fixes/cmip6/test_sam0_unicon.py index e9f0a76a50..cc834c97f4 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_sam0_unicon.py +++ b/tests/integration/cmor/_fixes/cmip6/test_sam0_unicon.py @@ -1,4 +1,5 @@ """Test fixes for SAM0-UNICON.""" + import iris import numpy as np import pytest @@ -10,7 +11,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'SAM0-UNICON', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "SAM0-UNICON", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -21,7 +22,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'SAM0-UNICON', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "SAM0-UNICON", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -32,7 +33,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'SAM0-UNICON', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "SAM0-UNICON", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] @@ -43,7 +44,7 @@ def test_clw_fix(): def test_get_nbp_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'SAM0-UNICON', 'Lmon', 'nbp') + fix = Fix.get_fixes("CMIP6", "SAM0-UNICON", "Lmon", "nbp") assert fix == [Nbp(None), GenericFix(None)] @@ -52,10 +53,10 @@ def nbp_cube(): """``nbp`` cube.""" cube = iris.cube.Cube( [1.0], - var_name='nbp', - standard_name='surface_net_downward_mass_flux_of_carbon_dioxide' - '_expressed_as_carbon_due_to_all_land_processes', - units='kg m-2 s-1', + var_name="nbp", + standard_name="surface_net_downward_mass_flux_of_carbon_dioxide" + "_expressed_as_carbon_due_to_all_land_processes", + units="kg m-2 s-1", ) return cube diff --git a/tests/integration/cmor/_fixes/cmip6/test_taiesm1.py b/tests/integration/cmor/_fixes/cmip6/test_taiesm1.py index dd955ef1fb..60bffe95e1 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_taiesm1.py +++ b/tests/integration/cmor/_fixes/cmip6/test_taiesm1.py @@ -1,4 +1,5 @@ """Tests for the fixes of TaiESM1.""" + from esmvalcore.cmor._fixes.cmip6.taiesm1 import Cl, Cli, Clw from esmvalcore.cmor._fixes.common import ClFixHybridPressureCoord from esmvalcore.cmor._fixes.fix import GenericFix @@ -7,7 +8,7 @@ def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'TaiESM1', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "TaiESM1", "Amon", "cl") assert fix == [Cl(None), GenericFix(None)] @@ -18,7 +19,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'TaiESM1', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "TaiESM1", "Amon", "cli") assert fix == [Cli(None), GenericFix(None)] @@ -29,7 +30,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'TaiESM1', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "TaiESM1", "Amon", "clw") assert fix == [Clw(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/cmip6/test_ukesm1_0_ll.py b/tests/integration/cmor/_fixes/cmip6/test_ukesm1_0_ll.py index 423d55e637..7961fdb81e 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_ukesm1_0_ll.py +++ b/tests/integration/cmor/_fixes/cmip6/test_ukesm1_0_ll.py @@ -1,4 +1,5 @@ """Tests for the fixes of UKESM1-0-LL.""" + import iris import pytest @@ -11,40 +12,40 @@ @pytest.fixture def sample_cubes(): """Sample cubes.""" - ta_cube = iris.cube.Cube([1.0], var_name='ta') - tas_cube = iris.cube.Cube([3.0], var_name='tas') + ta_cube = iris.cube.Cube([1.0], var_name="ta") + tas_cube = iris.cube.Cube([3.0], var_name="tas") return iris.cube.CubeList([ta_cube, tas_cube]) def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'UKESM1-0-LL', 'Amon', 'tas') + fix = Fix.get_fixes("CMIP6", "UKESM1-0-LL", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] def test_allvars_fix_metadata(sample_cubes): """Test ``fix_metadata`` for all variables.""" for cube in sample_cubes: - cube.attributes['parent_time_units'] = 'days since 1850-01-01' + cube.attributes["parent_time_units"] = "days since 1850-01-01" out_cubes = AllVars(None).fix_metadata(sample_cubes) assert out_cubes is sample_cubes for cube in out_cubes: - assert cube.attributes['parent_time_units'] == 'days since 1850-01-01' + assert cube.attributes["parent_time_units"] == "days since 1850-01-01" def test_allvars_no_need_tofix_metadata(sample_cubes): """Test ``fix_metadata`` for all variables.""" for cube in sample_cubes: - cube.attributes['parent_time_units'] = 'days since 1850-01-01' + cube.attributes["parent_time_units"] = "days since 1850-01-01" out_cubes = AllVars(None).fix_metadata(sample_cubes) assert out_cubes is sample_cubes for cube in out_cubes: - assert cube.attributes['parent_time_units'] == 'days since 1850-01-01' + assert cube.attributes["parent_time_units"] == "days since 1850-01-01" def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'UKESM1-0-LL', 'Amon', 'cl') + fix = Fix.get_fixes("CMIP6", "UKESM1-0-LL", "Amon", "cl") assert fix == [Cl(None), AllVars(None), GenericFix(None)] @@ -55,7 +56,7 @@ def test_cl_fix(): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'UKESM1-0-LL', 'Amon', 'cli') + fix = Fix.get_fixes("CMIP6", "UKESM1-0-LL", "Amon", "cli") assert fix == [Cli(None), AllVars(None), GenericFix(None)] @@ -66,7 +67,7 @@ def test_cli_fix(): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('CMIP6', 'UKESM1-0-LL', 'Amon', 'clw') + fix = Fix.get_fixes("CMIP6", "UKESM1-0-LL", "Amon", "clw") assert fix == [Clw(None), AllVars(None), GenericFix(None)] diff --git a/tests/integration/cmor/_fixes/conftest.py b/tests/integration/cmor/_fixes/conftest.py index 18b6873401..0dcc6b4037 100644 --- a/tests/integration/cmor/_fixes/conftest.py +++ b/tests/integration/cmor/_fixes/conftest.py @@ -8,4 +8,4 @@ @pytest.fixture def test_data_path(): """Path to test data for CMOR fixes.""" - return Path(__file__).resolve().parent / 'test_data' + return Path(__file__).resolve().parent / "test_data" diff --git a/tests/integration/cmor/_fixes/cordex/test_cnrm_cerfacs_cnrm_cm5.py b/tests/integration/cmor/_fixes/cordex/test_cnrm_cerfacs_cnrm_cm5.py index 63428e913c..2fb65085c6 100644 --- a/tests/integration/cmor/_fixes/cordex/test_cnrm_cerfacs_cnrm_cm5.py +++ b/tests/integration/cmor/_fixes/cordex/test_cnrm_cerfacs_cnrm_cm5.py @@ -1,58 +1,59 @@ """Tests for the fixes for driver CNRM-CERFACS-CNRM-CM5.""" + import iris import pytest from esmvalcore.cmor._fixes.cordex.cnrm_cerfacs_cnrm_cm5 import ( aladin63, - wrf381p,) + wrf381p, +) from esmvalcore.cmor.fix import Fix from esmvalcore.cmor.table import get_var_info @pytest.fixture def cubes(): - correct_time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') - correct_height_coord = iris.coords.AuxCoord([2.0], - var_name='height') - wrong_height_coord = iris.coords.AuxCoord([10.0], - var_name='height') + correct_time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) + correct_height_coord = iris.coords.AuxCoord([2.0], var_name="height") + wrong_height_coord = iris.coords.AuxCoord([10.0], var_name="height") correct_cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(correct_time_coord, 0)], - aux_coords_and_dims=[(correct_height_coord, ())] + aux_coords_and_dims=[(correct_height_coord, ())], ) wrong_cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(correct_time_coord, 0)], - aux_coords_and_dims=[(wrong_height_coord, ())] + aux_coords_and_dims=[(wrong_height_coord, ())], ) return iris.cube.CubeList([correct_cube, wrong_cube]) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_hadrem3ga705_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'HadREM3-GA7-05', - 'Amon', + "CORDEX", + "HadREM3-GA7-05", + "Amon", short_name, - extra_facets={'driver': 'CNRM-CERFACS-CNRM-CM5'}) + extra_facets={"driver": "CNRM-CERFACS-CNRM-CM5"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_aladin63_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'ALADIN63', - 'Amon', + "CORDEX", + "ALADIN63", + "Amon", short_name, - extra_facets={'driver': 'CNRM-CERFACS-CNRM-CM5'}) + extra_facets={"driver": "CNRM-CERFACS-CNRM-CM5"}, + ) assert isinstance(fix[0], Fix) @@ -61,33 +62,33 @@ def test_aladin63_height_fix(cubes): out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - assert cube.coord('height').points == 2.0 + assert cube.coord("height").points == 2.0 @pytest.mark.parametrize( - 'short_name', - ['tasmax', 'tasmin', 'tas', 'hurs', 'huss']) + "short_name", ["tasmax", "tasmin", "tas", "hurs", "huss"] +) def test_get_wrf381p_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'WRF381P', - 'Amon', + "CORDEX", + "WRF381P", + "Amon", short_name, - extra_facets={'driver': 'CNRM-CERFACS-CNRM-CM5'}) + extra_facets={"driver": "CNRM-CERFACS-CNRM-CM5"}, + ) assert isinstance(fix[0], Fix) def test_wrf381p_height_fix(): - time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') + time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0)], ) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = wrf381p.Tas(vardef) out_cubes = fix.fix_metadata([cube]) - assert out_cubes[0].coord('height').points == 2.0 + assert out_cubes[0].coord("height").points == 2.0 diff --git a/tests/integration/cmor/_fixes/cordex/test_cordex_fixes.py b/tests/integration/cmor/_fixes/cordex/test_cordex_fixes.py index a831d37b32..63cf6da5f3 100644 --- a/tests/integration/cmor/_fixes/cordex/test_cordex_fixes.py +++ b/tests/integration/cmor/_fixes/cordex/test_cordex_fixes.py @@ -1,4 +1,5 @@ """Tests for general CORDEX fixes.""" + import cordex as cx import iris import numpy as np @@ -16,45 +17,50 @@ @pytest.fixture def cubes(): - correct_time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') - wrong_time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='wrong') - correct_lat_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='lat', - standard_name='latitude', - long_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='latitudeCoord', - standard_name='latitude', - long_name='latitude') - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude', - long_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude', - long_name='longitude') + correct_time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) + wrong_time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="wrong" + ) + correct_lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + var_name="lat", + standard_name="latitude", + long_name="latitude", + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + var_name="latitudeCoord", + standard_name="latitude", + long_name="latitude", + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude", long_name="longitude" + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], + var_name="longitudeCoord", + standard_name="longitude", + long_name="longitude", + ) correct_cube = iris.cube.Cube( [[[10.0], [10.0]]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[ (correct_time_coord, 0), (correct_lat_coord, 1), - (correct_lon_coord, 2)], + (correct_lon_coord, 2), + ], ) wrong_cube = iris.cube.Cube( [[[10.0], [10.0]]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[ (wrong_time_coord, 0), (wrong_lat_coord, 1), - (wrong_lon_coord, 2)], + (wrong_lon_coord, 2), + ], ) return iris.cube.CubeList([correct_cube, wrong_cube]) @@ -62,53 +68,49 @@ def cubes(): @pytest.fixture def cordex_cubes(): coord_system = iris.coord_systems.RotatedGeogCS( - grid_north_pole_latitude=39.25, - grid_north_pole_longitude=-162, - ) - time = iris.coords.DimCoord(np.arange(0, 3), - var_name='time', - standard_name='time') - - rlat = iris.coords.DimCoord(np.arange(0, 412), - var_name='rlat', - standard_name='grid_latitude', - coord_system=coord_system, - ) - rlon = iris.coords.DimCoord(np.arange(0, 424), - var_name='rlon', - standard_name='grid_longitude', - coord_system=coord_system, - ) - lat = iris.coords.AuxCoord(np.ones((412, 424)), - var_name='lat', - standard_name='latitude') - lon = iris.coords.AuxCoord(np.ones((412, 424)), - var_name='lon', - standard_name='longitude') + grid_north_pole_latitude=39.25, + grid_north_pole_longitude=-162, + ) + time = iris.coords.DimCoord( + np.arange(0, 3), var_name="time", standard_name="time" + ) + + rlat = iris.coords.DimCoord( + np.arange(0, 412), + var_name="rlat", + standard_name="grid_latitude", + coord_system=coord_system, + ) + rlon = iris.coords.DimCoord( + np.arange(0, 424), + var_name="rlon", + standard_name="grid_longitude", + coord_system=coord_system, + ) + lat = iris.coords.AuxCoord( + np.ones((412, 424)), var_name="lat", standard_name="latitude" + ) + lon = iris.coords.AuxCoord( + np.ones((412, 424)), var_name="lon", standard_name="longitude" + ) cube = iris.cube.Cube( np.ones((3, 412, 424)), - var_name='tas', - dim_coords_and_dims=[ - (time, 0), - (rlat, 1), - (rlon, 2)], - aux_coords_and_dims=[ - (lat, (1, 2)), - (lon, (1, 2)) - ] - + var_name="tas", + dim_coords_and_dims=[(time, 0), (rlat, 1), (rlon, 2)], + aux_coords_and_dims=[(lat, (1, 2)), (lon, (1, 2))], ) return iris.cube.CubeList([cube]) @pytest.mark.parametrize( - 'coord, var_name, long_name', + "coord, var_name, long_name", [ - ('time', 'time', 'time'), - ('latitude', 'lat', 'latitude'), - ('longitude', 'lon', 'longitude'), - ]) + ("time", "time", "time"), + ("latitude", "lat", "latitude"), + ("longitude", "lon", "longitude"), + ], +) def test_mohchadrem3ga705_fix_metadata(cubes, coord, var_name, long_name): fix = MOHCHadREM3GA705(None) out_cubes = fix.fix_metadata(cubes) @@ -123,31 +125,29 @@ def test_timelongname_fix_metadata(cubes): out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - assert cube.coord('time').long_name == 'time' + assert cube.coord("time").long_name == "time" def test_clmcomcclm4817_fix_metadata(cubes): - cubes[0].coord('time').units = Unit( - 'days since 1850-1-1 00:00:00', - calendar='proleptic_gregorian') - cubes[1].coord('time').units = Unit( - 'days since 1850-1-1 00:00:00', - calendar='standard') + cubes[0].coord("time").units = Unit( + "days since 1850-1-1 00:00:00", calendar="proleptic_gregorian" + ) + cubes[1].coord("time").units = Unit( + "days since 1850-1-1 00:00:00", calendar="standard" + ) for coord in cubes[1].coords(): - coord.points = coord.core_points().astype( - '>f8', casting='same_kind') - lat = cubes[1].coord('latitude') + coord.points = coord.core_points().astype(">f8", casting="same_kind") + lat = cubes[1].coord("latitude") lat.guess_bounds() - lat.bounds = lat.core_bounds().astype( - '>f4', casting='same_kind') + lat.bounds = lat.core_bounds().astype(">f4", casting="same_kind") fix = CLMcomCCLM4817(None) out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - assert cube.coord('time').units == Unit( - 'days since 1850-1-1 00:00:00', - calendar='proleptic_gregorian') + assert cube.coord("time").units == Unit( + "days since 1850-1-1 00:00:00", calendar="proleptic_gregorian" + ) for coord in cube.coords(): assert coord.points.dtype == np.float64 @@ -156,37 +156,38 @@ def test_rotated_grid_fix(cordex_cubes): fix = AllVars( vardef=None, extra_facets={ - 'domain': 'EUR-11', - 'dataset': 'DATASET', - 'driver': 'DRIVER' - } - ) - domain = cx.cordex_domain('EUR-11', add_vertices=True) + "domain": "EUR-11", + "dataset": "DATASET", + "driver": "DRIVER", + }, + ) + domain = cx.cordex_domain("EUR-11", add_vertices=True) for cube in cordex_cubes: - for coord in ['rlat', 'rlon', 'lat', 'lon']: + for coord in ["rlat", "rlon", "lat", "lon"]: cube_coord = cube.coord(var_name=coord) cube_coord.points = domain[coord].data + 1e-6 out_cubes = fix.fix_metadata(cordex_cubes) assert cordex_cubes is out_cubes for out_cube in out_cubes: - for coord in ['rlat', 'rlon', 'lat', 'lon']: + for coord in ["rlat", "rlon", "lat", "lon"]: cube_coord = out_cube.coord(var_name=coord) domain_coord = domain[coord].data - np.testing.assert_array_equal( - cube_coord.points, domain_coord) + np.testing.assert_array_equal(cube_coord.points, domain_coord) def test_rotated_grid_fix_error(cordex_cubes): fix = AllVars( vardef=None, extra_facets={ - 'domain': 'EUR-11', - 'dataset': 'DATASET', - 'driver': 'DRIVER' - } - ) - msg = ("Differences between the original grid and the " - "standardised grid are above 10e-4 degrees.") + "domain": "EUR-11", + "dataset": "DATASET", + "driver": "DRIVER", + }, + ) + msg = ( + "Differences between the original grid and the " + "standardised grid are above 10e-4 degrees." + ) with pytest.raises(RecipeError) as exc: fix.fix_metadata(cordex_cubes) assert msg == exc.value.message @@ -196,17 +197,19 @@ def test_lambert_grid_warning(cubes, caplog): fix = AllVars( vardef=None, extra_facets={ - 'domain': 'EUR-11', - 'dataset': 'DATASET', - 'driver': 'DRIVER' - } - ) + "domain": "EUR-11", + "dataset": "DATASET", + "driver": "DRIVER", + }, + ) for cube in cubes: cube.coord_system = iris.coord_systems.LambertConformal fix.fix_metadata(cubes) - msg = ("Support for CORDEX datasets in a Lambert Conformal " - "coordinate system is ongoing. Certain preprocessor " - "functions may fail.") + msg = ( + "Support for CORDEX datasets in a Lambert Conformal " + "coordinate system is ongoing. Certain preprocessor " + "functions may fail." + ) assert msg in caplog.text @@ -214,16 +217,18 @@ def test_wrong_coord_system(cubes): fix = AllVars( vardef=None, extra_facets={ - 'domain': 'EUR-11', - 'dataset': 'DATASET', - 'driver': 'DRIVER' - } - ) + "domain": "EUR-11", + "dataset": "DATASET", + "driver": "DRIVER", + }, + ) for cube in cubes: cube.coord_system = iris.coord_systems.AlbersEqualArea - msg = ("Coordinate system albers_conical_equal_area not supported in " - "CORDEX datasets. Must be rotated_latitude_longitude " - "or lambert_conformal_conic.") + msg = ( + "Coordinate system albers_conical_equal_area not supported in " + "CORDEX datasets. Must be rotated_latitude_longitude " + "or lambert_conformal_conic." + ) with pytest.raises(RecipeError) as exc: fix.fix_metadata(cubes) assert msg == exc.value.message diff --git a/tests/integration/cmor/_fixes/cordex/test_ichec_ec_earth.py b/tests/integration/cmor/_fixes/cordex/test_ichec_ec_earth.py index 1314dc5341..7de67b1388 100644 --- a/tests/integration/cmor/_fixes/cordex/test_ichec_ec_earth.py +++ b/tests/integration/cmor/_fixes/cordex/test_ichec_ec_earth.py @@ -1,4 +1,5 @@ """Tests for the fixes for driver ICHEC-EC-Earth.""" + import iris import pytest @@ -9,70 +10,74 @@ def test_get_remo2015_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'REMO2015', - 'Amon', - 'pr', - extra_facets={'driver': 'ICHEC-EC-Earth'}) + "CORDEX", + "REMO2015", + "Amon", + "pr", + extra_facets={"driver": "ICHEC-EC-Earth"}, + ) assert isinstance(fix[0], Fix) def test_get_racmo22e_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'RACMO22E', - 'Amon', - 'pr', - extra_facets={'driver': 'ICHEC-EC-Earth'}) + "CORDEX", + "RACMO22E", + "Amon", + "pr", + extra_facets={"driver": "ICHEC-EC-Earth"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_hadrem3ga705_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'HadREM3-GA7-05', - 'Amon', + "CORDEX", + "HadREM3-GA7-05", + "Amon", short_name, - extra_facets={'driver': 'ICHEC-EC-Earth'}) + extra_facets={"driver": "ICHEC-EC-Earth"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_rca4_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'RCA4', - 'Amon', + "CORDEX", + "RCA4", + "Amon", short_name, - extra_facets={'driver': 'ICHEC-EC-Earth'}) + extra_facets={"driver": "ICHEC-EC-Earth"}, + ) assert isinstance(fix[0], Fix) @pytest.mark.parametrize( - 'short_name', - ['tasmax', 'tasmin', 'tas', 'hurs', 'huss']) + "short_name", ["tasmax", "tasmin", "tas", "hurs", "huss"] +) def test_get_wrf381p_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'WRF381P', - 'Amon', + "CORDEX", + "WRF381P", + "Amon", short_name, - extra_facets={'driver': 'ICHEC-EC-Earth'}) + extra_facets={"driver": "ICHEC-EC-Earth"}, + ) assert isinstance(fix[0], Fix) def test_wrf381p_height_fix(): - time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') + time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0)], ) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = wrf381p.Tas(vardef) out_cubes = fix.fix_metadata([cube]) - assert out_cubes[0].coord('height').points == 2.0 + assert out_cubes[0].coord("height").points == 2.0 diff --git a/tests/integration/cmor/_fixes/cordex/test_ipsl_ipsl_cm5a_mr.py b/tests/integration/cmor/_fixes/cordex/test_ipsl_ipsl_cm5a_mr.py index 5f879dfe9c..33384b6c00 100644 --- a/tests/integration/cmor/_fixes/cordex/test_ipsl_ipsl_cm5a_mr.py +++ b/tests/integration/cmor/_fixes/cordex/test_ipsl_ipsl_cm5a_mr.py @@ -1,4 +1,5 @@ """Tests for the fixes of driver IPSL-CM5A-MR.""" + import iris import pytest @@ -8,29 +9,29 @@ @pytest.mark.parametrize( - 'short_name', - ['tasmax', 'tasmin', 'tas', 'hurs', 'huss']) + "short_name", ["tasmax", "tasmin", "tas", "hurs", "huss"] +) def test_get_wrf381p_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'WRF381P', - 'Amon', + "CORDEX", + "WRF381P", + "Amon", short_name, - extra_facets={'driver': 'IPSL-CM5A-MR'}) + extra_facets={"driver": "IPSL-CM5A-MR"}, + ) assert isinstance(fix[0], Fix) def test_wrf381p_height_fix(): - time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') + time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0)], ) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = wrf381p.Tas(vardef) out_cubes = fix.fix_metadata([cube]) - assert out_cubes[0].coord('height').points == 2.0 + assert out_cubes[0].coord("height").points == 2.0 diff --git a/tests/integration/cmor/_fixes/cordex/test_miroc_miroc5.py b/tests/integration/cmor/_fixes/cordex/test_miroc_miroc5.py index c54bea04cd..aff8b5fb24 100644 --- a/tests/integration/cmor/_fixes/cordex/test_miroc_miroc5.py +++ b/tests/integration/cmor/_fixes/cordex/test_miroc_miroc5.py @@ -1,6 +1,7 @@ """Tests for the fixes of MIROC-MIROC5.""" -import pytest + import iris +import pytest from esmvalcore.cmor._fixes.cordex.miroc_miroc5 import wrf361h from esmvalcore.cmor.fix import Fix @@ -8,50 +9,49 @@ @pytest.fixture def cubes(): - correct_time_coord = iris.coords.DimCoord([0.0, 1.0], - var_name='time', - standard_name='time', - long_name='time') - wrong_height_coord = iris.coords.DimCoord([2.0], - var_name='height') + correct_time_coord = iris.coords.DimCoord( + [0.0, 1.0], var_name="time", standard_name="time", long_name="time" + ) + wrong_height_coord = iris.coords.DimCoord([2.0], var_name="height") wrong_cube = iris.cube.Cube( [[10.0], [10.0]], - var_name='tas', - dim_coords_and_dims=[ - (correct_time_coord, 0), - (wrong_height_coord, 1)], + var_name="tas", + dim_coords_and_dims=[(correct_time_coord, 0), (wrong_height_coord, 1)], ) return iris.cube.CubeList([wrong_cube]) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_cclm4_8_17fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'CCLM4-8-17', - 'Amon', + "CORDEX", + "CCLM4-8-17", + "Amon", short_name, - extra_facets={'driver': 'MIROC-MIROC5'}) + extra_facets={"driver": "MIROC-MIROC5"}, + ) assert isinstance(fix[0], Fix) def test_get_remo2015_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'REMO2015', - 'Amon', - 'pr', - extra_facets={'driver': 'MIROC-MIROC5'}) + "CORDEX", + "REMO2015", + "Amon", + "pr", + extra_facets={"driver": "MIROC-MIROC5"}, + ) assert isinstance(fix[0], Fix) def test_get_wrf361h_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'WRF361H', - 'Amon', - 'tas', - extra_facets={'driver': 'MIROC-MIROC5'}) + "CORDEX", + "WRF361H", + "Amon", + "tas", + extra_facets={"driver": "MIROC-MIROC5"}, + ) assert isinstance(fix[0], Fix) diff --git a/tests/integration/cmor/_fixes/cordex/test_mohc_hadgem2_es.py b/tests/integration/cmor/_fixes/cordex/test_mohc_hadgem2_es.py index 0584165e9e..9300d87e4d 100644 --- a/tests/integration/cmor/_fixes/cordex/test_mohc_hadgem2_es.py +++ b/tests/integration/cmor/_fixes/cordex/test_mohc_hadgem2_es.py @@ -1,4 +1,5 @@ """Tests for the fixes for driver MOHC-HadGEM2-ES.""" + import iris import pytest @@ -9,91 +10,97 @@ @pytest.fixture def cubes(): - correct_time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') - wrong_time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='wrong') - correct_lat_coord = iris.coords.DimCoord([0.0], - var_name='lat', - standard_name='latitude', - long_name='latitude') - wrong_lat_coord = iris.coords.DimCoord([0.0], - var_name='latitudeCoord', - standard_name='latitude', - long_name='latitude', - attributes={'wrong': 'attr'}) - correct_lon_coord = iris.coords.DimCoord([0.0], - var_name='lon', - standard_name='longitude', - long_name='longitude') - wrong_lon_coord = iris.coords.DimCoord([0.0], - var_name='longitudeCoord', - standard_name='longitude', - long_name='longitude', - attributes={'wrong': 'attr'}) + correct_time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) + wrong_time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="wrong" + ) + correct_lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude", long_name="latitude" + ) + wrong_lat_coord = iris.coords.DimCoord( + [0.0], + var_name="latitudeCoord", + standard_name="latitude", + long_name="latitude", + attributes={"wrong": "attr"}, + ) + correct_lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude", long_name="longitude" + ) + wrong_lon_coord = iris.coords.DimCoord( + [0.0], + var_name="longitudeCoord", + standard_name="longitude", + long_name="longitude", + attributes={"wrong": "attr"}, + ) correct_cube = iris.cube.Cube( [[[10.0]]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[ (correct_time_coord, 0), (correct_lat_coord, 1), - (correct_lon_coord, 2)], + (correct_lon_coord, 2), + ], ) wrong_cube = iris.cube.Cube( [[[10.0]]], - var_name='tas', + var_name="tas", dim_coords_and_dims=[ (wrong_time_coord, 0), (wrong_lat_coord, 1), - (wrong_lon_coord, 2)], + (wrong_lon_coord, 2), + ], ) return iris.cube.CubeList([correct_cube, wrong_cube]) def test_get_hirham5_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'HIRHAM5', - 'Amon', - 'pr', - extra_facets={'driver': 'MOHC-HadGEM2-ES'}) + "CORDEX", + "HIRHAM5", + "Amon", + "pr", + extra_facets={"driver": "MOHC-HadGEM2-ES"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_remo2015_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'REMO2015', - 'Amon', + "CORDEX", + "REMO2015", + "Amon", short_name, - extra_facets={'driver': 'MOHC-HadGEM2-ES'}) + extra_facets={"driver": "MOHC-HadGEM2-ES"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_hadrem3ga705_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'HadREM3-GA7-05', - 'Amon', + "CORDEX", + "HadREM3-GA7-05", + "Amon", short_name, - extra_facets={'driver': 'MOHC-HadGEM2-ES'}) + extra_facets={"driver": "MOHC-HadGEM2-ES"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_rca4_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'RCA4', - 'Amon', + "CORDEX", + "RCA4", + "Amon", short_name, - extra_facets={'driver': 'MOHC-HadGEM2-ES'}) + extra_facets={"driver": "MOHC-HadGEM2-ES"}, + ) assert isinstance(fix[0], Fix) @@ -102,34 +109,34 @@ def test_hirham5_fix(cubes): out_cubes = fix.fix_metadata(cubes) assert cubes is out_cubes for cube in out_cubes: - assert cube.coord('latitude').attributes == {} - assert cube.coord('longitude').attributes == {} + assert cube.coord("latitude").attributes == {} + assert cube.coord("longitude").attributes == {} @pytest.mark.parametrize( - 'short_name', - ['tasmax', 'tasmin', 'tas', 'hurs', 'huss']) + "short_name", ["tasmax", "tasmin", "tas", "hurs", "huss"] +) def test_get_wrf381p_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'WRF381P', - 'Amon', + "CORDEX", + "WRF381P", + "Amon", short_name, - extra_facets={'driver': 'MOHC-HadGEM2-ES'}) + extra_facets={"driver": "MOHC-HadGEM2-ES"}, + ) assert isinstance(fix[0], Fix) def test_wrf381p_height_fix(): - time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') + time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0)], ) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = wrf381p.Tas(vardef) out_cubes = fix.fix_metadata([cube]) - assert out_cubes[0].coord('height').points == 2.0 + assert out_cubes[0].coord("height").points == 2.0 diff --git a/tests/integration/cmor/_fixes/cordex/test_mpi_m_mpi_esm_lr.py b/tests/integration/cmor/_fixes/cordex/test_mpi_m_mpi_esm_lr.py index 7e42204666..eb7c303333 100644 --- a/tests/integration/cmor/_fixes/cordex/test_mpi_m_mpi_esm_lr.py +++ b/tests/integration/cmor/_fixes/cordex/test_mpi_m_mpi_esm_lr.py @@ -1,36 +1,40 @@ """Tests for the fixes of driver MPI-M-MPI-ESM-LR.""" + import pytest from esmvalcore.cmor.fix import Fix -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_regcm4_6_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'REGCM4-6', - 'Amon', + "CORDEX", + "REGCM4-6", + "Amon", short_name, - extra_facets={'driver': 'MPI-M-MPI-ESM-LR'}) + extra_facets={"driver": "MPI-M-MPI-ESM-LR"}, + ) assert isinstance(fix[0], Fix) def test_get_racmo22e_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'RACMO22E', - 'Amon', - 'pr', - extra_facets={'driver': 'MPI-M-MPI-ESM-LR'}) + "CORDEX", + "RACMO22E", + "Amon", + "pr", + extra_facets={"driver": "MPI-M-MPI-ESM-LR"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_hadrem3ga705_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'HadREM3-GA7-05', - 'Amon', + "CORDEX", + "HadREM3-GA7-05", + "Amon", short_name, - extra_facets={'driver': 'MPI-M-MPI-ESM-LR'}) + extra_facets={"driver": "MPI-M-MPI-ESM-LR"}, + ) assert isinstance(fix[0], Fix) diff --git a/tests/integration/cmor/_fixes/cordex/test_ncc_noresm1_m.py b/tests/integration/cmor/_fixes/cordex/test_ncc_noresm1_m.py index 8b5c4ab084..bd3eedd01f 100644 --- a/tests/integration/cmor/_fixes/cordex/test_ncc_noresm1_m.py +++ b/tests/integration/cmor/_fixes/cordex/test_ncc_noresm1_m.py @@ -1,4 +1,5 @@ """Tests for the fixes of driver NCC-NorESM1-M.""" + import iris import pytest @@ -9,70 +10,74 @@ def test_get_remo2015_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'REMO2015', - 'Amon', - 'pr', - extra_facets={'driver': 'NCC-NorESM1-M'}) + "CORDEX", + "REMO2015", + "Amon", + "pr", + extra_facets={"driver": "NCC-NorESM1-M"}, + ) assert isinstance(fix[0], Fix) def test_get_racmo22e_fix(): fix = Fix.get_fixes( - 'CORDEX', - 'RACMO22E', - 'Amon', - 'pr', - extra_facets={'driver': 'NCC-NorESM1-M'}) + "CORDEX", + "RACMO22E", + "Amon", + "pr", + extra_facets={"driver": "NCC-NorESM1-M"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_hadrem3ga705_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'HadREM3-GA7-05', - 'Amon', + "CORDEX", + "HadREM3-GA7-05", + "Amon", short_name, - extra_facets={'driver': 'NCC-NorESM1-M'}) + extra_facets={"driver": "NCC-NorESM1-M"}, + ) assert isinstance(fix[0], Fix) -@pytest.mark.parametrize('short_name', ['pr', 'tas']) +@pytest.mark.parametrize("short_name", ["pr", "tas"]) def test_get_rca4_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'RCA4', - 'Amon', + "CORDEX", + "RCA4", + "Amon", short_name, - extra_facets={'driver': 'NCC-NorESM1-M'}) + extra_facets={"driver": "NCC-NorESM1-M"}, + ) assert isinstance(fix[0], Fix) @pytest.mark.parametrize( - 'short_name', - ['tasmax', 'tasmin', 'tas', 'hurs', 'huss']) + "short_name", ["tasmax", "tasmin", "tas", "hurs", "huss"] +) def test_get_wrf381p_fix(short_name): fix = Fix.get_fixes( - 'CORDEX', - 'WRF381P', - 'Amon', + "CORDEX", + "WRF381P", + "Amon", short_name, - extra_facets={'driver': 'NCC-NorESM1-M'}) + extra_facets={"driver": "NCC-NorESM1-M"}, + ) assert isinstance(fix[0], Fix) def test_wrf381p_height_fix(): - time_coord = iris.coords.DimCoord([0.0], - var_name='time', - standard_name='time', - long_name='time') + time_coord = iris.coords.DimCoord( + [0.0], var_name="time", standard_name="time", long_name="time" + ) cube = iris.cube.Cube( [10.0], - var_name='tas', + var_name="tas", dim_coords_and_dims=[(time_coord, 0)], ) - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = wrf381p.Tas(vardef) out_cubes = fix.fix_metadata([cube]) - assert out_cubes[0].coord('height').points == 2.0 + assert out_cubes[0].coord("height").points == 2.0 diff --git a/tests/integration/cmor/_fixes/emac/test_emac.py b/tests/integration/cmor/_fixes/emac/test_emac.py index 1146502a42..6c738e8ee0 100644 --- a/tests/integration/cmor/_fixes/emac/test_emac.py +++ b/tests/integration/cmor/_fixes/emac/test_emac.py @@ -1,4 +1,5 @@ """Tests for the EMAC on-the-fly CMORizer.""" + from unittest import mock import iris @@ -22,6 +23,7 @@ MP_SS_tot, Od550aer, Pr, + Prodlnox, Rlds, Rlus, Rlut, @@ -48,17 +50,19 @@ def cubes_1d(): """1D cube.""" time_coord = DimCoord( 0.0, - var_name='time', - long_name='time', - units=Unit('day since 1950-01-01 00:00:00', calendar='gregorian'), + var_name="time", + long_name="time", + units=Unit("day since 1950-01-01 00:00:00", calendar="gregorian"), ) cube = Cube([1.0], dim_coords_and_dims=[(time_coord, 0)]) - cubes = CubeList([ - cube.copy(), - cube.copy(), - cube.copy(), - cube.copy(), - ]) + cubes = CubeList( + [ + cube.copy(), + cube.copy(), + cube.copy(), + cube.copy(), + ] + ) return cubes @@ -67,32 +71,34 @@ def cubes_2d(): """2D cube.""" time_coord = DimCoord( 0.0, - var_name='time', - long_name='time', - units=Unit('day since 1950-01-01 00:00:00', calendar='gregorian'), + var_name="time", + long_name="time", + units=Unit("day since 1950-01-01 00:00:00", calendar="gregorian"), ) lat_coord = DimCoord( 0.0, - var_name='lat', - long_name='latitude', - units='degrees_north', + var_name="lat", + long_name="latitude", + units="degrees_north", ) lon_coord = DimCoord( 0.0, - var_name='lon', - long_name='longitude', - units='degrees_east', + var_name="lon", + long_name="longitude", + units="degrees_east", ) cube = Cube( [[[1.0]]], dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], ) - cubes = CubeList([ - cube.copy(), - cube.copy(), - cube.copy(), - cube.copy(), - ]) + cubes = CubeList( + [ + cube.copy(), + cube.copy(), + cube.copy(), + cube.copy(), + ] + ) return cubes @@ -101,95 +107,99 @@ def cubes_3d(): """3D cube.""" time_coord = DimCoord( 0.0, - var_name='time', - long_name='time', - units=Unit('day since 1950-01-01 00:00:00', calendar='gregorian'), + var_name="time", + long_name="time", + units=Unit("day since 1950-01-01 00:00:00", calendar="gregorian"), ) plev_coord = DimCoord( [100000.0, 90000.0], - var_name='pax_2', - units='Pa', - attributes={'positive': 'down'}, + var_name="pax_2", + units="Pa", + attributes={"positive": "down"}, ) lev_coord = AuxCoord( [1, 2], - var_name='lev', - long_name='hybrid level at layer midpoints', + var_name="lev", + long_name="hybrid level at layer midpoints", ) lat_coord = DimCoord( 0.0, - var_name='lat', - long_name='latitude', - units='degrees_north', + var_name="lat", + long_name="latitude", + units="degrees_north", ) lon_coord = DimCoord( 0.0, - var_name='lon', - long_name='longitude', - units='degrees_east', + var_name="lon", + long_name="longitude", + units="degrees_east", ) cube = Cube( [[[[1.0]], [[2.0]]]], - dim_coords_and_dims=[(time_coord, 0), - (plev_coord, 1), - (lat_coord, 2), - (lon_coord, 3)], + dim_coords_and_dims=[ + (time_coord, 0), + (plev_coord, 1), + (lat_coord, 2), + (lon_coord, 3), + ], aux_coords_and_dims=[(lev_coord, 1)], ) hyam_cube = Cube( [100000.0, 90000.0], - var_name='hyam', - long_name='hybrid A coefficient at layer midpoints', - units='Pa', + var_name="hyam", + long_name="hybrid A coefficient at layer midpoints", + units="Pa", ) hybm_cube = Cube( [0.8, 0.4], - var_name='hybm', - long_name='hybrid B coefficient at layer midpoints', - units='1', + var_name="hybm", + long_name="hybrid B coefficient at layer midpoints", + units="1", ) hyai_cube = Cube( [110000.0, 95000.0, 80000.0], - var_name='hyai', - long_name='hybrid A coefficient at layer interfaces', - units='Pa', + var_name="hyai", + long_name="hybrid A coefficient at layer interfaces", + units="Pa", ) hybi_cube = Cube( [0.9, 0.5, 0.2], - var_name='hybi', - long_name='hybrid B coefficient at layer interfaces', - units='1', + var_name="hybi", + long_name="hybrid B coefficient at layer interfaces", + units="1", ) aps_ave_cube = Cube( [[[100000.0]]], - var_name='aps_ave', - long_name='surface pressure', - units='Pa', - ) - cubes = CubeList([ - cube.copy(), - cube.copy(), - cube.copy(), - cube.copy(), - hyam_cube, - hybm_cube, - hyai_cube, - hybi_cube, - aps_ave_cube, - ]) + var_name="aps_ave", + long_name="surface pressure", + units="Pa", + ) + cubes = CubeList( + [ + cube.copy(), + cube.copy(), + cube.copy(), + cube.copy(), + hyam_cube, + hybm_cube, + hyai_cube, + hybi_cube, + aps_ave_cube, + ] + ) return cubes def _get_fix(mip, short_name, fix_name): """Load a fix from the esmvalcore.cmor._fixes.emac.emac module.""" dataset = Dataset( - project='EMAC', - dataset='EMAC', + project="EMAC", + dataset="EMAC", mip=mip, short_name=short_name, ) extra_facets = get_extra_facets(dataset, ()) - vardef = get_var_info(project='EMAC', mip=mip, short_name=short_name) + vardef = get_var_info(project="EMAC", mip=mip, short_name=short_name) cls = getattr(esmvalcore.cmor._fixes.emac.emac, fix_name) fix = cls(vardef, extra_facets=extra_facets) return fix @@ -201,7 +211,7 @@ def get_fix(mip, short_name): def get_allvars_fix(mip, short_name): - return _get_fix(mip, short_name, 'AllVars') + return _get_fix(mip, short_name, "AllVars") def fix_metadata(cubes, mip, short_name): @@ -217,11 +227,11 @@ def check_tas_metadata(cubes): """Check tas metadata.""" assert len(cubes) == 1 cube = cubes[0] - assert cube.var_name == 'tas' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "tas" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes return cube @@ -229,23 +239,24 @@ def check_ta_metadata(cubes): """Check ta metadata.""" assert len(cubes) == 1 cube = cubes[0] - assert cube.var_name == 'ta' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "ta" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes return cube def check_time(cube): """Check time coordinate of cube.""" - assert cube.coords('time', dim_coords=True) - time = cube.coord('time', dim_coords=True) - assert time.var_name == 'time' - assert time.standard_name == 'time' - assert time.long_name == 'time' - assert time.units == Unit('day since 1950-01-01 00:00:00', - calendar='gregorian') + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.long_name == "time" + assert time.units == Unit( + "day since 1950-01-01 00:00:00", calendar="gregorian" + ) np.testing.assert_allclose(time.points, [54786.9916666667]) assert time.bounds is None assert time.attributes == {} @@ -253,23 +264,60 @@ def check_time(cube): def check_plev(cube): """Check plev coordinate of cube.""" - assert cube.coords('air_pressure', dim_coords=True) - plev = cube.coord('air_pressure', dim_coords=True) - assert plev.var_name == 'plev' - assert plev.standard_name == 'air_pressure' - assert plev.long_name == 'pressure' - assert plev.units == 'Pa' - assert plev.attributes['positive'] == 'down' + assert cube.coords("air_pressure", dim_coords=True) + plev = cube.coord("air_pressure", dim_coords=True) + assert plev.var_name == "plev" + assert plev.standard_name == "air_pressure" + assert plev.long_name == "pressure" + assert plev.units == "Pa" + assert plev.attributes["positive"] == "down" # Note: plev is reversed (index 0 should be surface, but is TOA at the # moment), but this is fixed in the CMOR checks in a later step # automatically np.testing.assert_allclose( plev.points, - [3, 5, 7, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 300, 500, 700, - 1000, 1500, 2000, 3000, 5000, 7000, 8000, 9000, 10000, 11500, 13000, - 15000, 17000, 20000, 25000, 30000, 40000, 50000, 60000, 70000, 85000, - 92500, 100000], + [ + 3, + 5, + 7, + 10, + 15, + 20, + 30, + 40, + 50, + 70, + 100, + 150, + 200, + 300, + 500, + 700, + 1000, + 1500, + 2000, + 3000, + 5000, + 7000, + 8000, + 9000, + 10000, + 11500, + 13000, + 15000, + 17000, + 20000, + 25000, + 30000, + 40000, + 50000, + 60000, + 70000, + 85000, + 92500, + 100000, + ], ) assert plev.bounds is None @@ -277,15 +325,17 @@ def check_plev(cube): def check_alevel(cube): """Check alevel coordinate of cube.""" # atmosphere_hybrid_sigma_pressure_coordinate - assert cube.coords('atmosphere_hybrid_sigma_pressure_coordinate', - dim_coords=True) - lev = cube.coord('atmosphere_hybrid_sigma_pressure_coordinate', - dim_coords=True) - assert lev.var_name == 'lev' - assert lev.standard_name == 'atmosphere_hybrid_sigma_pressure_coordinate' - assert lev.long_name == 'hybrid sigma pressure coordinate' - assert lev.units == '1' - assert lev.attributes['positive'] == 'down' + assert cube.coords( + "atmosphere_hybrid_sigma_pressure_coordinate", dim_coords=True + ) + lev = cube.coord( + "atmosphere_hybrid_sigma_pressure_coordinate", dim_coords=True + ) + assert lev.var_name == "lev" + assert lev.standard_name == "atmosphere_hybrid_sigma_pressure_coordinate" + assert lev.long_name == "hybrid sigma pressure coordinate" + assert lev.units == "1" + assert lev.attributes["positive"] == "down" np.testing.assert_allclose( lev.points[:4], [0.996141, 0.982633, 0.954782, 0.909258], @@ -293,22 +343,26 @@ def check_alevel(cube): ) np.testing.assert_allclose( lev.bounds[:4], - [[1.0, 0.992281], - [0.992281, 0.972985], - [0.972985, 0.936579], - [0.936579, 0.881937]], + [ + [1.0, 0.992281], + [0.992281, 0.972985], + [0.972985, 0.936579], + [0.936579, 0.881937], + ], rtol=1e-5, ) # Coefficient ap - assert cube.coords('vertical coordinate formula term: ap(k)', - dim_coords=False) - ap_coord = cube.coord('vertical coordinate formula term: ap(k)', - dim_coords=False) - assert ap_coord.var_name == 'ap' + assert cube.coords( + "vertical coordinate formula term: ap(k)", dim_coords=False + ) + ap_coord = cube.coord( + "vertical coordinate formula term: ap(k)", dim_coords=False + ) + assert ap_coord.var_name == "ap" assert ap_coord.standard_name is None - assert ap_coord.long_name == 'vertical coordinate formula term: ap(k)' - assert ap_coord.units == 'Pa' + assert ap_coord.long_name == "vertical coordinate formula term: ap(k)" + assert ap_coord.units == "Pa" assert ap_coord.attributes == {} np.testing.assert_allclose( ap_coord.points[:4], @@ -317,22 +371,21 @@ def check_alevel(cube): ) np.testing.assert_allclose( ap_coord.bounds[:4], - [[0.0, 0.0], - [0.0, 0.0], - [0.0, 783.195007], - [783.195007, 2549.968994]], + [[0.0, 0.0], [0.0, 0.0], [0.0, 783.195007], [783.195007, 2549.968994]], rtol=1e-5, ) # Coefficient b - assert cube.coords('vertical coordinate formula term: b(k)', - dim_coords=False) - b_coord = cube.coord('vertical coordinate formula term: b(k)', - dim_coords=False) - assert b_coord.var_name == 'b' + assert cube.coords( + "vertical coordinate formula term: b(k)", dim_coords=False + ) + b_coord = cube.coord( + "vertical coordinate formula term: b(k)", dim_coords=False + ) + assert b_coord.var_name == "b" assert b_coord.standard_name is None - assert b_coord.long_name == 'vertical coordinate formula term: b(k)' - assert b_coord.units == '1' + assert b_coord.long_name == "vertical coordinate formula term: b(k)" + assert b_coord.units == "1" assert b_coord.attributes == {} np.testing.assert_allclose( b_coord.points[:4], @@ -341,20 +394,22 @@ def check_alevel(cube): ) np.testing.assert_allclose( b_coord.bounds[:4], - [[1.0, 0.992281], - [0.992281, 0.972985], - [0.972985, 0.928747], - [0.928747, 0.856438]], + [ + [1.0, 0.992281], + [0.992281, 0.972985], + [0.972985, 0.928747], + [0.928747, 0.856438], + ], rtol=1e-5, ) # Coefficient ps - assert cube.coords('surface_air_pressure', dim_coords=False) - ps_coord = cube.coord('surface_air_pressure', dim_coords=False) - assert ps_coord.var_name == 'ps' - assert ps_coord.standard_name == 'surface_air_pressure' - assert ps_coord.long_name == 'Surface Air Pressure' - assert ps_coord.units == 'Pa' + assert cube.coords("surface_air_pressure", dim_coords=False) + ps_coord = cube.coord("surface_air_pressure", dim_coords=False) + assert ps_coord.var_name == "ps" + assert ps_coord.standard_name == "surface_air_pressure" + assert ps_coord.long_name == "Surface Air Pressure" + assert ps_coord.units == "Pa" assert ps_coord.attributes == {} np.testing.assert_allclose( ps_coord.points[:, :, 0], @@ -364,12 +419,12 @@ def check_alevel(cube): assert ps_coord.bounds is None # air_pressure - assert cube.coords('air_pressure', dim_coords=False) - p_coord = cube.coord('air_pressure', dim_coords=False) + assert cube.coords("air_pressure", dim_coords=False) + p_coord = cube.coord("air_pressure", dim_coords=False) assert p_coord.var_name is None - assert p_coord.standard_name == 'air_pressure' + assert p_coord.standard_name == "air_pressure" assert p_coord.long_name is None - assert p_coord.units == 'Pa' + assert p_coord.units == "Pa" assert p_coord.attributes == {} assert p_coord.points[0, 0, 0, 0] > p_coord.points[0, -1, 0, 0] assert p_coord.bounds[0, 0, 0, 0, 0] > p_coord.bounds[0, -1, 0, 0, 0] @@ -380,7 +435,7 @@ def check_hybrid_z(cube): """Check hybrid Z-coordinates of 3D cubes.""" assert len(cube.aux_factories) == 1 - air_pressure_coord = cube.coord('air_pressure') + air_pressure_coord = cube.coord("air_pressure") np.testing.assert_allclose( air_pressure_coord.points, [[[[130000.0]], [[180000.0]]]], @@ -390,88 +445,102 @@ def check_hybrid_z(cube): [[[[[100000.0, 145000.0]]], [[[145000.0, 200000.0]]]]], ) - lev_coord = cube.coord('atmosphere_hybrid_sigma_pressure_coordinate') + lev_coord = cube.coord("atmosphere_hybrid_sigma_pressure_coordinate") np.testing.assert_allclose(lev_coord.points, [1.3, 1.8]) np.testing.assert_allclose(lev_coord.bounds, [[1.0, 1.45], [1.45, 2.0]]) def check_lat(cube): """Check latitude coordinate of cube.""" - assert cube.coords('latitude', dim_coords=True) - lat = cube.coord('latitude', dim_coords=True) - assert lat.var_name == 'lat' - assert lat.standard_name == 'latitude' - assert lat.long_name == 'latitude' - assert lat.units == 'degrees_north' + assert cube.coords("latitude", dim_coords=True) + lat = cube.coord("latitude", dim_coords=True) + assert lat.var_name == "lat" + assert lat.standard_name == "latitude" + assert lat.long_name == "latitude" + assert lat.units == "degrees_north" np.testing.assert_allclose( lat.points, - [59.4444082891668, 19.8757191474409, -19.8757191474409, - -59.4444082891668], + [ + 59.4444082891668, + 19.8757191474409, + -19.8757191474409, + -59.4444082891668, + ], ) np.testing.assert_allclose( lat.bounds, - [[79.22875286, 39.66006372], - [39.66006372, 0.0], - [0.0, -39.66006372], - [-39.66006372, -79.22875286]], + [ + [79.22875286, 39.66006372], + [39.66006372, 0.0], + [0.0, -39.66006372], + [-39.66006372, -79.22875286], + ], ) assert lat.attributes == {} def check_lon(cube): """Check longitude coordinate of cube.""" - assert cube.coords('longitude', dim_coords=True) - lon = cube.coord('longitude', dim_coords=True) - assert lon.var_name == 'lon' - assert lon.standard_name == 'longitude' - assert lon.long_name == 'longitude' - assert lon.units == 'degrees_east' + assert cube.coords("longitude", dim_coords=True) + lon = cube.coord("longitude", dim_coords=True) + assert lon.var_name == "lon" + assert lon.standard_name == "longitude" + assert lon.long_name == "longitude" + assert lon.units == "degrees_east" np.testing.assert_allclose( lon.points, [0.0, 45.0, 90.0, 135.0, 180.0, 225.0, 270.0, 315.0], ) np.testing.assert_allclose( lon.bounds, - [[-22.5, 22.5], [22.5, 67.5], [67.5, 112.5], [112.5, 157.5], - [157.5, 202.5], [202.5, 247.5], [247.5, 292.5], [292.5, 337.5]], + [ + [-22.5, 22.5], + [22.5, 67.5], + [67.5, 112.5], + [112.5, 157.5], + [157.5, 202.5], + [202.5, 247.5], + [247.5, 292.5], + [292.5, 337.5], + ], ) assert lon.attributes == {} def check_heightxm(cube, height_value): """Check scalar heightxm coordinate of cube.""" - assert cube.coords('height') - height = cube.coord('height') - assert height.var_name == 'height' - assert height.standard_name == 'height' - assert height.long_name == 'height' - assert height.units == 'm' - assert height.attributes == {'positive': 'up'} + assert cube.coords("height") + height = cube.coord("height") + assert height.var_name == "height" + assert height.standard_name == "height" + assert height.long_name == "height" + assert height.units == "m" + assert height.attributes == {"positive": "up"} np.testing.assert_allclose(height.points, [height_value]) assert height.bounds is None def check_lambda550nm(cube): """Check scalar lambda550nm coordinate of cube.""" - assert cube.coords('radiation_wavelength') - typesi = cube.coord('radiation_wavelength') - assert typesi.var_name == 'wavelength' - assert typesi.standard_name == 'radiation_wavelength' - assert typesi.long_name == 'Radiation Wavelength 550 nanometers' - assert typesi.units == 'nm' + assert cube.coords("radiation_wavelength") + typesi = cube.coord("radiation_wavelength") + assert typesi.var_name == "wavelength" + assert typesi.standard_name == "radiation_wavelength" + assert typesi.long_name == "Radiation Wavelength 550 nanometers" + assert typesi.units == "nm" np.testing.assert_array_equal(typesi.points, [550.0]) assert typesi.bounds is None def check_typesi(cube): """Check scalar typesi coordinate of cube.""" - assert cube.coords('area_type') - typesi = cube.coord('area_type') - assert typesi.var_name == 'type' - assert typesi.standard_name == 'area_type' - assert typesi.long_name == 'Sea Ice area type' + assert cube.coords("area_type") + typesi = cube.coord("area_type") + assert typesi.var_name == "type" + assert typesi.standard_name == "area_type" + assert typesi.long_name == "Sea Ice area type" assert typesi.units.is_no_unit() - np.testing.assert_array_equal(typesi.points, ['sea_ice']) + np.testing.assert_array_equal(typesi.points, ["sea_ice"]) assert typesi.bounds is None @@ -480,79 +549,93 @@ def check_typesi(cube): def test_get_cube_cav(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - cubes = CubeList([ - Cube(0.0), - Cube(0.0, var_name='temp2_cav'), - ]) + fix = get_allvars_fix("Amon", "tas") + cubes = CubeList( + [ + Cube(0.0), + Cube(0.0, var_name="temp2_cav"), + ] + ) cube = fix.get_cube(cubes) - assert cube.var_name == 'temp2_cav' + assert cube.var_name == "temp2_cav" def test_get_cube_ave(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - cubes = CubeList([ - Cube(0.0), - Cube(0.0, var_name='temp2_ave'), - ]) + fix = get_allvars_fix("Amon", "tas") + cubes = CubeList( + [ + Cube(0.0), + Cube(0.0, var_name="temp2_ave"), + ] + ) cube = fix.get_cube(cubes) - assert cube.var_name == 'temp2_ave' + assert cube.var_name == "temp2_ave" def test_get_cube_cav_ave(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - cubes = CubeList([ - Cube(0.0, var_name='temp2_ave'), - Cube(0.0, var_name='temp2_cav'), - ]) + fix = get_allvars_fix("Amon", "tas") + cubes = CubeList( + [ + Cube(0.0, var_name="temp2_ave"), + Cube(0.0, var_name="temp2_cav"), + ] + ) cube = fix.get_cube(cubes) - assert cube.var_name == 'temp2_cav' + assert cube.var_name == "temp2_cav" def test_get_cube_str_input(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - cubes = CubeList([ - Cube(0.0), - Cube(0.0, var_name='x'), - ]) - cube = fix.get_cube(cubes, var_name='x') - assert cube.var_name == 'x' + fix = get_allvars_fix("Amon", "tas") + cubes = CubeList( + [ + Cube(0.0), + Cube(0.0, var_name="x"), + ] + ) + cube = fix.get_cube(cubes, var_name="x") + assert cube.var_name == "x" def test_get_cube_list_input(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - cubes = CubeList([ - Cube(0.0), - Cube(0.0, var_name='x'), - Cube(0.0, var_name='y'), - ]) - cube = fix.get_cube(cubes, var_name=['y', 'x']) - assert cube.var_name == 'y' + fix = get_allvars_fix("Amon", "tas") + cubes = CubeList( + [ + Cube(0.0), + Cube(0.0, var_name="x"), + Cube(0.0, var_name="y"), + ] + ) + cube = fix.get_cube(cubes, var_name=["y", "x"]) + assert cube.var_name == "y" def test_var_not_available_fix(): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") cubes = CubeList([Cube(0.0)]) - msg = (r"No variable of \['tm1_p19_cav', 'tm1_p19_ave'\] necessary for " - r"the extraction/derivation the CMOR variable 'ta' is available in " - r"the input file.") + msg = ( + r"No variable of \['tm1_cav', 'tm1_ave', 'tm1'\] necessary for " + r"the extraction/derivation the CMOR variable 'ta' is available in " + r"the input file." + ) with pytest.raises(ValueError, match=msg): fix.fix_metadata(cubes) def test_var_not_available_get_cube(): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") cubes = CubeList([Cube(0.0)]) - msg = (r"No variable of \['x'\] necessary for the extraction/derivation " - r"the CMOR variable 'ta' is available in the input file.") + msg = ( + r"No variable of \['x'\] necessary for the extraction/derivation " + r"the CMOR variable 'ta' is available in the input file." + ) with pytest.raises(ValueError, match=msg): - fix.get_cube(cubes, var_name='x') + fix.get_cube(cubes, var_name="x") # Test with single-dimension cubes @@ -560,27 +643,33 @@ def test_var_not_available_get_cube(): def test_only_time(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # EMAC CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('time') - coord_info.standard_name = 'time' - monkeypatch.setattr(fix.vardef, 'coordinates', {'time': coord_info}) + coord_info = CoordinateInfo("time") + coord_info.standard_name = "time" + monkeypatch.setattr(fix.vardef, "coordinates", {"time": coord_info}) # Create cube with only a single dimension - time_coord = DimCoord([0.0, 1.0], - var_name='time', - standard_name='time', - long_name='time', - units='days since 1850-01-01') - cubes = CubeList([ - Cube([1, 1], - var_name='tm1_p19_ave', - units='K', - dim_coords_and_dims=[(time_coord, 0)]), - ]) + time_coord = DimCoord( + [0.0, 1.0], + var_name="time", + standard_name="time", + long_name="time", + units="days since 1850-01-01", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="tm1_cav", + units="K", + dim_coords_and_dims=[(time_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -591,41 +680,48 @@ def test_only_time(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check time metadata - assert cube.coords('time') - new_time_coord = cube.coord('time', dim_coords=True) - assert new_time_coord.var_name == 'time' - assert new_time_coord.standard_name == 'time' - assert new_time_coord.long_name == 'time' - assert new_time_coord.units == 'days since 1850-01-01' + assert cube.coords("time") + new_time_coord = cube.coord("time", dim_coords=True) + assert new_time_coord.var_name == "time" + assert new_time_coord.standard_name == "time" + assert new_time_coord.long_name == "time" + assert new_time_coord.units == "days since 1850-01-01" # Check time data np.testing.assert_allclose(new_time_coord.points, [0.0, 1.0]) - np.testing.assert_allclose(new_time_coord.bounds, - [[-0.5, 0.5], [0.5, 1.5]]) + np.testing.assert_allclose( + new_time_coord.bounds, [[-0.5, 0.5], [0.5, 1.5]] + ) def test_only_plev(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # EMAC CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('plev19') - coord_info.standard_name = 'air_pressure' - monkeypatch.setattr(fix.vardef, 'coordinates', {'plev19': coord_info}) + coord_info = CoordinateInfo("plev19") + coord_info.standard_name = "air_pressure" + monkeypatch.setattr(fix.vardef, "coordinates", {"plev19": coord_info}) # Create cube with only a single dimension - plev_coord = DimCoord([1000.0, 900.0], - var_name='plev', - standard_name='air_pressure', - units='hPa') - cubes = CubeList([ - Cube([1, 1], - var_name='tm1_p19_ave', - units='K', - dim_coords_and_dims=[(plev_coord, 0)]), - ]) + plev_coord = DimCoord( + [1000.0, 900.0], + var_name="plev", + standard_name="air_pressure", + units="hPa", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="tm1_ave", + units="K", + dim_coords_and_dims=[(plev_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -636,13 +732,13 @@ def test_only_plev(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check plev metadata - assert cube.coords('air_pressure', dim_coords=True) - new_plev_coord = cube.coord('air_pressure') - assert new_plev_coord.var_name == 'plev' - assert new_plev_coord.standard_name == 'air_pressure' - assert new_plev_coord.long_name == 'pressure' - assert new_plev_coord.units == 'Pa' - assert new_plev_coord.attributes == {'positive': 'down'} + assert cube.coords("air_pressure", dim_coords=True) + new_plev_coord = cube.coord("air_pressure") + assert new_plev_coord.var_name == "plev" + assert new_plev_coord.standard_name == "air_pressure" + assert new_plev_coord.long_name == "pressure" + assert new_plev_coord.units == "Pa" + assert new_plev_coord.attributes == {"positive": "down"} # Check plev data np.testing.assert_allclose(new_plev_coord.points, [100000.0, 90000.0]) @@ -651,26 +747,29 @@ def test_only_plev(monkeypatch): def test_only_latitude(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # EMAC CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('latitude') - coord_info.standard_name = 'latitude' - monkeypatch.setattr(fix.vardef, 'coordinates', {'latitude': coord_info}) + coord_info = CoordinateInfo("latitude") + coord_info.standard_name = "latitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"latitude": coord_info}) # Create cube with only a single dimension - lat_coord = DimCoord([0.0, 10.0], - var_name='lat', - standard_name='latitude', - units='degrees') - cubes = CubeList([ - Cube([1, 1], - var_name='tm1_p19_ave', - units='K', - dim_coords_and_dims=[(lat_coord, 0)]), - ]) + lat_coord = DimCoord( + [0.0, 10.0], var_name="lat", standard_name="latitude", units="degrees" + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="tm1", + units="K", + dim_coords_and_dims=[(lat_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -681,41 +780,48 @@ def test_only_latitude(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check latitude metadata - assert cube.coords('latitude', dim_coords=True) - new_lat_coord = cube.coord('latitude') - assert new_lat_coord.var_name == 'lat' - assert new_lat_coord.standard_name == 'latitude' - assert new_lat_coord.long_name == 'latitude' - assert new_lat_coord.units == 'degrees_north' + assert cube.coords("latitude", dim_coords=True) + new_lat_coord = cube.coord("latitude") + assert new_lat_coord.var_name == "lat" + assert new_lat_coord.standard_name == "latitude" + assert new_lat_coord.long_name == "latitude" + assert new_lat_coord.units == "degrees_north" # Check latitude data np.testing.assert_allclose(new_lat_coord.points, [0.0, 10.0]) - np.testing.assert_allclose(new_lat_coord.bounds, - [[-5.0, 5.0], [5.0, 15.0]]) + np.testing.assert_allclose( + new_lat_coord.bounds, [[-5.0, 5.0], [5.0, 15.0]] + ) def test_only_longitude(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # EMAC CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('longitude') - coord_info.standard_name = 'longitude' - monkeypatch.setattr(fix.vardef, 'coordinates', {'longitude': coord_info}) + coord_info = CoordinateInfo("longitude") + coord_info.standard_name = "longitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"longitude": coord_info}) # Create cube with only a single dimension - lon_coord = DimCoord([0.0, 180.0], - var_name='lon', - standard_name='longitude', - units='degrees') - cubes = CubeList([ - Cube([1, 1], - var_name='tm1_p19_ave', - units='K', - dim_coords_and_dims=[(lon_coord, 0)]), - ]) + lon_coord = DimCoord( + [0.0, 180.0], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="tm1", + units="K", + dim_coords_and_dims=[(lon_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -726,17 +832,18 @@ def test_only_longitude(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check longitude metadata - assert cube.coords('longitude', dim_coords=True) - new_lon_coord = cube.coord('longitude') - assert new_lon_coord.var_name == 'lon' - assert new_lon_coord.standard_name == 'longitude' - assert new_lon_coord.long_name == 'longitude' - assert new_lon_coord.units == 'degrees_east' + assert cube.coords("longitude", dim_coords=True) + new_lon_coord = cube.coord("longitude") + assert new_lon_coord.var_name == "lon" + assert new_lon_coord.standard_name == "longitude" + assert new_lon_coord.long_name == "longitude" + assert new_lon_coord.units == "degrees_east" # Check longitude data np.testing.assert_allclose(new_lon_coord.points, [0.0, 180.0]) - np.testing.assert_allclose(new_lon_coord.bounds, - [[-90.0, 90.0], [90.0, 270.0]]) + np.testing.assert_allclose( + new_lon_coord.bounds, [[-90.0, 90.0], [90.0, 270.0]] + ) # Tests with sample data @@ -745,9 +852,9 @@ def test_only_longitude(monkeypatch): def test_sample_data_tas(test_data_path, tmp_path): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") - filepath = test_data_path / 'emac.nc' + filepath = test_data_path / "emac.nc" fixed_path = fix.fix_file(filepath, tmp_path) assert fixed_path == filepath @@ -770,13 +877,14 @@ def test_sample_data_tas(test_data_path, tmp_path): def test_sample_data_ta_plev(test_data_path, tmp_path, monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # Note: raw_name needs to be modified since the sample file only contains # plev39, while Amon's ta needs plev19 by default - monkeypatch.setitem(fix.extra_facets, 'raw_name', - ['tm1_p39_cav', 'tm1_p39_ave']) + monkeypatch.setitem( + fix.extra_facets, "raw_name", ["tm1_p39_cav", "tm1_p39_ave"] + ) - filepath = test_data_path / 'emac.nc' + filepath = test_data_path / "emac.nc" fixed_path = fix.fix_file(filepath, tmp_path) assert fixed_path == filepath @@ -800,17 +908,17 @@ def test_sample_data_ta_plev(test_data_path, tmp_path, monkeypatch): def test_sample_data_ta_alevel(test_data_path, tmp_path): """Test fix.""" - fix = get_allvars_fix('CFmon', 'ta') + fix = get_allvars_fix("CFmon", "ta") - filepath = test_data_path / 'emac.nc' + filepath = test_data_path / "emac.nc" fixed_path = fix.fix_file(filepath, tmp_path) assert fixed_path != filepath cubes = iris.load(str(fixed_path)) - assert cubes.extract(NameConstraint(var_name='hyam')) - assert cubes.extract(NameConstraint(var_name='hybm')) - assert cubes.extract(NameConstraint(var_name='hyai')) - assert cubes.extract(NameConstraint(var_name='hybi')) + assert cubes.extract(NameConstraint(var_name="hyam")) + assert cubes.extract(NameConstraint(var_name="hybm")) + assert cubes.extract(NameConstraint(var_name="hyai")) + assert cubes.extract(NameConstraint(var_name="hybi")) fixed_cubes = fix.fix_metadata(cubes) @@ -834,254 +942,288 @@ def test_sample_data_ta_alevel(test_data_path, tmp_path): def test_get_awhea_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Omon', 'awhea') + fix = Fix.get_fixes("EMAC", "EMAC", "Omon", "awhea") assert fix == [AllVars(None), GenericFix(None)] def test_awhea_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'awhea_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Omon', 'awhea') + cubes_2d[0].var_name = "awhea_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Omon", "awhea") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'awhea' + assert cube.var_name == "awhea" assert cube.standard_name is None - assert cube.long_name == ('Global Mean Net Surface Heat Flux Over Open ' - 'Water') - assert cube.units == 'W m-2' - assert 'positive' not in cube.attributes + assert cube.long_name == ( + "Global Mean Net Surface Heat Flux Over Open Water" + ) + assert cube.units == "W m-2" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_clivi_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'clivi') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "clivi") assert fix == [AllVars(None), GenericFix(None)] def test_clivi_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'xivi_cav' - cubes_2d[0].units = 'kg m-2' - fix = get_allvars_fix('Amon', 'clivi') + cubes_2d[0].var_name = "xivi_cav" + cubes_2d[0].units = "kg m-2" + fix = get_allvars_fix("Amon", "clivi") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'clivi' - assert cube.standard_name == 'atmosphere_mass_content_of_cloud_ice' - assert cube.long_name == 'Ice Water Path' - assert cube.units == 'kg m-2' - assert 'positive' not in cube.attributes + assert cube.var_name == "clivi" + assert cube.standard_name == "atmosphere_mass_content_of_cloud_ice" + assert cube.long_name == "Ice Water Path" + assert cube.units == "kg m-2" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_clt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'clt') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "clt") assert fix == [AllVars(None), GenericFix(None)] def test_clt_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'aclcov_cav' - fix = get_allvars_fix('Amon', 'clt') + cubes_2d[0].var_name = "aclcov_cav" + fix = get_allvars_fix("Amon", "clt") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'clt' - assert cube.standard_name == 'cloud_area_fraction' - assert cube.long_name == 'Total Cloud Cover Percentage' - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.var_name == "clt" + assert cube.standard_name == "cloud_area_fraction" + assert cube.long_name == "Total Cloud Cover Percentage" + assert cube.units == "%" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[100.0]]]) def test_get_clwvi_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'clwvi') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "clwvi") assert fix == [Clwvi(None), AllVars(None), GenericFix(None)] def test_clwvi_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'xlvi_cav' - cubes_2d[1].var_name = 'xivi_cav' - cubes_2d[0].units = 'kg m-2' - cubes_2d[1].units = 'kg m-2' + cubes_2d[0].var_name = "xlvi_cav" + cubes_2d[1].var_name = "xivi_cav" + cubes_2d[0].units = "kg m-2" + cubes_2d[1].units = "kg m-2" + + fixed_cubes = fix_metadata(cubes_2d, "Amon", "clwvi") + + assert len(fixed_cubes) == 1 + cube = fixed_cubes[0] + assert cube.var_name == "clwvi" + assert cube.standard_name == ( + "atmosphere_mass_content_of_cloud_condensed_water" + ) + assert cube.long_name == "Condensed Water Path" + assert cube.units == "kg m-2" + assert "positive" not in cube.attributes + + np.testing.assert_allclose(cube.data, [[[2.0]]]) + + +def test_get_prodlnox_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "prodlnox") + assert fix == [Prodlnox(None), AllVars(None), GenericFix(None)] + + +def test_prodlnox_fix(cubes_2d): + """Test fix.""" + cubes_2d[0].var_name = "NOxcg_cav" + cubes_2d[1].var_name = "NOxic_cav" + cubes_2d[2].var_name = "dt" + cubes_2d[0].units = "kg" + cubes_2d[1].units = "kg" + cubes_2d[2].units = "s" - fixed_cubes = fix_metadata(cubes_2d, 'Amon', 'clwvi') + fixed_cubes = fix_metadata(cubes_2d, "Amon", "prodlnox") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'clwvi' - assert cube.standard_name == ('atmosphere_mass_content_of_cloud_' - 'condensed_water') - assert cube.long_name == 'Condensed Water Path' - assert cube.units == 'kg m-2' - assert 'positive' not in cube.attributes + assert cube.var_name == "prodlnox" + assert cube.standard_name is None + assert cube.long_name == ( + "Tendency of atmosphere mass content of NOx from lightning" + ) + assert cube.units == "kg s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[2.0]]]) def test_get_co2mass_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'co2mass') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "co2mass") assert fix == [AllVars(None), GenericFix(None)] def test_co2mass_fix(cubes_1d): """Test fix.""" - cubes_1d[0].var_name = 'MP_CO2_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('Amon', 'co2mass') + cubes_1d[0].var_name = "MP_CO2_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("Amon", "co2mass") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'co2mass' - assert cube.standard_name == 'atmosphere_mass_of_carbon_dioxide' - assert cube.long_name == 'Total Atmospheric Mass of CO2' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.var_name == "co2mass" + assert cube.standard_name == "atmosphere_mass_of_carbon_dioxide" + assert cube.long_name == "Total Atmospheric Mass of CO2" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_evspsbl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'evspsbl') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "evspsbl") assert fix == [Evspsbl(None), AllVars(None), GenericFix(None)] def test_evspsbl_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'evap_cav' - cubes_2d[0].units = 'kg m-2 s-1' - fix = get_allvars_fix('Amon', 'evspsbl') + cubes_2d[0].var_name = "evap_cav" + cubes_2d[0].units = "kg m-2 s-1" + fix = get_allvars_fix("Amon", "evspsbl") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'evspsbl') + fix = get_fix("Amon", "evspsbl") cube = fix.fix_data(cube) - assert cube.var_name == 'evspsbl' - assert cube.standard_name == 'water_evapotranspiration_flux' - assert cube.long_name == ('Evaporation Including Sublimation and ' - 'Transpiration') - assert cube.units == 'kg m-2 s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "evspsbl" + assert cube.standard_name == "water_evapotranspiration_flux" + assert cube.long_name == ( + "Evaporation Including Sublimation and Transpiration" + ) + assert cube.units == "kg m-2 s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_hfls_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'hfls') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "hfls") assert fix == [Hfls(None), AllVars(None), GenericFix(None)] def test_hfls_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'ahfl_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'hfls') + cubes_2d[0].var_name = "ahfl_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "hfls") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'hfls') + fix = get_fix("Amon", "hfls") cube = fix.fix_data(cube) - assert cube.var_name == 'hfls' - assert cube.standard_name == 'surface_upward_latent_heat_flux' - assert cube.long_name == 'Surface Upward Latent Heat Flux' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "hfls" + assert cube.standard_name == "surface_upward_latent_heat_flux" + assert cube.long_name == "Surface Upward Latent Heat Flux" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_hfss_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'hfss') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "hfss") assert fix == [Hfss(None), AllVars(None), GenericFix(None)] def test_hfss_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'ahfs_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'hfss') + cubes_2d[0].var_name = "ahfs_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "hfss") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'hfss') + fix = get_fix("Amon", "hfss") cube = fix.fix_data(cube) - assert cube.var_name == 'hfss' - assert cube.standard_name == 'surface_upward_sensible_heat_flux' - assert cube.long_name == 'Surface Upward Sensible Heat Flux' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "hfss" + assert cube.standard_name == "surface_upward_sensible_heat_flux" + assert cube.long_name == "Surface Upward Sensible Heat Flux" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_hurs_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'hurs') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "hurs") assert fix == [AllVars(None), GenericFix(None)] def test_hurs_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'rh_2m_cav' - fix = get_allvars_fix('Amon', 'hurs') + cubes_2d[0].var_name = "rh_2m_cav" + fix = get_allvars_fix("Amon", "hurs") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'hurs' - assert cube.standard_name == 'relative_humidity' - assert cube.long_name == 'Near-Surface Relative Humidity' - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.var_name == "hurs" + assert cube.standard_name == "relative_humidity" + assert cube.long_name == "Near-Surface Relative Humidity" + assert cube.units == "%" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[100.0]]]) def test_get_od550aer_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'od550aer') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "od550aer") assert fix == [Od550aer(None), AllVars(None), GenericFix(None)] def test_od550aer_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'aot_opt_TOT_550_total_cav' - fixed_cubes = fix_metadata(cubes_3d, 'Amon', 'od550aer') + cubes_3d[0].var_name = "aot_opt_TOT_550_total_cav" + fixed_cubes = fix_metadata(cubes_3d, "Amon", "od550aer") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'od550aer' - assert cube.standard_name == ('atmosphere_optical_thickness_due_to_' - 'ambient_aerosol_particles') - assert cube.long_name == 'Ambient Aerosol Optical Thickness at 550nm' - assert cube.units == '1' - assert 'positive' not in cube.attributes + assert cube.var_name == "od550aer" + assert cube.standard_name == ( + "atmosphere_optical_thickness_due_to_ambient_aerosol_particles" + ) + assert cube.long_name == "Ambient Aerosol Optical Thickness at 550nm" + assert cube.units == "1" + assert "positive" not in cube.attributes check_lambda550nm(cube) @@ -1090,465 +1232,468 @@ def test_od550aer_fix(cubes_3d): def test_get_pr_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'pr') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "pr") assert fix == [Pr(None), AllVars(None), GenericFix(None)] def test_pr_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'aprl_cav' - cubes_2d[1].var_name = 'aprc_cav' - cubes_2d[0].units = 'kg m-2 s-1' - cubes_2d[1].units = 'kg m-2 s-1' + cubes_2d[0].var_name = "aprl_cav" + cubes_2d[1].var_name = "aprc_cav" + cubes_2d[0].units = "kg m-2 s-1" + cubes_2d[1].units = "kg m-2 s-1" - fixed_cubes = fix_metadata(cubes_2d, 'Amon', 'pr') + fixed_cubes = fix_metadata(cubes_2d, "Amon", "pr") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'pr' - assert cube.standard_name == 'precipitation_flux' - assert cube.long_name == 'Precipitation' - assert cube.units == 'kg m-2 s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "pr" + assert cube.standard_name == "precipitation_flux" + assert cube.long_name == "Precipitation" + assert cube.units == "kg m-2 s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[2.0]]]) def test_get_prc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'prc') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "prc") assert fix == [AllVars(None), GenericFix(None)] def test_prc_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'aprc_cav' - cubes_2d[0].units = 'kg m-2 s-1' - fix = get_allvars_fix('Amon', 'prc') + cubes_2d[0].var_name = "aprc_cav" + cubes_2d[0].units = "kg m-2 s-1" + fix = get_allvars_fix("Amon", "prc") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'prc' - assert cube.standard_name == 'convective_precipitation_flux' - assert cube.long_name == 'Convective Precipitation' - assert cube.units == 'kg m-2 s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "prc" + assert cube.standard_name == "convective_precipitation_flux" + assert cube.long_name == "Convective Precipitation" + assert cube.units == "kg m-2 s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_prl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'prl') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "prl") assert fix == [AllVars(None), GenericFix(None)] def test_prl_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'aprl_cav' - cubes_2d[0].units = 'kg m-2 s-1' - fix = get_allvars_fix('Amon', 'prl') + cubes_2d[0].var_name = "aprl_cav" + cubes_2d[0].units = "kg m-2 s-1" + fix = get_allvars_fix("Amon", "prl") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'prl' + assert cube.var_name == "prl" assert cube.standard_name is None - assert cube.long_name == 'Large Scale Precipitation' - assert cube.units == 'kg m-2 s-1' - assert 'positive' not in cube.attributes + assert cube.long_name == "Large Scale Precipitation" + assert cube.units == "kg m-2 s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_prsn_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'prsn') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "prsn") assert fix == [AllVars(None), GenericFix(None)] def test_prsn_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'aprs_cav' - cubes_2d[0].units = 'kg m-2 s-1' - fix = get_allvars_fix('Amon', 'prsn') + cubes_2d[0].var_name = "aprs_cav" + cubes_2d[0].units = "kg m-2 s-1" + fix = get_allvars_fix("Amon", "prsn") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'prsn' - assert cube.standard_name == 'snowfall_flux' - assert cube.long_name == 'Snowfall Flux' - assert cube.units == 'kg m-2 s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "prsn" + assert cube.standard_name == "snowfall_flux" + assert cube.long_name == "Snowfall Flux" + assert cube.units == "kg m-2 s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_prw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'prw') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "prw") assert fix == [AllVars(None), GenericFix(None)] def test_prw_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'qvi_cav' - cubes_2d[0].units = 'kg m-2' - fix = get_allvars_fix('Amon', 'prw') + cubes_2d[0].var_name = "qvi_cav" + cubes_2d[0].units = "kg m-2" + fix = get_allvars_fix("Amon", "prw") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'prw' - assert cube.standard_name == 'atmosphere_mass_content_of_water_vapor' - assert cube.long_name == 'Water Vapor Path' - assert cube.units == 'kg m-2' - assert 'positive' not in cube.attributes + assert cube.var_name == "prw" + assert cube.standard_name == "atmosphere_mass_content_of_water_vapor" + assert cube.long_name == "Water Vapor Path" + assert cube.units == "kg m-2" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_ps_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'ps') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "ps") assert fix == [AllVars(None), GenericFix(None)] def test_ps_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'aps_cav' - cubes_2d[0].units = 'Pa' - fix = get_allvars_fix('Amon', 'ps') + cubes_2d[0].var_name = "aps_cav" + cubes_2d[0].units = "Pa" + fix = get_allvars_fix("Amon", "ps") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'ps' - assert cube.standard_name == 'surface_air_pressure' - assert cube.long_name == 'Surface Air Pressure' - assert cube.units == 'Pa' - assert 'positive' not in cube.attributes + assert cube.var_name == "ps" + assert cube.standard_name == "surface_air_pressure" + assert cube.long_name == "Surface Air Pressure" + assert cube.units == "Pa" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_psl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'psl') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "psl") assert fix == [AllVars(None), GenericFix(None)] def test_psl_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'slp_cav' - cubes_2d[0].units = 'Pa' - fix = get_allvars_fix('Amon', 'psl') + cubes_2d[0].var_name = "slp_cav" + cubes_2d[0].units = "Pa" + fix = get_allvars_fix("Amon", "psl") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'psl' - assert cube.standard_name == 'air_pressure_at_mean_sea_level' - assert cube.long_name == 'Sea Level Pressure' - assert cube.units == 'Pa' - assert 'positive' not in cube.attributes + assert cube.var_name == "psl" + assert cube.standard_name == "air_pressure_at_mean_sea_level" + assert cube.long_name == "Sea Level Pressure" + assert cube.units == "Pa" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_rlds_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rlds') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rlds") assert fix == [Rlds(None), AllVars(None), GenericFix(None)] def test_rlds_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxtbot_cav' - cubes_2d[1].var_name = 'tradsu_cav' - cubes_2d[0].units = 'W m-2' - cubes_2d[1].units = 'W m-2' - fixed_cubes = fix_metadata(cubes_2d, 'Amon', 'rlds') + cubes_2d[0].var_name = "flxtbot_cav" + cubes_2d[1].var_name = "tradsu_cav" + cubes_2d[0].units = "W m-2" + cubes_2d[1].units = "W m-2" + fixed_cubes = fix_metadata(cubes_2d, "Amon", "rlds") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rlds' - assert cube.standard_name == 'surface_downwelling_longwave_flux_in_air' - assert cube.long_name == 'Surface Downwelling Longwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "rlds" + assert cube.standard_name == "surface_downwelling_longwave_flux_in_air" + assert cube.long_name == "Surface Downwelling Longwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[0.0]]]) def test_get_rlus_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rlus') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rlus") assert fix == [Rlus(None), AllVars(None), GenericFix(None)] def test_rlus_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'tradsu_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'rlus') + cubes_2d[0].var_name = "tradsu_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "rlus") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'rlus') + fix = get_fix("Amon", "rlus") cube = fix.fix_data(cube) - assert cube.var_name == 'rlus' - assert cube.standard_name == 'surface_upwelling_longwave_flux_in_air' - assert cube.long_name == 'Surface Upwelling Longwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rlus" + assert cube.standard_name == "surface_upwelling_longwave_flux_in_air" + assert cube.long_name == "Surface Upwelling Longwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_rlut_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rlut') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rlut") assert fix == [Rlut(None), AllVars(None), GenericFix(None)] def test_rlut_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxttop_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'rlut') + cubes_2d[0].var_name = "flxttop_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "rlut") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'rlut') + fix = get_fix("Amon", "rlut") cube = fix.fix_data(cube) - assert cube.var_name == 'rlut' - assert cube.standard_name == 'toa_outgoing_longwave_flux' - assert cube.long_name == 'TOA Outgoing Longwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rlut" + assert cube.standard_name == "toa_outgoing_longwave_flux" + assert cube.long_name == "TOA Outgoing Longwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_rlutcs_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rlutcs') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rlutcs") assert fix == [Rlutcs(None), AllVars(None), GenericFix(None)] def test_rlutcs_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxtftop_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'rlutcs') + cubes_2d[0].var_name = "flxtftop_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "rlutcs") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'rlutcs') + fix = get_fix("Amon", "rlutcs") cube = fix.fix_data(cube) - assert cube.var_name == 'rlutcs' - assert cube.standard_name == ('toa_outgoing_longwave_flux_assuming_clear_' - 'sky') - assert cube.long_name == 'TOA Outgoing Clear-Sky Longwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rlutcs" + assert cube.standard_name == ( + "toa_outgoing_longwave_flux_assuming_clear_sky" + ) + assert cube.long_name == "TOA Outgoing Clear-Sky Longwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_rsds_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rsds') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rsds") assert fix == [Rsds(None), AllVars(None), GenericFix(None)] def test_rsds_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxsbot_cav' - cubes_2d[1].var_name = 'sradsu_cav' - cubes_2d[0].units = 'W m-2' - cubes_2d[1].units = 'W m-2' - fixed_cubes = fix_metadata(cubes_2d, 'Amon', 'rsds') + cubes_2d[0].var_name = "flxsbot_cav" + cubes_2d[1].var_name = "sradsu_cav" + cubes_2d[0].units = "W m-2" + cubes_2d[1].units = "W m-2" + fixed_cubes = fix_metadata(cubes_2d, "Amon", "rsds") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rsds' - assert cube.standard_name == 'surface_downwelling_shortwave_flux_in_air' - assert cube.long_name == 'Surface Downwelling Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "rsds" + assert cube.standard_name == "surface_downwelling_shortwave_flux_in_air" + assert cube.long_name == "Surface Downwelling Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[0.0]]]) def test_get_rsdt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rsdt') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rsdt") assert fix == [Rsdt(None), AllVars(None), GenericFix(None)] def test_rsdt_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxstop_cav' - cubes_2d[1].var_name = 'srad0u_cav' - cubes_2d[0].units = 'W m-2' - cubes_2d[1].units = 'W m-2' - fixed_cubes = fix_metadata(cubes_2d, 'Amon', 'rsdt') + cubes_2d[0].var_name = "flxstop_cav" + cubes_2d[1].var_name = "srad0u_cav" + cubes_2d[0].units = "W m-2" + cubes_2d[1].units = "W m-2" + fixed_cubes = fix_metadata(cubes_2d, "Amon", "rsdt") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rsdt' - assert cube.standard_name == 'toa_incoming_shortwave_flux' - assert cube.long_name == 'TOA Incident Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "rsdt" + assert cube.standard_name == "toa_incoming_shortwave_flux" + assert cube.long_name == "TOA Incident Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[0.0]]]) def test_get_rsus_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rsus') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rsus") assert fix == [Rsus(None), AllVars(None), GenericFix(None)] def test_rsus_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'sradsu_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'rsus') + cubes_2d[0].var_name = "sradsu_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "rsus") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'rsus') + fix = get_fix("Amon", "rsus") cube = fix.fix_data(cube) - assert cube.var_name == 'rsus' - assert cube.standard_name == 'surface_upwelling_shortwave_flux_in_air' - assert cube.long_name == 'Surface Upwelling Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rsus" + assert cube.standard_name == "surface_upwelling_shortwave_flux_in_air" + assert cube.long_name == "Surface Upwelling Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_rsut_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rsut') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rsut") assert fix == [Rsut(None), AllVars(None), GenericFix(None)] def test_rsut_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'srad0u_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'rsut') + cubes_2d[0].var_name = "srad0u_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "rsut") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'rsut') + fix = get_fix("Amon", "rsut") cube = fix.fix_data(cube) - assert cube.var_name == 'rsut' - assert cube.standard_name == 'toa_outgoing_shortwave_flux' - assert cube.long_name == 'TOA Outgoing Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rsut" + assert cube.standard_name == "toa_outgoing_shortwave_flux" + assert cube.long_name == "TOA Outgoing Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_rsutcs_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rsutcs') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rsutcs") assert fix == [Rsutcs(None), AllVars(None), GenericFix(None)] def test_rsutcs_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxusftop_cav' - cubes_2d[0].units = 'W m-2' - fix = get_allvars_fix('Amon', 'rsutcs') + cubes_2d[0].var_name = "flxusftop_cav" + cubes_2d[0].units = "W m-2" + fix = get_allvars_fix("Amon", "rsutcs") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('Amon', 'rsutcs') + fix = get_fix("Amon", "rsutcs") cube = fix.fix_data(cube) - assert cube.var_name == 'rsutcs' - assert cube.standard_name == ('toa_outgoing_shortwave_flux_assuming_clear_' - 'sky') - assert cube.long_name == 'TOA Outgoing Clear-Sky Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rsutcs" + assert cube.standard_name == ( + "toa_outgoing_shortwave_flux_assuming_clear_sky" + ) + assert cube.long_name == "TOA Outgoing Clear-Sky Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" np.testing.assert_allclose(cube.data, [[[-1.0]]]) def test_get_rtmt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'rtmt') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "rtmt") assert fix == [Rtmt(None), AllVars(None), GenericFix(None)] def test_rtmt_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'flxttop_cav' - cubes_2d[1].var_name = 'flxstop_cav' - cubes_2d[0].units = 'W m-2' - cubes_2d[1].units = 'W m-2' - fixed_cubes = fix_metadata(cubes_2d, 'Amon', 'rtmt') + cubes_2d[0].var_name = "flxttop_cav" + cubes_2d[1].var_name = "flxstop_cav" + cubes_2d[0].units = "W m-2" + cubes_2d[1].units = "W m-2" + fixed_cubes = fix_metadata(cubes_2d, "Amon", "rtmt") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rtmt' - assert cube.standard_name == ('net_downward_radiative_flux_at_top_of_' - 'atmosphere_model') - assert cube.long_name == 'Net Downward Radiative Flux at Top of Model' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "rtmt" + assert cube.standard_name == ( + "net_downward_radiative_flux_at_top_of_atmosphere_model" + ) + assert cube.long_name == "Net Downward Radiative Flux at Top of Model" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[2.0]]]) def test_get_sfcWind_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'sfcWind') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "sfcWind") assert fix == [AllVars(None), GenericFix(None)] def test_sfcWind_fix(cubes_2d): # noqa: N802 """Test fix.""" - cubes_2d[0].var_name = 'wind10_cav' - cubes_2d[0].units = 'm s-1' - fix = get_allvars_fix('Amon', 'sfcWind') + cubes_2d[0].var_name = "wind10_cav" + cubes_2d[0].units = "m s-1" + fix = get_allvars_fix("Amon", "sfcWind") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'sfcWind' - assert cube.standard_name == 'wind_speed' - assert cube.long_name == 'Near-Surface Wind Speed' - assert cube.units == 'm s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "sfcWind" + assert cube.standard_name == "wind_speed" + assert cube.long_name == "Near-Surface Wind Speed" + assert cube.units == "m s-1" + assert "positive" not in cube.attributes check_heightxm(cube, 10.0) @@ -1557,23 +1702,23 @@ def test_sfcWind_fix(cubes_2d): # noqa: N802 def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'SImon', 'siconc') + fix = Fix.get_fixes("EMAC", "EMAC", "SImon", "siconc") assert fix == [AllVars(None), GenericFix(None)] def test_siconc_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'seaice_cav' - fix = get_allvars_fix('SImon', 'siconc') + cubes_2d[0].var_name = "seaice_cav" + fix = get_allvars_fix("SImon", "siconc") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'siconc' - assert cube.standard_name == 'sea_ice_area_fraction' - assert cube.long_name == 'Sea-Ice Area Percentage (Ocean Grid)' - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.var_name == "siconc" + assert cube.standard_name == "sea_ice_area_fraction" + assert cube.long_name == "Sea-Ice Area Percentage (Ocean Grid)" + assert cube.units == "%" + assert "positive" not in cube.attributes check_typesi(cube) @@ -1582,23 +1727,23 @@ def test_siconc_fix(cubes_2d): def test_get_siconca_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'SImon', 'siconca') + fix = Fix.get_fixes("EMAC", "EMAC", "SImon", "siconca") assert fix == [AllVars(None), GenericFix(None)] def test_siconca_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'seaice_cav' - fix = get_allvars_fix('SImon', 'siconca') + cubes_2d[0].var_name = "seaice_cav" + fix = get_allvars_fix("SImon", "siconca") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'siconca' - assert cube.standard_name == 'sea_ice_area_fraction' - assert cube.long_name == 'Sea-Ice Area Percentage (Atmospheric Grid)' - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.var_name == "siconca" + assert cube.standard_name == "sea_ice_area_fraction" + assert cube.long_name == "Sea-Ice Area Percentage (Atmospheric Grid)" + assert cube.units == "%" + assert "positive" not in cube.attributes check_typesi(cube) @@ -1607,28 +1752,28 @@ def test_siconca_fix(cubes_2d): def test_get_sithick_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'SImon', 'sithick') + fix = Fix.get_fixes("EMAC", "EMAC", "SImon", "sithick") assert fix == [Sithick(None), AllVars(None), GenericFix(None)] def test_sithick_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'siced_cav' - cubes_2d[0].units = 'm' - fix = get_allvars_fix('SImon', 'sithick') + cubes_2d[0].var_name = "siced_cav" + cubes_2d[0].units = "m" + fix = get_allvars_fix("SImon", "sithick") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - fix = get_fix('SImon', 'sithick') + fix = get_fix("SImon", "sithick") cube = fix.fix_data(cube) - assert cube.var_name == 'sithick' - assert cube.standard_name == 'sea_ice_thickness' - assert cube.long_name == 'Sea Ice Thickness' - assert cube.units == 'm' - assert 'positive' not in cube.attributes + assert cube.var_name == "sithick" + assert cube.standard_name == "sea_ice_thickness" + assert cube.long_name == "Sea Ice Thickness" + assert cube.units == "m" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) np.testing.assert_equal(np.ma.getmaskarray(cube.data), [[[False]]]) @@ -1642,24 +1787,24 @@ def test_sithick_fix(cubes_2d): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'tas') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] def test_tas_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'temp2_cav' - cubes_2d[0].units = 'K' - fix = get_allvars_fix('Amon', 'tas') + cubes_2d[0].var_name = "temp2_cav" + cubes_2d[0].units = "K" + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tas' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "tas" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes check_heightxm(cube, 2.0) @@ -1668,24 +1813,24 @@ def test_tas_fix(cubes_2d): def test_get_tasmax_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'tasmax') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "tasmax") assert fix == [AllVars(None), GenericFix(None)] def test_tasmax_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'temp2_max' - cubes_2d[0].units = 'K' - fix = get_allvars_fix('Amon', 'tasmax') + cubes_2d[0].var_name = "temp2_max" + cubes_2d[0].units = "K" + fix = get_allvars_fix("Amon", "tasmax") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tasmax' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Daily Maximum Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "tasmax" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Daily Maximum Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes check_heightxm(cube, 2.0) @@ -1694,24 +1839,24 @@ def test_tasmax_fix(cubes_2d): def test_get_tasmin_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'tasmin') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "tasmin") assert fix == [AllVars(None), GenericFix(None)] def test_tasmin_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'temp2_min' - cubes_2d[0].units = 'K' - fix = get_allvars_fix('Amon', 'tasmin') + cubes_2d[0].var_name = "temp2_min" + cubes_2d[0].units = "K" + fix = get_allvars_fix("Amon", "tasmin") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tasmin' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Daily Minimum Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "tasmin" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Daily Minimum Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes check_heightxm(cube, 2.0) @@ -1720,144 +1865,145 @@ def test_tasmin_fix(cubes_2d): def test_get_tauu_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'tauu') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "tauu") assert fix == [AllVars(None), GenericFix(None)] def test_tauu_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'ustr_cav' - cubes_2d[0].units = 'Pa' - fix = get_allvars_fix('Amon', 'tauu') + cubes_2d[0].var_name = "ustr_cav" + cubes_2d[0].units = "Pa" + fix = get_allvars_fix("Amon", "tauu") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tauu' - assert cube.standard_name == 'surface_downward_eastward_stress' - assert cube.long_name == 'Surface Downward Eastward Wind Stress' - assert cube.units == 'Pa' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "tauu" + assert cube.standard_name == "surface_downward_eastward_stress" + assert cube.long_name == "Surface Downward Eastward Wind Stress" + assert cube.units == "Pa" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_tauv_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'tauv') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "tauv") assert fix == [AllVars(None), GenericFix(None)] def test_tauv_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'vstr_cav' - cubes_2d[0].units = 'Pa' - fix = get_allvars_fix('Amon', 'tauv') + cubes_2d[0].var_name = "vstr_cav" + cubes_2d[0].units = "Pa" + fix = get_allvars_fix("Amon", "tauv") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tauv' - assert cube.standard_name == 'surface_downward_northward_stress' - assert cube.long_name == 'Surface Downward Northward Wind Stress' - assert cube.units == 'Pa' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "tauv" + assert cube.standard_name == "surface_downward_northward_stress" + assert cube.long_name == "Surface Downward Northward Wind Stress" + assert cube.units == "Pa" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_tos_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Omon', 'tos') + fix = Fix.get_fixes("EMAC", "EMAC", "Omon", "tos") assert fix == [AllVars(None), GenericFix(None)] def test_tos_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'tsw' - cubes_2d[0].units = 'degC' - fix = get_allvars_fix('Omon', 'tos') + cubes_2d[0].var_name = "tsw" + cubes_2d[0].units = "degC" + fix = get_allvars_fix("Omon", "tos") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tos' - assert cube.standard_name == 'sea_surface_temperature' - assert cube.long_name == 'Sea Surface Temperature' - assert cube.units == 'degC' - assert 'positive' not in cube.attributes + assert cube.var_name == "tos" + assert cube.standard_name == "sea_surface_temperature" + assert cube.long_name == "Sea Surface Temperature" + assert cube.units == "degC" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_toz_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'AERmon', 'toz') + fix = Fix.get_fixes("EMAC", "EMAC", "AERmon", "toz") assert fix == [Toz(None), AllVars(None), GenericFix(None)] def test_toz_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'toz' - cubes_2d[0].units = 'DU' - fixed_cubes = fix_metadata(cubes_2d, 'AERmon', 'toz') + cubes_2d[0].var_name = "toz" + cubes_2d[0].units = "DU" + fixed_cubes = fix_metadata(cubes_2d, "AERmon", "toz") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'toz' - assert cube.standard_name == ('equivalent_thickness_at_stp_of_atmosphere_' - 'ozone_content') - assert cube.long_name == 'Total Column Ozone' - assert cube.units == 'm' - assert 'positive' not in cube.attributes + assert cube.var_name == "toz" + assert cube.standard_name == ( + "equivalent_thickness_at_stp_of_atmosphere_ozone_content" + ) + assert cube.long_name == "Total Column Ozone" + assert cube.units == "m" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1e-5]]]) def test_get_ts_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'ts') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "ts") assert fix == [AllVars(None), GenericFix(None)] def test_ts_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'tsurf_cav' - cubes_2d[0].units = 'K' - fix = get_allvars_fix('Amon', 'ts') + cubes_2d[0].var_name = "tsurf_cav" + cubes_2d[0].units = "K" + fix = get_allvars_fix("Amon", "ts") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'ts' - assert cube.standard_name == 'surface_temperature' - assert cube.long_name == 'Surface Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "ts" + assert cube.standard_name == "surface_temperature" + assert cube.long_name == "Surface Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[1.0]]]) def test_get_uas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'uas') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "uas") assert fix == [AllVars(None), GenericFix(None)] def test_uas_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'u10_cav' - cubes_2d[0].units = 'm s-1' - fix = get_allvars_fix('Amon', 'uas') + cubes_2d[0].var_name = "u10_cav" + cubes_2d[0].units = "m s-1" + fix = get_allvars_fix("Amon", "uas") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'uas' - assert cube.standard_name == 'eastward_wind' - assert cube.long_name == 'Eastward Near-Surface Wind' - assert cube.units == 'm s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "uas" + assert cube.standard_name == "eastward_wind" + assert cube.long_name == "Eastward Near-Surface Wind" + assert cube.units == "m s-1" + assert "positive" not in cube.attributes check_heightxm(cube, 10.0) @@ -1866,24 +2012,24 @@ def test_uas_fix(cubes_2d): def test_get_vas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'vas') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "vas") assert fix == [AllVars(None), GenericFix(None)] def test_vas_fix(cubes_2d): """Test fix.""" - cubes_2d[0].var_name = 'v10_cav' - cubes_2d[0].units = 'm s-1' - fix = get_allvars_fix('Amon', 'vas') + cubes_2d[0].var_name = "v10_cav" + cubes_2d[0].units = "m s-1" + fix = get_allvars_fix("Amon", "vas") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'vas' - assert cube.standard_name == 'northward_wind' - assert cube.long_name == 'Northward Near-Surface Wind' - assert cube.units == 'm s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "vas" + assert cube.standard_name == "northward_wind" + assert cube.long_name == "Northward Near-Surface Wind" + assert cube.units == "m s-1" + assert "positive" not in cube.attributes check_heightxm(cube, 10.0) @@ -1895,457 +2041,461 @@ def test_vas_fix(cubes_2d): def test_get_MP_BC_tot_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_BC_tot') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_BC_tot") assert fix == [MP_BC_tot(None), AllVars(None), GenericFix(None)] def test_MP_BC_tot_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_BC_ki_cav' - cubes_1d[1].var_name = 'MP_BC_ks_cav' - cubes_1d[2].var_name = 'MP_BC_as_cav' - cubes_1d[3].var_name = 'MP_BC_cs_cav' - cubes_1d[0].units = 'kg' - cubes_1d[1].units = 'kg' - cubes_1d[2].units = 'kg' - cubes_1d[3].units = 'kg' - fixed_cubes = fix_metadata(cubes_1d, 'TRAC10hr', 'MP_BC_tot') + cubes_1d[0].var_name = "MP_BC_ki_cav" + cubes_1d[1].var_name = "MP_BC_ks_cav" + cubes_1d[2].var_name = "MP_BC_as_cav" + cubes_1d[3].var_name = "MP_BC_cs_cav" + cubes_1d[0].units = "kg" + cubes_1d[1].units = "kg" + cubes_1d[2].units = "kg" + cubes_1d[3].units = "kg" + fixed_cubes = fix_metadata(cubes_1d, "TRAC10hr", "MP_BC_tot") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_BC_tot' + assert cube.var_name == "MP_BC_tot" assert cube.standard_name is None - assert cube.long_name == ('total mass of black carbon (sum of all aerosol ' - 'modes)') - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == ( + "total mass of black carbon (sum of all aerosol modes)" + ) + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [4.0]) def test_get_MP_CFCl3_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_CFCl3') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_CFCl3") assert fix == [AllVars(None), GenericFix(None)] def test_MP_CFCl3_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_CFCl3_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_CFCl3') + cubes_1d[0].var_name = "MP_CFCl3_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_CFCl3") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_CFCl3' + assert cube.var_name == "MP_CFCl3" assert cube.standard_name is None - assert cube.long_name == 'total mass of CFCl3 (CFC-11)' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of CFCl3 (CFC-11)" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_ClOX_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_ClOX') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_ClOX") assert fix == [AllVars(None), GenericFix(None)] def test_MP_ClOX_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_ClOX_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_ClOX') + cubes_1d[0].var_name = "MP_ClOX_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_ClOX") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_ClOX' + assert cube.var_name == "MP_ClOX" assert cube.standard_name is None - assert cube.long_name == 'total mass of ClOX' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of ClOX" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_CH4_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_CH4') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_CH4") assert fix == [AllVars(None), GenericFix(None)] def test_MP_CH4_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_CH4_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_CH4') + cubes_1d[0].var_name = "MP_CH4_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_CH4") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_CH4' + assert cube.var_name == "MP_CH4" assert cube.standard_name is None - assert cube.long_name == 'total mass of CH4' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of CH4" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_CO_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_CO') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_CO") assert fix == [AllVars(None), GenericFix(None)] def test_MP_CO_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_CO_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_CO') + cubes_1d[0].var_name = "MP_CO_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_CO") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_CO' + assert cube.var_name == "MP_CO" assert cube.standard_name is None - assert cube.long_name == 'total mass of CO' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of CO" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_CO2_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_CO2') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_CO2") assert fix == [AllVars(None), GenericFix(None)] def test_MP_CO2_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_CO2_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_CO2') + cubes_1d[0].var_name = "MP_CO2_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_CO2") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_CO2' + assert cube.var_name == "MP_CO2" assert cube.standard_name is None - assert cube.long_name == 'total mass of CO2' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of CO2" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_DU_tot_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_DU_tot') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_DU_tot") assert fix == [MP_DU_tot(None), AllVars(None), GenericFix(None)] def test_MP_DU_tot_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_DU_ai_cav' - cubes_1d[1].var_name = 'MP_DU_as_cav' - cubes_1d[2].var_name = 'MP_DU_ci_cav' - cubes_1d[3].var_name = 'MP_DU_cs_cav' - cubes_1d[0].units = 'kg' - cubes_1d[1].units = 'kg' - cubes_1d[2].units = 'kg' - cubes_1d[3].units = 'kg' + cubes_1d[0].var_name = "MP_DU_ai_cav" + cubes_1d[1].var_name = "MP_DU_as_cav" + cubes_1d[2].var_name = "MP_DU_ci_cav" + cubes_1d[3].var_name = "MP_DU_cs_cav" + cubes_1d[0].units = "kg" + cubes_1d[1].units = "kg" + cubes_1d[2].units = "kg" + cubes_1d[3].units = "kg" - fixed_cubes = fix_metadata(cubes_1d, 'TRAC10hr', 'MP_DU_tot') + fixed_cubes = fix_metadata(cubes_1d, "TRAC10hr", "MP_DU_tot") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_DU_tot' + assert cube.var_name == "MP_DU_tot" assert cube.standard_name is None - assert cube.long_name == ('total mass of mineral dust (sum of all aerosol ' - 'modes)') - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == ( + "total mass of mineral dust (sum of all aerosol modes)" + ) + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [4.0]) def test_get_MP_N2O_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_N2O') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_N2O") assert fix == [AllVars(None), GenericFix(None)] def test_MP_N2O_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_N2O_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_N2O') + cubes_1d[0].var_name = "MP_N2O_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_N2O") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_N2O' + assert cube.var_name == "MP_N2O" assert cube.standard_name is None - assert cube.long_name == 'total mass of N2O' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of N2O" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_NH3_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_NH3') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_NH3") assert fix == [AllVars(None), GenericFix(None)] def test_MP_NH3_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_NH3_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_NH3') + cubes_1d[0].var_name = "MP_NH3_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_NH3") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_NH3' + assert cube.var_name == "MP_NH3" assert cube.standard_name is None - assert cube.long_name == 'total mass of NH3' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of NH3" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_NO_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_NO') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_NO") assert fix == [AllVars(None), GenericFix(None)] def test_MP_NO_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_NO_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_NO') + cubes_1d[0].var_name = "MP_NO_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_NO") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_NO' + assert cube.var_name == "MP_NO" assert cube.standard_name is None - assert cube.long_name == 'total mass of NO' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of NO" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_NO2_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_NO2') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_NO2") assert fix == [AllVars(None), GenericFix(None)] def test_MP_NO2_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_NO2_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_NO2') + cubes_1d[0].var_name = "MP_NO2_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_NO2") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_NO2' + assert cube.var_name == "MP_NO2" assert cube.standard_name is None - assert cube.long_name == 'total mass of NO2' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of NO2" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_NOX_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_NOX') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_NOX") assert fix == [AllVars(None), GenericFix(None)] def test_MP_NOX_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_NOX_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_NOX') + cubes_1d[0].var_name = "MP_NOX_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_NOX") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_NOX' + assert cube.var_name == "MP_NOX" assert cube.standard_name is None - assert cube.long_name == 'total mass of NOX (NO+NO2)' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of NOX (NO+NO2)" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_O3_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_O3') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_O3") assert fix == [AllVars(None), GenericFix(None)] def test_MP_O3_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_O3_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_O3') + cubes_1d[0].var_name = "MP_O3_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_O3") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_O3' + assert cube.var_name == "MP_O3" assert cube.standard_name is None - assert cube.long_name == 'total mass of O3' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of O3" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_OH_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_OH') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_OH") assert fix == [AllVars(None), GenericFix(None)] def test_MP_OH_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_OH_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_OH') + cubes_1d[0].var_name = "MP_OH_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_OH") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_OH' + assert cube.var_name == "MP_OH" assert cube.standard_name is None - assert cube.long_name == 'total mass of OH' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of OH" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_S_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_S') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_S") assert fix == [AllVars(None), GenericFix(None)] def test_MP_S_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_S_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_S') + cubes_1d[0].var_name = "MP_S_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_S") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_S' + assert cube.var_name == "MP_S" assert cube.standard_name is None - assert cube.long_name == 'total mass of S' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of S" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_SO2_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_SO2') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_SO2") assert fix == [AllVars(None), GenericFix(None)] def test_MP_SO2_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_SO2_cav' - cubes_1d[0].units = 'kg' - fix = get_allvars_fix('TRAC10hr', 'MP_SO2') + cubes_1d[0].var_name = "MP_SO2_cav" + cubes_1d[0].units = "kg" + fix = get_allvars_fix("TRAC10hr", "MP_SO2") fixed_cubes = fix.fix_metadata(cubes_1d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_SO2' + assert cube.var_name == "MP_SO2" assert cube.standard_name is None - assert cube.long_name == 'total mass of SO2' - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == "total mass of SO2" + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [1.0]) def test_get_MP_SO4mm_tot_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_SO4mm_tot') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_SO4mm_tot") assert fix == [MP_SO4mm_tot(None), AllVars(None), GenericFix(None)] def test_MP_SO4mm_tot_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_SO4mm_ns_cav' - cubes_1d[1].var_name = 'MP_SO4mm_ks_cav' - cubes_1d[2].var_name = 'MP_SO4mm_as_cav' - cubes_1d[3].var_name = 'MP_SO4mm_cs_cav' - cubes_1d[0].units = 'kg' - cubes_1d[1].units = 'kg' - cubes_1d[2].units = 'kg' - cubes_1d[3].units = 'kg' + cubes_1d[0].var_name = "MP_SO4mm_ns_cav" + cubes_1d[1].var_name = "MP_SO4mm_ks_cav" + cubes_1d[2].var_name = "MP_SO4mm_as_cav" + cubes_1d[3].var_name = "MP_SO4mm_cs_cav" + cubes_1d[0].units = "kg" + cubes_1d[1].units = "kg" + cubes_1d[2].units = "kg" + cubes_1d[3].units = "kg" - fixed_cubes = fix_metadata(cubes_1d, 'TRAC10hr', 'MP_SO4mm_tot') + fixed_cubes = fix_metadata(cubes_1d, "TRAC10hr", "MP_SO4mm_tot") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_SO4mm_tot' + assert cube.var_name == "MP_SO4mm_tot" assert cube.standard_name is None - assert cube.long_name == ('total mass of aerosol sulfate (sum of all ' - 'aerosol modes)') - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == ( + "total mass of aerosol sulfate (sum of all aerosol modes)" + ) + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [4.0]) def test_get_MP_SS_tot_fix(): # noqa: N802 """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'TRAC10hr', 'MP_SS_tot') + fix = Fix.get_fixes("EMAC", "EMAC", "TRAC10hr", "MP_SS_tot") assert fix == [MP_SS_tot(None), AllVars(None), GenericFix(None)] def test_MP_SS_tot_fix(cubes_1d): # noqa: N802 """Test fix.""" - cubes_1d[0].var_name = 'MP_SS_ks_cav' - cubes_1d[1].var_name = 'MP_SS_as_cav' - cubes_1d[2].var_name = 'MP_SS_cs_cav' - cubes_1d[0].units = 'kg' - cubes_1d[1].units = 'kg' - cubes_1d[2].units = 'kg' + cubes_1d[0].var_name = "MP_SS_ks_cav" + cubes_1d[1].var_name = "MP_SS_as_cav" + cubes_1d[2].var_name = "MP_SS_cs_cav" + cubes_1d[0].units = "kg" + cubes_1d[1].units = "kg" + cubes_1d[2].units = "kg" - fixed_cubes = fix_metadata(cubes_1d, 'TRAC10hr', 'MP_SS_tot') + fixed_cubes = fix_metadata(cubes_1d, "TRAC10hr", "MP_SS_tot") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'MP_SS_tot' + assert cube.var_name == "MP_SS_tot" assert cube.standard_name is None - assert cube.long_name == ('total mass of sea salt (sum of all aerosol ' - 'modes)') - assert cube.units == 'kg' - assert 'positive' not in cube.attributes + assert cube.long_name == ( + "total mass of sea salt (sum of all aerosol modes)" + ) + assert cube.units == "kg" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [3.0]) @@ -2355,23 +2505,23 @@ def test_MP_SS_tot_fix(cubes_1d): # noqa: N802 def test_get_cl_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'cl') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "cl") assert fix == [AllVars(None), GenericFix(None)] def test_cl_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'aclcac_cav' - fix = get_allvars_fix('Amon', 'cl') + cubes_3d[0].var_name = "aclcac_cav" + fix = get_allvars_fix("Amon", "cl") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'cl' - assert cube.standard_name == 'cloud_area_fraction_in_atmosphere_layer' - assert cube.long_name == 'Percentage Cloud Cover' - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.var_name == "cl" + assert cube.standard_name == "cloud_area_fraction_in_atmosphere_layer" + assert cube.long_name == "Percentage Cloud Cover" + assert cube.units == "%" + assert "positive" not in cube.attributes check_hybrid_z(cube) @@ -2380,24 +2530,24 @@ def test_cl_fix(cubes_3d): def test_get_cli_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'cli') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "cli") assert fix == [AllVars(None), GenericFix(None)] def test_cli_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'xim1_cav' - cubes_3d[0].units = 'kg kg-1' - fix = get_allvars_fix('Amon', 'cli') + cubes_3d[0].var_name = "xim1_cav" + cubes_3d[0].units = "kg kg-1" + fix = get_allvars_fix("Amon", "cli") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'cli' - assert cube.standard_name == 'mass_fraction_of_cloud_ice_in_air' - assert cube.long_name == 'Mass Fraction of Cloud Ice' - assert cube.units == 'kg kg-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "cli" + assert cube.standard_name == "mass_fraction_of_cloud_ice_in_air" + assert cube.long_name == "Mass Fraction of Cloud Ice" + assert cube.units == "kg kg-1" + assert "positive" not in cube.attributes check_hybrid_z(cube) @@ -2406,24 +2556,24 @@ def test_cli_fix(cubes_3d): def test_get_clw_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'clw') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "clw") assert fix == [AllVars(None), GenericFix(None)] def test_clw_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'xlm1_cav' - cubes_3d[0].units = 'kg kg-1' - fix = get_allvars_fix('Amon', 'clw') + cubes_3d[0].var_name = "xlm1_cav" + cubes_3d[0].units = "kg kg-1" + fix = get_allvars_fix("Amon", "clw") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'clw' - assert cube.standard_name == 'mass_fraction_of_cloud_liquid_water_in_air' - assert cube.long_name == 'Mass Fraction of Cloud Liquid Water' - assert cube.units == 'kg kg-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "clw" + assert cube.standard_name == "mass_fraction_of_cloud_liquid_water_in_air" + assert cube.long_name == "Mass Fraction of Cloud Liquid Water" + assert cube.units == "kg kg-1" + assert "positive" not in cube.attributes check_hybrid_z(cube) @@ -2432,165 +2582,165 @@ def test_clw_fix(cubes_3d): def test_get_hur_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'hur') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "hur") assert fix == [AllVars(None), GenericFix(None)] def test_hur_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'rhum_p19_cav' - cubes_3d[0].units = '1' - fix = get_allvars_fix('Amon', 'hur') + cubes_3d[0].var_name = "rhum_cav" + cubes_3d[0].units = "1" + fix = get_allvars_fix("Amon", "hur") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'hur' - assert cube.standard_name == 'relative_humidity' - assert cube.long_name == 'Relative Humidity' - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.var_name == "hur" + assert cube.standard_name == "relative_humidity" + assert cube.long_name == "Relative Humidity" + assert cube.units == "%" + assert "positive" not in cube.attributes assert not cube.aux_factories - assert cube.coords('air_pressure') + assert cube.coords("air_pressure") np.testing.assert_allclose(cube.data, [[[[100.0]], [[200.0]]]]) def test_get_hus_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'hus') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "hus") assert fix == [AllVars(None), GenericFix(None)] def test_hus_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'qm1_p19_cav' - cubes_3d[0].units = '1' - fix = get_allvars_fix('Amon', 'hus') + cubes_3d[0].var_name = "qm1_cav" + cubes_3d[0].units = "1" + fix = get_allvars_fix("Amon", "hus") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'hus' - assert cube.standard_name == 'specific_humidity' - assert cube.long_name == 'Specific Humidity' - assert cube.units == '1' - assert 'positive' not in cube.attributes + assert cube.var_name == "hus" + assert cube.standard_name == "specific_humidity" + assert cube.long_name == "Specific Humidity" + assert cube.units == "1" + assert "positive" not in cube.attributes assert not cube.aux_factories - assert cube.coords('air_pressure') + assert cube.coords("air_pressure") np.testing.assert_allclose(cube.data, [[[[1.0]], [[2.0]]]]) def test_get_ta_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'ta') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "ta") assert fix == [AllVars(None), GenericFix(None)] def test_ta_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'tm1_p19_cav' - cubes_3d[0].units = 'K' - fix = get_allvars_fix('Amon', 'ta') + cubes_3d[0].var_name = "tm1_cav" + cubes_3d[0].units = "K" + fix = get_allvars_fix("Amon", "ta") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'ta' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "ta" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes assert not cube.aux_factories - assert cube.coords('air_pressure') + assert cube.coords("air_pressure") np.testing.assert_allclose(cube.data, [[[[1.0]], [[2.0]]]]) def test_get_ua_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'ua') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "ua") assert fix == [AllVars(None), GenericFix(None)] def test_ua_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'um1_p19_cav' - cubes_3d[0].units = 'm s-1' - fix = get_allvars_fix('Amon', 'ua') + cubes_3d[0].var_name = "um1_cav" + cubes_3d[0].units = "m s-1" + fix = get_allvars_fix("Amon", "ua") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'ua' - assert cube.standard_name == 'eastward_wind' - assert cube.long_name == 'Eastward Wind' - assert cube.units == 'm s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "ua" + assert cube.standard_name == "eastward_wind" + assert cube.long_name == "Eastward Wind" + assert cube.units == "m s-1" + assert "positive" not in cube.attributes assert not cube.aux_factories - assert cube.coords('air_pressure') + assert cube.coords("air_pressure") np.testing.assert_allclose(cube.data, [[[[1.0]], [[2.0]]]]) def test_get_va_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'va') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "va") assert fix == [AllVars(None), GenericFix(None)] def test_va_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'vm1_p19_cav' - cubes_3d[0].units = 'm s-1' - fix = get_allvars_fix('Amon', 'va') + cubes_3d[0].var_name = "vm1_cav" + cubes_3d[0].units = "m s-1" + fix = get_allvars_fix("Amon", "va") fixed_cubes = fix.fix_metadata(cubes_3d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'va' - assert cube.standard_name == 'northward_wind' - assert cube.long_name == 'Northward Wind' - assert cube.units == 'm s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "va" + assert cube.standard_name == "northward_wind" + assert cube.long_name == "Northward Wind" + assert cube.units == "m s-1" + assert "positive" not in cube.attributes assert not cube.aux_factories - assert cube.coords('air_pressure') + assert cube.coords("air_pressure") np.testing.assert_allclose(cube.data, [[[[1.0]], [[2.0]]]]) def test_get_zg_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('EMAC', 'EMAC', 'Amon', 'zg') + fix = Fix.get_fixes("EMAC", "EMAC", "Amon", "zg") assert fix == [Zg(None), AllVars(None), GenericFix(None)] def test_zg_fix(cubes_3d): """Test fix.""" - cubes_3d[0].var_name = 'geopot_p19_cav' - cubes_3d[0].units = 'm2 s-2' - fix = get_fix('Amon', 'zg') + cubes_3d[0].var_name = "geopot_cav" + cubes_3d[0].units = "m2 s-2" + fix = get_fix("Amon", "zg") fixed_cubes = fix.fix_metadata(cubes_3d) - fix = get_allvars_fix('Amon', 'zg') + fix = get_allvars_fix("Amon", "zg") fixed_cubes = fix.fix_metadata(fixed_cubes) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'zg' - assert cube.standard_name == 'geopotential_height' - assert cube.long_name == 'Geopotential Height' - assert cube.units == 'm' - assert 'positive' not in cube.attributes + assert cube.var_name == "zg" + assert cube.standard_name == "geopotential_height" + assert cube.long_name == "Geopotential Height" + assert cube.units == "m" + assert "positive" not in cube.attributes assert not cube.aux_factories - assert cube.coords('air_pressure') + assert cube.coords("air_pressure") np.testing.assert_allclose( cube.data, @@ -2602,10 +2752,10 @@ def test_zg_fix(cubes_3d): # Test ``AllVars.fix_file`` -@mock.patch('esmvalcore.cmor._fixes.emac.emac.copyfile', autospec=True) +@mock.patch("esmvalcore.cmor._fixes.emac.emac.copyfile", autospec=True) def test_fix_file_no_alevel(mock_copyfile): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") new_path = fix.fix_file(mock.sentinel.filepath, mock.sentinel.output_dir) assert new_path == mock.sentinel.filepath @@ -2619,19 +2769,21 @@ def test_fix_plev_no_plev_coord(cubes_3d): """Test fix.""" # Create cube with Z-coord whose units are not convertible to Pa cube = cubes_3d[0] - z_coord = cube.coord(axis='Z') - z_coord.var_name = 'height' - z_coord.standard_name = 'height' - z_coord.long_name = 'height' - z_coord.units = 'm' - z_coord.attributes = {'positive': 'up'} + z_coord = cube.coord(axis="Z") + z_coord.var_name = "height" + z_coord.standard_name = "height" + z_coord.long_name = "height" + z_coord.units = "m" + z_coord.attributes = {"positive": "up"} z_coord.points = np.arange(z_coord.shape[0])[::-1] - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") - msg = ("Cannot find requested pressure level coordinate for variable " - "'ta', searched for Z-coordinates with units that are convertible " - "to Pa") + msg = ( + "Cannot find requested pressure level coordinate for variable " + "'ta', searched for Z-coordinates with units that are convertible " + "to Pa" + ) with pytest.raises(ValueError, match=msg): fix._fix_plev(cube) @@ -2641,16 +2793,16 @@ def test_fix_plev_no_plev_coord(cubes_3d): def test_fix_invalid_units(): """Test fix.""" - cube = Cube(1.0, attributes={'invalid_units': 'kg/m**2s'}) + cube = Cube(1.0, attributes={"invalid_units": "kg/m**2s"}) - fix = get_allvars_fix('Amon', 'pr') + fix = get_allvars_fix("Amon", "pr") fix.fix_var_metadata(cube) - assert cube.var_name == 'pr' - assert cube.standard_name == 'precipitation_flux' - assert cube.long_name == 'Precipitation' - assert cube.units == 'kg m-2 s-1' - assert cube.units.origin == 'kg m-2 s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "pr" + assert cube.standard_name == "precipitation_flux" + assert cube.long_name == "Precipitation" + assert cube.units == "kg m-2 s-1" + assert cube.units.origin == "kg m-2 s-1" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, 1.0) diff --git a/tests/integration/cmor/_fixes/icon/test_icon.py b/tests/integration/cmor/_fixes/icon/test_icon.py index b1607d07bf..b87c052008 100644 --- a/tests/integration/cmor/_fixes/icon/test_icon.py +++ b/tests/integration/cmor/_fixes/icon/test_icon.py @@ -1,4 +1,5 @@ """Tests for the ICON on-the-fly CMORizer.""" + from copy import deepcopy from datetime import datetime from pathlib import Path @@ -30,16 +31,16 @@ from esmvalcore.dataset import Dataset TEST_GRID_FILE_URI = ( - 'https://github.com/ESMValGroup/ESMValCore/raw/main/tests/integration/' - 'cmor/_fixes/test_data/icon_grid.nc' + "https://github.com/ESMValGroup/ESMValCore/raw/main/tests/integration/" + "cmor/_fixes/test_data/icon_grid.nc" ) -TEST_GRID_FILE_NAME = 'icon_grid.nc' +TEST_GRID_FILE_NAME = "icon_grid.nc" @pytest.fixture(autouse=True) def tmp_cache_dir(monkeypatch, tmp_path): """Use temporary path as cache directory for all tests in this module.""" - monkeypatch.setattr(IconFix, 'CACHE_DIR', tmp_path) + monkeypatch.setattr(IconFix, "CACHE_DIR", tmp_path) # Note: test_data_path is defined in tests/integration/cmor/_fixes/conftest.py @@ -48,89 +49,135 @@ def tmp_cache_dir(monkeypatch, tmp_path): @pytest.fixture def cubes_2d(test_data_path): """2D sample cubes.""" - nc_path = test_data_path / 'icon_2d.nc' + nc_path = test_data_path / "icon_2d.nc" return iris.load(str(nc_path)) @pytest.fixture def cubes_3d(test_data_path): """3D sample cubes.""" - nc_path = test_data_path / 'icon_3d.nc' + nc_path = test_data_path / "icon_3d.nc" return iris.load(str(nc_path)) @pytest.fixture def cubes_grid(test_data_path): """Grid description sample cubes.""" - nc_path = test_data_path / 'icon_grid.nc' + nc_path = test_data_path / "icon_grid.nc" return iris.load(str(nc_path)) @pytest.fixture def cubes_regular_grid(): """Cube with regular grid.""" - time_coord = DimCoord([0], var_name='time', standard_name='time', - units='days since 1850-01-01') - lat_coord = DimCoord([0.0, 1.0], var_name='lat', standard_name='latitude', - long_name='latitude', units='degrees_north') - lon_coord = DimCoord([-1.0, 1.0], var_name='lon', - standard_name='longitude', long_name='longitude', - units='degrees_east') - cube = Cube([[[0.0, 1.0], [2.0, 3.0]]], var_name='tas', units='K', - dim_coords_and_dims=[(time_coord, 0), - (lat_coord, 1), - (lon_coord, 2)]) + time_coord = DimCoord( + [0], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + lat_coord = DimCoord( + [0.0, 1.0], + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", + ) + lon_coord = DimCoord( + [-1.0, 1.0], + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + cube = Cube( + [[[0.0, 1.0], [2.0, 3.0]]], + var_name="tas", + units="K", + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], + ) return CubeList([cube]) @pytest.fixture def cubes_2d_lat_lon_grid(): """Cube with 2D latitude and longitude.""" - time_coord = DimCoord([0], var_name='time', standard_name='time', - units='days since 1850-01-01') - lat_coord = AuxCoord([[0.0, 0.0], [1.0, 1.0]], var_name='lat', - standard_name='latitude', long_name='latitude', - units='degrees_north') - lon_coord = AuxCoord([[0.0, 1.0], [0.0, 1.0]], var_name='lon', - standard_name='longitude', long_name='longitude', - units='degrees_east') - cube = Cube([[[0.0, 1.0], [2.0, 3.0]]], var_name='tas', units='K', - dim_coords_and_dims=[(time_coord, 0)], - aux_coords_and_dims=[(lat_coord, (1, 2)), - (lon_coord, (1, 2))]) + time_coord = DimCoord( + [0], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + lat_coord = AuxCoord( + [[0.0, 0.0], [1.0, 1.0]], + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", + ) + lon_coord = AuxCoord( + [[0.0, 1.0], [0.0, 1.0]], + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + cube = Cube( + [[[0.0, 1.0], [2.0, 3.0]]], + var_name="tas", + units="K", + dim_coords_and_dims=[(time_coord, 0)], + aux_coords_and_dims=[(lat_coord, (1, 2)), (lon_coord, (1, 2))], + ) return CubeList([cube]) @pytest.fixture def simple_unstructured_cube(): """Simple cube with unstructured grid.""" - time_coord = DimCoord([0], var_name='time', standard_name='time', - units='days since 1850-01-01') - height_coord = DimCoord([0, 1, 2], var_name='height') - lat_coord = AuxCoord([0.0, 1.0], var_name='lat', standard_name='latitude', - long_name='latitude', units='degrees_north') - lon_coord = AuxCoord([0.0, 1.0], var_name='lon', - standard_name='longitude', long_name='longitude', - units='degrees_east') - cube = Cube([[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]]], var_name='ta', - units='K', - dim_coords_and_dims=[(time_coord, 0), (height_coord, 1)], - aux_coords_and_dims=[(lat_coord, 2), (lon_coord, 2)]) + time_coord = DimCoord( + [0], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + height_coord = DimCoord([0, 1, 2], var_name="height") + lat_coord = AuxCoord( + [0.0, 1.0], + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", + ) + lon_coord = AuxCoord( + [0.0, 1.0], + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", + ) + cube = Cube( + [[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]]], + var_name="ta", + units="K", + dim_coords_and_dims=[(time_coord, 0), (height_coord, 1)], + aux_coords_and_dims=[(lat_coord, 2), (lon_coord, 2)], + ) return cube def _get_fix(mip, short_name, fix_name, session=None): """Load a fix from esmvalcore.cmor._fixes.icon.icon.""" dataset = Dataset( - project='ICON', - dataset='ICON', + project="ICON", + dataset="ICON", mip=mip, short_name=short_name, ) extra_facets = get_extra_facets(dataset, ()) - extra_facets['frequency'] = 'mon' - extra_facets['exp'] = 'amip' - vardef = get_var_info(project='ICON', mip=mip, short_name=short_name) + extra_facets["frequency"] = "mon" + extra_facets["exp"] = "amip" + vardef = get_var_info(project="ICON", mip=mip, short_name=short_name) cls = getattr(esmvalcore.cmor._fixes.icon.icon, fix_name) fix = cls(vardef, extra_facets=extra_facets, session=session) return fix @@ -144,7 +191,7 @@ def get_fix(mip, short_name, session=None): def get_allvars_fix(mip, short_name, session=None): """Load the AllVars fix from esmvalcore.cmor._fixes.icon.icon.""" - return _get_fix(mip, short_name, 'AllVars', session=session) + return _get_fix(mip, short_name, "AllVars", session=session) def fix_metadata(cubes, mip, short_name, session=None): @@ -169,11 +216,11 @@ def check_ta_metadata(cubes): """Check ta metadata.""" assert len(cubes) == 1 cube = cubes[0] - assert cube.var_name == 'ta' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "ta" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes return cube @@ -181,11 +228,11 @@ def check_tas_metadata(cubes): """Check tas metadata.""" assert len(cubes) == 1 cube = cubes[0] - assert cube.var_name == 'tas' - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.var_name == "tas" + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes return cube @@ -194,22 +241,23 @@ def check_siconc_metadata(cubes, var_name, long_name): assert len(cubes) == 1 cube = cubes[0] assert cube.var_name == var_name - assert cube.standard_name == 'sea_ice_area_fraction' + assert cube.standard_name == "sea_ice_area_fraction" assert cube.long_name == long_name - assert cube.units == '%' - assert 'positive' not in cube.attributes + assert cube.units == "%" + assert "positive" not in cube.attributes return cube def check_time(cube): """Check time coordinate of cube.""" - assert cube.coords('time', dim_coords=True) - time = cube.coord('time', dim_coords=True) - assert time.var_name == 'time' - assert time.standard_name == 'time' - assert time.long_name == 'time' - assert time.units == Unit('days since 1850-01-01', - calendar='proleptic_gregorian') + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.long_name == "time" + assert time.units == Unit( + "days since 1850-01-01", calendar="proleptic_gregorian" + ) np.testing.assert_allclose(time.points, [54770.5]) np.testing.assert_allclose(time.bounds, [[54755.0, 54786.0]]) assert time.attributes == {} @@ -217,25 +265,25 @@ def check_time(cube): def check_model_level_metadata(cube): """Check metadata of model_level coordinate.""" - assert cube.coords('model level number', dim_coords=True) - height = cube.coord('model level number', dim_coords=True) - assert height.var_name == 'model_level' + assert cube.coords("model level number", dim_coords=True) + height = cube.coord("model level number", dim_coords=True) + assert height.var_name == "model_level" assert height.standard_name is None - assert height.long_name == 'model level number' - assert height.units == 'no unit' - assert height.attributes == {'positive': 'up'} + assert height.long_name == "model level number" + assert height.units == "no unit" + assert height.attributes == {"positive": "up"} return height def check_air_pressure_metadata(cube): """Check metadata of air_pressure coordinate.""" - assert cube.coords('air_pressure', dim_coords=False) - plev = cube.coord('air_pressure', dim_coords=False) - assert plev.var_name == 'plev' - assert plev.standard_name == 'air_pressure' - assert plev.long_name == 'pressure' - assert plev.units == 'Pa' - assert plev.attributes == {'positive': 'down'} + assert cube.coords("air_pressure", dim_coords=False) + plev = cube.coord("air_pressure", dim_coords=False) + assert plev.var_name == "plev" + assert plev.standard_name == "air_pressure" + assert plev.long_name == "pressure" + assert plev.units == "Pa" + assert plev.attributes == {"positive": "down"} return plev @@ -246,7 +294,7 @@ def check_height(cube, plev_has_bounds=True): assert height.bounds is None plev = check_air_pressure_metadata(cube) - assert cube.coord_dims('air_pressure') == (0, 1, 2) + assert cube.coord_dims("air_pressure") == (0, 1, 2) np.testing.assert_allclose( plev.points[0, :4, 0], @@ -255,10 +303,12 @@ def check_height(cube, plev_has_bounds=True): if plev_has_bounds: np.testing.assert_allclose( plev.bounds[0, :4, 0], - [[100825.04, 100308.09], - [100308.09, 99000.336], - [99000.336, 97001.42], - [97001.42, 94388.59]], + [ + [100825.04, 100308.09], + [100308.09, 99000.336], + [99000.336, 97001.42], + [97001.42, 94388.59], + ], ) else: assert plev.bounds is None @@ -266,30 +316,30 @@ def check_height(cube, plev_has_bounds=True): def check_heightxm(cube, height_value): """Check scalar heightxm coordinate of cube.""" - assert cube.coords('height') - height = cube.coord('height') - assert height.var_name == 'height' - assert height.standard_name == 'height' - assert height.long_name == 'height' - assert height.units == 'm' - assert height.attributes == {'positive': 'up'} + assert cube.coords("height") + height = cube.coord("height") + assert height.var_name == "height" + assert height.standard_name == "height" + assert height.long_name == "height" + assert height.units == "m" + assert height.attributes == {"positive": "up"} np.testing.assert_allclose(height.points, [height_value]) assert height.bounds is None def check_lat(cube): """Check latitude coordinate of cube.""" - assert cube.coords('latitude', dim_coords=False) - lat = cube.coord('latitude', dim_coords=False) - assert lat.var_name == 'lat' - assert lat.standard_name == 'latitude' - assert lat.long_name == 'latitude' - assert lat.units == 'degrees_north' + assert cube.coords("latitude", dim_coords=False) + lat = cube.coord("latitude", dim_coords=False) + assert lat.var_name == "lat" + assert lat.standard_name == "latitude" + assert lat.long_name == "latitude" + assert lat.units == "degrees_north" assert lat.attributes == {} np.testing.assert_allclose( lat.points, [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], - rtol=1e-5 + rtol=1e-5, ) np.testing.assert_allclose( lat.bounds, @@ -303,24 +353,24 @@ def check_lat(cube): [0.0, 0.0, 90.0], [0.0, 0.0, 90.0], ], - rtol=1e-5 + rtol=1e-5, ) return lat def check_lon(cube): """Check longitude coordinate of cube.""" - assert cube.coords('longitude', dim_coords=False) - lon = cube.coord('longitude', dim_coords=False) - assert lon.var_name == 'lon' - assert lon.standard_name == 'longitude' - assert lon.long_name == 'longitude' - assert lon.units == 'degrees_east' + assert cube.coords("longitude", dim_coords=False) + lon = cube.coord("longitude", dim_coords=False) + assert lon.var_name == "lon" + assert lon.standard_name == "longitude" + assert lon.long_name == "longitude" + assert lon.units == "degrees_east" assert lon.attributes == {} np.testing.assert_allclose( lon.points, [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], - rtol=1e-5 + rtol=1e-5, ) np.testing.assert_allclose( lon.bounds, @@ -334,7 +384,7 @@ def check_lon(cube): [0.0, 90.0, 0.0], [90.0, 180.0, 0.0], ], - rtol=1e-5 + rtol=1e-5, ) return lon @@ -345,19 +395,24 @@ def check_lat_lon(cube): lon = check_lon(cube) # Check that latitude and longitude are mesh coordinates - assert cube.coords('latitude', mesh_coords=True) - assert cube.coords('longitude', mesh_coords=True) + assert cube.coords("latitude", mesh_coords=True) + assert cube.coords("longitude", mesh_coords=True) # Check dimensional coordinate describing the mesh - assert cube.coords('first spatial index for variables stored on an ' - 'unstructured grid', dim_coords=True) - i_coord = cube.coord('first spatial index for variables stored on an ' - 'unstructured grid', dim_coords=True) - assert i_coord.var_name == 'i' + assert cube.coords( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + i_coord = cube.coord( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + assert i_coord.var_name == "i" assert i_coord.standard_name is None - assert i_coord.long_name == ('first spatial index for variables stored on ' - 'an unstructured grid') - assert i_coord.units == '1' + assert i_coord.long_name == ( + "first spatial index for variables stored on an unstructured grid" + ) + assert i_coord.units == "1" np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) assert i_coord.bounds is None @@ -366,7 +421,7 @@ def check_lat_lon(cube): assert cube.coord_dims(lat) == cube.coord_dims(i_coord) # Check the mesh itself - assert cube.location == 'face' + assert cube.location == "face" mesh = cube.mesh check_mesh(mesh) @@ -377,24 +432,24 @@ def check_mesh(mesh): assert mesh.var_name is None assert mesh.standard_name is None assert mesh.long_name is None - assert mesh.units == 'unknown' + assert mesh.units == "unknown" assert mesh.attributes == {} - assert mesh.cf_role == 'mesh_topology' + assert mesh.cf_role == "mesh_topology" assert mesh.topology_dimension == 2 # Check face coordinates - assert len(mesh.coords(include_faces=True)) == 2 + assert len(mesh.coords(location="face")) == 2 - mesh_face_lat = mesh.coord(include_faces=True, axis='y') - assert mesh_face_lat.var_name == 'lat' - assert mesh_face_lat.standard_name == 'latitude' - assert mesh_face_lat.long_name == 'latitude' - assert mesh_face_lat.units == 'degrees_north' + mesh_face_lat = mesh.coord(location="face", axis="y") + assert mesh_face_lat.var_name == "lat" + assert mesh_face_lat.standard_name == "latitude" + assert mesh_face_lat.long_name == "latitude" + assert mesh_face_lat.units == "degrees_north" assert mesh_face_lat.attributes == {} np.testing.assert_allclose( mesh_face_lat.points, [-45.0, -45.0, -45.0, -45.0, 45.0, 45.0, 45.0, 45.0], - rtol=1e-5 + rtol=1e-5, ) np.testing.assert_allclose( mesh_face_lat.bounds, @@ -408,19 +463,19 @@ def check_mesh(mesh): [0.0, 0.0, 90.0], [0.0, 0.0, 90.0], ], - rtol=1e-5 + rtol=1e-5, ) - mesh_face_lon = mesh.coord(include_faces=True, axis='x') - assert mesh_face_lon.var_name == 'lon' - assert mesh_face_lon.standard_name == 'longitude' - assert mesh_face_lon.long_name == 'longitude' - assert mesh_face_lon.units == 'degrees_east' + mesh_face_lon = mesh.coord(location="face", axis="x") + assert mesh_face_lon.var_name == "lon" + assert mesh_face_lon.standard_name == "longitude" + assert mesh_face_lon.long_name == "longitude" + assert mesh_face_lon.units == "degrees_east" assert mesh_face_lon.attributes == {} np.testing.assert_allclose( mesh_face_lon.points, [225.0, 315.0, 45.0, 135.0, 225.0, 315.0, 45.0, 135.0], - rtol=1e-5 + rtol=1e-5, ) np.testing.assert_allclose( mesh_face_lon.bounds, @@ -434,35 +489,31 @@ def check_mesh(mesh): [0.0, 90.0, 0.0], [90.0, 180.0, 0.0], ], - rtol=1e-5 + rtol=1e-5, ) # Check node coordinates - assert len(mesh.coords(include_nodes=True)) == 2 + assert len(mesh.coords(location="node")) == 2 - mesh_node_lat = mesh.coord(include_nodes=True, axis='y') - assert mesh_node_lat.var_name == 'nlat' - assert mesh_node_lat.standard_name == 'latitude' - assert mesh_node_lat.long_name == 'node latitude' - assert mesh_node_lat.units == 'degrees_north' + mesh_node_lat = mesh.coord(location="node", axis="y") + assert mesh_node_lat.var_name == "nlat" + assert mesh_node_lat.standard_name == "latitude" + assert mesh_node_lat.long_name == "node latitude" + assert mesh_node_lat.units == "degrees_north" assert mesh_node_lat.attributes == {} np.testing.assert_allclose( - mesh_node_lat.points, - [-90.0, 0.0, 0.0, 0.0, 0.0, 90.0], - rtol=1e-5 + mesh_node_lat.points, [-90.0, 0.0, 0.0, 0.0, 0.0, 90.0], rtol=1e-5 ) assert mesh_node_lat.bounds is None - mesh_node_lon = mesh.coord(include_nodes=True, axis='x') - assert mesh_node_lon.var_name == 'nlon' - assert mesh_node_lon.standard_name == 'longitude' - assert mesh_node_lon.long_name == 'node longitude' - assert mesh_node_lon.units == 'degrees_east' + mesh_node_lon = mesh.coord(location="node", axis="x") + assert mesh_node_lon.var_name == "nlon" + assert mesh_node_lon.standard_name == "longitude" + assert mesh_node_lon.long_name == "node longitude" + assert mesh_node_lon.units == "degrees_east" assert mesh_node_lon.attributes == {} np.testing.assert_allclose( - mesh_node_lon.points, - [0.0, 180.0, 270.0, 0.0, 90, 0.0], - rtol=1e-5 + mesh_node_lon.points, [0.0, 180.0, 270.0, 0.0, 90, 0.0], rtol=1e-5 ) assert mesh_node_lon.bounds is None @@ -472,34 +523,36 @@ def check_mesh(mesh): assert conn.var_name is None assert conn.standard_name is None assert conn.long_name is None - assert conn.units == 'unknown' + assert conn.units == "unknown" assert conn.attributes == {} - assert conn.cf_role == 'face_node_connectivity' + assert conn.cf_role == "face_node_connectivity" assert conn.start_index == 1 assert conn.location_axis == 0 assert conn.shape == (8, 3) np.testing.assert_array_equal( conn.indices, - [[1, 3, 2], - [1, 4, 3], - [1, 5, 4], - [1, 2, 5], - [2, 3, 6], - [3, 4, 6], - [4, 5, 6], - [5, 2, 6]], + [ + [1, 3, 2], + [1, 4, 3], + [1, 5, 4], + [1, 2, 5], + [2, 3, 6], + [3, 4, 6], + [4, 5, 6], + [5, 2, 6], + ], ) def check_typesi(cube): """Check scalar typesi coordinate of cube.""" - assert cube.coords('area_type') - typesi = cube.coord('area_type') - assert typesi.var_name == 'type' - assert typesi.standard_name == 'area_type' - assert typesi.long_name == 'Sea Ice area type' + assert cube.coords("area_type") + typesi = cube.coord("area_type") + assert typesi.var_name == "type" + assert typesi.standard_name == "area_type" + assert typesi.long_name == "Sea Ice area type" assert typesi.units.is_no_unit() - np.testing.assert_array_equal(typesi.points, ['sea_ice']) + np.testing.assert_array_equal(typesi.points, ["sea_ice"]) assert typesi.bounds is None @@ -509,46 +562,46 @@ def check_typesi(cube): def test_get_areacella_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'fx', 'areacella') + fix = Fix.get_fixes("ICON", "ICON", "fx", "areacella") assert fix == [AllVars(None), GenericFix(None)] def test_areacella_fix(cubes_grid): """Test fix.""" - fix = get_allvars_fix('fx', 'areacella') - fix.extra_facets['var_type'] = 'fx' + fix = get_allvars_fix("fx", "areacella") + fix.extra_facets["var_type"] = "fx" fixed_cubes = fix.fix_metadata(cubes_grid) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'areacella' - assert cube.standard_name == 'cell_area' - assert cube.long_name == 'Grid-Cell Area for Atmospheric Grid Variables' - assert cube.units == 'm2' - assert 'positive' not in cube.attributes + assert cube.var_name == "areacella" + assert cube.standard_name == "cell_area" + assert cube.long_name == "Grid-Cell Area for Atmospheric Grid Variables" + assert cube.units == "m2" + assert "positive" not in cube.attributes check_lat_lon(cube) def test_get_areacello_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Ofx', 'areacello') + fix = Fix.get_fixes("ICON", "ICON", "Ofx", "areacello") assert fix == [AllVars(None), GenericFix(None)] def test_areacello_fix(cubes_grid): """Test fix.""" - fix = get_allvars_fix('Ofx', 'areacello') - fix.extra_facets['var_type'] = 'fx' + fix = get_allvars_fix("Ofx", "areacello") + fix.extra_facets["var_type"] = "fx" fixed_cubes = fix.fix_metadata(cubes_grid) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'areacello' - assert cube.standard_name == 'cell_area' - assert cube.long_name == 'Grid-Cell Area for Ocean Variables' - assert cube.units == 'm2' - assert 'positive' not in cube.attributes + assert cube.var_name == "areacello" + assert cube.standard_name == "cell_area" + assert cube.long_name == "Grid-Cell Area for Ocean Variables" + assert cube.units == "m2" + assert "positive" not in cube.attributes check_lat_lon(cube) @@ -558,31 +611,31 @@ def test_areacello_fix(cubes_grid): def test_get_clwvi_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'clwvi') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "clwvi") assert fix == [Clwvi(None), AllVars(None), GenericFix(None)] def test_clwvi_fix(cubes_regular_grid): """Test fix.""" - cubes = CubeList([ - cubes_regular_grid[0].copy(), - cubes_regular_grid[0].copy() - ]) - cubes[0].var_name = 'cllvi' - cubes[1].var_name = 'clivi' - cubes[0].units = '1e3 kg m-2' - cubes[1].units = '1e3 kg m-2' + cubes = CubeList( + [cubes_regular_grid[0].copy(), cubes_regular_grid[0].copy()] + ) + cubes[0].var_name = "cllvi" + cubes[1].var_name = "clivi" + cubes[0].units = "1e3 kg m-2" + cubes[1].units = "1e3 kg m-2" - fixed_cubes = fix_metadata(cubes, 'Amon', 'clwvi') + fixed_cubes = fix_metadata(cubes, "Amon", "clwvi") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'clwvi' - assert cube.standard_name == ('atmosphere_mass_content_of_cloud_' - 'condensed_water') - assert cube.long_name == 'Condensed Water Path' - assert cube.units == 'kg m-2' - assert 'positive' not in cube.attributes + assert cube.var_name == "clwvi" + assert cube.standard_name == ( + "atmosphere_mass_content_of_cloud_condensed_water" + ) + assert cube.long_name == "Condensed Water Path" + assert cube.units == "kg m-2" + assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, [[[0.0, 2000.0], [4000.0, 6000.0]]]) @@ -592,23 +645,24 @@ def test_clwvi_fix(cubes_regular_grid): def test_get_lwp_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'AERmon', 'lwp') + fix = Fix.get_fixes("ICON", "ICON", "AERmon", "lwp") assert fix == [AllVars(None), GenericFix(None)] def test_lwp_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('AERmon', 'lwp') + fix = get_allvars_fix("AERmon", "lwp") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'lwp' - assert cube.standard_name == ('atmosphere_mass_content_of_cloud_liquid_' - 'water') - assert cube.long_name == 'Liquid Water Path' - assert cube.units == 'kg m-2' - assert 'positive' not in cube.attributes + assert cube.var_name == "lwp" + assert cube.standard_name == ( + "atmosphere_mass_content_of_cloud_liquid_water" + ) + assert cube.long_name == "Liquid Water Path" + assert cube.units == "kg m-2" + assert "positive" not in cube.attributes check_time(cube) check_lat_lon(cube) @@ -619,22 +673,22 @@ def test_lwp_fix(cubes_2d): def test_get_rsdt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'rsdt') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "rsdt") assert fix == [AllVars(None), GenericFix(None)] def test_rsdt_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'rsdt') + fix = get_allvars_fix("Amon", "rsdt") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rsdt' - assert cube.standard_name == 'toa_incoming_shortwave_flux' - assert cube.long_name == 'TOA Incident Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "rsdt" + assert cube.standard_name == "toa_incoming_shortwave_flux" + assert cube.long_name == "TOA Incident Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" check_time(cube) check_lat_lon(cube) @@ -642,22 +696,22 @@ def test_rsdt_fix(cubes_2d): def test_get_rsut_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'rsut') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "rsut") assert fix == [AllVars(None), GenericFix(None)] def test_rsut_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'rsut') + fix = get_allvars_fix("Amon", "rsut") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rsut' - assert cube.standard_name == 'toa_outgoing_shortwave_flux' - assert cube.long_name == 'TOA Outgoing Shortwave Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "rsut" + assert cube.standard_name == "toa_outgoing_shortwave_flux" + assert cube.long_name == "TOA Outgoing Shortwave Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" check_time(cube) check_lat_lon(cube) @@ -668,17 +722,18 @@ def test_rsut_fix(cubes_2d): def test_get_siconc_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'SImon', 'siconc') + fix = Fix.get_fixes("ICON", "ICON", "SImon", "siconc") assert fix == [AllVars(None), GenericFix(None)] def test_siconc_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('SImon', 'siconc') + fix = get_allvars_fix("SImon", "siconc") fixed_cubes = fix.fix_metadata(cubes_2d) - cube = check_siconc_metadata(fixed_cubes, 'siconc', - 'Sea-Ice Area Percentage (Ocean Grid)') + cube = check_siconc_metadata( + fixed_cubes, "siconc", "Sea-Ice Area Percentage (Ocean Grid)" + ) check_time(cube) check_lat_lon(cube) check_typesi(cube) @@ -691,17 +746,18 @@ def test_siconc_fix(cubes_2d): def test_get_siconca_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'SImon', 'siconca') + fix = Fix.get_fixes("ICON", "ICON", "SImon", "siconca") assert fix == [AllVars(None), GenericFix(None)] def test_siconca_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('SImon', 'siconca') + fix = get_allvars_fix("SImon", "siconca") fixed_cubes = fix.fix_metadata(cubes_2d) - cube = check_siconc_metadata(fixed_cubes, 'siconca', - 'Sea-Ice Area Percentage (Atmospheric Grid)') + cube = check_siconc_metadata( + fixed_cubes, "siconca", "Sea-Ice Area Percentage (Atmospheric Grid)" + ) check_time(cube) check_lat_lon(cube) check_typesi(cube) @@ -717,13 +773,13 @@ def test_siconca_fix(cubes_2d): def test_get_ta_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'ta') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "ta") assert fix == [AllVars(None), GenericFix(None)] def test_ta_fix(cubes_3d): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") fixed_cubes = fix.fix_metadata(cubes_3d) cube = check_ta_metadata(fixed_cubes) @@ -734,11 +790,13 @@ def test_ta_fix(cubes_3d): def test_ta_fix_no_plev_bounds(cubes_3d): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') - cubes = CubeList([ - cubes_3d.extract_cube(NameConstraint(var_name='ta')), - cubes_3d.extract_cube(NameConstraint(var_name='pfull')), - ]) + fix = get_allvars_fix("Amon", "ta") + cubes = CubeList( + [ + cubes_3d.extract_cube(NameConstraint(var_name="ta")), + cubes_3d.extract_cube(NameConstraint(var_name="pfull")), + ] + ) fixed_cubes = fix.fix_metadata(cubes) cube = check_ta_metadata(fixed_cubes) @@ -752,13 +810,13 @@ def test_ta_fix_no_plev_bounds(cubes_3d): def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'tas') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "tas") assert fix == [AllVars(None), GenericFix(None)] def test_tas_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes_2d) cube = check_tas_metadata(fixed_cubes) @@ -769,10 +827,10 @@ def test_tas_fix(cubes_2d): def test_tas_spatial_index_coord_already_present(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") - index_coord = DimCoord(np.arange(8), var_name='ncells') - cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) + index_coord = DimCoord(np.arange(8), var_name="ncells") + cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) cube.add_dim_coord(index_coord, 1) fixed_cubes = fix.fix_metadata(cubes_2d) @@ -783,11 +841,11 @@ def test_tas_spatial_index_coord_already_present(cubes_2d): def test_tas_scalar_height2m_already_present(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") # Scalar height (with wrong metadata) already present - height_coord = AuxCoord(2.0, var_name='h', standard_name='height') - cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) + height_coord = AuxCoord(2.0, var_name="h", standard_name="height") + cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) cube.add_aux_coord(height_coord, ()) fixed_cubes = fix.fix_metadata(cubes_2d) @@ -799,30 +857,35 @@ def test_tas_scalar_height2m_already_present(cubes_2d): def test_tas_dim_height2m_already_present(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['ugrid'] = False + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["ugrid"] = False fixed_cubes = fix.fix_metadata(cubes_2d) cube = check_tas_metadata(fixed_cubes) assert cube.mesh is None - assert cube.coords('first spatial index for variables stored on an ' - 'unstructured grid', dim_coords=True) - i_coord = cube.coord('first spatial index for variables stored on an ' - 'unstructured grid', dim_coords=True) - assert i_coord.var_name == 'i' + assert cube.coords( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + i_coord = cube.coord( + "first spatial index for variables stored on an unstructured grid", + dim_coords=True, + ) + assert i_coord.var_name == "i" assert i_coord.standard_name is None - assert i_coord.long_name == ('first spatial index for variables stored on ' - 'an unstructured grid') - assert i_coord.units == '1' + assert i_coord.long_name == ( + "first spatial index for variables stored on an unstructured grid" + ) + assert i_coord.units == "1" np.testing.assert_allclose(i_coord.points, [0, 1, 2, 3, 4, 5, 6, 7]) assert i_coord.bounds is None - assert cube.coords('latitude', dim_coords=False) - assert cube.coords('longitude', dim_coords=False) - lat = cube.coord('latitude', dim_coords=False) - lon = cube.coord('longitude', dim_coords=False) + assert cube.coords("latitude", dim_coords=False) + assert cube.coords("longitude", dim_coords=False) + lat = cube.coord("latitude", dim_coords=False) + lon = cube.coord("longitude", dim_coords=False) assert len(cube.coord_dims(lat)) == 1 assert cube.coord_dims(lat) == cube.coord_dims(lon) assert cube.coord_dims(lat) == cube.coord_dims(i_coord) @@ -830,13 +893,13 @@ def test_tas_dim_height2m_already_present(cubes_2d): def test_tas_no_mesh(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") # Dimensional coordinate height (with wrong metadata) already present - height_coord = AuxCoord(2.0, var_name='h', standard_name='height') - cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) + height_coord = AuxCoord(2.0, var_name="h", standard_name="height") + cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) cube.add_aux_coord(height_coord, ()) - cube = iris.util.new_axis(cube, scalar_coord='height') + cube = iris.util.new_axis(cube, scalar_coord="height") cube.transpose((1, 0, 2)) cubes = CubeList([cube]) fixed_cubes = fix.fix_metadata(cubes) @@ -849,21 +912,22 @@ def test_tas_no_mesh(cubes_2d): def test_tas_no_shift_time(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['shift_time'] = False + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["shift_time"] = False fixed_cubes = fix.fix_metadata(cubes_2d) cube = check_tas_metadata(fixed_cubes) check_lat_lon(cube) check_heightxm(cube, 2.0) - assert cube.coords('time', dim_coords=True) - time = cube.coord('time', dim_coords=True) - assert time.var_name == 'time' - assert time.standard_name == 'time' - assert time.long_name == 'time' - assert time.units == Unit('days since 1850-01-01', - calendar='proleptic_gregorian') + assert cube.coords("time", dim_coords=True) + time = cube.coord("time", dim_coords=True) + assert time.var_name == "time" + assert time.standard_name == "time" + assert time.long_name == "time" + assert time.units == Unit( + "days since 1850-01-01", calendar="proleptic_gregorian" + ) np.testing.assert_allclose(time.points, [54786.0]) assert time.bounds is None assert time.attributes == {} @@ -874,43 +938,43 @@ def test_tas_no_shift_time(cubes_2d): def test_get_uas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'uas') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "uas") assert fix == [AllVars(None), GenericFix(None)] def test_uas_fix(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'uas') + fix = get_allvars_fix("Amon", "uas") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'uas' - assert cube.standard_name == 'eastward_wind' - assert cube.long_name == 'Eastward Near-Surface Wind' - assert cube.units == 'm s-1' - assert 'positive' not in cube.attributes + assert cube.var_name == "uas" + assert cube.standard_name == "eastward_wind" + assert cube.long_name == "Eastward Near-Surface Wind" + assert cube.units == "m s-1" + assert "positive" not in cube.attributes check_time(cube) check_lat_lon(cube) - assert cube.coords('height') - height = cube.coord('height') - assert height.var_name == 'height' - assert height.standard_name == 'height' - assert height.long_name == 'height' - assert height.units == 'm' - assert height.attributes == {'positive': 'up'} + assert cube.coords("height") + height = cube.coord("height") + assert height.var_name == "height" + assert height.standard_name == "height" + assert height.long_name == "height" + assert height.units == "m" + assert height.attributes == {"positive": "up"} np.testing.assert_allclose(height.points, [10.0]) assert height.bounds is None def test_uas_scalar_height10m_already_present(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'uas') + fix = get_allvars_fix("Amon", "uas") # Scalar height (with wrong metadata) already present - height_coord = AuxCoord(10.0, var_name='h', standard_name='height') - cube = cubes_2d.extract_cube(NameConstraint(var_name='uas')) + height_coord = AuxCoord(10.0, var_name="h", standard_name="height") + cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) cube.add_aux_coord(height_coord, ()) fixed_cubes = fix.fix_metadata(cubes_2d) @@ -922,13 +986,13 @@ def test_uas_scalar_height10m_already_present(cubes_2d): def test_uas_dim_height10m_already_present(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'uas') + fix = get_allvars_fix("Amon", "uas") # Dimensional coordinate height (with wrong metadata) already present - height_coord = AuxCoord(10.0, var_name='h', standard_name='height') - cube = cubes_2d.extract_cube(NameConstraint(var_name='uas')) + height_coord = AuxCoord(10.0, var_name="h", standard_name="height") + cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) cube.add_aux_coord(height_coord, ()) - cube = iris.util.new_axis(cube, scalar_coord='height') + cube = iris.util.new_axis(cube, scalar_coord="height") cube.transpose((1, 0, 2)) cubes = CubeList([cube]) fixed_cubes = fix.fix_metadata(cubes) @@ -944,26 +1008,26 @@ def test_uas_dim_height10m_already_present(cubes_2d): def test_regular_grid_fix(cubes_regular_grid): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes_regular_grid) cube = check_tas_metadata(fixed_cubes) - assert cube.coords('time', dim_coords=True, dimensions=0) - assert cube.coords('latitude', dim_coords=True, dimensions=1) - assert cube.coords('longitude', dim_coords=True, dimensions=2) - assert cube.coords('height', dim_coords=False, dimensions=()) + assert cube.coords("time", dim_coords=True, dimensions=0) + assert cube.coords("latitude", dim_coords=True, dimensions=1) + assert cube.coords("longitude", dim_coords=True, dimensions=2) + assert cube.coords("height", dim_coords=False, dimensions=()) def test_2d_lat_lon_grid_fix(cubes_2d_lat_lon_grid): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes_2d_lat_lon_grid) cube = check_tas_metadata(fixed_cubes) - assert cube.coords('time', dim_coords=True, dimensions=0) - assert cube.coords('latitude', dim_coords=False, dimensions=(1, 2)) - assert cube.coords('longitude', dim_coords=False, dimensions=(1, 2)) - assert cube.coords('height', dim_coords=False, dimensions=()) + assert cube.coords("time", dim_coords=True, dimensions=0) + assert cube.coords("latitude", dim_coords=False, dimensions=(1, 2)) + assert cube.coords("longitude", dim_coords=False, dimensions=(1, 2)) + assert cube.coords("height", dim_coords=False, dimensions=()) # Test ch4Clim (for time dimension time2) @@ -971,37 +1035,37 @@ def test_2d_lat_lon_grid_fix(cubes_2d_lat_lon_grid): def test_get_ch4clim_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'ch4Clim') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "ch4Clim") assert fix == [AllVars(None), GenericFix(None)] def test_ch4clim_fix(cubes_regular_grid): """Test fix.""" cube = cubes_regular_grid[0] - cube.var_name = 'ch4Clim' - cube.units = 'mol mol-1' - cube.coord('time').units = 'no_unit' - cube.coord('time').attributes['invalid_units'] = 'day as %Y%m%d.%f' - cube.coord('time').points = [18500201.0] - cube.coord('time').long_name = 'wrong_time_name' - - fix = get_allvars_fix('Amon', 'ch4Clim') + cube.var_name = "ch4Clim" + cube.units = "mol mol-1" + cube.coord("time").units = "no_unit" + cube.coord("time").attributes["invalid_units"] = "day as %Y%m%d.%f" + cube.coord("time").points = [18500201.0] + cube.coord("time").long_name = "wrong_time_name" + + fix = get_allvars_fix("Amon", "ch4Clim") fixed_cubes = fix.fix_metadata(cubes_regular_grid) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'ch4Clim' - assert cube.standard_name == 'mole_fraction_of_methane_in_air' - assert cube.long_name == 'Mole Fraction of CH4' - assert cube.units == 'mol mol-1' - assert 'positive' not in cube.attributes - - time_coord = cube.coord('time') - assert time_coord.var_name == 'time' - assert time_coord.standard_name == 'time' - assert time_coord.long_name == 'time' + assert cube.var_name == "ch4Clim" + assert cube.standard_name == "mole_fraction_of_methane_in_air" + assert cube.long_name == "Mole Fraction of CH4" + assert cube.units == "mol mol-1" + assert "positive" not in cube.attributes + + time_coord = cube.coord("time") + assert time_coord.var_name == "time" + assert time_coord.standard_name == "time" + assert time_coord.long_name == "time" assert time_coord.units == Unit( - 'days since 1850-01-01', calendar='proleptic_gregorian' + "days since 1850-01-01", calendar="proleptic_gregorian" ) np.testing.assert_allclose(time_coord.points, [15.5]) np.testing.assert_allclose(time_coord.bounds, [[0.0, 31.0]]) @@ -1012,21 +1076,21 @@ def test_ch4clim_fix(cubes_regular_grid): def test_empty_standard_name_fix(cubes_2d, monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") # We know that tas has a standard name, but this being native model output # there may be variables with no standard name. The code is designed to # handle this gracefully and here we test it with an artificial, but # realistic case. - monkeypatch.setattr(fix.vardef, 'standard_name', '') + monkeypatch.setattr(fix.vardef, "standard_name", "") fixed_cubes = fix.fix_metadata(cubes_2d) assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'tas' + assert cube.var_name == "tas" assert cube.standard_name is None - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.units == 'K' - assert 'positive' not in cube.attributes + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.units == "K" + assert "positive" not in cube.attributes # Test automatic addition of missing coordinates @@ -1035,13 +1099,13 @@ def test_empty_standard_name_fix(cubes_2d, monkeypatch): def test_add_time(cubes_2d): """Test fix.""" # Remove time from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - uas_cube = cubes_2d.extract_cube(NameConstraint(var_name='uas')) + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + uas_cube = cubes_2d.extract_cube(NameConstraint(var_name="uas")) tas_cube = tas_cube[0] - tas_cube.remove_coord('time') + tas_cube.remove_coord("time") cubes = CubeList([tas_cube, uas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes) cube = check_tas_metadata(fixed_cubes) @@ -1051,12 +1115,14 @@ def test_add_time(cubes_2d): def test_add_time_fail(): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') - cube = Cube(1, var_name='ta', units='K') - cubes = CubeList([ - cube, - Cube(1, var_name='tas', units='K'), - ]) + fix = get_allvars_fix("Amon", "ta") + cube = Cube(1, var_name="ta", units="K") + cubes = CubeList( + [ + cube, + Cube(1, var_name="tas", units="K"), + ] + ) msg = "Cannot add required coordinate 'time' to variable 'ta'" with pytest.raises(ValueError, match=msg): fix._add_time(cube, cubes) @@ -1065,10 +1131,10 @@ def test_add_time_fail(): def test_add_latitude(cubes_2d): """Test fix.""" # Remove latitude from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('latitude') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("latitude") cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") assert len(fix._horizontal_grids) == 0 fixed_cubes = fix.fix_metadata(cubes) @@ -1083,10 +1149,10 @@ def test_add_latitude(cubes_2d): def test_add_longitude(cubes_2d): """Test fix.""" # Remove longitude from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('longitude') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("longitude") cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") assert len(fix._horizontal_grids) == 0 fixed_cubes = fix.fix_metadata(cubes) @@ -1101,11 +1167,11 @@ def test_add_longitude(cubes_2d): def test_add_latitude_longitude(cubes_2d): """Test fix.""" # Remove latitude and longitude from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('latitude') - tas_cube.remove_coord('longitude') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("latitude") + tas_cube.remove_coord("longitude") cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") assert len(fix._horizontal_grids) == 0 fixed_cubes = fix.fix_metadata(cubes) @@ -1121,11 +1187,11 @@ def test_add_latitude_fail(cubes_2d): """Test fix.""" # Remove latitude and grid file attribute from tas cube to test automatic # addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('latitude') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("latitude") tas_cube.attributes = {} cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") msg = "Failed to add missing latitude coordinate to cube" with pytest.raises(ValueError, match=msg): @@ -1136,11 +1202,11 @@ def test_add_longitude_fail(cubes_2d): """Test fix.""" # Remove longitude and grid file attribute from tas cube to test automatic # addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('longitude') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("longitude") tas_cube.attributes = {} cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") msg = "Failed to add missing longitude coordinate to cube" with pytest.raises(ValueError, match=msg): @@ -1149,243 +1215,253 @@ def test_add_longitude_fail(cubes_2d): def test_add_coord_from_grid_file_fail_invalid_coord(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") msg = r"coord_name must be one of .* got 'invalid_coord_name'" with pytest.raises(ValueError, match=msg): - fix._add_coord_from_grid_file(mock.sentinel.cube, 'invalid_coord_name') + fix._add_coord_from_grid_file(mock.sentinel.cube, "invalid_coord_name") def test_add_coord_from_grid_file_fail_no_url(): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") - msg = ("Cube does not contain the attribute 'grid_file_uri' necessary to " - "download the ICON horizontal grid file") + msg = ( + "Cube does not contain the attribute 'grid_file_uri' necessary to " + "download the ICON horizontal grid file" + ) with pytest.raises(ValueError, match=msg): - fix._add_coord_from_grid_file(Cube(0), 'latitude') + fix._add_coord_from_grid_file(Cube(0), "latitude") def test_add_coord_from_grid_fail_no_unnamed_dim(cubes_2d): """Test fix.""" # Remove latitude from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('latitude') - index_coord = DimCoord(np.arange(8), var_name='ncells') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("latitude") + index_coord = DimCoord(np.arange(8), var_name="ncells") tas_cube.add_dim_coord(index_coord, 1) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") - msg = ("Cannot determine coordinate dimension for coordinate 'latitude', " - "cube does not contain a single unnamed dimension") + msg = ( + "Cannot determine coordinate dimension for coordinate 'latitude', " + "cube does not contain a single unnamed dimension" + ) with pytest.raises(ValueError, match=msg): - fix._add_coord_from_grid_file(tas_cube, 'latitude') + fix._add_coord_from_grid_file(tas_cube, "latitude") def test_add_coord_from_grid_fail_two_unnamed_dims(cubes_2d): """Test fix.""" # Remove latitude from tas cube to test automatic addition - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - tas_cube.remove_coord('latitude') + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + tas_cube.remove_coord("latitude") tas_cube = iris.util.new_axis(tas_cube) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") - msg = ("Cannot determine coordinate dimension for coordinate 'latitude', " - "cube does not contain a single unnamed dimension") + msg = ( + "Cannot determine coordinate dimension for coordinate 'latitude', " + "cube does not contain a single unnamed dimension" + ) with pytest.raises(ValueError, match=msg): - fix._add_coord_from_grid_file(tas_cube, 'latitude') + fix._add_coord_from_grid_file(tas_cube, "latitude") # Test get_horizontal_grid -@mock.patch.object(IconFix, '_get_grid_from_facet', autospec=True) -@mock.patch('esmvalcore.cmor._fixes.icon._base_fixes.requests', autospec=True) +@mock.patch.object(IconFix, "_get_grid_from_facet", autospec=True) +@mock.patch("esmvalcore.cmor._fixes.icon._base_fixes.requests", autospec=True) def test_get_horizontal_grid_from_attr_cached_in_dict( mock_requests, mock_get_grid_from_facet, ): """Test fix.""" - cube = Cube(0, attributes={'grid_file_uri': 'cached_grid_url.nc'}) + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) grid_cube = Cube(0) - fix = get_allvars_fix('Amon', 'tas') - fix._horizontal_grids['cached_grid_url.nc'] = grid_cube - fix._horizontal_grids['grid_from_facet.nc'] = mock.sentinel.wrong_grid + fix = get_allvars_fix("Amon", "tas") + fix._horizontal_grids["cached_grid_url.nc"] = grid_cube + fix._horizontal_grids["grid_from_facet.nc"] = mock.sentinel.wrong_grid grid = fix.get_horizontal_grid(cube) assert len(fix._horizontal_grids) == 2 - assert 'cached_grid_url.nc' in fix._horizontal_grids - assert 'grid_from_facet.nc' in fix._horizontal_grids # has not been used - assert fix._horizontal_grids['cached_grid_url.nc'] == grid + assert "cached_grid_url.nc" in fix._horizontal_grids + assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used + assert fix._horizontal_grids["cached_grid_url.nc"] == grid assert grid == grid_cube assert grid is not grid_cube assert mock_requests.mock_calls == [] mock_get_grid_from_facet.assert_not_called() -@mock.patch.object(IconFix, '_get_grid_from_facet', autospec=True) +@mock.patch.object(IconFix, "_get_grid_from_facet", autospec=True) def test_get_horizontal_grid_from_attr_rootpath( mock_get_grid_from_facet, monkeypatch, tmp_path ): """Test fix.""" - rootpath = deepcopy(CFG['rootpath']) - rootpath['ICON'] = str(tmp_path) - monkeypatch.setitem(CFG, 'rootpath', rootpath) - cube = Cube(0, attributes={'grid_file_uri': 'grid.nc'}) - grid_cube = Cube(0, var_name='test_grid_cube') - (tmp_path / 'amip').mkdir(parents=True, exist_ok=True) - iris.save(grid_cube, tmp_path / 'amip' / 'grid.nc') + rootpath = deepcopy(CFG["rootpath"]) + rootpath["ICON"] = str(tmp_path) + monkeypatch.setitem(CFG, "rootpath", rootpath) + cube = Cube(0, attributes={"grid_file_uri": "grid.nc"}) + grid_cube = Cube(0, var_name="test_grid_cube") + (tmp_path / "amip").mkdir(parents=True, exist_ok=True) + iris.save(grid_cube, tmp_path / "amip" / "grid.nc") - fix = get_allvars_fix('Amon', 'tas') - fix._horizontal_grids['grid_from_facet.nc'] = mock.sentinel.wrong_grid + fix = get_allvars_fix("Amon", "tas") + fix._horizontal_grids["grid_from_facet.nc"] = mock.sentinel.wrong_grid grid = fix.get_horizontal_grid(cube) assert len(fix._horizontal_grids) == 2 - assert 'grid.nc' in fix._horizontal_grids - assert 'grid_from_facet.nc' in fix._horizontal_grids # has not been used - assert fix._horizontal_grids['grid.nc'] == grid + assert "grid.nc" in fix._horizontal_grids + assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used + assert fix._horizontal_grids["grid.nc"] == grid assert len(grid) == 1 - assert grid[0].var_name == 'test_grid_cube' + assert grid[0].var_name == "test_grid_cube" assert grid[0].shape == () mock_get_grid_from_facet.assert_not_called() -@mock.patch.object(IconFix, '_get_grid_from_facet', autospec=True) -@mock.patch('esmvalcore.cmor._fixes.icon._base_fixes.requests', autospec=True) +@mock.patch.object(IconFix, "_get_grid_from_facet", autospec=True) +@mock.patch("esmvalcore.cmor._fixes.icon._base_fixes.requests", autospec=True) def test_get_horizontal_grid_from_attr_cached_in_file( mock_requests, mock_get_grid_from_facet, tmp_path, ): """Test fix.""" - cube = Cube(0, attributes={ - 'grid_file_uri': 'https://temporary.url/this/is/the/grid_file.nc'}) - fix = get_allvars_fix('Amon', 'tas') + cube = Cube( + 0, + attributes={ + "grid_file_uri": "https://temporary.url/this/is/the/grid_file.nc" + }, + ) + fix = get_allvars_fix("Amon", "tas") assert len(fix._horizontal_grids) == 0 # Save temporary grid file - grid_cube = Cube(0, var_name='grid') - iris.save(grid_cube, str(tmp_path / 'grid_file.nc')) + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, str(tmp_path / "grid_file.nc")) grid = fix.get_horizontal_grid(cube) assert isinstance(grid, CubeList) assert len(grid) == 1 - assert grid[0].var_name == 'grid' + assert grid[0].var_name == "grid" assert grid[0].shape == () assert len(fix._horizontal_grids) == 1 - assert 'grid_file.nc' in fix._horizontal_grids - assert fix._horizontal_grids['grid_file.nc'] == grid + assert "grid_file.nc" in fix._horizontal_grids + assert fix._horizontal_grids["grid_file.nc"] == grid assert mock_requests.mock_calls == [] mock_get_grid_from_facet.assert_not_called() -@mock.patch.object(IconFix, '_get_grid_from_facet', autospec=True) +@mock.patch.object(IconFix, "_get_grid_from_facet", autospec=True) def test_get_horizontal_grid_from_attr_cache_file_too_old( mock_get_grid_from_facet, tmp_path, monkeypatch, ): """Test fix.""" - cube = Cube(0, attributes={'grid_file_uri': TEST_GRID_FILE_URI}) - fix = get_allvars_fix('Amon', 'tas') + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Amon", "tas") assert len(fix._horizontal_grids) == 0 # Save temporary grid file - grid_cube = Cube(0, var_name='grid') - iris.save(grid_cube, str(tmp_path / 'icon_grid.nc')) + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, str(tmp_path / "icon_grid.nc")) # Temporary overwrite default cache location for downloads and cache # validity duration - monkeypatch.setattr(fix, 'CACHE_VALIDITY', -1) + monkeypatch.setattr(fix, "CACHE_VALIDITY", -1) grid = fix.get_horizontal_grid(cube) assert isinstance(grid, CubeList) assert len(grid) == 4 var_names = [cube.var_name for cube in grid] - assert 'cell_area' in var_names - assert 'dual_area' in var_names - assert 'vertex_index' in var_names - assert 'vertex_of_cell' in var_names + assert "cell_area" in var_names + assert "dual_area" in var_names + assert "vertex_index" in var_names + assert "vertex_of_cell" in var_names assert len(fix._horizontal_grids) == 1 assert TEST_GRID_FILE_NAME in fix._horizontal_grids assert fix._horizontal_grids[TEST_GRID_FILE_NAME] == grid mock_get_grid_from_facet.assert_not_called() -@mock.patch.object(IconFix, '_get_grid_from_cube_attr', autospec=True) +@mock.patch.object(IconFix, "_get_grid_from_cube_attr", autospec=True) def test_get_horizontal_grid_from_facet_cached_in_dict( mock_get_grid_from_cube_attr, tmp_path, ): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path # Save temporary grid file (this will not be used; however, it is necessary # to not raise a FileNotFoundError) - grid_path = 'grid.nc' - wrong_grid_cube = Cube(0, var_name='wrong_grid') - iris.save(wrong_grid_cube, tmp_path / 'grid.nc') + grid_path = "grid.nc" + wrong_grid_cube = Cube(0, var_name="wrong_grid") + iris.save(wrong_grid_cube, tmp_path / "grid.nc") # Make sure that grid specified by cube attribute is NOT used - cube = Cube(0, attributes={'grid_file_uri': 'cached_grid_url.nc'}) - grid_cube = Cube(0, var_name='grid') - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['horizontal_grid'] = grid_path - fix._horizontal_grids['cached_grid_url.nc'] = mock.sentinel.wrong_grid - fix._horizontal_grids['grid.nc'] = grid_cube + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) + grid_cube = Cube(0, var_name="grid") + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid + fix._horizontal_grids["grid.nc"] = grid_cube grid = fix.get_horizontal_grid(cube) assert len(fix._horizontal_grids) == 2 - assert 'cached_grid_url.nc' in fix._horizontal_grids # has not been used - assert 'grid.nc' in fix._horizontal_grids - assert fix._horizontal_grids['grid.nc'] == grid + assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used + assert "grid.nc" in fix._horizontal_grids + assert fix._horizontal_grids["grid.nc"] == grid assert grid == grid_cube assert grid is not grid_cube mock_get_grid_from_cube_attr.assert_not_called() -@pytest.mark.parametrize('grid_path', ['{tmp_path}/grid.nc', 'grid.nc']) -@mock.patch.object(IconFix, '_get_grid_from_cube_attr', autospec=True) +@pytest.mark.parametrize("grid_path", ["{tmp_path}/grid.nc", "grid.nc"]) +@mock.patch.object(IconFix, "_get_grid_from_cube_attr", autospec=True) def test_get_horizontal_grid_from_facet( mock_get_grid_from_cube_attr, grid_path, tmp_path, ): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path # Make sure that grid specified by cube attribute is NOT used - cube = Cube(0, attributes={'grid_file_uri': 'cached_grid_url.nc'}) + cube = Cube(0, attributes={"grid_file_uri": "cached_grid_url.nc"}) # Save temporary grid file grid_path = grid_path.format(tmp_path=tmp_path) - grid_cube = Cube(0, var_name='grid') - iris.save(grid_cube, tmp_path / 'grid.nc') + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['horizontal_grid'] = grid_path - fix._horizontal_grids['cached_grid_url.nc'] = mock.sentinel.wrong_grid + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + fix._horizontal_grids["cached_grid_url.nc"] = mock.sentinel.wrong_grid grid = fix.get_horizontal_grid(cube) assert isinstance(grid, CubeList) assert len(grid) == 1 - assert grid[0].var_name == 'grid' + assert grid[0].var_name == "grid" assert len(fix._horizontal_grids) == 2 - assert 'cached_grid_url.nc' in fix._horizontal_grids # has not been used - assert 'grid.nc' in fix._horizontal_grids - assert fix._horizontal_grids['grid.nc'] == grid + assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used + assert "grid.nc" in fix._horizontal_grids + assert fix._horizontal_grids["grid.nc"] == grid mock_get_grid_from_cube_attr.assert_not_called() def test_get_horizontal_grid_from_facet_fail(tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path cube = Cube(0) - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['horizontal_grid'] = '/this/does/not/exist.nc' + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["horizontal_grid"] = "/this/does/not/exist.nc" with pytest.raises(FileNotFoundError): fix.get_horizontal_grid(cube) @@ -1396,25 +1472,33 @@ def test_get_horizontal_grid_from_facet_fail(tmp_path): def test_only_time(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # ICON CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('time') - coord_info.standard_name = 'time' - monkeypatch.setattr(fix.vardef, 'coordinates', {'time': coord_info}) + coord_info = CoordinateInfo("time") + coord_info.standard_name = "time" + monkeypatch.setattr(fix.vardef, "coordinates", {"time": coord_info}) # Create cube with only a single dimension - time_coord = DimCoord([0.0, 31.0], - var_name='time', - standard_name='time', - long_name='time', - units='days since 1850-01-01') - cubes = CubeList([ - Cube([1, 1], var_name='ta', units='K', - dim_coords_and_dims=[(time_coord, 0)]), - ]) + time_coord = DimCoord( + [0.0, 31.0], + var_name="time", + standard_name="time", + long_name="time", + units="days since 1850-01-01", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="ta", + units="K", + dim_coords_and_dims=[(time_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -1425,17 +1509,18 @@ def test_only_time(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check time metadata - assert cube.coords('time') - new_time_coord = cube.coord('time', dim_coords=True) - assert new_time_coord.var_name == 'time' - assert new_time_coord.standard_name == 'time' - assert new_time_coord.long_name == 'time' - assert new_time_coord.units == 'days since 1850-01-01' + assert cube.coords("time") + new_time_coord = cube.coord("time", dim_coords=True) + assert new_time_coord.var_name == "time" + assert new_time_coord.standard_name == "time" + assert new_time_coord.long_name == "time" + assert new_time_coord.units == "days since 1850-01-01" # Check time data np.testing.assert_allclose(new_time_coord.points, [-15.5, 15.5]) - np.testing.assert_allclose(new_time_coord.bounds, - [[-31.0, 0.0], [0.0, 31.0]]) + np.testing.assert_allclose( + new_time_coord.bounds, [[-31.0, 0.0], [0.0, 31.0]] + ) # Check that no mesh has been created assert cube.mesh is None @@ -1443,24 +1528,29 @@ def test_only_time(monkeypatch): def test_only_height(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # ICON CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('plev19') - coord_info.standard_name = 'air_pressure' - monkeypatch.setattr(fix.vardef, 'coordinates', {'plev19': coord_info}) + coord_info = CoordinateInfo("plev19") + coord_info.standard_name = "air_pressure" + monkeypatch.setattr(fix.vardef, "coordinates", {"plev19": coord_info}) # Create cube with only a single dimension - height_coord = DimCoord([1000.0, 100.0], - var_name='height', - standard_name='height', - units='cm') - cubes = CubeList([ - Cube([1, 1], var_name='ta', units='K', - dim_coords_and_dims=[(height_coord, 0)]), - ]) + height_coord = DimCoord( + [1000.0, 100.0], var_name="height", standard_name="height", units="cm" + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="ta", + units="K", + dim_coords_and_dims=[(height_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -1471,20 +1561,20 @@ def test_only_height(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check height metadata - assert cube.coords('height', dim_coords=True) - new_height_coord = cube.coord('height') - assert new_height_coord.var_name == 'height' - assert new_height_coord.standard_name == 'height' - assert new_height_coord.long_name == 'height' - assert new_height_coord.units == 'm' - assert new_height_coord.attributes == {'positive': 'up'} + assert cube.coords("height", dim_coords=True) + new_height_coord = cube.coord("height") + assert new_height_coord.var_name == "height" + assert new_height_coord.standard_name == "height" + assert new_height_coord.long_name == "height" + assert new_height_coord.units == "m" + assert new_height_coord.attributes == {"positive": "up"} # Check height data np.testing.assert_allclose(new_height_coord.points, [1.0, 10.0]) assert new_height_coord.bounds is None # Check that no air_pressure coordinate has been created - assert not cube.coords('air_pressure') + assert not cube.coords("air_pressure") # Check that no mesh has been created assert cube.mesh is None @@ -1492,24 +1582,29 @@ def test_only_height(monkeypatch): def test_only_latitude(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # ICON CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('latitude') - coord_info.standard_name = 'latitude' - monkeypatch.setattr(fix.vardef, 'coordinates', {'latitude': coord_info}) + coord_info = CoordinateInfo("latitude") + coord_info.standard_name = "latitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"latitude": coord_info}) # Create cube with only a single dimension - lat_coord = DimCoord([0.0, 10.0], - var_name='lat', - standard_name='latitude', - units='degrees') - cubes = CubeList([ - Cube([1, 1], var_name='ta', units='K', - dim_coords_and_dims=[(lat_coord, 0)]), - ]) + lat_coord = DimCoord( + [0.0, 10.0], var_name="lat", standard_name="latitude", units="degrees" + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="ta", + units="K", + dim_coords_and_dims=[(lat_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -1520,12 +1615,12 @@ def test_only_latitude(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check latitude metadata - assert cube.coords('latitude', dim_coords=True) - new_lat_coord = cube.coord('latitude') - assert new_lat_coord.var_name == 'lat' - assert new_lat_coord.standard_name == 'latitude' - assert new_lat_coord.long_name == 'latitude' - assert new_lat_coord.units == 'degrees_north' + assert cube.coords("latitude", dim_coords=True) + new_lat_coord = cube.coord("latitude") + assert new_lat_coord.var_name == "lat" + assert new_lat_coord.standard_name == "latitude" + assert new_lat_coord.long_name == "latitude" + assert new_lat_coord.units == "degrees_north" # Check latitude data np.testing.assert_allclose(new_lat_coord.points, [0.0, 10.0]) @@ -1537,24 +1632,32 @@ def test_only_latitude(monkeypatch): def test_only_longitude(monkeypatch): """Test fix.""" - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") # We know that ta has dimensions time, plev19, latitude, longitude, but the # ICON CMORizer is designed to check for the presence of each dimension # individually. To test this, remove all but one dimension of ta to create # an artificial, but realistic test case. - coord_info = CoordinateInfo('longitude') - coord_info.standard_name = 'longitude' - monkeypatch.setattr(fix.vardef, 'coordinates', {'longitude': coord_info}) + coord_info = CoordinateInfo("longitude") + coord_info.standard_name = "longitude" + monkeypatch.setattr(fix.vardef, "coordinates", {"longitude": coord_info}) # Create cube with only a single dimension - lon_coord = DimCoord([0.0, 180.0], - var_name='lon', - standard_name='longitude', - units='degrees') - cubes = CubeList([ - Cube([1, 1], var_name='ta', units='K', - dim_coords_and_dims=[(lon_coord, 0)]), - ]) + lon_coord = DimCoord( + [0.0, 180.0], + var_name="lon", + standard_name="longitude", + units="degrees", + ) + cubes = CubeList( + [ + Cube( + [1, 1], + var_name="ta", + units="K", + dim_coords_and_dims=[(lon_coord, 0)], + ), + ] + ) fixed_cubes = fix.fix_metadata(cubes) # Check cube metadata @@ -1565,12 +1668,12 @@ def test_only_longitude(monkeypatch): np.testing.assert_equal(cube.data, [1, 1]) # Check longitude metadata - assert cube.coords('longitude', dim_coords=True) - new_lon_coord = cube.coord('longitude') - assert new_lon_coord.var_name == 'lon' - assert new_lon_coord.standard_name == 'longitude' - assert new_lon_coord.long_name == 'longitude' - assert new_lon_coord.units == 'degrees_east' + assert cube.coords("longitude", dim_coords=True) + new_lon_coord = cube.coord("longitude") + assert new_lon_coord.var_name == "lon" + assert new_lon_coord.standard_name == "longitude" + assert new_lon_coord.long_name == "longitude" + assert new_lon_coord.units == "degrees_east" # Check longitude data np.testing.assert_allclose(new_lon_coord.points, [0.0, 180.0]) @@ -1585,7 +1688,7 @@ def test_only_longitude(monkeypatch): def test_var_not_available_pr(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'pr') + fix = get_allvars_fix("Amon", "pr") msg = "Variable 'pr' used to extract 'pr' is not available in input file" with pytest.raises(ValueError, match=msg): fix.fix_metadata(cubes_2d) @@ -1596,9 +1699,9 @@ def test_var_not_available_pr(cubes_2d): def test_invalid_time_units(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") for cube in cubes_2d: - cube.coord('time').attributes['invalid_units'] = 'month as %Y%m%d.%f' + cube.coord("time").attributes["invalid_units"] = "month as %Y%m%d.%f" msg = "Expected time units" with pytest.raises(ValueError, match=msg): fix.fix_metadata(cubes_2d) @@ -1609,16 +1712,16 @@ def test_invalid_time_units(cubes_2d): def test_hourly_data(cubes_2d): """Test fix.""" - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = '1hr' + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = "1hr" for cube in cubes_2d: - cube.coord('time').points = [20041104.5833333] + cube.coord("time").points = [20041104.5833333] fixed_cubes = fix.fix_metadata(cubes_2d) cube = check_tas_metadata(fixed_cubes) - date = cube.coord('time').units.num2date(cube.coord('time').points) - date_bnds = cube.coord('time').units.num2date(cube.coord('time').bounds) + date = cube.coord("time").units.num2date(cube.coord("time").points) + date_bnds = cube.coord("time").units.num2date(cube.coord("time").bounds) np.testing.assert_array_equal(date, [datetime(2004, 11, 4, 13, 30)]) np.testing.assert_array_equal( date_bnds, [[datetime(2004, 11, 4, 13), datetime(2004, 11, 4, 14)]] @@ -1626,7 +1729,7 @@ def test_hourly_data(cubes_2d): @pytest.mark.parametrize( - 'bounds', + "bounds", [ None, [ @@ -1640,23 +1743,23 @@ def test_6hourly_data_multiple_points(bounds): time_coord = DimCoord( [20220101, 20220101.25], bounds=bounds, - standard_name='time', - attributes={'invalid_units': 'day as %Y%m%d.%f'}, + standard_name="time", + attributes={"invalid_units": "day as %Y%m%d.%f"}, ) cube = Cube( [1, 2], - var_name='tas', - units='K', + var_name="tas", + units="K", dim_coords_and_dims=[(time_coord, 0)], ) cubes = CubeList([cube]) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = '6hr' + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = "6hr" fixed_cube = fix._fix_time(cube, cubes) - points = fixed_cube.coord('time').units.num2date(cube.coord('time').points) - bounds = fixed_cube.coord('time').units.num2date(cube.coord('time').bounds) + points = fixed_cube.coord("time").units.num2date(cube.coord("time").points) + bounds = fixed_cube.coord("time").units.num2date(cube.coord("time").bounds) np.testing.assert_array_equal( points, [datetime(2021, 12, 31, 21), datetime(2022, 1, 1, 3)], @@ -1674,24 +1777,24 @@ def test_subhourly_data_no_shift(): """Test fix.""" time_coord = DimCoord( [0.5, 1.0], - standard_name='time', - units=Unit('hours since 2022-01-01', calendar='proleptic_gregorian'), + standard_name="time", + units=Unit("hours since 2022-01-01", calendar="proleptic_gregorian"), ) cube = Cube( [1, 2], - var_name='tas', - units='K', + var_name="tas", + units="K", dim_coords_and_dims=[(time_coord, 0)], ) cubes = CubeList([cube]) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = 'subhr' - fix.extra_facets['shift_time'] = False + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = "subhr" + fix.extra_facets["shift_time"] = False fixed_cube = fix._fix_time(cube, cubes) - points = fixed_cube.coord('time').units.num2date(cube.coord('time').points) - bounds = fixed_cube.coord('time').units.num2date(cube.coord('time').bounds) + points = fixed_cube.coord("time").units.num2date(cube.coord("time").points) + bounds = fixed_cube.coord("time").units.num2date(cube.coord("time").bounds) np.testing.assert_array_equal( points, [datetime(2022, 1, 1, 0, 30), datetime(2022, 1, 1, 1)], @@ -1709,40 +1812,40 @@ def test_subhourly_data_no_shift(): @pytest.mark.parametrize( - 'frequency,dt_in,dt_out,bounds', + "frequency,dt_in,dt_out,bounds", [ ( - 'dec', + "dec", [(2000, 1, 1)], [(1995, 1, 1)], [[(1990, 1, 1), (2000, 1, 1)]], ), ( - 'yr', + "yr", [(2000, 1, 1), (2001, 1, 1)], [(1999, 7, 2, 12), (2000, 7, 2)], [[(1999, 1, 1), (2000, 1, 1)], [(2000, 1, 1), (2001, 1, 1)]], ), ( - 'mon', + "mon", [(2000, 1, 1)], [(1999, 12, 16, 12)], [[(1999, 12, 1), (2000, 1, 1)]], ), ( - 'mon', + "mon", [(2000, 11, 30, 23, 45), (2000, 12, 31, 23)], [(2000, 11, 16), (2000, 12, 16, 12)], [[(2000, 11, 1), (2000, 12, 1)], [(2000, 12, 1), (2001, 1, 1)]], ), ( - 'day', + "day", [(2000, 1, 1, 12)], [(2000, 1, 1)], [[(1999, 12, 31, 12), (2000, 1, 1, 12)]], ), ( - '6hr', + "6hr", [(2000, 1, 5, 14), (2000, 1, 5, 20)], [(2000, 1, 5, 11), (2000, 1, 5, 17)], [ @@ -1751,13 +1854,13 @@ def test_subhourly_data_no_shift(): ], ), ( - '3hr', + "3hr", [(2000, 1, 1)], [(1999, 12, 31, 22, 30)], [[(1999, 12, 31, 21), (2000, 1, 1)]], ), ( - '1hr', + "1hr", [(2000, 1, 5, 14), (2000, 1, 5, 15)], [(2000, 1, 5, 13, 30), (2000, 1, 5, 14, 30)], [ @@ -1769,19 +1872,19 @@ def test_subhourly_data_no_shift(): ) def test_shift_time_coord(frequency, dt_in, dt_out, bounds): """Test ``_shift_time_coord``.""" - cube = Cube(0, cell_methods=[CellMethod('mean', 'time')]) + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) datetimes = [datetime(*dt) for dt in dt_in] - time_units = Unit('days since 1950-01-01', calendar='proleptic_gregorian') + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") time_coord = DimCoord( time_units.date2num(datetimes), - standard_name='time', - var_name='time', - long_name='time', + standard_name="time", + var_name="time", + long_name="time", units=time_units, ) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = frequency + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = frequency fix._shift_time_coord(cube, time_coord) @@ -1796,32 +1899,32 @@ def test_shift_time_coord(frequency, dt_in, dt_out, bounds): @pytest.mark.parametrize( - 'frequency,dt_in', + "frequency,dt_in", [ - ('dec', [(2000, 1, 15)]), - ('yr', [(2000, 1, 1), (2001, 1, 1)]), - ('mon', [(2000, 6, 15)]), - ('day', [(2000, 1, 1), (2001, 1, 2)]), - ('6hr', [(2000, 6, 15, 12)]), - ('3hr', [(2000, 1, 1, 4), (2000, 1, 1, 7)]), - ('1hr', [(2000, 1, 1, 4), (2000, 1, 1, 5)]), + ("dec", [(2000, 1, 15)]), + ("yr", [(2000, 1, 1), (2001, 1, 1)]), + ("mon", [(2000, 6, 15)]), + ("day", [(2000, 1, 1), (2001, 1, 2)]), + ("6hr", [(2000, 6, 15, 12)]), + ("3hr", [(2000, 1, 1, 4), (2000, 1, 1, 7)]), + ("1hr", [(2000, 1, 1, 4), (2000, 1, 1, 5)]), ], ) def test_shift_time_point_measurement(frequency, dt_in): """Test ``_shift_time_coord``.""" - cube = Cube(0, cell_methods=[CellMethod('point', 'time')]) + cube = Cube(0, cell_methods=[CellMethod("point", "time")]) datetimes = [datetime(*dt) for dt in dt_in] - time_units = Unit('days since 1950-01-01', calendar='proleptic_gregorian') + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") time_coord = DimCoord( time_units.date2num(datetimes), - standard_name='time', - var_name='time', - long_name='time', + standard_name="time", + var_name="time", + long_name="time", units=time_units, ) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = frequency + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = frequency fix._shift_time_coord(cube, time_coord) @@ -1832,46 +1935,44 @@ def test_shift_time_point_measurement(frequency, dt_in): @pytest.mark.parametrize( - 'frequency', ['dec', 'yr', 'yrPt', 'mon', 'monC', 'monPt'] + "frequency", ["dec", "yr", "yrPt", "mon", "monC", "monPt"] ) def test_shift_time_coord_hourly_data_low_freq_fail(frequency): """Test ``_shift_time_coord``.""" - cube = Cube(0, cell_methods=[CellMethod('mean', 'time')]) - time_units = Unit('hours since 1950-01-01', calendar='proleptic_gregorian') + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("hours since 1950-01-01", calendar="proleptic_gregorian") time_coord = DimCoord( [1, 2, 3], - standard_name='time', - var_name='time', - long_name='time', + standard_name="time", + var_name="time", + long_name="time", units=time_units, ) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = frequency + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = frequency - msg = ( - "Cannot shift time coordinate: Rounding to closest day failed." - ) + msg = "Cannot shift time coordinate: Rounding to closest day failed." with pytest.raises(ValueError, match=msg): fix._shift_time_coord(cube, time_coord) @pytest.mark.parametrize( - 'frequency', ['dec', 'yr', 'yrPt', 'mon', 'monC', 'monPt'] + "frequency", ["dec", "yr", "yrPt", "mon", "monC", "monPt"] ) def test_shift_time_coord_not_first_of_month(frequency): """Test ``_get_previous_timestep``.""" - cube = Cube(0, cell_methods=[CellMethod('mean', 'time')]) - time_units = Unit('days since 1950-01-01', calendar='proleptic_gregorian') + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") time_coord = DimCoord( [1.5], - standard_name='time', - var_name='time', - long_name='time', + standard_name="time", + var_name="time", + long_name="time", units=time_units, ) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = frequency + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = frequency msg = ( "Cannot shift time coordinate: expected first of the month at 00:00:00" @@ -1880,20 +1981,20 @@ def test_shift_time_coord_not_first_of_month(frequency): fix._shift_time_coord(cube, time_coord) -@pytest.mark.parametrize('frequency', ['fx', 'subhrPt', 'invalid_freq']) +@pytest.mark.parametrize("frequency", ["fx", "subhrPt", "invalid_freq"]) def test_shift_time_coord_invalid_freq(frequency): """Test ``_get_previous_timestep``.""" - cube = Cube(0, cell_methods=[CellMethod('mean', 'time')]) - time_units = Unit('days since 1950-01-01', calendar='proleptic_gregorian') + cube = Cube(0, cell_methods=[CellMethod("mean", "time")]) + time_units = Unit("days since 1950-01-01", calendar="proleptic_gregorian") time_coord = DimCoord( [1.5, 2.5], - standard_name='time', - var_name='time', - long_name='time', + standard_name="time", + var_name="time", + long_name="time", units=time_units, ) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = frequency + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = frequency msg = ( "Cannot shift time coordinate: failed to determine previous time step" @@ -1906,36 +2007,36 @@ def test_shift_time_coord_invalid_freq(frequency): @pytest.mark.parametrize( - 'frequency,datetime_in,datetime_out', + "frequency,datetime_in,datetime_out", [ - ('dec', (2000, 1, 1), (1990, 1, 1)), - ('yr', (2000, 1, 1), (1999, 1, 1)), - ('yrPt', (2001, 6, 1), (2000, 6, 1)), - ('mon', (2001, 1, 1), (2000, 12, 1)), - ('mon', (2001, 2, 1), (2001, 1, 1)), - ('mon', (2001, 3, 1), (2001, 2, 1)), - ('mon', (2001, 4, 1), (2001, 3, 1)), - ('monC', (2000, 5, 1), (2000, 4, 1)), - ('monC', (2000, 6, 1), (2000, 5, 1)), - ('monC', (2000, 7, 1), (2000, 6, 1)), - ('monC', (2000, 8, 1), (2000, 7, 1)), - ('monPt', (2002, 9, 1), (2002, 8, 1)), - ('monPt', (2002, 10, 1), (2002, 9, 1)), - ('monPt', (2002, 11, 1), (2002, 10, 1)), - ('monPt', (2002, 12, 1), (2002, 11, 1)), - ('day', (2000, 1, 1), (1999, 12, 31)), - ('day', (2000, 3, 1), (2000, 2, 29)), - ('day', (2187, 3, 14), (2187, 3, 13)), - ('6hr', (2000, 3, 14, 15), (2000, 3, 14, 9)), - ('6hrPt', (2000, 1, 1), (1999, 12, 31, 18)), - ('6hrCM', (2000, 1, 1, 1), (1999, 12, 31, 19)), - ('3hr', (2000, 3, 14, 15), (2000, 3, 14, 12)), - ('3hrPt', (2000, 1, 1), (1999, 12, 31, 21)), - ('3hrCM', (2000, 1, 1, 1), (1999, 12, 31, 22)), - ('1hr', (2000, 3, 14, 15), (2000, 3, 14, 14)), - ('1hrPt', (2000, 1, 1), (1999, 12, 31, 23)), - ('1hrCM', (2000, 1, 1, 1), (2000, 1, 1)), - ('hr', (2000, 3, 14), (2000, 3, 13, 23)), + ("dec", (2000, 1, 1), (1990, 1, 1)), + ("yr", (2000, 1, 1), (1999, 1, 1)), + ("yrPt", (2001, 6, 1), (2000, 6, 1)), + ("mon", (2001, 1, 1), (2000, 12, 1)), + ("mon", (2001, 2, 1), (2001, 1, 1)), + ("mon", (2001, 3, 1), (2001, 2, 1)), + ("mon", (2001, 4, 1), (2001, 3, 1)), + ("monC", (2000, 5, 1), (2000, 4, 1)), + ("monC", (2000, 6, 1), (2000, 5, 1)), + ("monC", (2000, 7, 1), (2000, 6, 1)), + ("monC", (2000, 8, 1), (2000, 7, 1)), + ("monPt", (2002, 9, 1), (2002, 8, 1)), + ("monPt", (2002, 10, 1), (2002, 9, 1)), + ("monPt", (2002, 11, 1), (2002, 10, 1)), + ("monPt", (2002, 12, 1), (2002, 11, 1)), + ("day", (2000, 1, 1), (1999, 12, 31)), + ("day", (2000, 3, 1), (2000, 2, 29)), + ("day", (2187, 3, 14), (2187, 3, 13)), + ("6hr", (2000, 3, 14, 15), (2000, 3, 14, 9)), + ("6hrPt", (2000, 1, 1), (1999, 12, 31, 18)), + ("6hrCM", (2000, 1, 1, 1), (1999, 12, 31, 19)), + ("3hr", (2000, 3, 14, 15), (2000, 3, 14, 12)), + ("3hrPt", (2000, 1, 1), (1999, 12, 31, 21)), + ("3hrCM", (2000, 1, 1, 1), (1999, 12, 31, 22)), + ("1hr", (2000, 3, 14, 15), (2000, 3, 14, 14)), + ("1hrPt", (2000, 1, 1), (1999, 12, 31, 23)), + ("1hrCM", (2000, 1, 1, 1), (2000, 1, 1)), + ("hr", (2000, 3, 14), (2000, 3, 13, 23)), ], ) def test_get_previous_timestep(frequency, datetime_in, datetime_out): @@ -1943,8 +2044,8 @@ def test_get_previous_timestep(frequency, datetime_in, datetime_out): datetime_in = datetime(*datetime_in) datetime_out = datetime(*datetime_out) - fix = get_allvars_fix('Amon', 'tas') - fix.extra_facets['frequency'] = frequency + fix = get_allvars_fix("Amon", "tas") + fix.extra_facets["frequency"] = frequency new_datetime = fix._get_previous_timestep(datetime_in) @@ -1954,21 +2055,21 @@ def test_get_previous_timestep(frequency, datetime_in, datetime_out): # Test mesh creation raises warning because bounds do not match vertices -@mock.patch('esmvalcore.cmor._fixes.icon._base_fixes.logger', autospec=True) +@mock.patch("esmvalcore.cmor._fixes.icon._base_fixes.logger", autospec=True) def test_get_mesh_fail_invalid_clat_bounds(mock_logger, cubes_2d): """Test fix.""" # Slightly modify latitude bounds from tas cube to make mesh creation fail - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - lat_bnds = tas_cube.coord('latitude').bounds.copy() + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + lat_bnds = tas_cube.coord("latitude").bounds.copy() lat_bnds[0, 0] = 40.0 - tas_cube.coord('latitude').bounds = lat_bnds + tas_cube.coord("latitude").bounds = lat_bnds cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes) cube = check_tas_metadata(fixed_cubes) - assert cube.coord('latitude').bounds[0, 0] != 40.0 + assert cube.coord("latitude").bounds[0, 0] != 40.0 mock_logger.warning.assert_called_once_with( "Latitude bounds of the face coordinate ('clat_vertices' in " "the grid file) differ from the corresponding values " @@ -1978,21 +2079,21 @@ def test_get_mesh_fail_invalid_clat_bounds(mock_logger, cubes_2d): ) -@mock.patch('esmvalcore.cmor._fixes.icon._base_fixes.logger', autospec=True) +@mock.patch("esmvalcore.cmor._fixes.icon._base_fixes.logger", autospec=True) def test_get_mesh_fail_invalid_clon_bounds(mock_logger, cubes_2d): """Test fix.""" # Slightly modify longitude bounds from tas cube to make mesh creation fail - tas_cube = cubes_2d.extract_cube(NameConstraint(var_name='tas')) - lon_bnds = tas_cube.coord('longitude').bounds.copy() + tas_cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + lon_bnds = tas_cube.coord("longitude").bounds.copy() lon_bnds[0, 1] = 40.0 - tas_cube.coord('longitude').bounds = lon_bnds + tas_cube.coord("longitude").bounds = lon_bnds cubes = CubeList([tas_cube]) - fix = get_allvars_fix('Amon', 'tas') + fix = get_allvars_fix("Amon", "tas") fixed_cubes = fix.fix_metadata(cubes) cube = check_tas_metadata(fixed_cubes) - assert cube.coord('longitude').bounds[0, 1] != 40.0 + assert cube.coord("longitude").bounds[0, 1] != 40.0 mock_logger.warning.assert_called_once_with( "Longitude bounds of the face coordinate ('clon_vertices' in " "the grid file) differ from the corresponding values " @@ -2008,8 +2109,8 @@ def test_get_mesh_fail_invalid_clon_bounds(mock_logger, cubes_2d): def test_get_grid_url(): """Test fix.""" - cube = Cube(0, attributes={'grid_file_uri': TEST_GRID_FILE_URI}) - fix = get_allvars_fix('Amon', 'tas') + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Amon", "tas") (grid_url, grid_name) = fix._get_grid_url(cube) assert grid_url == TEST_GRID_FILE_URI assert grid_name == TEST_GRID_FILE_NAME @@ -2018,9 +2119,11 @@ def test_get_grid_url(): def test_get_grid_url_fail(): """Test fix.""" cube = Cube(0) - fix = get_allvars_fix('Amon', 'tas') - msg = ("Cube does not contain the attribute 'grid_file_uri' necessary to " - "download the ICON horizontal grid file") + fix = get_allvars_fix("Amon", "tas") + msg = ( + "Cube does not contain the attribute 'grid_file_uri' necessary to " + "download the ICON horizontal grid file" + ) with pytest.raises(ValueError, match=msg): fix._get_grid_url(cube) @@ -2030,9 +2133,9 @@ def test_get_grid_url_fail(): def test_get_mesh_cached_from_attr(monkeypatch): """Test fix.""" - cube = Cube(0, attributes={'grid_file_uri': TEST_GRID_FILE_URI}) - fix = get_allvars_fix('Amon', 'tas') - monkeypatch.setattr(fix, '_create_mesh', mock.Mock()) + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Amon", "tas") + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.mesh mesh = fix.get_mesh(cube) assert mesh == mock.sentinel.mesh @@ -2041,30 +2144,30 @@ def test_get_mesh_cached_from_attr(monkeypatch): def test_get_mesh_not_cached_from_attr(monkeypatch): """Test fix.""" - cube = Cube(0, attributes={'grid_file_uri': TEST_GRID_FILE_URI}) - fix = get_allvars_fix('Amon', 'tas') - monkeypatch.setattr(fix, '_create_mesh', mock.Mock()) + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Amon", "tas") + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) fix.get_mesh(cube) fix._create_mesh.assert_called_once_with(cube) def test_get_mesh_cached_from_facet(monkeypatch, tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path # Save temporary grid file (this will not be used; however, it is necessary # to not raise a FileNotFoundError) - grid_path = 'grid.nc' - grid_cube = Cube(0, var_name='grid') - iris.save(grid_cube, tmp_path / 'grid.nc') - - cube = Cube(0, attributes={'grid_file_uri': TEST_GRID_FILE_URI}) - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['horizontal_grid'] = grid_path - monkeypatch.setattr(fix, '_create_mesh', mock.Mock()) + grid_path = "grid.nc" + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh - fix._meshes['grid.nc'] = mock.sentinel.mesh + fix._meshes["grid.nc"] = mock.sentinel.mesh mesh = fix.get_mesh(cube) @@ -2074,19 +2177,19 @@ def test_get_mesh_cached_from_facet(monkeypatch, tmp_path): def test_get_mesh_not_cached_from_facet(monkeypatch, tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path # Save temporary grid file (this will not be used; however, it is necessary # to not raise a FileNotFoundError) - grid_path = 'grid.nc' - grid_cube = Cube(0, var_name='grid') - iris.save(grid_cube, tmp_path / 'grid.nc') - - cube = Cube(0, attributes={'grid_file_uri': TEST_GRID_FILE_URI}) - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['horizontal_grid'] = grid_path - monkeypatch.setattr(fix, '_create_mesh', mock.Mock()) + grid_path = "grid.nc" + grid_cube = Cube(0, var_name="grid") + iris.save(grid_cube, tmp_path / "grid.nc") + + cube = Cube(0, attributes={"grid_file_uri": TEST_GRID_FILE_URI}) + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["horizontal_grid"] = grid_path + monkeypatch.setattr(fix, "_create_mesh", mock.Mock()) fix._meshes[TEST_GRID_FILE_NAME] = mock.sentinel.wrong_mesh fix.get_mesh(cube) @@ -2098,66 +2201,66 @@ def test_get_mesh_not_cached_from_facet(monkeypatch, tmp_path): @pytest.mark.parametrize( - 'path,description,output', + "path,description,output", [ - ('{tmp_path}/a.nc', None, '{tmp_path}/a.nc'), - ('b.nc', 'Grid file', '{tmp_path}/b.nc'), + ("{tmp_path}/a.nc", None, "{tmp_path}/a.nc"), + ("b.nc", "Grid file", "{tmp_path}/b.nc"), ], ) def test_get_path_from_facet(path, description, output, tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['test_path'] = path + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["test_path"] = path # Create empty dummy file output = output.format(tmp_path=tmp_path) - with open(output, 'w', encoding='utf-8'): + with open(output, "w", encoding="utf-8"): pass - out_path = fix._get_path_from_facet('test_path', description=description) + out_path = fix._get_path_from_facet("test_path", description=description) assert isinstance(out_path, Path) assert out_path == Path(output.format(tmp_path=tmp_path)) @pytest.mark.parametrize( - 'path,description', + "path,description", [ - ('{tmp_path}/a.nc', None), - ('b.nc', 'Grid file'), + ("{tmp_path}/a.nc", None), + ("b.nc", "Grid file"), ], ) def test_get_path_from_facet_fail(path, description, tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix('Amon', 'tas', session=session) - fix.extra_facets['test_path'] = path + fix = get_allvars_fix("Amon", "tas", session=session) + fix.extra_facets["test_path"] = path with pytest.raises(FileNotFoundError, match=description): - fix._get_path_from_facet('test_path', description=description) + fix._get_path_from_facet("test_path", description=description) # Test add_additional_cubes -@pytest.mark.parametrize('facet', ['zg_file', 'zghalf_file']) -@pytest.mark.parametrize('path', ['{tmp_path}/a.nc', 'a.nc']) +@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) +@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) def test_add_additional_cubes(path, facet, tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix('Amon', 'tas', session=session) + fix = get_allvars_fix("Amon", "tas", session=session) fix.extra_facets[facet] = path # Save temporary cube cube = Cube(0, var_name=facet) - iris.save(cube, tmp_path / 'a.nc') + iris.save(cube, tmp_path / "a.nc") cubes = CubeList([]) new_cubes = fix.add_additional_cubes(cubes) @@ -2167,38 +2270,38 @@ def test_add_additional_cubes(path, facet, tmp_path): assert cubes[0].var_name == facet -@pytest.mark.parametrize('facet', ['zg_file', 'zghalf_file']) -@pytest.mark.parametrize('path', ['{tmp_path}/a.nc', 'a.nc']) +@pytest.mark.parametrize("facet", ["zg_file", "zghalf_file"]) +@pytest.mark.parametrize("path", ["{tmp_path}/a.nc", "a.nc"]) def test_add_additional_cubes_fail(path, facet, tmp_path): """Test fix.""" - session = CFG.start_session('my session') - session['auxiliary_data_dir'] = tmp_path + session = CFG.start_session("my session") + session["auxiliary_data_dir"] = tmp_path path = path.format(tmp_path=tmp_path) - fix = get_allvars_fix('Amon', 'tas', session=session) + fix = get_allvars_fix("Amon", "tas", session=session) fix.extra_facets[facet] = path cubes = CubeList([]) - with pytest.raises(FileNotFoundError, match='File'): + with pytest.raises(FileNotFoundError, match="File"): fix.add_additional_cubes(cubes) # Test _fix_height -@pytest.mark.parametrize('bounds', [True, False]) +@pytest.mark.parametrize("bounds", [True, False]) def test_fix_height_plev(bounds, simple_unstructured_cube): """Test fix.""" cube = simple_unstructured_cube[:, 1:, :] pfull_cube = simple_unstructured_cube[:, 1:, :] - pfull_cube.var_name = 'pfull' - pfull_cube.units = 'Pa' + pfull_cube.var_name = "pfull" + pfull_cube.units = "Pa" cubes = CubeList([cube, pfull_cube]) if bounds: phalf_cube = simple_unstructured_cube.copy() - phalf_cube.var_name = 'phalf' - phalf_cube.units = 'Pa' + phalf_cube.var_name = "phalf" + phalf_cube.units = "Pa" cubes.append(phalf_cube) - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") fixed_cube = fix._fix_height(cube, cubes) @@ -2210,7 +2313,7 @@ def test_fix_height_plev(bounds, simple_unstructured_cube): assert height.bounds is None plev = check_air_pressure_metadata(fixed_cube) - assert fixed_cube.coord_dims('air_pressure') == (0, 1, 2) + assert fixed_cube.coord_dims("air_pressure") == (0, 1, 2) np.testing.assert_allclose(plev.points, expected_data) if bounds: expected_bnds = [[[[4.0, 2.0], [5.0, 3.0]], [[2.0, 0.0], [3.0, 1.0]]]] @@ -2219,20 +2322,20 @@ def test_fix_height_plev(bounds, simple_unstructured_cube): assert plev.bounds is None -@pytest.mark.parametrize('bounds', [True, False]) +@pytest.mark.parametrize("bounds", [True, False]) def test_fix_height_alt16(bounds, simple_unstructured_cube): """Test fix.""" cube = simple_unstructured_cube[:, 1:, :] zg_cube = simple_unstructured_cube[0, 1:, :] - zg_cube.var_name = 'zg' - zg_cube.units = 'm' + zg_cube.var_name = "zg" + zg_cube.units = "m" cubes = CubeList([cube, zg_cube]) if bounds: zghalf_cube = simple_unstructured_cube[0, :, :] - zghalf_cube.var_name = 'zghalf' - zghalf_cube.units = 'm' + zghalf_cube.var_name = "zghalf" + zghalf_cube.units = "m" cubes.append(zghalf_cube) - fix = get_allvars_fix('Amon', 'ta') + fix = get_allvars_fix("Amon", "ta") fixed_cube = fix._fix_height(cube, cubes) @@ -2243,14 +2346,14 @@ def test_fix_height_alt16(bounds, simple_unstructured_cube): np.testing.assert_array_equal(height.points, [0, 1]) assert height.bounds is None - assert fixed_cube.coords('altitude', dim_coords=False) - alt16 = fixed_cube.coord('altitude', dim_coords=False) - assert alt16.var_name == 'alt16' - assert alt16.standard_name == 'altitude' - assert alt16.long_name == 'altitude' - assert alt16.units == 'm' - assert alt16.attributes == {'positive': 'up'} - assert fixed_cube.coord_dims('altitude') == (1, 2) + assert fixed_cube.coords("altitude", dim_coords=False) + alt16 = fixed_cube.coord("altitude", dim_coords=False) + assert alt16.var_name == "alt16" + assert alt16.standard_name == "altitude" + assert alt16.long_name == "altitude" + assert alt16.units == "m" + assert alt16.attributes == {"positive": "up"} + assert fixed_cube.coord_dims("altitude") == (1, 2) np.testing.assert_allclose(alt16.points, expected_data[0]) if bounds: expected_bnds = [[[4.0, 2.0], [5.0, 3.0]], [[2.0, 0.0], [3.0, 1.0]]] @@ -2264,27 +2367,27 @@ def test_fix_height_alt16(bounds, simple_unstructured_cube): def test_get_hfls_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'hfls') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "hfls") assert fix == [Hfls(None), AllVars(None), GenericFix(None)] def test_hfls_fix(cubes_regular_grid): """Test fix.""" cubes = CubeList([cubes_regular_grid[0].copy()]) - cubes[0].var_name = 'hfls' - cubes[0].units = 'W m-2' + cubes[0].var_name = "hfls" + cubes[0].units = "W m-2" - fixed_cubes = fix_metadata(cubes, 'Amon', 'hfls') + fixed_cubes = fix_metadata(cubes, "Amon", "hfls") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'hfls' - assert cube.standard_name == 'surface_upward_latent_heat_flux' - assert cube.long_name == 'Surface Upward Latent Heat Flux' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "hfls" + assert cube.standard_name == "surface_upward_latent_heat_flux" + assert cube.long_name == "Surface Upward Latent Heat Flux" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" - fixed_cube = fix_data(cube, 'Amon', 'hfls') + fixed_cube = fix_data(cube, "Amon", "hfls") np.testing.assert_allclose(fixed_cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) @@ -2294,27 +2397,27 @@ def test_hfls_fix(cubes_regular_grid): def test_get_hfss_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'hfss') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "hfss") assert fix == [Hfss(None), AllVars(None), GenericFix(None)] def test_hfss_fix(cubes_regular_grid): """Test fix.""" cubes = CubeList([cubes_regular_grid[0].copy()]) - cubes[0].var_name = 'hfss' - cubes[0].units = 'W m-2' + cubes[0].var_name = "hfss" + cubes[0].units = "W m-2" - fixed_cubes = fix_metadata(cubes, 'Amon', 'hfss') + fixed_cubes = fix_metadata(cubes, "Amon", "hfss") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'hfss' - assert cube.standard_name == 'surface_upward_sensible_heat_flux' - assert cube.long_name == 'Surface Upward Sensible Heat Flux' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'up' + assert cube.var_name == "hfss" + assert cube.standard_name == "surface_upward_sensible_heat_flux" + assert cube.long_name == "Surface Upward Sensible Heat Flux" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "up" - fixed_cube = fix_data(cube, 'Amon', 'hfss') + fixed_cube = fix_data(cube, "Amon", "hfss") np.testing.assert_allclose(fixed_cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) @@ -2324,33 +2427,35 @@ def test_hfss_fix(cubes_regular_grid): def test_get_rtnt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'rtnt') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "rtnt") assert fix == [Rtnt(None), AllVars(None), GenericFix(None)] def test_rtnt_fix(cubes_regular_grid): """Test fix.""" - cubes = CubeList([ - cubes_regular_grid[0].copy(), - cubes_regular_grid[0].copy(), - cubes_regular_grid[0].copy() - ]) - cubes[0].var_name = 'rsdt' - cubes[1].var_name = 'rsut' - cubes[2].var_name = 'rlut' - cubes[0].units = 'W m-2' - cubes[1].units = 'W m-2' - cubes[2].units = 'W m-2' + cubes = CubeList( + [ + cubes_regular_grid[0].copy(), + cubes_regular_grid[0].copy(), + cubes_regular_grid[0].copy(), + ] + ) + cubes[0].var_name = "rsdt" + cubes[1].var_name = "rsut" + cubes[2].var_name = "rlut" + cubes[0].units = "W m-2" + cubes[1].units = "W m-2" + cubes[2].units = "W m-2" - fixed_cubes = fix_metadata(cubes, 'Amon', 'rtnt') + fixed_cubes = fix_metadata(cubes, "Amon", "rtnt") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rtnt' + assert cube.var_name == "rtnt" assert cube.standard_name is None - assert cube.long_name == 'TOA Net downward Total Radiation' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.long_name == "TOA Net downward Total Radiation" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) @@ -2360,33 +2465,36 @@ def test_rtnt_fix(cubes_regular_grid): def test_get_rtmt_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('ICON', 'ICON', 'Amon', 'rtmt') + fix = Fix.get_fixes("ICON", "ICON", "Amon", "rtmt") assert fix == [Rtmt(None), AllVars(None), GenericFix(None)] def test_rtmt_fix(cubes_regular_grid): """Test fix.""" - cubes = CubeList([ - cubes_regular_grid[0].copy(), - cubes_regular_grid[0].copy(), - cubes_regular_grid[0].copy() - ]) - cubes[0].var_name = 'rsdt' - cubes[1].var_name = 'rsut' - cubes[2].var_name = 'rlut' - cubes[0].units = 'W m-2' - cubes[1].units = 'W m-2' - cubes[2].units = 'W m-2' + cubes = CubeList( + [ + cubes_regular_grid[0].copy(), + cubes_regular_grid[0].copy(), + cubes_regular_grid[0].copy(), + ] + ) + cubes[0].var_name = "rsdt" + cubes[1].var_name = "rsut" + cubes[2].var_name = "rlut" + cubes[0].units = "W m-2" + cubes[1].units = "W m-2" + cubes[2].units = "W m-2" - fixed_cubes = fix_metadata(cubes, 'Amon', 'rtmt') + fixed_cubes = fix_metadata(cubes, "Amon", "rtmt") assert len(fixed_cubes) == 1 cube = fixed_cubes[0] - assert cube.var_name == 'rtmt' - assert cube.standard_name == ('net_downward_radiative_flux_at_top_of' - '_atmosphere_model') - assert cube.long_name == 'Net Downward Radiative Flux at Top of Model' - assert cube.units == 'W m-2' - assert cube.attributes['positive'] == 'down' + assert cube.var_name == "rtmt" + assert cube.standard_name == ( + "net_downward_radiative_flux_at_top_of_atmosphere_model" + ) + assert cube.long_name == "Net Downward Radiative Flux at Top of Model" + assert cube.units == "W m-2" + assert cube.attributes["positive"] == "down" np.testing.assert_allclose(cube.data, [[[0.0, -1.0], [-2.0, -3.0]]]) diff --git a/tests/integration/cmor/_fixes/ipslcm/test_ipsl_cm6.py b/tests/integration/cmor/_fixes/ipslcm/test_ipsl_cm6.py index dd1d19480e..f13c329d49 100644 --- a/tests/integration/cmor/_fixes/ipslcm/test_ipsl_cm6.py +++ b/tests/integration/cmor/_fixes/ipslcm/test_ipsl_cm6.py @@ -1,4 +1,5 @@ """Tests for the fixes of IPSL-CM6.""" + import iris import pytest @@ -11,7 +12,7 @@ @pytest.fixture def test_get_tas_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('IPSLCM', 'IPSL-CM6', 'Amon', 'tas') + fix = Fix.get_fixes("IPSLCM", "IPSL-CM6", "Amon", "tas") assert fix == [Tas(None), GenericFix(None)] @@ -21,18 +22,19 @@ def cubes(): cube = iris.cube.Cube( [200.0], # chilly, isn't it ? - var_name='tas', - standard_name='air_temperature', - units='K', + var_name="tas", + standard_name="air_temperature", + units="K", ) return iris.cube.CubeList([cube]) def test_tas_fix_metadata(cubes): """Test ``fix_metadata`` for ``tas``.""" - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Tas(vardef) out_cubes = fix.fix_metadata(cubes) - out_cube = fix.get_cube_from_list(out_cubes, 'tas') - assert any([coord.standard_name == 'height' - for coord in out_cube.aux_coords]) + out_cube = fix.get_cube_from_list(out_cubes, "tas") + assert any( + [coord.standard_name == "height" for coord in out_cube.aux_coords] + ) diff --git a/tests/integration/cmor/_fixes/native6/test_era5.py b/tests/integration/cmor/_fixes/native6/test_era5.py index 70b432541d..60460138a9 100644 --- a/tests/integration/cmor/_fixes/native6/test_era5.py +++ b/tests/integration/cmor/_fixes/native6/test_era5.py @@ -1,4 +1,5 @@ """Tests for the fixes of ERA5.""" + import datetime import iris @@ -17,21 +18,23 @@ from esmvalcore.cmor.table import CMOR_TABLES, get_var_info from esmvalcore.preprocessor import cmor_check_metadata -COMMENT = ('Contains modified Copernicus Climate Change Service Information ' - f'{datetime.datetime.now().year}') +COMMENT = ( + "Contains modified Copernicus Climate Change Service Information " + f"{datetime.datetime.now().year}" +) def test_get_evspsbl_fix(): """Test whether the right fixes are gathered for a single variable.""" - fix = Fix.get_fixes('native6', 'ERA5', 'E1hr', 'evspsbl') - vardef = get_var_info('native6', 'E1hr', 'evspsbl') + fix = Fix.get_fixes("native6", "ERA5", "E1hr", "evspsbl") + vardef = get_var_info("native6", "E1hr", "evspsbl") assert fix == [Evspsbl(vardef), AllVars(vardef), GenericFix(vardef)] def test_get_zg_fix(): """Test whether the right fix gets found again, for zg as well.""" - fix = Fix.get_fixes('native6', 'ERA5', 'Amon', 'zg') - vardef = get_var_info('native6', 'E1hr', 'evspsbl') + fix = Fix.get_fixes("native6", "ERA5", "Amon", "zg") + vardef = get_var_info("native6", "E1hr", "evspsbl") assert fix == [Zg(vardef), AllVars(vardef), GenericFix(vardef)] @@ -39,198 +42,207 @@ def test_get_frequency_hourly(): """Test cubes with hourly frequency.""" time = iris.coords.DimCoord( [0, 1, 2], - standard_name='time', - units=Unit('hours since 1900-01-01'), + standard_name="time", + units=Unit("hours since 1900-01-01"), ) cube = iris.cube.Cube( [1, 6, 3], - var_name='random_var', + var_name="random_var", dim_coords_and_dims=[(time, 0)], ) - assert get_frequency(cube) == 'hourly' - cube.coord('time').convert_units('days since 1850-1-1 00:00:00.0') - assert get_frequency(cube) == 'hourly' + assert get_frequency(cube) == "hourly" + cube.coord("time").convert_units("days since 1850-1-1 00:00:00.0") + assert get_frequency(cube) == "hourly" def test_get_frequency_monthly(): """Test cubes with monthly frequency.""" time = iris.coords.DimCoord( [0, 31, 59], - standard_name='time', - units=Unit('hours since 1900-01-01'), + standard_name="time", + units=Unit("hours since 1900-01-01"), ) cube = iris.cube.Cube( [1, 6, 3], - var_name='random_var', + var_name="random_var", dim_coords_and_dims=[(time, 0)], ) - assert get_frequency(cube) == 'monthly' - cube.coord('time').convert_units('days since 1850-1-1 00:00:00.0') - assert get_frequency(cube) == 'monthly' + assert get_frequency(cube) == "monthly" + cube.coord("time").convert_units("days since 1850-1-1 00:00:00.0") + assert get_frequency(cube) == "monthly" def test_get_frequency_fx(): """Test cubes with time invariant frequency.""" - cube = iris.cube.Cube(1., long_name='Cube without time coordinate') - assert get_frequency(cube) == 'fx' + cube = iris.cube.Cube(1.0, long_name="Cube without time coordinate") + assert get_frequency(cube) == "fx" time = iris.coords.DimCoord( 0, - standard_name='time', - units=Unit('hours since 1900-01-01'), + standard_name="time", + units=Unit("hours since 1900-01-01"), ) cube = iris.cube.Cube( [1], - var_name='cube_with_length_1_time_coord', - long_name='Geopotential', + var_name="cube_with_length_1_time_coord", + long_name="Geopotential", dim_coords_and_dims=[(time, 0)], ) - assert get_frequency(cube) == 'fx' - cube.long_name = 'Not geopotential' + assert get_frequency(cube) == "fx" + cube.long_name = "Not geopotential" with pytest.raises(ValueError): get_frequency(cube) def _era5_latitude(): return iris.coords.DimCoord( - np.array([90., 0., -90.]), - standard_name='latitude', - long_name='latitude', - var_name='latitude', - units=Unit('degrees'), + np.array([90.0, 0.0, -90.0]), + standard_name="latitude", + long_name="latitude", + var_name="latitude", + units=Unit("degrees"), ) def _era5_longitude(): return iris.coords.DimCoord( np.array([0, 180, 359.75]), - standard_name='longitude', - long_name='longitude', - var_name='longitude', - units=Unit('degrees'), + standard_name="longitude", + long_name="longitude", + var_name="longitude", + units=Unit("degrees"), circular=True, ) def _era5_time(frequency): - if frequency == 'invariant': + if frequency == "invariant": timestamps = [788928] # hours since 1900 at 1 january 1990 - elif frequency == 'hourly': + elif frequency == "hourly": timestamps = [788928, 788929, 788930] - elif frequency == 'monthly': + elif frequency == "monthly": timestamps = [788928, 789672, 790344] return iris.coords.DimCoord( - np.array(timestamps, dtype='int32'), - standard_name='time', - long_name='time', - var_name='time', - units=Unit('hours since 1900-01-01' - '00:00:00.0', calendar='gregorian'), + np.array(timestamps, dtype="int32"), + standard_name="time", + long_name="time", + var_name="time", + units=Unit("hours since 1900-01-0100:00:00.0", calendar="gregorian"), ) def _era5_plev(): - values = np.array([ - 1, - 1000, - ]) + values = np.array( + [ + 1, + 1000, + ] + ) return iris.coords.DimCoord( values, long_name="pressure", units=Unit("millibars"), var_name="level", - attributes={'positive': 'down'}, + attributes={"positive": "down"}, ) def _era5_data(frequency): - if frequency == 'invariant': + if frequency == "invariant": return np.arange(9).reshape(1, 3, 3) return np.arange(27).reshape(3, 3, 3) def _cmor_latitude(): return iris.coords.DimCoord( - np.array([-90., 0., 90.]), - standard_name='latitude', - long_name='Latitude', - var_name='lat', - units=Unit('degrees_north'), - bounds=np.array([[-90., -45.], [-45., 45.], [45., 90.]]), + np.array([-90.0, 0.0, 90.0]), + standard_name="latitude", + long_name="Latitude", + var_name="lat", + units=Unit("degrees_north"), + bounds=np.array([[-90.0, -45.0], [-45.0, 45.0], [45.0, 90.0]]), ) def _cmor_longitude(): return iris.coords.DimCoord( np.array([0, 180, 359.75]), - standard_name='longitude', - long_name='Longitude', - var_name='lon', - units=Unit('degrees_east'), - bounds=np.array([[-0.125, 90.], [90., 269.875], [269.875, 359.875]]), + standard_name="longitude", + long_name="Longitude", + var_name="lon", + units=Unit("degrees_east"), + bounds=np.array([[-0.125, 90.0], [90.0, 269.875], [269.875, 359.875]]), circular=True, ) def _cmor_time(mip, bounds=None, shifted=False): """Provide expected time coordinate after fixes.""" - if mip == 'E1hr': + if mip == "E1hr": offset = 51134 # days since 1850 at 1 january 1990 timestamps = offset + np.arange(3) / 24 if shifted: timestamps -= 1 / 48 if bounds is not None: bounds = [[t - 1 / 48, t + 1 / 48] for t in timestamps] - elif mip == 'Amon': - timestamps = np.array([51149.5, 51179., 51208.5]) + elif mip == "Amon": + timestamps = np.array([51149.5, 51179.0, 51208.5]) if bounds is not None: - bounds = np.array([[51134., 51165.], [51165., 51193.], - [51193., 51224.]]) + bounds = np.array( + [[51134.0, 51165.0], [51165.0, 51193.0], [51193.0, 51224.0]] + ) - return iris.coords.DimCoord(np.array(timestamps, dtype=float), - standard_name='time', - long_name='time', - var_name='time', - units=Unit('days since 1850-1-1 00:00:00', - calendar='gregorian'), - bounds=bounds) + return iris.coords.DimCoord( + np.array(timestamps, dtype=float), + standard_name="time", + long_name="time", + var_name="time", + units=Unit("days since 1850-1-1 00:00:00", calendar="gregorian"), + bounds=bounds, + ) def _cmor_aux_height(value): - return iris.coords.AuxCoord(value, - long_name="height", - standard_name="height", - units=Unit('m'), - var_name="height", - attributes={'positive': 'up'}) + return iris.coords.AuxCoord( + value, + long_name="height", + standard_name="height", + units=Unit("m"), + var_name="height", + attributes={"positive": "up"}, + ) def _cmor_plev(): - values = np.array([ - 100000.0, - 100.0, - ]) - return iris.coords.DimCoord(values, - long_name="pressure", - standard_name="air_pressure", - units=Unit("Pa"), - var_name="plev", - attributes={'positive': 'down'}) + values = np.array( + [ + 100000.0, + 100.0, + ] + ) + return iris.coords.DimCoord( + values, + long_name="pressure", + standard_name="air_pressure", + units=Unit("Pa"), + var_name="plev", + attributes={"positive": "down"}, + ) def _cmor_data(mip): - if mip == 'fx': + if mip == "fx": return np.arange(9).reshape(3, 3)[::-1, :] return np.arange(27).reshape(3, 3, 3)[:, ::-1, :] def cl_era5_monthly(): - time = _era5_time('monthly') + time = _era5_time("monthly") data = np.ones((3, 2, 3, 3)) cube = iris.cube.Cube( data, - long_name='Percentage Cloud Cover', - var_name='cl', - units='%', + long_name="Percentage Cloud Cover", + var_name="cl", + units="%", dim_coords_and_dims=[ (time, 0), (_era5_plev(), 1), @@ -242,13 +254,13 @@ def cl_era5_monthly(): def cl_cmor_amon(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('Amon', 'cl') - time = _cmor_time('Amon', bounds=True) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("Amon", "cl") + time = _cmor_time("Amon", bounds=True) data = np.ones((3, 2, 3, 3)) data = data * 100.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -259,18 +271,18 @@ def cl_cmor_amon(): (_cmor_latitude(), 2), (_cmor_longitude(), 3), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def clt_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='cloud cover fraction', - var_name='cloud_cover', - units='unknown', + _era5_data("hourly"), + long_name="cloud cover fraction", + var_name="cloud_cover", + units="unknown", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -281,12 +293,12 @@ def clt_era5_hourly(): def clt_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'clt') - time = _cmor_time('E1hr', bounds=True) - data = _cmor_data('E1hr') * 100 + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "clt") + time = _cmor_time("E1hr", bounds=True) + data = _cmor_data("E1hr") * 100 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -296,18 +308,18 @@ def clt_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def evspsbl_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly') * -1., - long_name='total evapotranspiration', - var_name='e', - units='unknown', + _era5_data("hourly") * -1.0, + long_name="total evapotranspiration", + var_name="e", + units="unknown", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -318,12 +330,12 @@ def evspsbl_era5_hourly(): def evspsbl_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'evspsbl') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') * 1000 / 3600. + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "evspsbl") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") * 1000 / 3600.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -333,18 +345,18 @@ def evspsbl_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def evspsblpot_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly') * -1., - long_name='potential evapotranspiration', - var_name='epot', - units='unknown', + _era5_data("hourly") * -1.0, + long_name="potential evapotranspiration", + var_name="epot", + units="unknown", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -355,12 +367,12 @@ def evspsblpot_era5_hourly(): def evspsblpot_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'evspsblpot') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') * 1000 / 3600. + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "evspsblpot") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") * 1000 / 3600.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -370,18 +382,18 @@ def evspsblpot_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def mrro_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='runoff', - var_name='runoff', - units='m', + _era5_data("hourly"), + long_name="runoff", + var_name="runoff", + units="m", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -392,12 +404,12 @@ def mrro_era5_hourly(): def mrro_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'mrro') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') * 1000 / 3600. + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "mrro") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") * 1000 / 3600.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -407,18 +419,18 @@ def mrro_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def orog_era5_hourly(): - time = _era5_time('invariant') + time = _era5_time("invariant") cube = iris.cube.Cube( - _era5_data('invariant'), - long_name='geopotential height', - var_name='zg', - units='m**2 s**-2', + _era5_data("invariant"), + long_name="geopotential height", + var_name="zg", + units="m**2 s**-2", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -429,28 +441,28 @@ def orog_era5_hourly(): def orog_cmor_fx(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('fx', 'orog') - data = _cmor_data('fx') / 9.80665 + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("fx", "orog") + data = _cmor_data("fx") / 9.80665 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, units=Unit(vardef.units), dim_coords_and_dims=[(_cmor_latitude(), 0), (_cmor_longitude(), 1)], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def pr_era5_monthly(): - time = _era5_time('monthly') + time = _era5_time("monthly") cube = iris.cube.Cube( - _era5_data('monthly'), - long_name='total_precipitation', - var_name='tp', - units='m', + _era5_data("monthly"), + long_name="total_precipitation", + var_name="tp", + units="m", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -461,12 +473,12 @@ def pr_era5_monthly(): def pr_cmor_amon(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('Amon', 'pr') - time = _cmor_time('Amon', bounds=True) - data = _cmor_data('Amon') * 1000. / 3600. / 24. + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("Amon", "pr") + time = _cmor_time("Amon", bounds=True) + data = _cmor_data("Amon") * 1000.0 / 3600.0 / 24.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -476,18 +488,18 @@ def pr_cmor_amon(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def pr_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='total_precipitation', - var_name='tp', - units='m', + _era5_data("hourly"), + long_name="total_precipitation", + var_name="tp", + units="m", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -498,12 +510,12 @@ def pr_era5_hourly(): def pr_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'pr') - time = _cmor_time('E1hr', bounds=True, shifted=True) - data = _cmor_data('E1hr') * 1000. / 3600. + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "pr") + time = _cmor_time("E1hr", bounds=True, shifted=True) + data = _cmor_data("E1hr") * 1000.0 / 3600.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -513,18 +525,18 @@ def pr_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def prsn_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='snow', - var_name='snow', - units='unknown', + _era5_data("hourly"), + long_name="snow", + var_name="snow", + units="unknown", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -535,12 +547,12 @@ def prsn_era5_hourly(): def prsn_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'prsn') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') * 1000 / 3600. + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "prsn") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") * 1000 / 3600.0 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -550,18 +562,18 @@ def prsn_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def ptype_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='snow', - var_name='snow', - units='unknown', + _era5_data("hourly"), + long_name="snow", + var_name="snow", + units="unknown", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -572,12 +584,12 @@ def ptype_era5_hourly(): def ptype_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'ptype') - time = _cmor_time('E1hr', shifted=False, bounds=True) - data = _cmor_data('E1hr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "ptype") + time = _cmor_time("E1hr", shifted=False, bounds=True) + data = _cmor_data("E1hr") cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, units=1, @@ -586,20 +598,20 @@ def ptype_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) - cube.coord('latitude').long_name = 'latitude' - cube.coord('longitude').long_name = 'longitude' + cube.coord("latitude").long_name = "latitude" + cube.coord("longitude").long_name = "longitude" return iris.cube.CubeList([cube]) def rlds_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='surface thermal radiation downwards', - var_name='ssrd', - units='J m**-2', + _era5_data("hourly"), + long_name="surface thermal radiation downwards", + var_name="ssrd", + units="J m**-2", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -610,32 +622,36 @@ def rlds_era5_hourly(): def rlds_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'rlds') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') / 3600 - cube = iris.cube.Cube(data.astype('float32'), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[(time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2)], - attributes={ - 'comment': COMMENT, - 'positive': 'down', - }) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "rlds") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") / 3600 + cube = iris.cube.Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={ + "comment": COMMENT, + "positive": "down", + }, + ) return iris.cube.CubeList([cube]) def rls_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='runoff', - var_name='runoff', - units='W m-2', + _era5_data("hourly"), + long_name="runoff", + var_name="runoff", + units="W m-2", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -646,34 +662,36 @@ def rls_era5_hourly(): def rls_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'rls') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') - cube = iris.cube.Cube(data.astype('float32'), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[ - (time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2), - ], - attributes={ - 'comment': COMMENT, - 'positive': 'down', - }) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "rls") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") + cube = iris.cube.Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={ + "comment": COMMENT, + "positive": "down", + }, + ) return iris.cube.CubeList([cube]) def rsds_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='solar_radiation_downwards', - var_name='rlwd', - units='J m**-2', + _era5_data("hourly"), + long_name="solar_radiation_downwards", + var_name="rlwd", + units="J m**-2", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -684,65 +702,76 @@ def rsds_era5_hourly(): def rsds_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'rsds') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') / 3600 - cube = iris.cube.Cube(data.astype('float32'), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[(time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2)], - attributes={ - 'comment': COMMENT, - 'positive': 'down', - }) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "rsds") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") / 3600 + cube = iris.cube.Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={ + "comment": COMMENT, + "positive": "down", + }, + ) return iris.cube.CubeList([cube]) def rsdt_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='thermal_radiation_downwards', - var_name='strd', - units='J m**-2', - dim_coords_and_dims=[(time, 0), (_era5_latitude(), 1), - (_era5_longitude(), 2)], + _era5_data("hourly"), + long_name="thermal_radiation_downwards", + var_name="strd", + units="J m**-2", + dim_coords_and_dims=[ + (time, 0), + (_era5_latitude(), 1), + (_era5_longitude(), 2), + ], ) return iris.cube.CubeList([cube]) def rsdt_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'rsdt') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') / 3600 - cube = iris.cube.Cube(data.astype('float32'), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[(time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2)], - attributes={ - 'comment': COMMENT, - 'positive': 'down', - }) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "rsdt") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") / 3600 + cube = iris.cube.Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={ + "comment": COMMENT, + "positive": "down", + }, + ) return iris.cube.CubeList([cube]) def rss_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='net_solar_radiation', - var_name='ssr', - units='J m**-2', + _era5_data("hourly"), + long_name="net_solar_radiation", + var_name="ssr", + units="J m**-2", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -753,32 +782,36 @@ def rss_era5_hourly(): def rss_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'rss') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') / 3600 - cube = iris.cube.Cube(data.astype('float32'), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[(time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2)], - attributes={ - 'comment': COMMENT, - 'positive': 'down', - }) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "rss") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") / 3600 + cube = iris.cube.Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={ + "comment": COMMENT, + "positive": "down", + }, + ) return iris.cube.CubeList([cube]) def tas_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='2m_temperature', - var_name='t2m', - units='K', + _era5_data("hourly"), + long_name="2m_temperature", + var_name="t2m", + units="K", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -789,30 +822,34 @@ def tas_era5_hourly(): def tas_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'tas') - time = _cmor_time('E1hr') - data = _cmor_data('E1hr') - cube = iris.cube.Cube(data.astype('float32'), - long_name=vardef.long_name, - var_name=vardef.short_name, - standard_name=vardef.standard_name, - units=Unit(vardef.units), - dim_coords_and_dims=[(time, 0), - (_cmor_latitude(), 1), - (_cmor_longitude(), 2)], - attributes={'comment': COMMENT}) - cube.add_aux_coord(_cmor_aux_height(2.)) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "tas") + time = _cmor_time("E1hr") + data = _cmor_data("E1hr") + cube = iris.cube.Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT}, + ) + cube.add_aux_coord(_cmor_aux_height(2.0)) return iris.cube.CubeList([cube]) def tas_era5_monthly(): - time = _era5_time('monthly') + time = _era5_time("monthly") cube = iris.cube.Cube( - _era5_data('monthly'), - long_name='2m_temperature', - var_name='t2m', - units='K', + _era5_data("monthly"), + long_name="2m_temperature", + var_name="t2m", + units="K", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -823,12 +860,12 @@ def tas_era5_monthly(): def tas_cmor_amon(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('Amon', 'tas') - time = _cmor_time('Amon', bounds=True) - data = _cmor_data('Amon') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("Amon", "tas") + time = _cmor_time("Amon", bounds=True) + data = _cmor_data("Amon") cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -838,20 +875,20 @@ def tas_cmor_amon(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) - cube.add_aux_coord(_cmor_aux_height(2.)) + cube.add_aux_coord(_cmor_aux_height(2.0)) return iris.cube.CubeList([cube]) def zg_era5_monthly(): - time = _era5_time('monthly') + time = _era5_time("monthly") data = np.ones((3, 2, 3, 3)) cube = iris.cube.Cube( data, - long_name='geopotential height', - var_name='zg', - units='m**2 s**-2', + long_name="geopotential height", + var_name="zg", + units="m**2 s**-2", dim_coords_and_dims=[ (time, 0), (_era5_plev(), 1), @@ -863,13 +900,13 @@ def zg_era5_monthly(): def zg_cmor_amon(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('Amon', 'zg') - time = _cmor_time('Amon', bounds=True) + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("Amon", "zg") + time = _cmor_time("Amon", bounds=True) data = np.ones((3, 2, 3, 3)) data = data / 9.80665 cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -880,18 +917,18 @@ def zg_cmor_amon(): (_cmor_latitude(), 2), (_cmor_longitude(), 3), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) return iris.cube.CubeList([cube]) def tasmax_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='maximum 2m temperature', - var_name='mx2t', - units='K', + _era5_data("hourly"), + long_name="maximum 2m temperature", + var_name="mx2t", + units="K", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -902,12 +939,12 @@ def tasmax_era5_hourly(): def tasmax_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'tasmax') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "tasmax") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -917,19 +954,19 @@ def tasmax_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) - cube.add_aux_coord(_cmor_aux_height(2.)) + cube.add_aux_coord(_cmor_aux_height(2.0)) return iris.cube.CubeList([cube]) def tasmin_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='minimum 2m temperature', - var_name='mn2t', - units='K', + _era5_data("hourly"), + long_name="minimum 2m temperature", + var_name="mn2t", + units="K", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -940,12 +977,12 @@ def tasmin_era5_hourly(): def tasmin_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'tasmin') - time = _cmor_time('E1hr', shifted=True, bounds=True) - data = _cmor_data('E1hr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "tasmin") + time = _cmor_time("E1hr", shifted=True, bounds=True) + data = _cmor_data("E1hr") cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -955,19 +992,19 @@ def tasmin_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) - cube.add_aux_coord(_cmor_aux_height(2.)) + cube.add_aux_coord(_cmor_aux_height(2.0)) return iris.cube.CubeList([cube]) def uas_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='10m_u_component_of_wind', - var_name='u10', - units='m s-1', + _era5_data("hourly"), + long_name="10m_u_component_of_wind", + var_name="u10", + units="m s-1", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -978,12 +1015,12 @@ def uas_era5_hourly(): def uas_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'uas') - time = _cmor_time('E1hr') - data = _cmor_data('E1hr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "uas") + time = _cmor_time("E1hr") + data = _cmor_data("E1hr") cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -993,19 +1030,19 @@ def uas_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) - cube.add_aux_coord(_cmor_aux_height(10.)) + cube.add_aux_coord(_cmor_aux_height(10.0)) return iris.cube.CubeList([cube]) def vas_era5_hourly(): - time = _era5_time('hourly') + time = _era5_time("hourly") cube = iris.cube.Cube( - _era5_data('hourly'), - long_name='10m_v_component_of_wind', - var_name='v10', - units='m s-1', + _era5_data("hourly"), + long_name="10m_v_component_of_wind", + var_name="v10", + units="m s-1", dim_coords_and_dims=[ (time, 0), (_era5_latitude(), 1), @@ -1016,12 +1053,12 @@ def vas_era5_hourly(): def vas_cmor_e1hr(): - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('E1hr', 'vas') - time = _cmor_time('E1hr') - data = _cmor_data('E1hr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("E1hr", "vas") + time = _cmor_time("E1hr") + data = _cmor_data("E1hr") cube = iris.cube.Cube( - data.astype('float32'), + data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, standard_name=vardef.standard_name, @@ -1031,64 +1068,69 @@ def vas_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={'comment': COMMENT}, + attributes={"comment": COMMENT}, ) - cube.add_aux_coord(_cmor_aux_height(10.)) + cube.add_aux_coord(_cmor_aux_height(10.0)) return iris.cube.CubeList([cube]) VARIABLES = [ - pytest.param(a, b, c, d, id=c + '_' + d) for (a, b, c, d) in [ - (cl_era5_monthly(), cl_cmor_amon(), 'cl', 'Amon'), - (clt_era5_hourly(), clt_cmor_e1hr(), 'clt', 'E1hr'), - (evspsbl_era5_hourly(), evspsbl_cmor_e1hr(), 'evspsbl', 'E1hr'), - (evspsblpot_era5_hourly(), evspsblpot_cmor_e1hr(), 'evspsblpot', - 'E1hr'), - (mrro_era5_hourly(), mrro_cmor_e1hr(), 'mrro', 'E1hr'), - (orog_era5_hourly(), orog_cmor_fx(), 'orog', 'fx'), - (pr_era5_monthly(), pr_cmor_amon(), 'pr', 'Amon'), - (pr_era5_hourly(), pr_cmor_e1hr(), 'pr', 'E1hr'), - (prsn_era5_hourly(), prsn_cmor_e1hr(), 'prsn', 'E1hr'), - (ptype_era5_hourly(), ptype_cmor_e1hr(), 'ptype', 'E1hr'), - (rlds_era5_hourly(), rlds_cmor_e1hr(), 'rlds', 'E1hr'), - (rls_era5_hourly(), rls_cmor_e1hr(), 'rls', 'E1hr'), - (rsds_era5_hourly(), rsds_cmor_e1hr(), 'rsds', 'E1hr'), - (rsdt_era5_hourly(), rsdt_cmor_e1hr(), 'rsdt', 'E1hr'), - (rss_era5_hourly(), rss_cmor_e1hr(), 'rss', 'E1hr'), - (tas_era5_hourly(), tas_cmor_e1hr(), 'tas', 'E1hr'), - (tas_era5_monthly(), tas_cmor_amon(), 'tas', 'Amon'), - (tasmax_era5_hourly(), tasmax_cmor_e1hr(), 'tasmax', 'E1hr'), - (tasmin_era5_hourly(), tasmin_cmor_e1hr(), 'tasmin', 'E1hr'), - (uas_era5_hourly(), uas_cmor_e1hr(), 'uas', 'E1hr'), - (vas_era5_hourly(), vas_cmor_e1hr(), 'vas', 'E1hr'), - (zg_era5_monthly(), zg_cmor_amon(), 'zg', 'Amon'), + pytest.param(a, b, c, d, id=c + "_" + d) + for (a, b, c, d) in [ + (cl_era5_monthly(), cl_cmor_amon(), "cl", "Amon"), + (clt_era5_hourly(), clt_cmor_e1hr(), "clt", "E1hr"), + (evspsbl_era5_hourly(), evspsbl_cmor_e1hr(), "evspsbl", "E1hr"), + ( + evspsblpot_era5_hourly(), + evspsblpot_cmor_e1hr(), + "evspsblpot", + "E1hr", + ), + (mrro_era5_hourly(), mrro_cmor_e1hr(), "mrro", "E1hr"), + (orog_era5_hourly(), orog_cmor_fx(), "orog", "fx"), + (pr_era5_monthly(), pr_cmor_amon(), "pr", "Amon"), + (pr_era5_hourly(), pr_cmor_e1hr(), "pr", "E1hr"), + (prsn_era5_hourly(), prsn_cmor_e1hr(), "prsn", "E1hr"), + (ptype_era5_hourly(), ptype_cmor_e1hr(), "ptype", "E1hr"), + (rlds_era5_hourly(), rlds_cmor_e1hr(), "rlds", "E1hr"), + (rls_era5_hourly(), rls_cmor_e1hr(), "rls", "E1hr"), + (rsds_era5_hourly(), rsds_cmor_e1hr(), "rsds", "E1hr"), + (rsdt_era5_hourly(), rsdt_cmor_e1hr(), "rsdt", "E1hr"), + (rss_era5_hourly(), rss_cmor_e1hr(), "rss", "E1hr"), + (tas_era5_hourly(), tas_cmor_e1hr(), "tas", "E1hr"), + (tas_era5_monthly(), tas_cmor_amon(), "tas", "Amon"), + (tasmax_era5_hourly(), tasmax_cmor_e1hr(), "tasmax", "E1hr"), + (tasmin_era5_hourly(), tasmin_cmor_e1hr(), "tasmin", "E1hr"), + (uas_era5_hourly(), uas_cmor_e1hr(), "uas", "E1hr"), + (vas_era5_hourly(), vas_cmor_e1hr(), "vas", "E1hr"), + (zg_era5_monthly(), zg_cmor_amon(), "zg", "Amon"), ] ] -@pytest.mark.parametrize('era5_cubes, cmor_cubes, var, mip', VARIABLES) +@pytest.mark.parametrize("era5_cubes, cmor_cubes, var, mip", VARIABLES) def test_cmorization(era5_cubes, cmor_cubes, var, mip): """Verify that cmorization results in the expected target cube.""" - fixed_cubes = fix_metadata(era5_cubes, var, 'native6', 'era5', mip) + fixed_cubes = fix_metadata(era5_cubes, var, "native6", "era5", mip) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes[0] cmor_cube = cmor_cubes[0] # Test that CMOR checks are passing - fixed_cubes = cmor_check_metadata(fixed_cube, 'native6', mip, var) + fixed_cubes = cmor_check_metadata(fixed_cube, "native6", mip, var) - if fixed_cube.coords('time'): + if fixed_cube.coords("time"): for cube in [fixed_cube, cmor_cube]: - coord = cube.coord('time') + coord = cube.coord("time") coord.points = np.round(coord.points, decimals=7) if coord.bounds is not None: coord.bounds = np.round(coord.bounds, decimals=7) print("Test results for variable/MIP: ", var, mip) - print('cmor_cube:', cmor_cube) - print('fixed_cube:', fixed_cube) - print('cmor_cube data:', cmor_cube.data) - print('fixed_cube data:', fixed_cube.data) + print("cmor_cube:", cmor_cube) + print("fixed_cube:", fixed_cube) + print("cmor_cube data:", cmor_cube.data) + print("fixed_cube data:", fixed_cube.data) print("cmor_cube coords:") for coord in cmor_cube.coords(): print(coord) diff --git a/tests/integration/cmor/_fixes/native6/test_mswep.py b/tests/integration/cmor/_fixes/native6/test_mswep.py index 70418c748c..9169b7631b 100644 --- a/tests/integration/cmor/_fixes/native6/test_mswep.py +++ b/tests/integration/cmor/_fixes/native6/test_mswep.py @@ -1,4 +1,5 @@ """Tests for the fixes of MSWEP.""" + from pathlib import Path import iris @@ -15,10 +16,10 @@ from esmvalcore.cmor.table import CMOR_TABLES -@pytest.mark.parametrize('mip_table', ('Amon', 'day')) +@pytest.mark.parametrize("mip_table", ("Amon", "day")) def test_get_pr_fix(mip_table): """Test whether the right fix gets found.""" - fix = Fix.get_fixes('native6', 'MSWEP', mip_table, 'pr') + fix = Fix.get_fixes("native6", "MSWEP", mip_table, "pr") assert isinstance(fix[0], Pr) @@ -27,7 +28,7 @@ def cube_month(): """Return extract from mswep monthly data (shape 3x5x5).""" # out = cube[0:3, 0:360:72, 0:720:144] # iris.save(out, 'mswep_month.nc') - path = Path(__file__).with_name('mswep_month.nc') + path = Path(__file__).with_name("mswep_month.nc") return iris.load_cube(str(path)) @@ -36,23 +37,23 @@ def cube_day(): """Return extract from mswep daily data (shape 3x5x5).""" # out = cube[0:3, 0:360:72, 0:720:144] # iris.save(out, 'mswep_day.nc') - path = Path(__file__).with_name('mswep_day.nc') + path = Path(__file__).with_name("mswep_day.nc") return iris.load_cube(str(path)) @pytest.fixture def fix_month(): """Return fix for monthly pr data.""" - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('Amon', 'pr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("Amon", "pr") return Pr(vardef) @pytest.fixture def fix_day(): """Return fix for daily pr data.""" - cmor_table = CMOR_TABLES['native6'] - vardef = cmor_table.get_variable('day', 'pr') + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("day", "pr") return Pr(vardef) @@ -89,31 +90,31 @@ def test_fix_time_month(cube_month): """Test `fix_time_month`.""" fix_time_month(cube_month) - time = cube_month.coord('time') - assert time.units == 'days since 1850-01-01' + time = cube_month.coord("time") + assert time.units == "days since 1850-01-01" def test_fix_time_day(cube_day): """Test `fix_time_day`.""" fix_time_day(cube_day) - time = cube_day.coord('time') - assert time.units == 'days since 1850-01-01' + time = cube_day.coord("time") + assert time.units == "days since 1850-01-01" def test_fix_longitude(fix_month, cube_month): """Test `Pr._fix_longitude`.""" unfixed_data = cube_month.data.copy() - unfixed_lon = cube_month.coord(axis='X') + unfixed_lon = cube_month.coord(axis="X") shift = (unfixed_lon.points < 0).sum() fix_longitude(cube_month) - lon = cube_month.coord(axis='X') + lon = cube_month.coord(axis="X") assert lon.is_monotonic - coord_def = fix_month.vardef.coordinates['longitude'] + coord_def = fix_month.vardef.coordinates["longitude"] valid_min = float(coord_def.valid_min) valid_max = float(coord_def.valid_max) @@ -128,6 +129,6 @@ def test_fix_bounds(fix_month, cube_month): """Test `Pr._fix_bounds`.""" fix_month._fix_bounds(cube_month) - for axis in 'XYT': + for axis in "XYT": coord = cube_month.coord(axis=axis) assert coord.has_bounds() diff --git a/tests/integration/cmor/_fixes/obs4mips/test_airs_2_0.py b/tests/integration/cmor/_fixes/obs4mips/test_airs_2_0.py new file mode 100644 index 0000000000..d82aba1640 --- /dev/null +++ b/tests/integration/cmor/_fixes/obs4mips/test_airs_2_0.py @@ -0,0 +1,39 @@ +"""Test AIRS-2-0 fixes.""" + +import dask.array as da +import numpy as np +from iris.cube import Cube, CubeList + +from esmvalcore.cmor.fix import fix_metadata + + +def test_fix_metadata_hur(): + """Test ``fix_metadata`` for hur.""" + cubes = CubeList( + [ + Cube( + da.from_array([-0.1, 0.2, 1.2, 1.7]), + var_name="hur", + units="1", + attributes={"valid_range": [0.0, 1.5]}, + ), + ] + ) + + fixed_cubes = fix_metadata( + cubes, + "hur", + "obs4MIPs", + "AIRS-2-0", + "Amon", + check_level=5, + ) + + assert len(fixed_cubes) == 1 + fixed_cube = fixed_cubes[0] + assert fixed_cube.units == "%" + assert fixed_cube.attributes == {} + assert fixed_cube.has_lazy_data() + expected_data = np.ma.masked_invalid([np.nan, 20.0, 120.0, np.nan]) + np.testing.assert_allclose(fixed_cube.data.mask, expected_data.mask) + np.testing.assert_allclose(fixed_cube.data, expected_data) diff --git a/tests/integration/cmor/_fixes/obs4mips/test_airs_2_1.py b/tests/integration/cmor/_fixes/obs4mips/test_airs_2_1.py index 80ce0d00c6..db22045d1a 100644 --- a/tests/integration/cmor/_fixes/obs4mips/test_airs_2_1.py +++ b/tests/integration/cmor/_fixes/obs4mips/test_airs_2_1.py @@ -1,4 +1,5 @@ """Test AIRS-2-1 fixes.""" + import numpy as np from iris.coords import DimCoord from iris.cube import Cube, CubeList @@ -10,19 +11,24 @@ def get_air_pressure_coord(points, units): """Get ``air_pressure`` coordinate.""" - return DimCoord(points, var_name='plev', standard_name='air_pressure', - long_name='pressure', units=units) + return DimCoord( + points, + var_name="plev", + standard_name="air_pressure", + long_name="pressure", + units=units, + ) def test_get_allvars_fix(): """Test getting of fix.""" - fix = Fix.get_fixes('obs4MIPs', 'AIRS-2-1', 'Amon', 'cl') + fix = Fix.get_fixes("obs4MIPs", "AIRS-2-1", "Amon", "cl") assert fix == [AllVars(None), GenericFix(None)] def test_allvars_fix_no_air_pressure(): """Test fix for all variables.""" - cubes = CubeList([Cube(0.0, var_name='cl')]) + cubes = CubeList([Cube(0.0, var_name="cl")]) fix = AllVars(None) out_cubes = fix.fix_metadata(cubes.copy()) @@ -32,47 +38,59 @@ def test_allvars_fix_no_air_pressure(): def test_allvars_fix_correct_air_pressure_pa(): """Test fix for all variables.""" - air_pressure_coord = get_air_pressure_coord([100000.0, 80000.0], 'Pa') - cube = Cube([0.0, 1.0], var_name='cl', - dim_coords_and_dims=[(air_pressure_coord, 0)]) + air_pressure_coord = get_air_pressure_coord([100000.0, 80000.0], "Pa") + cube = Cube( + [0.0, 1.0], + var_name="cl", + dim_coords_and_dims=[(air_pressure_coord, 0)], + ) cubes = CubeList([cube]) fix = AllVars(None) out_cubes = fix.fix_metadata(cubes.copy()) assert len(out_cubes) == 1 assert out_cubes[0] == cubes[0] - assert out_cubes[0].coord('air_pressure').units == 'Pa' - np.testing.assert_allclose(out_cubes[0].coord('air_pressure').points, - [100000.0, 80000.0]) + assert out_cubes[0].coord("air_pressure").units == "Pa" + np.testing.assert_allclose( + out_cubes[0].coord("air_pressure").points, [100000.0, 80000.0] + ) def test_allvars_fix_correct_air_pressure_hpa(): """Test fix for all variables.""" - air_pressure_coord = get_air_pressure_coord([1000.0, 800.0], 'hPa') - cube = Cube([0.0, 1.0], var_name='cl', - dim_coords_and_dims=[(air_pressure_coord, 0)]) + air_pressure_coord = get_air_pressure_coord([1000.0, 800.0], "hPa") + cube = Cube( + [0.0, 1.0], + var_name="cl", + dim_coords_and_dims=[(air_pressure_coord, 0)], + ) cubes = CubeList([cube]) fix = AllVars(None) out_cubes = fix.fix_metadata(cubes.copy()) assert len(out_cubes) == 1 assert out_cubes[0] == cubes[0] - assert out_cubes[0].coord('air_pressure').units == 'hPa' - np.testing.assert_allclose(out_cubes[0].coord('air_pressure').points, - [1000.0, 800.0]) + assert out_cubes[0].coord("air_pressure").units == "hPa" + np.testing.assert_allclose( + out_cubes[0].coord("air_pressure").points, [1000.0, 800.0] + ) def test_allvars_fix_incorrect_air_pressure(): """Test fix for all variables.""" - air_pressure_coord = get_air_pressure_coord([100000.0, 80000.0], 'hPa') - cube = Cube([0.0, 1.0], var_name='cl', - dim_coords_and_dims=[(air_pressure_coord, 0)]) + air_pressure_coord = get_air_pressure_coord([100000.0, 80000.0], "hPa") + cube = Cube( + [0.0, 1.0], + var_name="cl", + dim_coords_and_dims=[(air_pressure_coord, 0)], + ) cubes = CubeList([cube]) fix = AllVars(None) out_cubes = fix.fix_metadata(cubes.copy()) assert len(out_cubes) == 1 assert out_cubes[0] != cubes[0] - assert out_cubes[0].coord('air_pressure').units == 'Pa' - np.testing.assert_allclose(out_cubes[0].coord('air_pressure').points, - [100000.0, 80000.0]) + assert out_cubes[0].coord("air_pressure").units == "Pa" + np.testing.assert_allclose( + out_cubes[0].coord("air_pressure").points, [100000.0, 80000.0] + ) diff --git a/tests/integration/cmor/_fixes/obs4mips/test_ssmi.py b/tests/integration/cmor/_fixes/obs4mips/test_ssmi.py index 6a66486778..e4d8e3d22b 100644 --- a/tests/integration/cmor/_fixes/obs4mips/test_ssmi.py +++ b/tests/integration/cmor/_fixes/obs4mips/test_ssmi.py @@ -1,4 +1,5 @@ """Test SSMI fixes.""" + import unittest from esmvalcore.cmor._fixes.fix import GenericFix @@ -8,7 +9,10 @@ class TestPrw(unittest.TestCase): """Test prw fixes.""" + def test_get(self): """Test fix get.""" - self.assertListEqual(Fix.get_fixes('obs4MIPs', 'SSMI', 'Amon', 'prw'), - [Prw(None), GenericFix(None)]) + self.assertListEqual( + Fix.get_fixes("obs4MIPs", "SSMI", "Amon", "prw"), + [Prw(None), GenericFix(None)], + ) diff --git a/tests/integration/cmor/_fixes/obs4mips/test_ssmi_meris.py b/tests/integration/cmor/_fixes/obs4mips/test_ssmi_meris.py index 024aa4e705..5d000bf1ef 100644 --- a/tests/integration/cmor/_fixes/obs4mips/test_ssmi_meris.py +++ b/tests/integration/cmor/_fixes/obs4mips/test_ssmi_meris.py @@ -1,4 +1,5 @@ """Test SSMI fixes.""" + import unittest from esmvalcore.cmor._fixes.fix import GenericFix @@ -8,8 +9,10 @@ class TestPrw(unittest.TestCase): """Test prw fixes.""" + def test_get(self): """Test fix get.""" self.assertListEqual( - Fix.get_fixes('obs4MIPs', 'SSMI-MERIS', 'Amon', 'prw'), - [Prw(None), GenericFix(None)]) + Fix.get_fixes("obs4MIPs", "SSMI-MERIS", "Amon", "prw"), + [Prw(None), GenericFix(None)], + ) diff --git a/tests/integration/cmor/_fixes/test_common.py b/tests/integration/cmor/_fixes/test_common.py index 809c1fbfd2..075335d7d8 100644 --- a/tests/integration/cmor/_fixes/test_common.py +++ b/tests/integration/cmor/_fixes/test_common.py @@ -1,4 +1,5 @@ """Test for common fixes used for multiple datasets.""" + import iris import numpy as np import pytest @@ -13,22 +14,34 @@ ) from esmvalcore.cmor.table import get_var_info -AIR_PRESSURE_POINTS = np.array([[[[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], - [1.0, 1.0, 1.0, 1.0]], - [[2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0], - [10.0, 11.0, 12.0, 13.0]]]]) -AIR_PRESSURE_BOUNDS = np.array([[[[[0.0, 1.5], [-1.0, 2.0], [-2.0, 2.5], - [-3.0, 3.0]], - [[-4.0, 3.5], [-5.0, 4.0], [-6.0, 4.5], - [-7.0, 5.0]], - [[-8.0, 5.5], [-9.0, 6.0], [-10.0, 6.5], - [-11.0, 7.0]]], - [[[1.5, 3.0], [2.0, 5.0], [2.5, 7.0], - [3.0, 9.0]], - [[3.5, 11.0], [4.0, 13.0], [4.5, 15.0], - [5.0, 17.0]], - [[5.5, 19.0], [6.0, 21.0], [6.5, 23.0], - [7.0, 25.0]]]]]) +AIR_PRESSURE_POINTS = np.array( + [ + [ + [[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]], + [ + [2.0, 3.0, 4.0, 5.0], + [6.0, 7.0, 8.0, 9.0], + [10.0, 11.0, 12.0, 13.0], + ], + ] + ] +) +AIR_PRESSURE_BOUNDS = np.array( + [ + [ + [ + [[0.0, 1.5], [-1.0, 2.0], [-2.0, 2.5], [-3.0, 3.0]], + [[-4.0, 3.5], [-5.0, 4.0], [-6.0, 4.5], [-7.0, 5.0]], + [[-8.0, 5.5], [-9.0, 6.0], [-10.0, 6.5], [-11.0, 7.0]], + ], + [ + [[1.5, 3.0], [2.0, 5.0], [2.5, 7.0], [3.0, 9.0]], + [[3.5, 11.0], [4.0, 13.0], [4.5, 15.0], [5.0, 17.0]], + [[5.5, 19.0], [6.0, 21.0], [6.5, 23.0], [7.0, 25.0]], + ], + ] + ] +) def hybrid_pressure_coord_fix_metadata(nc_path, short_name, fix): @@ -39,32 +52,34 @@ def hybrid_pressure_coord_fix_metadata(nc_path, short_name, fix): assert len(cubes) == 4 var_names = [cube.var_name for cube in cubes] assert short_name in var_names - assert 'ps' in var_names - assert 'b_bnds' in var_names + assert "ps" in var_names + assert "b_bnds" in var_names # Raw cube cube = cubes.extract_cube(NameConstraint(var_name=short_name)) - air_pressure_coord = cube.coord('air_pressure') + air_pressure_coord = cube.coord("air_pressure") assert air_pressure_coord.points is not None assert air_pressure_coord.bounds is None np.testing.assert_allclose(air_pressure_coord.points, AIR_PRESSURE_POINTS) # Raw ps cube - ps_cube = cubes.extract_cube('surface_air_pressure') - assert ps_cube.attributes == {'additional_attribute': 'xyz'} + ps_cube = cubes.extract_cube("surface_air_pressure") + assert ps_cube.attributes == {"additional_attribute": "xyz"} # Apply fix fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes.extract_cube(NameConstraint(var_name=short_name)) - fixed_air_pressure_coord = fixed_cube.coord('air_pressure') + fixed_air_pressure_coord = fixed_cube.coord("air_pressure") assert fixed_air_pressure_coord.points is not None assert fixed_air_pressure_coord.bounds is not None - np.testing.assert_allclose(fixed_air_pressure_coord.points, - AIR_PRESSURE_POINTS) - np.testing.assert_allclose(fixed_air_pressure_coord.bounds, - AIR_PRESSURE_BOUNDS) - surface_pressure_coord = fixed_cube.coord(var_name='ps') + np.testing.assert_allclose( + fixed_air_pressure_coord.points, AIR_PRESSURE_POINTS + ) + np.testing.assert_allclose( + fixed_air_pressure_coord.bounds, AIR_PRESSURE_BOUNDS + ) + surface_pressure_coord = fixed_cube.coord(var_name="ps") assert surface_pressure_coord.attributes == {} return var_names @@ -72,33 +87,53 @@ def hybrid_pressure_coord_fix_metadata(nc_path, short_name, fix): def test_cl_hybrid_pressure_coord_fix_metadata_with_a(test_data_path): """Test ``fix_metadata`` for ``cl``.""" - vardef = get_var_info('CMIP6', 'Amon', 'cl') - nc_path = test_data_path / 'common_cl_a.nc' + vardef = get_var_info("CMIP6", "Amon", "cl") + nc_path = test_data_path / "common_cl_a.nc" var_names = hybrid_pressure_coord_fix_metadata( - nc_path, 'cl', ClFixHybridPressureCoord(vardef)) - assert 'a_bnds' in var_names + nc_path, "cl", ClFixHybridPressureCoord(vardef) + ) + assert "a_bnds" in var_names def test_cl_hybrid_pressure_coord_fix_metadata_with_ap(test_data_path): """Test ``fix_metadata`` for ``cl``.""" - vardef = get_var_info('CMIP6', 'Amon', 'cl') - nc_path = test_data_path / 'common_cl_ap.nc' + vardef = get_var_info("CMIP6", "Amon", "cl") + nc_path = test_data_path / "common_cl_ap.nc" var_names = hybrid_pressure_coord_fix_metadata( - nc_path, 'cl', ClFixHybridPressureCoord(vardef)) - assert 'ap_bnds' in var_names + nc_path, "cl", ClFixHybridPressureCoord(vardef) + ) + assert "ap_bnds" in var_names HEIGHT_POINTS = np.array([[[1.0, 1.0]], [[2.0, 3.0]]]) -HEIGHT_BOUNDS_WRONG = np.array([[[[0.5, 1.5], [0.5, 1.5]]], - [[[1.5, 3.0], [2.5, 4.0]]]]) -HEIGHT_BOUNDS_RIGHT = np.array([[[[0.5, 1.5], [-0.5, 2.0]]], - [[[1.5, 3.0], [2.0, 5.0]]]]) -PRESSURE_POINTS = np.array([[[101312.98512207, 101312.98512207]], - [[101300.97123885, 101288.95835383]]]) -PRESSURE_BOUNDS = np.array([[[[101318.99243691, 101306.9780559], - [101331.00781103, 101300.97123885]]], - [[[101306.9780559, 101288.95835383], - [101300.97123885, 101264.93559234]]]]) +HEIGHT_BOUNDS_WRONG = np.array( + [[[[0.5, 1.5], [0.5, 1.5]]], [[[1.5, 3.0], [2.5, 4.0]]]] +) +HEIGHT_BOUNDS_RIGHT = np.array( + [[[[0.5, 1.5], [-0.5, 2.0]]], [[[1.5, 3.0], [2.0, 5.0]]]] +) +PRESSURE_POINTS = np.array( + [ + [[101312.98512207, 101312.98512207]], + [[101300.97123885, 101288.95835383]], + ] +) +PRESSURE_BOUNDS = np.array( + [ + [ + [ + [101318.99243691, 101306.9780559], + [101331.00781103, 101300.97123885], + ] + ], + [ + [ + [101306.9780559, 101288.95835383], + [101300.97123885, 101264.93559234], + ] + ], + ] +) def hybrid_height_coord_fix_metadata(nc_path, short_name, fix): @@ -109,67 +144,73 @@ def hybrid_height_coord_fix_metadata(nc_path, short_name, fix): assert len(cubes) == 3 var_names = [cube.var_name for cube in cubes] assert short_name in var_names - assert 'orog' in var_names - assert 'b_bnds' in var_names + assert "orog" in var_names + assert "b_bnds" in var_names # Raw cube cube = cubes.extract_cube(NameConstraint(var_name=short_name)) - height_coord = cube.coord('altitude') + height_coord = cube.coord("altitude") assert height_coord.points is not None assert height_coord.bounds is not None np.testing.assert_allclose(height_coord.points, HEIGHT_POINTS) np.testing.assert_allclose(height_coord.bounds, HEIGHT_BOUNDS_WRONG) assert not np.allclose(height_coord.bounds, HEIGHT_BOUNDS_RIGHT) - assert not cube.coords('air_pressure') + assert not cube.coords("air_pressure") # Apply fix fixed_cubes = fix.fix_metadata(cubes) assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes.extract_cube(NameConstraint(var_name=short_name)) - fixed_height_coord = fixed_cube.coord('altitude') + fixed_height_coord = fixed_cube.coord("altitude") assert fixed_height_coord.points is not None assert fixed_height_coord.bounds is not None np.testing.assert_allclose(fixed_height_coord.points, HEIGHT_POINTS) np.testing.assert_allclose(fixed_height_coord.bounds, HEIGHT_BOUNDS_RIGHT) assert not np.allclose(fixed_height_coord.bounds, HEIGHT_BOUNDS_WRONG) - air_pressure_coord = cube.coord('air_pressure') + air_pressure_coord = cube.coord("air_pressure") np.testing.assert_allclose(air_pressure_coord.points, PRESSURE_POINTS) np.testing.assert_allclose(air_pressure_coord.bounds, PRESSURE_BOUNDS) - assert air_pressure_coord.var_name == 'plev' - assert air_pressure_coord.standard_name == 'air_pressure' - assert air_pressure_coord.long_name == 'pressure' - assert air_pressure_coord.units == 'Pa' + assert air_pressure_coord.var_name == "plev" + assert air_pressure_coord.standard_name == "air_pressure" + assert air_pressure_coord.long_name == "pressure" + assert air_pressure_coord.units == "Pa" def test_cl_hybrid_height_coord_fix_metadata(test_data_path): """Test ``fix_metadata`` for ``cl``.""" - vardef = get_var_info('CMIP6', 'Amon', 'cl') - nc_path = test_data_path / 'common_cl_hybrid_height.nc' - hybrid_height_coord_fix_metadata(nc_path, 'cl', - ClFixHybridHeightCoord(vardef)) + vardef = get_var_info("CMIP6", "Amon", "cl") + nc_path = test_data_path / "common_cl_hybrid_height.nc" + hybrid_height_coord_fix_metadata( + nc_path, "cl", ClFixHybridHeightCoord(vardef) + ) @pytest.fixture def siconc_cubes(): """Sample cube.""" - time_coord = iris.coords.DimCoord([0.0], - standard_name='time', - var_name='time', - units='days since 6543-2-1') - lat_coord = iris.coords.DimCoord([-30.0], - standard_name='latitude', - var_name='lat', - units='degrees_north') - lon_coord = iris.coords.DimCoord([30.0], - standard_name='longitude', - var_name='lon', - units='degrees_east') + time_coord = iris.coords.DimCoord( + [0.0], + standard_name="time", + var_name="time", + units="days since 6543-2-1", + ) + lat_coord = iris.coords.DimCoord( + [-30.0], + standard_name="latitude", + var_name="lat", + units="degrees_north", + ) + lon_coord = iris.coords.DimCoord( + [30.0], standard_name="longitude", var_name="lon", units="degrees_east" + ) coords_specs = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] - cube = iris.cube.Cube([[[22.0]]], - standard_name='sea_ice_area_fraction', - var_name='siconc', - units='%', - dim_coords_and_dims=coords_specs) + cube = iris.cube.Cube( + [[[22.0]]], + standard_name="sea_ice_area_fraction", + var_name="siconc", + units="%", + dim_coords_and_dims=coords_specs, + ) return iris.cube.CubeList([cube]) @@ -180,34 +221,34 @@ def test_siconc_fix_metadata(siconc_cubes): assert siconc_cube.var_name == "siconc" # Extract siconc cube - siconc_cube = siconc_cubes.extract_cube('sea_ice_area_fraction') - assert not siconc_cube.coords('typesi') + siconc_cube = siconc_cubes.extract_cube("sea_ice_area_fraction") + assert not siconc_cube.coords("typesi") # Apply fix - vardef = get_var_info('CMIP6', 'SImon', 'siconc') + vardef = get_var_info("CMIP6", "SImon", "siconc") fix = SiconcFixScalarCoord(vardef) fixed_cubes = fix.fix_metadata(siconc_cubes) assert len(fixed_cubes) == 1 - fixed_siconc_cube = fixed_cubes.extract_cube('sea_ice_area_fraction') - fixed_typesi_coord = fixed_siconc_cube.coord('area_type') + fixed_siconc_cube = fixed_cubes.extract_cube("sea_ice_area_fraction") + fixed_typesi_coord = fixed_siconc_cube.coord("area_type") assert fixed_typesi_coord.points is not None assert fixed_typesi_coord.bounds is None - np.testing.assert_equal(fixed_typesi_coord.points, ['sea_ice']) - np.testing.assert_equal(fixed_typesi_coord.units, Unit('No unit')) + np.testing.assert_equal(fixed_typesi_coord.points, ["sea_ice"]) + np.testing.assert_equal(fixed_typesi_coord.units, Unit("No unit")) def get_tos_cubes(wrong_ij_names=False, ij_bounds=False): """Cubes containing tos variable.""" if wrong_ij_names: - j_var_name = 'lat' - j_long_name = 'latitude' - i_var_name = 'lon' - i_long_name = 'longitude' + j_var_name = "lat" + j_long_name = "latitude" + i_var_name = "lon" + i_long_name = "longitude" else: - j_var_name = 'j' - j_long_name = 'cell index along second dimension' - i_var_name = 'i' - i_long_name = 'cell index along first dimension' + j_var_name = "j" + j_long_name = "cell index along second dimension" + i_var_name = "i" + i_long_name = "cell index along first dimension" if ij_bounds: j_bounds = [[10.0, 30.0], [30.0, 50.0]] i_bounds = [[5.0, 15.0], [15.0, 25.0], [25.0, 35.0]] @@ -228,31 +269,31 @@ def get_tos_cubes(wrong_ij_names=False, ij_bounds=False): ) lat_coord = iris.coords.AuxCoord( [[-40.0, -20.0, 0.0], [-20.0, 0.0, 20.0]], - var_name='lat', - standard_name='latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + units="degrees_north", ) lon_coord = iris.coords.AuxCoord( [[100.0, 140.0, 180.0], [80.0, 100.0, 120.0]], - var_name='lon', - standard_name='longitude', - units='degrees_east', + var_name="lon", + standard_name="longitude", + units="degrees_east", ) time_coord = iris.coords.DimCoord( 1.0, bounds=[0.0, 2.0], - var_name='time', - standard_name='time', - long_name='time', - units='days since 1950-01-01', + var_name="time", + standard_name="time", + long_name="time", + units="days since 1950-01-01", ) # Create tos variable cube cube = iris.cube.Cube( np.full((1, 2, 3), 300.0), - var_name='tos', - long_name='sea_surface_temperature', - units='K', + var_name="tos", + long_name="sea_surface_temperature", + units="K", dim_coords_and_dims=[(time_coord, 0), (j_coord, 1), (i_coord, 2)], aux_coords_and_dims=[(lat_coord, (1, 2)), (lon_coord, (1, 2))], ) @@ -268,28 +309,28 @@ def get_tos_regular_grid_cubes(): time_coord = iris.coords.DimCoord( 1.0, bounds=[0.0, 2.0], - var_name='time', - standard_name='time', - long_name='time', - units='days since 1950-01-01', + var_name="time", + standard_name="time", + long_name="time", + units="days since 1950-01-01", ) lat_coord = iris.coords.DimCoord( [-40.0, -20.0, 0.0], - var_name='lat', - standard_name='latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + units="degrees_north", ) lon_coord = iris.coords.DimCoord( [100.0, 140.0, 180.0], - var_name='lon', - standard_name='longitude', - units='degrees_east', + var_name="lon", + standard_name="longitude", + units="degrees_east", ) regular_grid_cube = iris.cube.Cube( np.full((1, 3, 3), 300.0), - var_name='tos', - long_name='sea_surface_temperature', - units='K', + var_name="tos", + long_name="sea_surface_temperature", + units="K", dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], ) # Create empty (dummy) cube @@ -305,64 +346,82 @@ def tos_cubes_wrong_ij_names(): def test_ocean_fix_grid_wrong_ij_names(tos_cubes_wrong_ij_names): """Test ``fix_metadata`` with cubes with wrong ij names.""" - cube_in = tos_cubes_wrong_ij_names.extract_cube('sea_surface_temperature') - assert len(cube_in.coords('latitude')) == 2 - assert len(cube_in.coords('longitude')) == 2 - assert cube_in.coord('latitude', dimensions=1).bounds is not None - assert cube_in.coord('longitude', dimensions=2).bounds is not None - assert cube_in.coord('latitude', dimensions=(1, 2)).bounds is None - assert cube_in.coord('longitude', dimensions=(1, 2)).bounds is None + cube_in = tos_cubes_wrong_ij_names.extract_cube("sea_surface_temperature") + assert len(cube_in.coords("latitude")) == 2 + assert len(cube_in.coords("longitude")) == 2 + assert cube_in.coord("latitude", dimensions=1).bounds is not None + assert cube_in.coord("longitude", dimensions=2).bounds is not None + assert cube_in.coord("latitude", dimensions=(1, 2)).bounds is None + assert cube_in.coord("longitude", dimensions=(1, 2)).bounds is None # Apply fix - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = OceanFixGrid(vardef) fixed_cubes = fix.fix_metadata(tos_cubes_wrong_ij_names) assert len(fixed_cubes) == 1 - fixed_cube = fixed_cubes.extract_cube('sea_surface_temperature') + fixed_cube = fixed_cubes.extract_cube("sea_surface_temperature") assert fixed_cube is cube_in # Check ij names - i_coord = fixed_cube.coord('cell index along first dimension') - j_coord = fixed_cube.coord('cell index along second dimension') - assert i_coord.var_name == 'i' + i_coord = fixed_cube.coord("cell index along first dimension") + j_coord = fixed_cube.coord("cell index along second dimension") + assert i_coord.var_name == "i" assert i_coord.standard_name is None - assert i_coord.long_name == 'cell index along first dimension' - assert i_coord.units == '1' + assert i_coord.long_name == "cell index along first dimension" + assert i_coord.units == "1" assert i_coord.circular is False - assert j_coord.var_name == 'j' + assert j_coord.var_name == "j" assert j_coord.standard_name is None - assert j_coord.long_name == 'cell index along second dimension' - assert j_coord.units == '1' + assert j_coord.long_name == "cell index along second dimension" + assert j_coord.units == "1" # Check ij points and bounds np.testing.assert_allclose(i_coord.points, [0, 1, 2]) - np.testing.assert_allclose(i_coord.bounds, - [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]]) + np.testing.assert_allclose( + i_coord.bounds, [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]] + ) np.testing.assert_allclose(j_coord.points, [0, 1]) np.testing.assert_allclose(j_coord.bounds, [[-0.5, 0.5], [0.5, 1.5]]) # Check bounds of latitude and longitude - assert len(fixed_cube.coords('latitude')) == 1 - assert len(fixed_cube.coords('longitude')) == 1 - assert fixed_cube.coord('latitude').bounds is not None - assert fixed_cube.coord('longitude').bounds is not None + assert len(fixed_cube.coords("latitude")) == 1 + assert len(fixed_cube.coords("longitude")) == 1 + assert fixed_cube.coord("latitude").bounds is not None + assert fixed_cube.coord("longitude").bounds is not None latitude_bounds = np.array( - [[[-43.48076211, -34.01923789, -22.00961894, -31.47114317], - [-34.01923789, -10.0, 2.00961894, -22.00961894], - [-10.0, -0.53847577, 11.47114317, 2.00961894]], - [[-31.47114317, -22.00961894, -10.0, -19.46152423], - [-22.00961894, 2.00961894, 14.01923789, -10.0], - [2.00961894, 11.47114317, 23.48076211, 14.01923789]]]) + [ + [ + [-43.48076211, -34.01923789, -22.00961894, -31.47114317], + [-34.01923789, -10.0, 2.00961894, -22.00961894], + [-10.0, -0.53847577, 11.47114317, 2.00961894], + ], + [ + [-31.47114317, -22.00961894, -10.0, -19.46152423], + [-22.00961894, 2.00961894, 14.01923789, -10.0], + [2.00961894, 11.47114317, 23.48076211, 14.01923789], + ], + ] + ) np.testing.assert_allclose( - fixed_cube.coord('latitude').bounds, latitude_bounds) - longitude_bounds = np.array([[[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]], - [[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]]]) + fixed_cube.coord("latitude").bounds, latitude_bounds + ) + longitude_bounds = np.array( + [ + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + ] + ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, longitude_bounds) + fixed_cube.coord("longitude").bounds, longitude_bounds + ) @pytest.fixture @@ -373,128 +432,164 @@ def tos_cubes_no_ij_bounds(): def test_ocean_fix_grid_no_ij_bounds(tos_cubes_no_ij_bounds): """Test ``fix_metadata`` with cubes with no ij bounds.""" - cube_in = tos_cubes_no_ij_bounds.extract_cube('sea_surface_temperature') - assert len(cube_in.coords('latitude')) == 1 - assert len(cube_in.coords('longitude')) == 1 - assert cube_in.coord('latitude').bounds is None - assert cube_in.coord('longitude').bounds is None - assert cube_in.coord('cell index along first dimension').var_name == 'i' - assert cube_in.coord('cell index along second dimension').var_name == 'j' - assert cube_in.coord('cell index along first dimension').bounds is None - assert cube_in.coord('cell index along second dimension').bounds is None + cube_in = tos_cubes_no_ij_bounds.extract_cube("sea_surface_temperature") + assert len(cube_in.coords("latitude")) == 1 + assert len(cube_in.coords("longitude")) == 1 + assert cube_in.coord("latitude").bounds is None + assert cube_in.coord("longitude").bounds is None + assert cube_in.coord("cell index along first dimension").var_name == "i" + assert cube_in.coord("cell index along second dimension").var_name == "j" + assert cube_in.coord("cell index along first dimension").bounds is None + assert cube_in.coord("cell index along second dimension").bounds is None # Apply fix - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = OceanFixGrid(vardef) fixed_cubes = fix.fix_metadata(tos_cubes_no_ij_bounds) assert len(fixed_cubes) == 1 - fixed_cube = fixed_cubes.extract_cube('sea_surface_temperature') + fixed_cube = fixed_cubes.extract_cube("sea_surface_temperature") assert fixed_cube is cube_in # Check ij names - i_coord = fixed_cube.coord('cell index along first dimension') - j_coord = fixed_cube.coord('cell index along second dimension') - assert i_coord.var_name == 'i' + i_coord = fixed_cube.coord("cell index along first dimension") + j_coord = fixed_cube.coord("cell index along second dimension") + assert i_coord.var_name == "i" assert i_coord.standard_name is None - assert i_coord.long_name == 'cell index along first dimension' - assert i_coord.units == '1' + assert i_coord.long_name == "cell index along first dimension" + assert i_coord.units == "1" assert i_coord.circular is False - assert j_coord.var_name == 'j' + assert j_coord.var_name == "j" assert j_coord.standard_name is None - assert j_coord.long_name == 'cell index along second dimension' - assert j_coord.units == '1' + assert j_coord.long_name == "cell index along second dimension" + assert j_coord.units == "1" # Check ij points and bounds np.testing.assert_allclose(i_coord.points, [0, 1, 2]) - np.testing.assert_allclose(i_coord.bounds, - [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]]) + np.testing.assert_allclose( + i_coord.bounds, [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]] + ) np.testing.assert_allclose(j_coord.points, [0, 1]) np.testing.assert_allclose(j_coord.bounds, [[-0.5, 0.5], [0.5, 1.5]]) # Check bounds of latitude and longitude - assert len(fixed_cube.coords('latitude')) == 1 - assert len(fixed_cube.coords('longitude')) == 1 - assert fixed_cube.coord('latitude').bounds is not None - assert fixed_cube.coord('longitude').bounds is not None + assert len(fixed_cube.coords("latitude")) == 1 + assert len(fixed_cube.coords("longitude")) == 1 + assert fixed_cube.coord("latitude").bounds is not None + assert fixed_cube.coord("longitude").bounds is not None latitude_bounds = np.array( - [[[-43.48076211, -34.01923789, -22.00961894, -31.47114317], - [-34.01923789, -10.0, 2.00961894, -22.00961894], - [-10.0, -0.53847577, 11.47114317, 2.00961894]], - [[-31.47114317, -22.00961894, -10.0, -19.46152423], - [-22.00961894, 2.00961894, 14.01923789, -10.0], - [2.00961894, 11.47114317, 23.48076211, 14.01923789]]]) + [ + [ + [-43.48076211, -34.01923789, -22.00961894, -31.47114317], + [-34.01923789, -10.0, 2.00961894, -22.00961894], + [-10.0, -0.53847577, 11.47114317, 2.00961894], + ], + [ + [-31.47114317, -22.00961894, -10.0, -19.46152423], + [-22.00961894, 2.00961894, 14.01923789, -10.0], + [2.00961894, 11.47114317, 23.48076211, 14.01923789], + ], + ] + ) np.testing.assert_allclose( - fixed_cube.coord('latitude').bounds, latitude_bounds) - longitude_bounds = np.array([[[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]], - [[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]]]) + fixed_cube.coord("latitude").bounds, latitude_bounds + ) + longitude_bounds = np.array( + [ + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + ] + ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, longitude_bounds) + fixed_cube.coord("longitude").bounds, longitude_bounds + ) def test_ocean_fix_only_aux_coords(tos_cubes_no_ij_bounds): """Test ``fix_metadata`` with cubes with wrong ij names.""" - cube_in = tos_cubes_no_ij_bounds.extract_cube('sea_surface_temperature') - cube_in.remove_coord(cube_in.coord(var_name='i')) - cube_in.remove_coord(cube_in.coord(var_name='j')) - assert len(cube_in.coords('latitude')) == 1 - assert len(cube_in.coords('longitude')) == 1 - assert cube_in.coord('latitude', dimensions=(1, 2)).bounds is None - assert cube_in.coord('longitude', dimensions=(1, 2)).bounds is None + cube_in = tos_cubes_no_ij_bounds.extract_cube("sea_surface_temperature") + cube_in.remove_coord(cube_in.coord(var_name="i")) + cube_in.remove_coord(cube_in.coord(var_name="j")) + assert len(cube_in.coords("latitude")) == 1 + assert len(cube_in.coords("longitude")) == 1 + assert cube_in.coord("latitude", dimensions=(1, 2)).bounds is None + assert cube_in.coord("longitude", dimensions=(1, 2)).bounds is None # Apply fix - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = OceanFixGrid(vardef) fixed_cubes = fix.fix_metadata(tos_cubes_no_ij_bounds) assert len(fixed_cubes) == 1 - fixed_cube = fixed_cubes.extract_cube('sea_surface_temperature') + fixed_cube = fixed_cubes.extract_cube("sea_surface_temperature") assert fixed_cube is cube_in # Check ij names - i_coord = fixed_cube.coord('cell index along first dimension') - j_coord = fixed_cube.coord('cell index along second dimension') - assert i_coord.var_name == 'i' + i_coord = fixed_cube.coord("cell index along first dimension") + j_coord = fixed_cube.coord("cell index along second dimension") + assert i_coord.var_name == "i" assert i_coord.standard_name is None - assert i_coord.long_name == 'cell index along first dimension' - assert i_coord.units == '1' + assert i_coord.long_name == "cell index along first dimension" + assert i_coord.units == "1" assert i_coord.circular is False - assert j_coord.var_name == 'j' + assert j_coord.var_name == "j" assert j_coord.standard_name is None - assert j_coord.long_name == 'cell index along second dimension' - assert j_coord.units == '1' + assert j_coord.long_name == "cell index along second dimension" + assert j_coord.units == "1" # Check ij points and bounds np.testing.assert_allclose(i_coord.points, [0, 1, 2]) - np.testing.assert_allclose(i_coord.bounds, - [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]]) + np.testing.assert_allclose( + i_coord.bounds, [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]] + ) np.testing.assert_allclose(j_coord.points, [0, 1]) np.testing.assert_allclose(j_coord.bounds, [[-0.5, 0.5], [0.5, 1.5]]) # Check bounds of latitude and longitude - assert len(fixed_cube.coords('latitude')) == 1 - assert len(fixed_cube.coords('longitude')) == 1 - assert fixed_cube.coord('latitude').bounds is not None - assert fixed_cube.coord('longitude').bounds is not None + assert len(fixed_cube.coords("latitude")) == 1 + assert len(fixed_cube.coords("longitude")) == 1 + assert fixed_cube.coord("latitude").bounds is not None + assert fixed_cube.coord("longitude").bounds is not None latitude_bounds = np.array( - [[[-43.48076211, -34.01923789, -22.00961894, -31.47114317], - [-34.01923789, -10.0, 2.00961894, -22.00961894], - [-10.0, -0.53847577, 11.47114317, 2.00961894]], - [[-31.47114317, -22.00961894, -10.0, -19.46152423], - [-22.00961894, 2.00961894, 14.01923789, -10.0], - [2.00961894, 11.47114317, 23.48076211, 14.01923789]]]) + [ + [ + [-43.48076211, -34.01923789, -22.00961894, -31.47114317], + [-34.01923789, -10.0, 2.00961894, -22.00961894], + [-10.0, -0.53847577, 11.47114317, 2.00961894], + ], + [ + [-31.47114317, -22.00961894, -10.0, -19.46152423], + [-22.00961894, 2.00961894, 14.01923789, -10.0], + [2.00961894, 11.47114317, 23.48076211, 14.01923789], + ], + ] + ) np.testing.assert_allclose( - fixed_cube.coord('latitude').bounds, latitude_bounds) - longitude_bounds = np.array([[[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]], - [[140.625, 99.375, 99.375, 140.625], - [99.375, 140.625, 140.625, 99.375], - [140.625, 99.375, 99.375, 140.625]]]) + fixed_cube.coord("latitude").bounds, latitude_bounds + ) + longitude_bounds = np.array( + [ + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + [ + [140.625, 99.375, 99.375, 140.625], + [99.375, 140.625, 140.625, 99.375], + [140.625, 99.375, 99.375, 140.625], + ], + ] + ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, longitude_bounds) + fixed_cube.coord("longitude").bounds, longitude_bounds + ) @pytest.fixture @@ -506,17 +601,21 @@ def tos_cubes_regular_grid_cubes(): def test_ocean_fix_grid_regular(tos_cubes_regular_grid_cubes): """Test ``fix_metadata`` with cubes with regular coords.""" cube_in = tos_cubes_regular_grid_cubes.extract_cube( - 'sea_surface_temperature') - assert len(cube_in.coords('latitude')) == 1 - assert len(cube_in.coords('longitude')) == 1 + "sea_surface_temperature" + ) + assert len(cube_in.coords("latitude")) == 1 + assert len(cube_in.coords("longitude")) == 1 # Apply fix - vardef = get_var_info('CMIP6', 'Omon', 'tos') + vardef = get_var_info("CMIP6", "Omon", "tos") fix = OceanFixGrid(vardef) fixed_cubes = fix.fix_metadata(tos_cubes_regular_grid_cubes) - fixed_cube = fixed_cubes.extract_cube('sea_surface_temperature') + fixed_cube = fixed_cubes.extract_cube("sea_surface_temperature") assert fixed_cube == cube_in - assert (fixed_cube.coord("latitude").bounds == cube_in.coord( - "latitude").bounds) - assert (fixed_cube.coord("longitude").bounds == cube_in.coord( - "longitude").bounds) + assert ( + fixed_cube.coord("latitude").bounds == cube_in.coord("latitude").bounds + ) + assert ( + fixed_cube.coord("longitude").bounds + == cube_in.coord("longitude").bounds + ) diff --git a/tests/integration/cmor/_fixes/test_data/access_native.nc b/tests/integration/cmor/_fixes/test_data/access_native.nc new file mode 100644 index 0000000000..7c0849db63 Binary files /dev/null and b/tests/integration/cmor/_fixes/test_data/access_native.nc differ diff --git a/tests/integration/cmor/_fixes/test_data/create_test_data.py b/tests/integration/cmor/_fixes/test_data/create_test_data.py index 20af90076d..35dc40351b 100644 --- a/tests/integration/cmor/_fixes/test_data/create_test_data.py +++ b/tests/integration/cmor/_fixes/test_data/create_test_data.py @@ -1,4 +1,5 @@ """Create test data for tests of CMOR fixes.""" + import os import numpy as np @@ -7,232 +8,249 @@ def create_hyb_pres_file_without_ap(dataset, short_name): """Create dataset without vertical auxiliary coordinate ``ap``.""" - dataset.createDimension('time', size=1) - dataset.createDimension('lev', size=2) - dataset.createDimension('lat', size=3) - dataset.createDimension('lon', size=4) - dataset.createDimension('bnds', size=2) + dataset.createDimension("time", size=1) + dataset.createDimension("lev", size=2) + dataset.createDimension("lat", size=3) + dataset.createDimension("lon", size=4) + dataset.createDimension("bnds", size=2) # Dimensional variables - dataset.createVariable('time', np.float64, dimensions=('time',)) - dataset.createVariable('lev', np.float64, dimensions=('lev',)) - dataset.createVariable('lev_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('lat', np.float64, dimensions=('lat',)) - dataset.createVariable('lon', np.float64, dimensions=('lon',)) - dataset.variables['time'][:] = [0.0] - dataset.variables['time'].standard_name = 'time' - dataset.variables['time'].units = 'days since 6543-2-1' - dataset.variables['lev'][:] = [1.0, 2.0] - dataset.variables['lev'].bounds = 'lev_bnds' - dataset.variables['lev'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev'].units = '1' - dataset.variables['lev_bnds'][:] = [[0.5, 1.5], [1.5, 3.0]] - dataset.variables['lev_bnds'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev_bnds'].units = '1' - dataset.variables['lat'][:] = [-30.0, 0.0, 30.0] - dataset.variables['lat'].standard_name = 'latitude' - dataset.variables['lat'].units = 'degrees_north' - dataset.variables['lon'][:] = [30.0, 60.0, 90.0, 120.0] - dataset.variables['lon'].standard_name = 'longitude' - dataset.variables['lon'].units = 'degrees_east' + dataset.createVariable("time", np.float64, dimensions=("time",)) + dataset.createVariable("lev", np.float64, dimensions=("lev",)) + dataset.createVariable("lev_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("lat", np.float64, dimensions=("lat",)) + dataset.createVariable("lon", np.float64, dimensions=("lon",)) + dataset.variables["time"][:] = [0.0] + dataset.variables["time"].standard_name = "time" + dataset.variables["time"].units = "days since 6543-2-1" + dataset.variables["lev"][:] = [1.0, 2.0] + dataset.variables["lev"].bounds = "lev_bnds" + dataset.variables[ + "lev" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev"].units = "1" + dataset.variables["lev_bnds"][:] = [[0.5, 1.5], [1.5, 3.0]] + dataset.variables[ + "lev_bnds" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev_bnds"].units = "1" + dataset.variables["lat"][:] = [-30.0, 0.0, 30.0] + dataset.variables["lat"].standard_name = "latitude" + dataset.variables["lat"].units = "degrees_north" + dataset.variables["lon"][:] = [30.0, 60.0, 90.0, 120.0] + dataset.variables["lon"].standard_name = "longitude" + dataset.variables["lon"].units = "degrees_east" # Coordinates for derivation of pressure coordinate - dataset.createVariable('b', np.float64, dimensions=('lev',)) - dataset.createVariable('b_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('ps', np.float64, - dimensions=('time', 'lat', 'lon')) - dataset.variables['b'][:] = [0.0, 1.0] - dataset.variables['b_bnds'][:] = [[-1.0, 0.5], [0.5, 2.0]] - dataset.variables['ps'][:] = np.arange(1 * 3 * 4).reshape(1, 3, 4) - dataset.variables['ps'].standard_name = 'surface_air_pressure' - dataset.variables['ps'].units = 'Pa' - dataset.variables['ps'].additional_attribute = 'xyz' + dataset.createVariable("b", np.float64, dimensions=("lev",)) + dataset.createVariable("b_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("ps", np.float64, dimensions=("time", "lat", "lon")) + dataset.variables["b"][:] = [0.0, 1.0] + dataset.variables["b_bnds"][:] = [[-1.0, 0.5], [0.5, 2.0]] + dataset.variables["ps"][:] = np.arange(1 * 3 * 4).reshape(1, 3, 4) + dataset.variables["ps"].standard_name = "surface_air_pressure" + dataset.variables["ps"].units = "Pa" + dataset.variables["ps"].additional_attribute = "xyz" # Variable - dataset.createVariable(short_name, np.float32, - dimensions=('time', 'lev', 'lat', 'lon')) - dataset.variables[short_name][:] = np.full((1, 2, 3, 4), 0.0, - dtype=np.float32) - dataset.variables[short_name].standard_name = ( - 'cloud_area_fraction_in_atmosphere_layer') - dataset.variables[short_name].units = '%' + dataset.createVariable( + short_name, np.float32, dimensions=("time", "lev", "lat", "lon") + ) + dataset.variables[short_name][:] = np.full( + (1, 2, 3, 4), 0.0, dtype=np.float32 + ) + dataset.variables[ + short_name + ].standard_name = "cloud_area_fraction_in_atmosphere_layer" + dataset.variables[short_name].units = "%" def create_hyb_pres_file_with_a(dataset, short_name): """Create netcdf file with issues in hybrid pressure coordinate.""" create_hyb_pres_file_without_ap(dataset, short_name) - dataset.createVariable('a', np.float64, dimensions=('lev',)) - dataset.createVariable('a_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('p0', np.float64, dimensions=()) - dataset.variables['a'][:] = [1.0, 2.0] - dataset.variables['a_bnds'][:] = [[0.0, 1.5], [1.5, 3.0]] - dataset.variables['p0'][:] = 1.0 - dataset.variables['p0'].units = 'Pa' - dataset.variables['lev'].formula_terms = 'p0: p0 a: a b: b ps: ps' - dataset.variables['lev_bnds'].formula_terms = ( - 'p0: p0 a: a_bnds b: b_bnds ps: ps') + dataset.createVariable("a", np.float64, dimensions=("lev",)) + dataset.createVariable("a_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("p0", np.float64, dimensions=()) + dataset.variables["a"][:] = [1.0, 2.0] + dataset.variables["a_bnds"][:] = [[0.0, 1.5], [1.5, 3.0]] + dataset.variables["p0"][:] = 1.0 + dataset.variables["p0"].units = "Pa" + dataset.variables["lev"].formula_terms = "p0: p0 a: a b: b ps: ps" + dataset.variables[ + "lev_bnds" + ].formula_terms = "p0: p0 a: a_bnds b: b_bnds ps: ps" def create_hyb_pres_file_with_ap(dataset, short_name): """Create netcdf file with issues in hybrid pressure coordinate.""" create_hyb_pres_file_without_ap(dataset, short_name) - dataset.createVariable('ap', np.float64, dimensions=('lev',)) - dataset.createVariable('ap_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.variables['ap'][:] = [1.0, 2.0] - dataset.variables['ap_bnds'][:] = [[0.0, 1.5], [1.5, 3.0]] - dataset.variables['ap'].units = 'Pa' - dataset.variables['lev'].formula_terms = 'ap: ap b: b ps: ps' - dataset.variables['lev_bnds'].formula_terms = ( - 'ap: ap_bnds b: b_bnds ps: ps') + dataset.createVariable("ap", np.float64, dimensions=("lev",)) + dataset.createVariable("ap_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.variables["ap"][:] = [1.0, 2.0] + dataset.variables["ap_bnds"][:] = [[0.0, 1.5], [1.5, 3.0]] + dataset.variables["ap"].units = "Pa" + dataset.variables["lev"].formula_terms = "ap: ap b: b ps: ps" + dataset.variables[ + "lev_bnds" + ].formula_terms = "ap: ap_bnds b: b_bnds ps: ps" def save_cl_file_with_a(save_path): """Create netcdf file for ``cl`` with ``a`` coordinate.""" - nc_path = os.path.join(save_path, 'common_cl_a.nc') - dataset = Dataset(nc_path, mode='w') - create_hyb_pres_file_with_a(dataset, 'cl') + nc_path = os.path.join(save_path, "common_cl_a.nc") + dataset = Dataset(nc_path, mode="w") + create_hyb_pres_file_with_a(dataset, "cl") dataset.close() print(f"Saved {nc_path}") def save_cl_file_with_ap(save_path): """Create netcdf file for ``cl`` with ``ap`` coordinate.""" - nc_path = os.path.join(save_path, 'common_cl_ap.nc') - dataset = Dataset(nc_path, mode='w') - create_hyb_pres_file_with_ap(dataset, 'cl') + nc_path = os.path.join(save_path, "common_cl_ap.nc") + dataset = Dataset(nc_path, mode="w") + create_hyb_pres_file_with_ap(dataset, "cl") dataset.close() print(f"Saved {nc_path}") def create_hybrid_height_file(dataset, short_name): """Create dataset with hybrid height coordinate.""" - dataset.createDimension('time', size=1) - dataset.createDimension('lev', size=2) - dataset.createDimension('lat', size=1) - dataset.createDimension('lon', size=2) - dataset.createDimension('bnds', size=2) + dataset.createDimension("time", size=1) + dataset.createDimension("lev", size=2) + dataset.createDimension("lat", size=1) + dataset.createDimension("lon", size=2) + dataset.createDimension("bnds", size=2) # Dimensional variables - dataset.createVariable('time', np.float64, dimensions=('time',)) - dataset.createVariable('lev', np.float64, dimensions=('lev',)) - dataset.createVariable('lev_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('lat', np.float64, dimensions=('lat',)) - dataset.createVariable('lon', np.float64, dimensions=('lon',)) - dataset.variables['time'][:] = [0.0] - dataset.variables['time'].standard_name = 'time' - dataset.variables['time'].units = 'days since 6543-2-1' - dataset.variables['lev'][:] = [1.0, 2.0] - dataset.variables['lev'].bounds = 'lev_bnds' - dataset.variables['lev'].standard_name = ( - 'atmosphere_hybrid_height_coordinate') - dataset.variables['lev'].units = 'm' - dataset.variables['lev'].formula_terms = 'a: lev b: b orog: orog' - dataset.variables['lev_bnds'][:] = [[0.5, 1.5], [1.5, 3.0]] - dataset.variables['lev_bnds'].standard_name = ( - 'atmosphere_hybrid_height_coordinate') - dataset.variables['lev_bnds'].units = '1' - dataset.variables['lev_bnds'].formula_terms = ( - 'a: lev_bnds b: b_bnds orog: orog') - dataset.variables['lat'][:] = [0.0] - dataset.variables['lat'].standard_name = 'latitude' - dataset.variables['lat'].units = 'degrees_north' - dataset.variables['lon'][:] = [30.0, 60.0] - dataset.variables['lon'].standard_name = 'longitude' - dataset.variables['lon'].units = 'degrees_east' + dataset.createVariable("time", np.float64, dimensions=("time",)) + dataset.createVariable("lev", np.float64, dimensions=("lev",)) + dataset.createVariable("lev_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("lat", np.float64, dimensions=("lat",)) + dataset.createVariable("lon", np.float64, dimensions=("lon",)) + dataset.variables["time"][:] = [0.0] + dataset.variables["time"].standard_name = "time" + dataset.variables["time"].units = "days since 6543-2-1" + dataset.variables["lev"][:] = [1.0, 2.0] + dataset.variables["lev"].bounds = "lev_bnds" + dataset.variables[ + "lev" + ].standard_name = "atmosphere_hybrid_height_coordinate" + dataset.variables["lev"].units = "m" + dataset.variables["lev"].formula_terms = "a: lev b: b orog: orog" + dataset.variables["lev_bnds"][:] = [[0.5, 1.5], [1.5, 3.0]] + dataset.variables[ + "lev_bnds" + ].standard_name = "atmosphere_hybrid_height_coordinate" + dataset.variables["lev_bnds"].units = "1" + dataset.variables[ + "lev_bnds" + ].formula_terms = "a: lev_bnds b: b_bnds orog: orog" + dataset.variables["lat"][:] = [0.0] + dataset.variables["lat"].standard_name = "latitude" + dataset.variables["lat"].units = "degrees_north" + dataset.variables["lon"][:] = [30.0, 60.0] + dataset.variables["lon"].standard_name = "longitude" + dataset.variables["lon"].units = "degrees_east" # Coordinates for derivation of height coordinate - dataset.createVariable('b', np.float64, dimensions=('lev',)) - dataset.createVariable('b_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('orog', np.float64, dimensions=('lat', 'lon')) - dataset.variables['b'][:] = [0.0, 1.0] - dataset.variables['b_bnds'][:] = [[-1.0, 0.5], [0.5, 2.0]] - dataset.variables['orog'][:] = [[0.0, 1.0]] - dataset.variables['orog'].standard_name = 'surface_altitude' - dataset.variables['orog'].units = 'm' + dataset.createVariable("b", np.float64, dimensions=("lev",)) + dataset.createVariable("b_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("orog", np.float64, dimensions=("lat", "lon")) + dataset.variables["b"][:] = [0.0, 1.0] + dataset.variables["b_bnds"][:] = [[-1.0, 0.5], [0.5, 2.0]] + dataset.variables["orog"][:] = [[0.0, 1.0]] + dataset.variables["orog"].standard_name = "surface_altitude" + dataset.variables["orog"].units = "m" # Variable - dataset.createVariable(short_name, np.float32, - dimensions=('time', 'lev', 'lat', 'lon')) - dataset.variables[short_name][:] = np.full((1, 2, 1, 2), 0.0, - dtype=np.float32) - dataset.variables[short_name].standard_name = ( - 'cloud_area_fraction_in_atmosphere_layer') - dataset.variables[short_name].units = '%' + dataset.createVariable( + short_name, np.float32, dimensions=("time", "lev", "lat", "lon") + ) + dataset.variables[short_name][:] = np.full( + (1, 2, 1, 2), 0.0, dtype=np.float32 + ) + dataset.variables[ + short_name + ].standard_name = "cloud_area_fraction_in_atmosphere_layer" + dataset.variables[short_name].units = "%" def save_cl_file_with_height(save_path): """Create netcdf file for ``cl`` with hybrid height coordinate.""" - nc_path = os.path.join(save_path, 'common_cl_hybrid_height.nc') - dataset = Dataset(nc_path, mode='w') - create_hybrid_height_file(dataset, 'cl') + nc_path = os.path.join(save_path, "common_cl_hybrid_height.nc") + dataset = Dataset(nc_path, mode="w") + create_hybrid_height_file(dataset, "cl") dataset.close() print(f"Saved {nc_path}") def save_cnrm_cm6_1_cl_file(save_path): """Create netcdf file with similar issues as ``cl``.""" - nc_path = os.path.join(save_path, 'cnrm_cm6_1_cl.nc') - dataset = Dataset(nc_path, mode='w') - dataset.createDimension('time', size=1) - dataset.createDimension('lev', size=3) - dataset.createDimension('lat', size=2) - dataset.createDimension('lon', size=2) - dataset.createDimension('bnds', size=2) + nc_path = os.path.join(save_path, "cnrm_cm6_1_cl.nc") + dataset = Dataset(nc_path, mode="w") + dataset.createDimension("time", size=1) + dataset.createDimension("lev", size=3) + dataset.createDimension("lat", size=2) + dataset.createDimension("lon", size=2) + dataset.createDimension("bnds", size=2) # Dimensional variables - dataset.createVariable('time', np.float64, dimensions=('time',)) - dataset.createVariable('lev', np.float64, dimensions=('lev',)) - dataset.createVariable('lev_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('lat', np.float64, dimensions=('lat',)) - dataset.createVariable('lon', np.float64, dimensions=('lon',)) - dataset.variables['time'][:] = [0.0] - dataset.variables['time'].standard_name = 'time' - dataset.variables['time'].units = 'days since 6543-2-1' - dataset.variables['lev'][:] = [1.0, 2.0, 4.0] - dataset.variables['lev'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev'].bounds = 'lev_bnds' - dataset.variables['lev'].units = '1' - dataset.variables['lev'].formula_term = ( - 'ap: ap b: b ps: ps') # Error in attribute intended - dataset.variables['lev_bnds'][:] = [[0.5, 1.5], [1.5, 3.0], [3.0, 5.0]] - dataset.variables['lev_bnds'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev_bnds'].units = '1' - dataset.variables['lev_bnds'].formula_term = ( - 'ap: ap b: b ps: ps') # Error in attribute intended - dataset.variables['lat'][:] = [-30.0, 0.0] - dataset.variables['lat'].standard_name = 'latitude' - dataset.variables['lat'].units = 'degrees_north' - dataset.variables['lon'][:] = [30.0, 60.0] - dataset.variables['lon'].standard_name = 'longitude' - dataset.variables['lon'].units = 'degrees_east' + dataset.createVariable("time", np.float64, dimensions=("time",)) + dataset.createVariable("lev", np.float64, dimensions=("lev",)) + dataset.createVariable("lev_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("lat", np.float64, dimensions=("lat",)) + dataset.createVariable("lon", np.float64, dimensions=("lon",)) + dataset.variables["time"][:] = [0.0] + dataset.variables["time"].standard_name = "time" + dataset.variables["time"].units = "days since 6543-2-1" + dataset.variables["lev"][:] = [1.0, 2.0, 4.0] + dataset.variables[ + "lev" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev"].bounds = "lev_bnds" + dataset.variables["lev"].units = "1" + dataset.variables[ + "lev" + ].formula_term = "ap: ap b: b ps: ps" # Error in attribute intended + dataset.variables["lev_bnds"][:] = [[0.5, 1.5], [1.5, 3.0], [3.0, 5.0]] + dataset.variables[ + "lev_bnds" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev_bnds"].units = "1" + dataset.variables[ + "lev_bnds" + ].formula_term = "ap: ap b: b ps: ps" # Error in attribute intended + dataset.variables["lat"][:] = [-30.0, 0.0] + dataset.variables["lat"].standard_name = "latitude" + dataset.variables["lat"].units = "degrees_north" + dataset.variables["lon"][:] = [30.0, 60.0] + dataset.variables["lon"].standard_name = "longitude" + dataset.variables["lon"].units = "degrees_east" # Coordinates for derivation of pressure coordinate # Wrong shape of bounds is intended - dataset.createVariable('ap', np.float64, dimensions=('lev',)) - dataset.createVariable('ap_bnds', np.float64, dimensions=('bnds', 'lev')) - dataset.createVariable('b', np.float64, dimensions=('lev',)) - dataset.createVariable('b_bnds', np.float64, dimensions=('bnds', 'lev')) - dataset.createVariable('ps', np.float64, - dimensions=('time', 'lat', 'lon')) - dataset.variables['ap'][:] = [1.0, 2.0, 5.0] - dataset.variables['ap_bnds'][:] = [[0.0, 1.5, 1.5], [3.0, 3.0, 6.0]] - dataset.variables['b'][:] = [0.0, 1.0, 3.0] - dataset.variables['b_bnds'][:] = [[-1.0, 0.5, 0.5], [2.0, 2.0, 5.0]] - dataset.variables['ps'][:] = np.arange(1 * 2 * 2).reshape(1, 2, 2) - dataset.variables['ps'].standard_name = 'surface_air_pressure' - dataset.variables['ps'].units = 'Pa' + dataset.createVariable("ap", np.float64, dimensions=("lev",)) + dataset.createVariable("ap_bnds", np.float64, dimensions=("bnds", "lev")) + dataset.createVariable("b", np.float64, dimensions=("lev",)) + dataset.createVariable("b_bnds", np.float64, dimensions=("bnds", "lev")) + dataset.createVariable("ps", np.float64, dimensions=("time", "lat", "lon")) + dataset.variables["ap"][:] = [1.0, 2.0, 5.0] + dataset.variables["ap_bnds"][:] = [[0.0, 1.5, 1.5], [3.0, 3.0, 6.0]] + dataset.variables["b"][:] = [0.0, 1.0, 3.0] + dataset.variables["b_bnds"][:] = [[-1.0, 0.5, 0.5], [2.0, 2.0, 5.0]] + dataset.variables["ps"][:] = np.arange(1 * 2 * 2).reshape(1, 2, 2) + dataset.variables["ps"].standard_name = "surface_air_pressure" + dataset.variables["ps"].units = "Pa" # Cl variable - dataset.createVariable('cl', np.float32, - dimensions=('time', 'lev', 'lat', 'lon')) - dataset.variables['cl'][:] = np.full((1, 3, 2, 2), 0.0, dtype=np.float32) - dataset.variables['cl'].standard_name = ( - 'cloud_area_fraction_in_atmosphere_layer') - dataset.variables['cl'].units = '%' + dataset.createVariable( + "cl", np.float32, dimensions=("time", "lev", "lat", "lon") + ) + dataset.variables["cl"][:] = np.full((1, 3, 2, 2), 0.0, dtype=np.float32) + dataset.variables[ + "cl" + ].standard_name = "cloud_area_fraction_in_atmosphere_layer" + dataset.variables["cl"].units = "%" dataset.close() print(f"Saved {nc_path}") @@ -240,112 +258,124 @@ def save_cnrm_cm6_1_cl_file(save_path): def save_cesm2_cl_file(save_path): """Create netcdf file with similar issues as ``cl``.""" - nc_path = os.path.join(save_path, 'cesm2_cl.nc') - with Dataset(nc_path, mode='w') as dataset: - dataset.createDimension('time', size=1) - dataset.createDimension('lev', size=2) - dataset.createDimension('lat', size=3) - dataset.createDimension('lon', size=4) - dataset.createDimension('bnds', size=2) + nc_path = os.path.join(save_path, "cesm2_cl.nc") + with Dataset(nc_path, mode="w") as dataset: + dataset.createDimension("time", size=1) + dataset.createDimension("lev", size=2) + dataset.createDimension("lat", size=3) + dataset.createDimension("lon", size=4) + dataset.createDimension("bnds", size=2) # Dimensional variables - dataset.createVariable('time', np.float64, dimensions=('time',)) - dataset.createVariable('lev', np.float64, dimensions=('lev',)) - dataset.createVariable('lev_bnds', np.float64, dimensions=('lev', - 'bnds')) - dataset.createVariable('lat', np.float64, dimensions=('lat',)) - dataset.createVariable('lon', np.float64, dimensions=('lon',)) - dataset.variables['time'][:] = [0.0] - dataset.variables['time'].standard_name = 'time' - dataset.variables['time'].units = 'days since 6543-2-1' - dataset.variables['lev'][:] = [1.0, 2.0] - dataset.variables['lev'].bounds = 'lev_bnds' - dataset.variables['lev'].units = 'hPa' - dataset.variables['lev_bnds'][:] = [[0.5, 1.5], [1.5, 3.0]] - dataset.variables['lev_bnds'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev_bnds'].units = '1' - dataset.variables['lev_bnds'].formula_terms = ( - 'p0: p0 a: a_bnds b: b_bnds ps: ps') - dataset.variables['lat'][:] = [-30.0, 0.0, 30.0] - dataset.variables['lat'].standard_name = 'latitude' - dataset.variables['lat'].units = 'degrees_north' - dataset.variables['lon'][:] = [30.0, 60.0, 90.0, 120.0] - dataset.variables['lon'].standard_name = 'longitude' - dataset.variables['lon'].units = 'degrees_east' + dataset.createVariable("time", np.float64, dimensions=("time",)) + dataset.createVariable("lev", np.float64, dimensions=("lev",)) + dataset.createVariable( + "lev_bnds", np.float64, dimensions=("lev", "bnds") + ) + dataset.createVariable("lat", np.float64, dimensions=("lat",)) + dataset.createVariable("lon", np.float64, dimensions=("lon",)) + dataset.variables["time"][:] = [0.0] + dataset.variables["time"].standard_name = "time" + dataset.variables["time"].units = "days since 6543-2-1" + dataset.variables["lev"][:] = [1.0, 2.0] + dataset.variables["lev"].bounds = "lev_bnds" + dataset.variables["lev"].units = "hPa" + dataset.variables["lev_bnds"][:] = [[0.5, 1.5], [1.5, 3.0]] + dataset.variables[ + "lev_bnds" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev_bnds"].units = "1" + dataset.variables[ + "lev_bnds" + ].formula_terms = "p0: p0 a: a_bnds b: b_bnds ps: ps" + dataset.variables["lat"][:] = [-30.0, 0.0, 30.0] + dataset.variables["lat"].standard_name = "latitude" + dataset.variables["lat"].units = "degrees_north" + dataset.variables["lon"][:] = [30.0, 60.0, 90.0, 120.0] + dataset.variables["lon"].standard_name = "longitude" + dataset.variables["lon"].units = "degrees_east" # Coordinates for derivation of pressure coordinate - dataset.createVariable('a', np.float64, dimensions=('lev',)) - dataset.createVariable('a_bnds', np.float64, dimensions=('lev', - 'bnds')) - dataset.createVariable('b', np.float64, dimensions=('lev',)) - dataset.createVariable('b_bnds', np.float64, dimensions=('lev', - 'bnds')) - dataset.createVariable('p0', np.float64, dimensions=()) - dataset.createVariable('ps', np.float64, - dimensions=('time', 'lat', 'lon')) - dataset.variables['a'][:] = [1.0, 2.0] - dataset.variables['a'].bounds = 'a_bnds' - dataset.variables['a_bnds'][:] = [[1.5, 3.0], [0.0, 1.5]] # intended - dataset.variables['b'][:] = [0.0, 1.0] - dataset.variables['b'].bounds = 'b_bnds' - dataset.variables['b_bnds'][:] = [[0.5, 2.0], [-1.0, 0.5]] # intended - dataset.variables['p0'][:] = 1.0 - dataset.variables['p0'].units = 'Pa' - dataset.variables['ps'][:] = np.arange(1 * 3 * 4).reshape(1, 3, 4) - dataset.variables['ps'].standard_name = 'surface_air_pressure' - dataset.variables['ps'].units = 'Pa' + dataset.createVariable("a", np.float64, dimensions=("lev",)) + dataset.createVariable( + "a_bnds", np.float64, dimensions=("lev", "bnds") + ) + dataset.createVariable("b", np.float64, dimensions=("lev",)) + dataset.createVariable( + "b_bnds", np.float64, dimensions=("lev", "bnds") + ) + dataset.createVariable("p0", np.float64, dimensions=()) + dataset.createVariable( + "ps", np.float64, dimensions=("time", "lat", "lon") + ) + dataset.variables["a"][:] = [1.0, 2.0] + dataset.variables["a"].bounds = "a_bnds" + dataset.variables["a_bnds"][:] = [[1.5, 3.0], [0.0, 1.5]] # intended + dataset.variables["b"][:] = [0.0, 1.0] + dataset.variables["b"].bounds = "b_bnds" + dataset.variables["b_bnds"][:] = [[0.5, 2.0], [-1.0, 0.5]] # intended + dataset.variables["p0"][:] = 1.0 + dataset.variables["p0"].units = "Pa" + dataset.variables["ps"][:] = np.arange(1 * 3 * 4).reshape(1, 3, 4) + dataset.variables["ps"].standard_name = "surface_air_pressure" + dataset.variables["ps"].units = "Pa" # Cl variable - dataset.createVariable('cl', np.float32, - dimensions=('time', 'lev', 'lat', 'lon')) - dataset.variables['cl'][:] = np.full((1, 2, 3, 4), - 0.0, dtype=np.float32) - dataset.variables['cl'].standard_name = ( - 'cloud_area_fraction_in_atmosphere_layer') - dataset.variables['cl'].units = '%' + dataset.createVariable( + "cl", np.float32, dimensions=("time", "lev", "lat", "lon") + ) + dataset.variables["cl"][:] = np.full( + (1, 2, 3, 4), 0.0, dtype=np.float32 + ) + dataset.variables[ + "cl" + ].standard_name = "cloud_area_fraction_in_atmosphere_layer" + dataset.variables["cl"].units = "%" print(f"Saved {nc_path}") def save_cesm2_waccm_cl_file(save_path): """Create netcdf file with similar issues as ``cl``.""" - nc_path = os.path.join(save_path, 'cesm2_waccm_cl.nc') - dataset = Dataset(nc_path, mode='w') - dataset.createDimension('lev', size=2) - dataset.createDimension('bnds', size=2) + nc_path = os.path.join(save_path, "cesm2_waccm_cl.nc") + dataset = Dataset(nc_path, mode="w") + dataset.createDimension("lev", size=2) + dataset.createDimension("bnds", size=2) # Dimensional variables - dataset.createVariable('lev', np.float64, dimensions=('lev',)) - dataset.createVariable('lev_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.variables['lev'][:] = [1.0, 2.0] - dataset.variables['lev'].bounds = 'lev_bnds' - dataset.variables['lev'].units = '1' - dataset.variables['lev_bnds'][:] = [[0.5, 1.5], [1.5, 3.0]] - dataset.variables['lev_bnds'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev_bnds'].units = '1' - dataset.variables['lev_bnds'].formula_terms = ( - 'p0: p0 a: a_bnds b: b_bnds ps: ps') + dataset.createVariable("lev", np.float64, dimensions=("lev",)) + dataset.createVariable("lev_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.variables["lev"][:] = [1.0, 2.0] + dataset.variables["lev"].bounds = "lev_bnds" + dataset.variables["lev"].units = "1" + dataset.variables["lev_bnds"][:] = [[0.5, 1.5], [1.5, 3.0]] + dataset.variables[ + "lev_bnds" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev_bnds"].units = "1" + dataset.variables[ + "lev_bnds" + ].formula_terms = "p0: p0 a: a_bnds b: b_bnds ps: ps" # Coordinates for derivation of pressure coordinate - dataset.createVariable('a', np.float64, dimensions=('lev',)) - dataset.createVariable('a_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('b', np.float64, dimensions=('lev',)) - dataset.createVariable('b_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.variables['a'][:] = [1.0, 2.0] - dataset.variables['a'].bounds = 'a_bnds' - dataset.variables['a_bnds'][:] = [[1.5, 0.0], [3.0, 1.5]] - dataset.variables['b'][:] = [0.0, 1.0] - dataset.variables['b'].bounds = 'b_bnds' - dataset.variables['b_bnds'][:] = [[0.5, -1.0], [2.0, 0.5]] + dataset.createVariable("a", np.float64, dimensions=("lev",)) + dataset.createVariable("a_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("b", np.float64, dimensions=("lev",)) + dataset.createVariable("b_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.variables["a"][:] = [1.0, 2.0] + dataset.variables["a"].bounds = "a_bnds" + dataset.variables["a_bnds"][:] = [[1.5, 0.0], [3.0, 1.5]] + dataset.variables["b"][:] = [0.0, 1.0] + dataset.variables["b"].bounds = "b_bnds" + dataset.variables["b_bnds"][:] = [[0.5, -1.0], [2.0, 0.5]] # Cl variable - dataset.createVariable('cl', np.float32, dimensions=('lev',)) - dataset.variables['cl'][:] = np.full((2,), [0.0, 1.0], dtype=np.float32) - dataset.variables['cl'].standard_name = ( - 'cloud_area_fraction_in_atmosphere_layer') - dataset.variables['cl'].units = '%' + dataset.createVariable("cl", np.float32, dimensions=("lev",)) + dataset.variables["cl"][:] = np.full((2,), [0.0, 1.0], dtype=np.float32) + dataset.variables[ + "cl" + ].standard_name = "cloud_area_fraction_in_atmosphere_layer" + dataset.variables["cl"].units = "%" dataset.close() print(f"Saved {nc_path}") @@ -353,66 +383,73 @@ def save_cesm2_waccm_cl_file(save_path): def save_gfdl_cm4_cl_file(save_path): """Create netcdf file with similar issues as ``cl``.""" - nc_path = os.path.join(save_path, 'gfdl_cm4_cl.nc') - dataset = Dataset(nc_path, mode='w') - dataset.createDimension('time', size=1) - dataset.createDimension('lev', size=3) - dataset.createDimension('lat', size=2) - dataset.createDimension('lon', size=2) - dataset.createDimension('bnds', size=2) + nc_path = os.path.join(save_path, "gfdl_cm4_cl.nc") + dataset = Dataset(nc_path, mode="w") + dataset.createDimension("time", size=1) + dataset.createDimension("lev", size=3) + dataset.createDimension("lat", size=2) + dataset.createDimension("lon", size=2) + dataset.createDimension("bnds", size=2) # Dimensional variables - dataset.createVariable('time', np.float64, dimensions=('time',)) - dataset.createVariable('lev', np.float64, dimensions=('lev',)) - dataset.createVariable('lev_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('lat', np.float64, dimensions=('lat',)) - dataset.createVariable('lon', np.float64, dimensions=('lon',)) - dataset.variables['time'][:] = [0.0] - dataset.variables['time'].standard_name = 'time' - dataset.variables['time'].units = 'days since 6543-2-1' - dataset.variables['lev'][:] = [1.0, 2.0, 4.0] - dataset.variables['lev'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev'].bounds = 'lev_bnds' - dataset.variables['lev'].units = '1' - dataset.variables['lev'].formula_term = ( - 'ap: ap b: b ps: ps') # Error in attribute intended - dataset.variables['lev_bnds'][:] = [[0.5, 1.5], [1.5, 3.0], [3.0, 5.0]] - dataset.variables['lev_bnds'].standard_name = ( - 'atmosphere_hybrid_sigma_pressure_coordinate') - dataset.variables['lev_bnds'].units = '1' - dataset.variables['lev_bnds'].formula_term = ( - 'ap: ap_bnds b: b_bnds ps: ps') # Error in attribute intended - dataset.variables['lat'][:] = [-30.0, 0.0] - dataset.variables['lat'].standard_name = 'latitude' - dataset.variables['lat'].units = 'degrees_north' - dataset.variables['lon'][:] = [30.0, 60.0] - dataset.variables['lon'].standard_name = 'longitude' - dataset.variables['lon'].units = 'degrees_east' + dataset.createVariable("time", np.float64, dimensions=("time",)) + dataset.createVariable("lev", np.float64, dimensions=("lev",)) + dataset.createVariable("lev_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("lat", np.float64, dimensions=("lat",)) + dataset.createVariable("lon", np.float64, dimensions=("lon",)) + dataset.variables["time"][:] = [0.0] + dataset.variables["time"].standard_name = "time" + dataset.variables["time"].units = "days since 6543-2-1" + dataset.variables["lev"][:] = [1.0, 2.0, 4.0] + dataset.variables[ + "lev" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev"].bounds = "lev_bnds" + dataset.variables["lev"].units = "1" + dataset.variables[ + "lev" + ].formula_term = "ap: ap b: b ps: ps" # Error in attribute intended + dataset.variables["lev_bnds"][:] = [[0.5, 1.5], [1.5, 3.0], [3.0, 5.0]] + dataset.variables[ + "lev_bnds" + ].standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" + dataset.variables["lev_bnds"].units = "1" + dataset.variables[ + "lev_bnds" + ].formula_term = ( + "ap: ap_bnds b: b_bnds ps: ps" # Error in attribute intended + ) + dataset.variables["lat"][:] = [-30.0, 0.0] + dataset.variables["lat"].standard_name = "latitude" + dataset.variables["lat"].units = "degrees_north" + dataset.variables["lon"][:] = [30.0, 60.0] + dataset.variables["lon"].standard_name = "longitude" + dataset.variables["lon"].units = "degrees_east" # Coordinates for derivation of pressure coordinate - dataset.createVariable('ap', np.float64, dimensions=('lev',)) - dataset.createVariable('ap_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('b', np.float64, dimensions=('lev',)) - dataset.createVariable('b_bnds', np.float64, dimensions=('lev', 'bnds')) - dataset.createVariable('ps', np.float64, - dimensions=('time', 'lat', 'lon')) - dataset.variables['ap'][:] = [1.0, 2.0, 5.0] - dataset.variables['ap'].units = 'Pa' - dataset.variables['ap_bnds'][:] = [[0.0, 1.5], [1.5, 3.0], [3.0, 6.0]] - dataset.variables['b'][:] = [0.0, 1.0, 3.0] - dataset.variables['b_bnds'][:] = [[-1.0, 0.5], [0.5, 2.0], [2.0, 5.0]] - dataset.variables['ps'][:] = np.arange(1 * 2 * 2).reshape(1, 2, 2) - dataset.variables['ps'].standard_name = 'surface_air_pressure' - dataset.variables['ps'].units = 'Pa' + dataset.createVariable("ap", np.float64, dimensions=("lev",)) + dataset.createVariable("ap_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("b", np.float64, dimensions=("lev",)) + dataset.createVariable("b_bnds", np.float64, dimensions=("lev", "bnds")) + dataset.createVariable("ps", np.float64, dimensions=("time", "lat", "lon")) + dataset.variables["ap"][:] = [1.0, 2.0, 5.0] + dataset.variables["ap"].units = "Pa" + dataset.variables["ap_bnds"][:] = [[0.0, 1.5], [1.5, 3.0], [3.0, 6.0]] + dataset.variables["b"][:] = [0.0, 1.0, 3.0] + dataset.variables["b_bnds"][:] = [[-1.0, 0.5], [0.5, 2.0], [2.0, 5.0]] + dataset.variables["ps"][:] = np.arange(1 * 2 * 2).reshape(1, 2, 2) + dataset.variables["ps"].standard_name = "surface_air_pressure" + dataset.variables["ps"].units = "Pa" # Cl variable - dataset.createVariable('cl', np.float32, - dimensions=('time', 'lev', 'lat', 'lon')) - dataset.variables['cl'][:] = np.full((1, 3, 2, 2), 0.0, dtype=np.float32) - dataset.variables['cl'].standard_name = ( - 'cloud_area_fraction_in_atmosphere_layer') - dataset.variables['cl'].units = '%' + dataset.createVariable( + "cl", np.float32, dimensions=("time", "lev", "lat", "lon") + ) + dataset.variables["cl"][:] = np.full((1, 3, 2, 2), 0.0, dtype=np.float32) + dataset.variables[ + "cl" + ].standard_name = "cloud_area_fraction_in_atmosphere_layer" + dataset.variables["cl"].units = "%" dataset.close() print(f"Saved {nc_path}") @@ -430,5 +467,5 @@ def main(): save_gfdl_cm4_cl_file(save_path) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tests/integration/cmor/_fixes/test_fix.py b/tests/integration/cmor/_fixes/test_fix.py index 1e97c62fd4..13ce5dc03c 100644 --- a/tests/integration/cmor/_fixes/test_fix.py +++ b/tests/integration/cmor/_fixes/test_fix.py @@ -21,83 +21,91 @@ def test_get_fix(): - assert Fix.get_fixes('CMIP5', 'CanESM2', 'Amon', 'fgco2') == [ - FgCo2(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "CanESM2", "Amon", "fgco2") == [ + FgCo2(None), + GenericFix(None), ] def test_get_fix_case_insensitive(): - assert Fix.get_fixes('CMIP5', 'CanESM2', 'Amon', 'fgCo2') == [ - FgCo2(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "CanESM2", "Amon", "fgCo2") == [ + FgCo2(None), + GenericFix(None), ] def test_get_fix_cordex(): fix = Fix.get_fixes( - 'CORDEX', - 'ALADIN63', - 'Amon', - 'tas', - extra_facets={'driver': 'CNRM-CERFACS-CNRM-CM5'}, + "CORDEX", + "ALADIN63", + "Amon", + "tas", + extra_facets={"driver": "CNRM-CERFACS-CNRM-CM5"}, ) assert fix == [Tas(None), AllVars(None), GenericFix(None)] def test_get_grid_fix_cordex(): fix = Fix.get_fixes( - 'CORDEX', - 'ALADIN53', - 'Amon', - 'tas', - extra_facets={'driver': 'CNRM-CERFACS-CNRM-CM5'}, + "CORDEX", + "ALADIN53", + "Amon", + "tas", + extra_facets={"driver": "CNRM-CERFACS-CNRM-CM5"}, ) assert fix == [AllVars(None), GenericFix(None)] def test_get_fixes_with_replace(): - assert Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'ch4') == [ - Ch4(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "ch4") == [ + Ch4(None), + GenericFix(None), ] def test_get_fixes_with_generic(): - assert Fix.get_fixes('CMIP5', 'CESM1-BGC', 'Amon', 'gpp') == [ - Gpp(None), GenericFix(None) + assert Fix.get_fixes("CMIP5", "CESM1-BGC", "Amon", "gpp") == [ + Gpp(None), + GenericFix(None), ] def test_get_fix_no_project(): with pytest.raises(KeyError): - Fix.get_fixes('BAD_PROJECT', 'BNU-ESM', 'Amon', 'ch4') + Fix.get_fixes("BAD_PROJECT", "BNU-ESM", "Amon", "ch4") def test_get_fix_no_model(): - assert Fix.get_fixes('CMIP5', 'BAD_MODEL', 'Amon', 'ch4') == [ + assert Fix.get_fixes("CMIP5", "BAD_MODEL", "Amon", "ch4") == [ GenericFix(None) ] def test_get_fix_no_var(): - assert Fix.get_fixes('CMIP5', 'BNU-ESM', 'Amon', 'BAD_VAR') == [ + assert Fix.get_fixes("CMIP5", "BNU-ESM", "Amon", "BAD_VAR") == [ GenericFix(None) ] def test_get_fix_only_mip(): - assert Fix.get_fixes('CMIP6', 'CESM2', 'Omon', 'thetao') == [ - Omon(None), GenericFix(None) + assert Fix.get_fixes("CMIP6", "CESM2", "Omon", "thetao") == [ + Omon(None), + GenericFix(None), ] def test_get_fix_only_mip_case_insensitive(): - assert Fix.get_fixes('CMIP6', 'CESM2', 'omOn', 'thetao') == [ - Omon(None), GenericFix(None) + assert Fix.get_fixes("CMIP6", "CESM2", "omOn", "thetao") == [ + Omon(None), + GenericFix(None), ] def test_get_fix_mip_and_var(): - assert Fix.get_fixes('CMIP6', 'CESM2', 'Omon', 'tos') == [ - Tos(None), Omon(None), GenericFix(None) + assert Fix.get_fixes("CMIP6", "CESM2", "Omon", "tos") == [ + Tos(None), + Omon(None), + GenericFix(None), ] @@ -114,56 +122,56 @@ def test_fix_data(): def test_fix_file(): - filepath = 'sample_filepath' - assert Fix(None).fix_file(filepath, 'preproc') == filepath + filepath = "sample_filepath" + assert Fix(None).fix_file(filepath, "preproc") == filepath def test_get_fixed_filepath_paths(tmp_path): - output_dir = tmp_path / 'fixed' - filepath = Path('this', 'is', 'a', 'file.nc') + output_dir = tmp_path / "fixed" + filepath = Path("this", "is", "a", "file.nc") assert not output_dir.is_dir() fixed_path = Fix(None).get_fixed_filepath(output_dir, filepath) assert output_dir.is_dir() assert isinstance(fixed_path, Path) - assert fixed_path == tmp_path / 'fixed' / 'file.nc' + assert fixed_path == tmp_path / "fixed" / "file.nc" def test_get_fixed_filepath_unique_suffix_paths(tmp_path): - output_dir = tmp_path / 'fixed' / 'prefix_1_' - filepath = Path('this', 'is', 'a', 'file.nc') + output_dir = tmp_path / "fixed" / "prefix_1_" + filepath = Path("this", "is", "a", "file.nc") assert not output_dir.parent.is_dir() fixed_path = Fix(None).get_fixed_filepath( output_dir, filepath, add_unique_suffix=True ) assert fixed_path.parent.is_dir() assert isinstance(fixed_path, Path) - assert fixed_path != tmp_path / 'fixed' / 'prefix_1_' / 'file.nc' - assert fixed_path.parent.name.startswith('prefix_1_') - assert fixed_path.name == 'file.nc' + assert fixed_path != tmp_path / "fixed" / "prefix_1_" / "file.nc" + assert fixed_path.parent.name.startswith("prefix_1_") + assert fixed_path.name == "file.nc" def test_get_fixed_filepath_strs(tmp_path): - output_dir = os.path.join(str(tmp_path), 'fixed') - filepath = os.path.join('this', 'is', 'a', 'file.nc') + output_dir = os.path.join(str(tmp_path), "fixed") + filepath = os.path.join("this", "is", "a", "file.nc") assert not Path(output_dir).is_dir() fixed_path = Fix(None).get_fixed_filepath(output_dir, filepath) assert Path(output_dir).is_dir() assert isinstance(fixed_path, Path) - assert fixed_path == tmp_path / 'fixed' / 'file.nc' + assert fixed_path == tmp_path / "fixed" / "file.nc" def test_get_fixed_filepath_unique_suffix_strs(tmp_path): - output_dir = os.path.join(str(tmp_path), 'fixed', 'prefix_1_') - filepath = os.path.join('this', 'is', 'a', 'file.nc') + output_dir = os.path.join(str(tmp_path), "fixed", "prefix_1_") + filepath = os.path.join("this", "is", "a", "file.nc") assert not Path(output_dir).parent.is_dir() fixed_path = Fix(None).get_fixed_filepath( output_dir, filepath, add_unique_suffix=True ) assert fixed_path.parent.is_dir() assert isinstance(fixed_path, Path) - assert fixed_path != tmp_path / 'fixed' / 'prefix_1_' / 'file.nc' - assert fixed_path.parent.name.startswith('prefix_1_') - assert fixed_path.name == 'file.nc' + assert fixed_path != tmp_path / "fixed" / "prefix_1_" / "file.nc" + assert fixed_path.parent.name.startswith("prefix_1_") + assert fixed_path.name == "file.nc" def test_session_empty(): @@ -172,7 +180,7 @@ def test_session_empty(): def test_session(): - session = CFG.start_session('my session') + session = CFG.start_session("my session") fix = Fix(None, session=session) assert fix.session == session @@ -183,17 +191,17 @@ def test_frequency_empty(): def test_frequency_from_vardef(): - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") fix = Fix(vardef) - assert fix.frequency == 'mon' + assert fix.frequency == "mon" def test_frequency_given(): - fix = Fix(None, frequency='1hr') - assert fix.frequency == '1hr' + fix = Fix(None, frequency="1hr") + assert fix.frequency == "1hr" def test_frequency_not_from_vardef(): - vardef = get_var_info('CMIP6', 'Amon', 'tas') - fix = Fix(vardef, frequency='3hr') - assert fix.frequency == '3hr' + vardef = get_var_info("CMIP6", "Amon", "tas") + fix = Fix(vardef, frequency="3hr") + assert fix.frequency == "3hr" diff --git a/tests/integration/cmor/_fixes/test_native_datasets.py b/tests/integration/cmor/_fixes/test_native_datasets.py index 5399bf887e..ebc9e89105 100644 --- a/tests/integration/cmor/_fixes/test_native_datasets.py +++ b/tests/integration/cmor/_fixes/test_native_datasets.py @@ -1,4 +1,5 @@ """Tests for base class of native dataset fixes.""" + from unittest import mock import numpy as np @@ -14,10 +15,12 @@ @pytest.fixture def cubes(): """List of cubes with different `var_names`.""" - cubes = CubeList([ - Cube(0.0, var_name='pr'), - Cube(0.0, var_name='tas'), - ]) + cubes = CubeList( + [ + Cube(0.0, var_name="pr"), + Cube(0.0, var_name="tas"), + ] + ) return cubes @@ -33,49 +36,53 @@ def sample_cube(): """4D sample cube with many coordinates.""" time_coord = DimCoord( 0.0, - long_name='time', - units=Unit('day since 1950-01-01 00:00:00', calendar='gregorian'), + long_name="time", + units=Unit("day since 1950-01-01 00:00:00", calendar="gregorian"), ) plev_coord = DimCoord( [1000.0, 900.0], - long_name='air_pressure', - units='hPa', + long_name="air_pressure", + units="hPa", ) alt16_coord = AuxCoord( [2.0, 4.0], - long_name='altitude', - units='km', + long_name="altitude", + units="km", ) height_coord = AuxCoord( [2.0, 4.0], - long_name='height', - units='km', + long_name="height", + units="km", ) coord_with_bounds = AuxCoord( [2.0, 4.0], bounds=[[0.0, 2.5], [2.5, 10.0]], - long_name='coord with bounds', - units='km', + long_name="coord with bounds", + units="km", ) lat_coord = DimCoord( 3.141592653, - long_name='latitude', - units='rad', + long_name="latitude", + units="rad", ) lon_coord = DimCoord( 3.141592653, - long_name='longitude', - units='rad', + long_name="longitude", + units="rad", ) cube = Cube( [[[[1.0]], [[2.0]]]], - dim_coords_and_dims=[(time_coord, 0), - (plev_coord, 1), - (lat_coord, 2), - (lon_coord, 3)], - aux_coords_and_dims=[(alt16_coord, 1), - (height_coord, 1), - (coord_with_bounds, 1)], + dim_coords_and_dims=[ + (time_coord, 0), + (plev_coord, 1), + (lat_coord, 2), + (lon_coord, 3), + ], + aux_coords_and_dims=[ + (alt16_coord, 1), + (height_coord, 1), + (coord_with_bounds, 1), + ], ) return cube @@ -91,25 +98,26 @@ def fix(): allow us to test all common cases. """ - vardef = get_var_info('CMIP6', 'Amon', 'tas') + vardef = get_var_info("CMIP6", "Amon", "tas") extra_facets = {} fix = NativeDatasetFix(vardef, extra_facets=extra_facets) return fix @pytest.mark.parametrize( - 'scalar_coord,coord_name,val', + "scalar_coord,coord_name,val", [ - ('height2m', 'height', 2.0), - ('height10m', 'height', 10.0), - ('lambda550nm', 'radiation_wavelength', 550.0), - ('typesi', 'area_type', 'sea_ice'), + ("height2m", "height", 2.0), + ("height10m", "height", 10.0), + ("lambda550nm", "radiation_wavelength", 550.0), + ("typesi", "area_type", "sea_ice"), ], ) -def test_fix_scalar_coords(monkeypatch, empty_cube, fix, scalar_coord, - coord_name, val): +def test_fix_scalar_coords( + monkeypatch, empty_cube, fix, scalar_coord, coord_name, val +): """Test ``fix_scalar_coords``.""" - monkeypatch.setattr(fix.vardef, 'dimensions', [scalar_coord]) + monkeypatch.setattr(fix.vardef, "dimensions", [scalar_coord]) fix.fix_scalar_coords(empty_cube) @@ -124,43 +132,43 @@ def test_fix_scalar_coords(monkeypatch, empty_cube, fix, scalar_coord, def test_fix_var_metadata_tas(empty_cube, fix): """Test ``fix_var_metadata`` using `tas`.""" - empty_cube.units = 'K' + empty_cube.units = "K" fix.fix_var_metadata(empty_cube) - assert empty_cube.var_name == 'tas' - assert empty_cube.standard_name == 'air_temperature' - assert empty_cube.long_name == 'Near-Surface Air Temperature' - assert empty_cube.units == 'K' - assert 'positive' not in empty_cube.attributes + assert empty_cube.var_name == "tas" + assert empty_cube.standard_name == "air_temperature" + assert empty_cube.long_name == "Near-Surface Air Temperature" + assert empty_cube.units == "K" + assert "positive" not in empty_cube.attributes def test_fix_var_metadata_custom_var(monkeypatch, empty_cube, fix): """Test ``fix_var_metadata`` using custom variable.""" - monkeypatch.setattr(fix, 'INVALID_UNITS', {'invalid_units': 'kg'}) - monkeypatch.setattr(fix.vardef, 'positive', mock.sentinel.positive) - monkeypatch.setattr(fix.vardef, 'standard_name', '') - monkeypatch.setattr(fix.vardef, 'units', 'g') - empty_cube.attributes['invalid_units'] = 'invalid_units' + monkeypatch.setattr(fix, "INVALID_UNITS", {"invalid_units": "kg"}) + monkeypatch.setattr(fix.vardef, "positive", mock.sentinel.positive) + monkeypatch.setattr(fix.vardef, "standard_name", "") + monkeypatch.setattr(fix.vardef, "units", "g") + empty_cube.attributes["invalid_units"] = "invalid_units" fix.fix_var_metadata(empty_cube) - assert empty_cube.var_name == 'tas' + assert empty_cube.var_name == "tas" assert empty_cube.standard_name is None - assert empty_cube.long_name == 'Near-Surface Air Temperature' - assert empty_cube.units == 'g' - assert empty_cube.attributes['positive'] == mock.sentinel.positive + assert empty_cube.long_name == "Near-Surface Air Temperature" + assert empty_cube.units == "g" + assert empty_cube.attributes["positive"] == mock.sentinel.positive np.testing.assert_allclose(empty_cube.data, 1000.0) def test_fix_var_metadata_raw_units(monkeypatch, empty_cube, fix): """Test ``fix_var_metadata`` with ``raw_units``.""" empty_cube.units = None - monkeypatch.setitem(fix.extra_facets, 'raw_units', 'K') + monkeypatch.setitem(fix.extra_facets, "raw_units", "K") fix.fix_var_metadata(empty_cube) - assert empty_cube.units == 'K' + assert empty_cube.units == "K" np.testing.assert_allclose(empty_cube.data, 1.0) @@ -168,32 +176,32 @@ def test_fix_var_metadata_raw_units_ignore_invalid_units( monkeypatch, empty_cube, fix ): """Test ``fix_var_metadata`` with raw_units and invalid units.""" - monkeypatch.setitem(fix.extra_facets, 'raw_units', 'km') - monkeypatch.setattr(fix, 'INVALID_UNITS', {'invalid_units': 'kg'}) - monkeypatch.setattr(fix.vardef, 'units', 'm') - empty_cube.attributes['invalid_units'] = 'invalid_units' + monkeypatch.setitem(fix.extra_facets, "raw_units", "km") + monkeypatch.setattr(fix, "INVALID_UNITS", {"invalid_units": "kg"}) + monkeypatch.setattr(fix.vardef, "units", "m") + empty_cube.attributes["invalid_units"] = "invalid_units" fix.fix_var_metadata(empty_cube) - assert empty_cube.units == 'm' # invalid units have been ignored - assert 'invalid_units' not in empty_cube.attributes + assert empty_cube.units == "m" # invalid units have been ignored + assert "invalid_units" not in empty_cube.attributes np.testing.assert_allclose(empty_cube.data, 1000.0) def test_fix_var_metadata_units_exponent(monkeypatch, empty_cube, fix): """Test ``fix_var_metadata`` with invalid units.""" - monkeypatch.setattr(fix.vardef, 'units', 'm s-2') - empty_cube.attributes['invalid_units'] = 'km/s**2' + monkeypatch.setattr(fix.vardef, "units", "m s-2") + empty_cube.attributes["invalid_units"] = "km/s**2" fix.fix_var_metadata(empty_cube) - assert empty_cube.units == 'm s-2' + assert empty_cube.units == "m s-2" np.testing.assert_allclose(empty_cube.data, 1000.0) def test_fix_var_metadata_units_fail(empty_cube, fix): """Test ``fix_var_metadata`` with invalid units.""" - empty_cube.attributes['invalid_units'] = 'invalid_units' + empty_cube.attributes["invalid_units"] = "invalid_units" msg = "Failed to fix invalid units 'invalid_units' for variable 'tas'" with pytest.raises(ValueError, match=msg): @@ -203,46 +211,47 @@ def test_fix_var_metadata_units_fail(empty_cube, fix): def test_get_cube(cubes, fix): """Test ``get_cube``.""" cube = fix.get_cube(cubes) - assert cube.var_name == 'tas' + assert cube.var_name == "tas" def test_get_cube_custom_var_name(cubes, fix): """Test ``get_cube`` with custom `var_name`.""" - cube = fix.get_cube(cubes, var_name='pr') - assert cube.var_name == 'pr' + cube = fix.get_cube(cubes, var_name="pr") + assert cube.var_name == "pr" def test_get_cube_extra_facets(cubes, fix): """Test ``get_cube`` with `raw_name` in extra facets.""" - fix.extra_facets['raw_name'] = 'pr' + fix.extra_facets["raw_name"] = "pr" cube = fix.get_cube(cubes) - assert cube.var_name == 'pr' + assert cube.var_name == "pr" def test_get_cube_fail(cubes, fix): """Test ``get_cube`` with invalid `var_name`.""" msg = "Variable 'x' used to extract 'tas' is not available in input file" with pytest.raises(ValueError, match=msg): - fix.get_cube(cubes, var_name='x') + fix.get_cube(cubes, var_name="x") @pytest.mark.parametrize( - 'coord,coord_name,func_name', + "coord,coord_name,func_name", [ - ('time', 'time', 'fix_regular_time'), - ('time1', 'time', 'fix_regular_time'), - ('time2', 'time', 'fix_regular_time'), - ('time3', 'time', 'fix_regular_time'), - ('latitude', 'latitude', 'fix_regular_lat'), - ('longitude', 'longitude', 'fix_regular_lon'), - ] + ("time", "time", "fix_regular_time"), + ("time1", "time", "fix_regular_time"), + ("time2", "time", "fix_regular_time"), + ("time3", "time", "fix_regular_time"), + ("latitude", "latitude", "fix_regular_lat"), + ("longitude", "longitude", "fix_regular_lon"), + ], ) -def test_fix_regular_coords_from_cube(monkeypatch, sample_cube, fix, coord, - coord_name, func_name): +def test_fix_regular_coords_from_cube( + monkeypatch, sample_cube, fix, coord, coord_name, func_name +): """Test fixing of regular coords from cube.""" coord_info = CoordinateInfo(coord) coord_info.standard_name = coord_name - monkeypatch.setattr(fix.vardef, 'coordinates', {coord: coord_info}) + monkeypatch.setattr(fix.vardef, "coordinates", {coord: coord_info}) func = getattr(fix, func_name) func(sample_cube) @@ -255,22 +264,23 @@ def test_fix_regular_coords_from_cube(monkeypatch, sample_cube, fix, coord, @pytest.mark.parametrize( - 'coord,coord_name,func_name', + "coord,coord_name,func_name", [ - ('time', 'time', 'fix_regular_time'), - ('time1', 'time', 'fix_regular_time'), - ('time2', 'time', 'fix_regular_time'), - ('time3', 'time', 'fix_regular_time'), - ('latitude', 'latitude', 'fix_regular_lat'), - ('longitude', 'longitude', 'fix_regular_lon'), - ] + ("time", "time", "fix_regular_time"), + ("time1", "time", "fix_regular_time"), + ("time2", "time", "fix_regular_time"), + ("time3", "time", "fix_regular_time"), + ("latitude", "latitude", "fix_regular_lat"), + ("longitude", "longitude", "fix_regular_lon"), + ], ) -def test_fix_regular_coords_from_str(monkeypatch, sample_cube, fix, coord, - coord_name, func_name): +def test_fix_regular_coords_from_str( + monkeypatch, sample_cube, fix, coord, coord_name, func_name +): """Test fixing of regular coords from string.""" coord_info = CoordinateInfo(coord) coord_info.standard_name = coord_name - monkeypatch.setattr(fix.vardef, 'coordinates', {coord: coord_info}) + monkeypatch.setattr(fix.vardef, "coordinates", {coord: coord_info}) func = getattr(fix, func_name) func(sample_cube, coord=coord_name) @@ -283,15 +293,16 @@ def test_fix_regular_coords_from_str(monkeypatch, sample_cube, fix, coord, @pytest.mark.parametrize( - 'func_name,coord_name,units', + "func_name,coord_name,units", [ - ('fix_regular_time', 'time', 'days since 01-01-1990'), - ('fix_regular_lat', 'latitude', 'rad'), - ('fix_regular_lon', 'longitude', 'rad'), - ] + ("fix_regular_time", "time", "days since 01-01-1990"), + ("fix_regular_lat", "latitude", "rad"), + ("fix_regular_lon", "longitude", "rad"), + ], ) -def test_fix_regular_coords_from_coords(empty_cube, fix, func_name, - coord_name, units): +def test_fix_regular_coords_from_coords( + empty_cube, fix, func_name, coord_name, units +): """Test fixing of regular coords from coords.""" coord = AuxCoord([1.570796, 3.141592], units=units) @@ -305,15 +316,16 @@ def test_fix_regular_coords_from_coords(empty_cube, fix, func_name, @pytest.mark.parametrize( - 'func_name,coord_name,units', + "func_name,coord_name,units", [ - ('fix_regular_time', 'time', 'days since 01-01-1990'), - ('fix_regular_lat', 'latitude', 'rad'), - ('fix_regular_lon', 'longitude', 'rad'), - ] + ("fix_regular_time", "time", "days since 01-01-1990"), + ("fix_regular_lat", "latitude", "rad"), + ("fix_regular_lon", "longitude", "rad"), + ], ) -def test_fix_regular_coords_from_coords_no_bounds(empty_cube, fix, func_name, - coord_name, units): +def test_fix_regular_coords_from_coords_no_bounds( + empty_cube, fix, func_name, coord_name, units +): """Test fixing of regular coords from coords.""" coord = AuxCoord([1.570796, 3.141592], units=units) @@ -328,22 +340,22 @@ def test_fix_regular_coords_from_coords_no_bounds(empty_cube, fix, func_name, def test_guess_coord_bounds_from_str(sample_cube, fix): """Test ``guess_coord_bounds`` from string.""" - out_coord = fix.guess_coord_bounds(sample_cube, 'height') - assert out_coord is sample_cube.coord('height') + out_coord = fix.guess_coord_bounds(sample_cube, "height") + assert out_coord is sample_cube.coord("height") np.testing.assert_allclose(out_coord.bounds, [[1.0, 3.0], [3.0, 5.0]]) def test_guess_coord_bounds_from_str_len_1(sample_cube, fix): """Test ``guess_coord_bounds`` from string.""" - out_coord = fix.guess_coord_bounds(sample_cube, 'time') - assert out_coord is sample_cube.coord('time') + out_coord = fix.guess_coord_bounds(sample_cube, "time") + assert out_coord is sample_cube.coord("time") assert out_coord.bounds is None def test_guess_coord_bounds_from_str_already_present(sample_cube, fix): """Test ``guess_coord_bounds`` if bounds are already present.""" - out_coord = fix.guess_coord_bounds(sample_cube, 'coord with bounds') - assert out_coord is sample_cube.coord('coord with bounds') + out_coord = fix.guess_coord_bounds(sample_cube, "coord with bounds") + assert out_coord is sample_cube.coord("coord with bounds") np.testing.assert_allclose(out_coord.bounds, [[0.0, 2.5], [2.5, 10.0]]) @@ -374,36 +386,36 @@ def test_guess_coord_bounds_from_coord_already_present(empty_cube, fix): def test_fix_time_metadata(sample_cube, fix): """Test ``fix_time_metadata``.""" out_coord = fix.fix_time_metadata(sample_cube) - assert out_coord is sample_cube.coord('time') - assert out_coord.standard_name == 'time' - assert out_coord.var_name == 'time' - assert out_coord.long_name == 'time' - assert out_coord.units == 'day since 1950-01-01 00:00:00' + assert out_coord is sample_cube.coord("time") + assert out_coord.standard_name == "time" + assert out_coord.var_name == "time" + assert out_coord.long_name == "time" + assert out_coord.units == "day since 1950-01-01 00:00:00" np.testing.assert_allclose(out_coord.points, [0.0]) assert out_coord.bounds is None def test_fix_time_metadata_from_str(sample_cube, fix): """Test ``fix_time_metadata`` from string.""" - out_coord = fix.fix_time_metadata(sample_cube, coord='time') - assert out_coord is sample_cube.coord('time') - assert out_coord.standard_name == 'time' - assert out_coord.var_name == 'time' - assert out_coord.long_name == 'time' - assert out_coord.units == 'day since 1950-01-01 00:00:00' + out_coord = fix.fix_time_metadata(sample_cube, coord="time") + assert out_coord is sample_cube.coord("time") + assert out_coord.standard_name == "time" + assert out_coord.var_name == "time" + assert out_coord.long_name == "time" + assert out_coord.units == "day since 1950-01-01 00:00:00" np.testing.assert_allclose(out_coord.points, [0.0]) assert out_coord.bounds is None def test_fix_time_metadata_from_coord(sample_cube, fix): """Test ``fix_time_metadata`` from string.""" - coord = AuxCoord([2.0], units='day since 1950-01-01 00:00:00') + coord = AuxCoord([2.0], units="day since 1950-01-01 00:00:00") out_coord = fix.fix_time_metadata(sample_cube, coord=coord) assert out_coord is coord - assert out_coord.standard_name == 'time' - assert out_coord.var_name == 'time' - assert out_coord.long_name == 'time' - assert out_coord.units == 'day since 1950-01-01 00:00:00' + assert out_coord.standard_name == "time" + assert out_coord.var_name == "time" + assert out_coord.long_name == "time" + assert out_coord.units == "day since 1950-01-01 00:00:00" np.testing.assert_allclose(out_coord.points, [2.0]) assert out_coord.bounds is None @@ -411,39 +423,39 @@ def test_fix_time_metadata_from_coord(sample_cube, fix): def test_fix_alt16_metadata(sample_cube, fix): """Test ``fix_alt16_metadata``.""" out_coord = fix.fix_alt16_metadata(sample_cube) - assert out_coord is sample_cube.coord('altitude') - assert out_coord.standard_name == 'altitude' - assert out_coord.var_name == 'alt16' - assert out_coord.long_name == 'altitude' - assert out_coord.units == 'm' - assert out_coord.attributes['positive'] == 'up' + assert out_coord is sample_cube.coord("altitude") + assert out_coord.standard_name == "altitude" + assert out_coord.var_name == "alt16" + assert out_coord.long_name == "altitude" + assert out_coord.units == "m" + assert out_coord.attributes["positive"] == "up" np.testing.assert_allclose(out_coord.points, [2000.0, 4000.0]) assert out_coord.bounds is None def test_fix_alt16_metadata_from_str(sample_cube, fix): """Test ``fix_alt16_metadata`` from string.""" - out_coord = fix.fix_alt16_metadata(sample_cube, coord='altitude') - assert out_coord is sample_cube.coord('altitude') - assert out_coord.standard_name == 'altitude' - assert out_coord.var_name == 'alt16' - assert out_coord.long_name == 'altitude' - assert out_coord.units == 'm' - assert out_coord.attributes['positive'] == 'up' + out_coord = fix.fix_alt16_metadata(sample_cube, coord="altitude") + assert out_coord is sample_cube.coord("altitude") + assert out_coord.standard_name == "altitude" + assert out_coord.var_name == "alt16" + assert out_coord.long_name == "altitude" + assert out_coord.units == "m" + assert out_coord.attributes["positive"] == "up" np.testing.assert_allclose(out_coord.points, [2000.0, 4000.0]) assert out_coord.bounds is None def test_fix_alt16_metadata_from_coord(sample_cube, fix): """Test ``fix_alt16_metadata`` from string.""" - coord = AuxCoord([2.0], units='m') + coord = AuxCoord([2.0], units="m") out_coord = fix.fix_alt16_metadata(sample_cube, coord=coord) assert out_coord is coord - assert out_coord.standard_name == 'altitude' - assert out_coord.var_name == 'alt16' - assert out_coord.long_name == 'altitude' - assert out_coord.units == 'm' - assert out_coord.attributes['positive'] == 'up' + assert out_coord.standard_name == "altitude" + assert out_coord.var_name == "alt16" + assert out_coord.long_name == "altitude" + assert out_coord.units == "m" + assert out_coord.attributes["positive"] == "up" np.testing.assert_allclose(out_coord.points, [2.0]) assert out_coord.bounds is None @@ -451,39 +463,39 @@ def test_fix_alt16_metadata_from_coord(sample_cube, fix): def test_fix_height_metadata(sample_cube, fix): """Test ``fix_height_metadata``.""" out_coord = fix.fix_height_metadata(sample_cube) - assert out_coord is sample_cube.coord('height') - assert out_coord.standard_name == 'height' - assert out_coord.var_name == 'height' - assert out_coord.long_name == 'height' - assert out_coord.units == 'm' - assert out_coord.attributes['positive'] == 'up' + assert out_coord is sample_cube.coord("height") + assert out_coord.standard_name == "height" + assert out_coord.var_name == "height" + assert out_coord.long_name == "height" + assert out_coord.units == "m" + assert out_coord.attributes["positive"] == "up" np.testing.assert_allclose(out_coord.points, [2000.0, 4000.0]) assert out_coord.bounds is None def test_fix_height_metadata_from_str(sample_cube, fix): """Test ``fix_height_metadata`` from string.""" - out_coord = fix.fix_height_metadata(sample_cube, coord='height') - assert out_coord is sample_cube.coord('height') - assert out_coord.standard_name == 'height' - assert out_coord.var_name == 'height' - assert out_coord.long_name == 'height' - assert out_coord.units == 'm' - assert out_coord.attributes['positive'] == 'up' + out_coord = fix.fix_height_metadata(sample_cube, coord="height") + assert out_coord is sample_cube.coord("height") + assert out_coord.standard_name == "height" + assert out_coord.var_name == "height" + assert out_coord.long_name == "height" + assert out_coord.units == "m" + assert out_coord.attributes["positive"] == "up" np.testing.assert_allclose(out_coord.points, [2000.0, 4000.0]) assert out_coord.bounds is None def test_fix_height_metadata_from_coord(sample_cube, fix): """Test ``fix_height_metadata`` from string.""" - coord = AuxCoord([2.0], units='m') + coord = AuxCoord([2.0], units="m") out_coord = fix.fix_height_metadata(sample_cube, coord=coord) assert out_coord is coord - assert out_coord.standard_name == 'height' - assert out_coord.var_name == 'height' - assert out_coord.long_name == 'height' - assert out_coord.units == 'm' - assert out_coord.attributes['positive'] == 'up' + assert out_coord.standard_name == "height" + assert out_coord.var_name == "height" + assert out_coord.long_name == "height" + assert out_coord.units == "m" + assert out_coord.attributes["positive"] == "up" np.testing.assert_allclose(out_coord.points, [2.0]) assert out_coord.bounds is None @@ -491,39 +503,39 @@ def test_fix_height_metadata_from_coord(sample_cube, fix): def test_fix_plev_metadata(sample_cube, fix): """Test ``fix_plev_metadata``.""" out_coord = fix.fix_plev_metadata(sample_cube) - assert out_coord is sample_cube.coord('air_pressure') - assert out_coord.standard_name == 'air_pressure' - assert out_coord.var_name == 'plev' - assert out_coord.long_name == 'pressure' - assert out_coord.units == 'Pa' - assert out_coord.attributes['positive'] == 'down' + assert out_coord is sample_cube.coord("air_pressure") + assert out_coord.standard_name == "air_pressure" + assert out_coord.var_name == "plev" + assert out_coord.long_name == "pressure" + assert out_coord.units == "Pa" + assert out_coord.attributes["positive"] == "down" np.testing.assert_allclose(out_coord.points, [100000.0, 90000.0]) assert out_coord.bounds is None def test_fix_plev_metadata_from_str(sample_cube, fix): """Test ``fix_plev_metadata`` from string.""" - out_coord = fix.fix_plev_metadata(sample_cube, coord='air_pressure') - assert out_coord is sample_cube.coord('air_pressure') - assert out_coord.standard_name == 'air_pressure' - assert out_coord.var_name == 'plev' - assert out_coord.long_name == 'pressure' - assert out_coord.units == 'Pa' - assert out_coord.attributes['positive'] == 'down' + out_coord = fix.fix_plev_metadata(sample_cube, coord="air_pressure") + assert out_coord is sample_cube.coord("air_pressure") + assert out_coord.standard_name == "air_pressure" + assert out_coord.var_name == "plev" + assert out_coord.long_name == "pressure" + assert out_coord.units == "Pa" + assert out_coord.attributes["positive"] == "down" np.testing.assert_allclose(out_coord.points, [100000.0, 90000.0]) assert out_coord.bounds is None def test_fix_plev_metadata_from_coord(sample_cube, fix): """Test ``fix_plev_metadata`` from string.""" - coord = AuxCoord([1.0], units='Pa') + coord = AuxCoord([1.0], units="Pa") out_coord = fix.fix_plev_metadata(sample_cube, coord=coord) assert out_coord is coord - assert out_coord.standard_name == 'air_pressure' - assert out_coord.var_name == 'plev' - assert out_coord.long_name == 'pressure' - assert out_coord.units == 'Pa' - assert out_coord.attributes['positive'] == 'down' + assert out_coord.standard_name == "air_pressure" + assert out_coord.var_name == "plev" + assert out_coord.long_name == "pressure" + assert out_coord.units == "Pa" + assert out_coord.attributes["positive"] == "down" np.testing.assert_allclose(out_coord.points, [1.0]) assert out_coord.bounds is None @@ -531,36 +543,36 @@ def test_fix_plev_metadata_from_coord(sample_cube, fix): def test_fix_lat_metadata(sample_cube, fix): """Test ``fix_lat_metadata``.""" out_coord = fix.fix_lat_metadata(sample_cube) - assert out_coord is sample_cube.coord('latitude') - assert out_coord.standard_name == 'latitude' - assert out_coord.var_name == 'lat' - assert out_coord.long_name == 'latitude' - assert out_coord.units == 'degrees_north' + assert out_coord is sample_cube.coord("latitude") + assert out_coord.standard_name == "latitude" + assert out_coord.var_name == "lat" + assert out_coord.long_name == "latitude" + assert out_coord.units == "degrees_north" np.testing.assert_allclose(out_coord.points, [180.0]) assert out_coord.bounds is None def test_fix_lat_metadata_from_str(sample_cube, fix): """Test ``fix_lat_metadata`` from string.""" - out_coord = fix.fix_lat_metadata(sample_cube, coord='latitude') - assert out_coord is sample_cube.coord('latitude') - assert out_coord.standard_name == 'latitude' - assert out_coord.var_name == 'lat' - assert out_coord.long_name == 'latitude' - assert out_coord.units == 'degrees_north' + out_coord = fix.fix_lat_metadata(sample_cube, coord="latitude") + assert out_coord is sample_cube.coord("latitude") + assert out_coord.standard_name == "latitude" + assert out_coord.var_name == "lat" + assert out_coord.long_name == "latitude" + assert out_coord.units == "degrees_north" np.testing.assert_allclose(out_coord.points, [180.0]) assert out_coord.bounds is None def test_fix_lat_metadata_from_coord(sample_cube, fix): """Test ``fix_lat_metadata`` from string.""" - coord = AuxCoord([0.0], units='degrees') + coord = AuxCoord([0.0], units="degrees") out_coord = fix.fix_lat_metadata(sample_cube, coord=coord) assert out_coord is coord - assert out_coord.standard_name == 'latitude' - assert out_coord.var_name == 'lat' - assert out_coord.long_name == 'latitude' - assert out_coord.units == 'degrees_north' + assert out_coord.standard_name == "latitude" + assert out_coord.var_name == "lat" + assert out_coord.long_name == "latitude" + assert out_coord.units == "degrees_north" np.testing.assert_allclose(out_coord.points, [0.0]) assert out_coord.bounds is None @@ -568,35 +580,35 @@ def test_fix_lat_metadata_from_coord(sample_cube, fix): def test_fix_lon_metadata(sample_cube, fix): """Test ``fix_lon_metadata``.""" out_coord = fix.fix_lon_metadata(sample_cube) - assert out_coord is sample_cube.coord('longitude') - assert out_coord.standard_name == 'longitude' - assert out_coord.var_name == 'lon' - assert out_coord.long_name == 'longitude' - assert out_coord.units == 'degrees_east' + assert out_coord is sample_cube.coord("longitude") + assert out_coord.standard_name == "longitude" + assert out_coord.var_name == "lon" + assert out_coord.long_name == "longitude" + assert out_coord.units == "degrees_east" np.testing.assert_allclose(out_coord.points, [180.0]) assert out_coord.bounds is None def test_fix_lon_metadata_from_str(sample_cube, fix): """Test ``fix_lon_metadata`` from string.""" - out_coord = fix.fix_lon_metadata(sample_cube, coord='longitude') - assert out_coord is sample_cube.coord('longitude') - assert out_coord.standard_name == 'longitude' - assert out_coord.var_name == 'lon' - assert out_coord.long_name == 'longitude' - assert out_coord.units == 'degrees_east' + out_coord = fix.fix_lon_metadata(sample_cube, coord="longitude") + assert out_coord is sample_cube.coord("longitude") + assert out_coord.standard_name == "longitude" + assert out_coord.var_name == "lon" + assert out_coord.long_name == "longitude" + assert out_coord.units == "degrees_east" np.testing.assert_allclose(out_coord.points, [180.0]) assert out_coord.bounds is None def test_fix_lon_metadata_from_coord(sample_cube, fix): """Test ``fix_lon_metadata`` from string.""" - coord = AuxCoord([0.0], units='degrees') + coord = AuxCoord([0.0], units="degrees") out_coord = fix.fix_lon_metadata(sample_cube, coord=coord) assert out_coord is coord - assert out_coord.standard_name == 'longitude' - assert out_coord.var_name == 'lon' - assert out_coord.long_name == 'longitude' - assert out_coord.units == 'degrees_east' + assert out_coord.standard_name == "longitude" + assert out_coord.var_name == "lon" + assert out_coord.long_name == "longitude" + assert out_coord.units == "degrees_east" np.testing.assert_allclose(out_coord.points, [0.0]) assert out_coord.bounds is None diff --git a/tests/integration/cmor/_fixes/test_shared.py b/tests/integration/cmor/_fixes/test_shared.py index b51b3839c8..09e3657e01 100644 --- a/tests/integration/cmor/_fixes/test_shared.py +++ b/tests/integration/cmor/_fixes/test_shared.py @@ -1,4 +1,5 @@ """Tests for shared functions for fixes.""" + import dask.array as da import iris import iris.coords @@ -41,8 +42,9 @@ def test_altitude_to_pressure_func(): np.testing.assert_allclose(func(50.0), 100725.54298598564) np.testing.assert_allclose(func(80000.0), 0.88628) np.testing.assert_allclose(func(90000.0), 0.1576523580997673) - np.testing.assert_allclose(func(np.array([0.0, 100.0])), - [101325.0, 100129.0]) + np.testing.assert_allclose( + func(np.array([0.0, 100.0])), [101325.0, 100129.0] + ) def test_pressure_to_altitude_func(): @@ -55,38 +57,41 @@ def test_pressure_to_altitude_func(): np.testing.assert_allclose(func(1000.0), 31054.63120206961) np.testing.assert_allclose(func(75.9448), 50000) np.testing.assert_allclose(func(0.1), 91607.36011892557) - np.testing.assert_allclose(func(np.array([101325.0, 177687.0])), - [0.0, -5000.0], atol=1.0e-7) + np.testing.assert_allclose( + func(np.array([101325.0, 177687.0])), [0.0, -5000.0], atol=1.0e-7 + ) TEST_ADD_AUX_COORDS_FROM_CUBES = [ ({}, 1), - ({'x': ()}, 0), - ({'x': 1, 'a': ()}, 0), - ({'a': ()}, 1), - ({'a': (), 'b': 1}, 1), - ({'a': (), 'b': 1}, 1), - ({'c': 1}, 2), - ({'a': (), 'b': 1, 'c': 1}, 2), - ({'d': (0, 1)}, 1), - ({'a': (), 'b': 1, 'd': (0, 1)}, 1), + ({"x": ()}, 0), + ({"x": 1, "a": ()}, 0), + ({"a": ()}, 1), + ({"a": (), "b": 1}, 1), + ({"a": (), "b": 1}, 1), + ({"c": 1}, 2), + ({"a": (), "b": 1, "c": 1}, 2), + ({"d": (0, 1)}, 1), + ({"a": (), "b": 1, "d": (0, 1)}, 1), ] -@pytest.mark.parametrize('coord_dict,output', TEST_ADD_AUX_COORDS_FROM_CUBES) +@pytest.mark.parametrize("coord_dict,output", TEST_ADD_AUX_COORDS_FROM_CUBES) def test_add_aux_coords_from_cubes(coord_dict, output): """Test extraction of auxiliary coordinates from cubes.""" cube = iris.cube.Cube([[0.0]]) - cubes = iris.cube.CubeList([ - iris.cube.Cube(0.0, var_name='a'), - iris.cube.Cube([0.0], var_name='b'), - iris.cube.Cube([0.0], var_name='c'), - iris.cube.Cube([0.0], var_name='c'), - iris.cube.Cube([[0.0]], var_name='d'), - ]) + cubes = iris.cube.CubeList( + [ + iris.cube.Cube(0.0, var_name="a"), + iris.cube.Cube([0.0], var_name="b"), + iris.cube.Cube([0.0], var_name="c"), + iris.cube.Cube([0.0], var_name="c"), + iris.cube.Cube([[0.0]], var_name="d"), + ] + ) if output == 1: add_aux_coords_from_cubes(cube, cubes, coord_dict) - for (coord_name, coord_dims) in coord_dict.items(): + for coord_name, coord_dims in coord_dict.items(): coord = cube.coord(var_name=coord_name) if len(cube.coord_dims(coord)) == 1: assert cube.coord_dims(coord)[0] == coord_dims @@ -187,30 +192,58 @@ def test_map_on_filled_da_mask_not_used(): np.testing.assert_equal(output.mask, [False, False, False]) -ALT_COORD = iris.coords.AuxCoord([0.0], bounds=[[-100.0, 500.0]], - standard_name='altitude', units='m', - var_name='alt', long_name='altitude') +ALT_COORD = iris.coords.AuxCoord( + [0.0], + bounds=[[-100.0, 500.0]], + standard_name="altitude", + units="m", + var_name="alt", + long_name="altitude", +) ALT_COORD_MASKED = ALT_COORD.copy(np.ma.masked_equal([0.0], 0.0)) -ALT_COORD_NB = iris.coords.AuxCoord([0.0], standard_name='altitude', units='m', - var_name='alt', long_name='altitude') -ALT_COORD_KM = iris.coords.AuxCoord([0.0], bounds=[[-0.1, 0.5]], - var_name='alt', long_name='altitude', - standard_name='altitude', units='km') -P_COORD = iris.coords.AuxCoord([101325.0], bounds=[[102532.0, 95460.8]], - standard_name='air_pressure', units='Pa', - var_name='plev', long_name='pressure') +ALT_COORD_NB = iris.coords.AuxCoord( + [0.0], + standard_name="altitude", + units="m", + var_name="alt", + long_name="altitude", +) +ALT_COORD_KM = iris.coords.AuxCoord( + [0.0], + bounds=[[-0.1, 0.5]], + var_name="alt", + long_name="altitude", + standard_name="altitude", + units="km", +) +P_COORD = iris.coords.AuxCoord( + [101325.0], + bounds=[[102532.0, 95460.8]], + standard_name="air_pressure", + units="Pa", + var_name="plev", + long_name="pressure", +) P_COORD_MASKED = P_COORD.copy(np.ma.masked_equal([0.0], 0.0)) -P_COORD_NB = iris.coords.AuxCoord([101325.0], standard_name='air_pressure', - units='Pa', var_name='plev', - long_name='pressure') -CUBE_ALT = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(ALT_COORD, 0)]) -CUBE_ALT_MASKED = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(ALT_COORD_MASKED, 0)]) -CUBE_ALT_NB = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(ALT_COORD_NB, 0)]) -CUBE_ALT_KM = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(ALT_COORD_KM, 0)]) +P_COORD_NB = iris.coords.AuxCoord( + [101325.0], + standard_name="air_pressure", + units="Pa", + var_name="plev", + long_name="pressure", +) +CUBE_ALT = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(ALT_COORD, 0)] +) +CUBE_ALT_MASKED = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(ALT_COORD_MASKED, 0)] +) +CUBE_ALT_NB = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(ALT_COORD_NB, 0)] +) +CUBE_ALT_KM = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(ALT_COORD_KM, 0)] +) TEST_ADD_PLEV_FROM_ALTITUDE = [ @@ -222,25 +255,27 @@ def test_map_on_filled_da_mask_not_used(): ] -@pytest.mark.parametrize('cube,output', TEST_ADD_PLEV_FROM_ALTITUDE) +@pytest.mark.parametrize("cube,output", TEST_ADD_PLEV_FROM_ALTITUDE) def test_add_plev_from_altitude(cube, output): """Test adding of pressure level coordinate.""" if output is None: with pytest.raises(ValueError) as err: add_plev_from_altitude(cube) - msg = ("Cannot add 'air_pressure' coordinate, 'altitude' coordinate " - "not available") + msg = ( + "Cannot add 'air_pressure' coordinate, 'altitude' coordinate " + "not available" + ) assert str(err.value) == msg return - assert not cube.coords('air_pressure') + assert not cube.coords("air_pressure") add_plev_from_altitude(cube) - air_pressure_coord = cube.coord('air_pressure') + air_pressure_coord = cube.coord("air_pressure") metadata_list = [ - 'var_name', - 'standard_name', - 'long_name', - 'units', - 'attributes', + "var_name", + "standard_name", + "long_name", + "units", + "attributes", ] for attr in metadata_list: assert getattr(air_pressure_coord, attr) == getattr(output, attr) @@ -253,21 +288,29 @@ def test_add_plev_from_altitude(cube, output): np.testing.assert_allclose( air_pressure_coord.bounds, output.bounds, rtol=1e-3 ) - assert cube.coords('altitude') + assert cube.coords("altitude") -P_COORD_HPA = iris.coords.AuxCoord([1013.25], bounds=[[1025.32, 954.60]], - var_name='plev', - standard_name='air_pressure', - long_name='pressure', units='hPa') -CUBE_PLEV = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(P_COORD, 0)]) -CUBE_PLEV_MASKED = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(P_COORD_MASKED, 0)]) -CUBE_PLEV_NB = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(P_COORD_NB, 0)]) -CUBE_PLEV_HPA = iris.cube.Cube([1.0], var_name='x', - aux_coords_and_dims=[(P_COORD_HPA, 0)]) +P_COORD_HPA = iris.coords.AuxCoord( + [1013.25], + bounds=[[1025.32, 954.60]], + var_name="plev", + standard_name="air_pressure", + long_name="pressure", + units="hPa", +) +CUBE_PLEV = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(P_COORD, 0)] +) +CUBE_PLEV_MASKED = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(P_COORD_MASKED, 0)] +) +CUBE_PLEV_NB = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(P_COORD_NB, 0)] +) +CUBE_PLEV_HPA = iris.cube.Cube( + [1.0], var_name="x", aux_coords_and_dims=[(P_COORD_HPA, 0)] +) TEST_ADD_ALTITUDE_FROM_PLEV = [ @@ -279,25 +322,27 @@ def test_add_plev_from_altitude(cube, output): ] -@pytest.mark.parametrize('cube,output', TEST_ADD_ALTITUDE_FROM_PLEV) +@pytest.mark.parametrize("cube,output", TEST_ADD_ALTITUDE_FROM_PLEV) def test_add_altitude_from_plev(cube, output): """Test adding of altitude coordinate.""" if output is None: with pytest.raises(ValueError) as err: add_altitude_from_plev(cube) - msg = ("Cannot add 'altitude' coordinate, 'air_pressure' coordinate " - "not available") + msg = ( + "Cannot add 'altitude' coordinate, 'air_pressure' coordinate " + "not available" + ) assert str(err.value) == msg return - assert not cube.coords('altitude') + assert not cube.coords("altitude") add_altitude_from_plev(cube) - altitude_coord = cube.coord('altitude') + altitude_coord = cube.coord("altitude") metadata_list = [ - 'var_name', - 'standard_name', - 'long_name', - 'units', - 'attributes', + "var_name", + "standard_name", + "long_name", + "units", + "attributes", ] for attr in metadata_list: assert getattr(altitude_coord, attr) == getattr(output, attr) @@ -305,15 +350,16 @@ def test_add_altitude_from_plev(cube, output): if output.bounds is None: assert altitude_coord.bounds is None else: - np.testing.assert_allclose(altitude_coord.bounds, output.bounds, - rtol=1e-3) - assert cube.coords('air_pressure') + np.testing.assert_allclose( + altitude_coord.bounds, output.bounds, rtol=1e-3 + ) + assert cube.coords("air_pressure") -DIM_COORD = iris.coords.DimCoord([3.141592], - bounds=[[1.23, 4.567891011]], - standard_name='latitude') -CUBE_1 = iris.cube.Cube([1.0], standard_name='air_temperature') +DIM_COORD = iris.coords.DimCoord( + [3.141592], bounds=[[1.23, 4.567891011]], standard_name="latitude" +) +CUBE_1 = iris.cube.Cube([1.0], standard_name="air_temperature") CUBE_2 = iris.cube.Cube([3.0], dim_coords_and_dims=[(DIM_COORD, 0)]) TEST_ADD_SCALAR_COORD = [ (CUBE_1.copy(), None), @@ -324,158 +370,168 @@ def test_add_altitude_from_plev(cube, output): TEST_ADD_SCALAR_COORD_NO_VALS = [CUBE_1.copy(), CUBE_2.copy()] -@pytest.mark.parametrize('cube_in,depth', TEST_ADD_SCALAR_COORD) +@pytest.mark.parametrize("cube_in,depth", TEST_ADD_SCALAR_COORD) def test_add_scalar_depth_coord(cube_in, depth): """Test adding of scalar depth coordinate.""" cube_in = cube_in.copy() if depth is None: depth = 0.0 - depth_coord = iris.coords.AuxCoord(depth, - var_name='depth', - standard_name='depth', - long_name='depth', - units=Unit('m'), - attributes={'positive': 'down'}) + depth_coord = iris.coords.AuxCoord( + depth, + var_name="depth", + standard_name="depth", + long_name="depth", + units=Unit("m"), + attributes={"positive": "down"}, + ) with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube_in.coord('depth') + cube_in.coord("depth") if depth == 0.0: cube_out = add_scalar_depth_coord(cube_in) else: cube_out = add_scalar_depth_coord(cube_in, depth) assert cube_out is cube_in - coord = cube_in.coord('depth') + coord = cube_in.coord("depth") assert coord == depth_coord cube_out_2 = add_scalar_depth_coord(cube_out) assert cube_out_2 is cube_out - coord = cube_in.coord('depth') + coord = cube_in.coord("depth") assert coord == depth_coord -@pytest.mark.parametrize('cube_in,height', TEST_ADD_SCALAR_COORD) +@pytest.mark.parametrize("cube_in,height", TEST_ADD_SCALAR_COORD) def test_add_scalar_height_coord(cube_in, height): """Test adding of scalar height coordinate.""" cube_in = cube_in.copy() if height is None: height = 2.0 - height_coord = iris.coords.AuxCoord(height, - var_name='height', - standard_name='height', - long_name='height', - units=Unit('m'), - attributes={'positive': 'up'}) + height_coord = iris.coords.AuxCoord( + height, + var_name="height", + standard_name="height", + long_name="height", + units=Unit("m"), + attributes={"positive": "up"}, + ) with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube_in.coord('height') + cube_in.coord("height") if height == 2.0: cube_out = add_scalar_height_coord(cube_in) else: cube_out = add_scalar_height_coord(cube_in, height) assert cube_out is cube_in - coord = cube_in.coord('height') + coord = cube_in.coord("height") assert coord == height_coord cube_out_2 = add_scalar_height_coord(cube_out) assert cube_out_2 is cube_out - coord = cube_in.coord('height') + coord = cube_in.coord("height") assert coord == height_coord -@pytest.mark.parametrize('cube_in', TEST_ADD_SCALAR_COORD_NO_VALS) +@pytest.mark.parametrize("cube_in", TEST_ADD_SCALAR_COORD_NO_VALS) def test_add_scalar_lambda550nm_coord(cube_in): """Test adding of scalar lambda550nm coordinate.""" cube_in = cube_in.copy() lambda550nm_coord = iris.coords.AuxCoord( 550.0, - var_name='wavelength', - standard_name='radiation_wavelength', - long_name='Radiation Wavelength 550 nanometers', - units='nm', + var_name="wavelength", + standard_name="radiation_wavelength", + long_name="Radiation Wavelength 550 nanometers", + units="nm", ) with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube_in.coord('radiation_wavelength') + cube_in.coord("radiation_wavelength") cube_out = add_scalar_lambda550nm_coord(cube_in) assert cube_out is cube_in - coord = cube_in.coord('radiation_wavelength') + coord = cube_in.coord("radiation_wavelength") assert coord == lambda550nm_coord cube_out_2 = add_scalar_lambda550nm_coord(cube_out) assert cube_out_2 is cube_out - coord = cube_in.coord('radiation_wavelength') + coord = cube_in.coord("radiation_wavelength") assert coord == lambda550nm_coord -@pytest.mark.parametrize('cube_in,typeland', TEST_ADD_SCALAR_COORD) +@pytest.mark.parametrize("cube_in,typeland", TEST_ADD_SCALAR_COORD) def test_add_scalar_typeland_coord(cube_in, typeland): """Test adding of scalar typeland coordinate.""" cube_in = cube_in.copy() if typeland is None: - typeland = 'default' - typeland_coord = iris.coords.AuxCoord(typeland, - var_name='type', - standard_name='area_type', - long_name='Land area type', - units=Unit('no unit')) + typeland = "default" + typeland_coord = iris.coords.AuxCoord( + typeland, + var_name="type", + standard_name="area_type", + long_name="Land area type", + units=Unit("no unit"), + ) with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube_in.coord('area_type') - if typeland == 'default': + cube_in.coord("area_type") + if typeland == "default": cube_out = add_scalar_typeland_coord(cube_in) else: cube_out = add_scalar_typeland_coord(cube_in, typeland) assert cube_out is cube_in - coord = cube_in.coord('area_type') + coord = cube_in.coord("area_type") assert coord == typeland_coord cube_out_2 = add_scalar_typeland_coord(cube_out) assert cube_out_2 is cube_out - coord = cube_in.coord('area_type') + coord = cube_in.coord("area_type") assert coord == typeland_coord -@pytest.mark.parametrize('cube_in,typesea', TEST_ADD_SCALAR_COORD) +@pytest.mark.parametrize("cube_in,typesea", TEST_ADD_SCALAR_COORD) def test_add_scalar_typesea_coord(cube_in, typesea): """Test adding of scalar typesea coordinate.""" cube_in = cube_in.copy() if typesea is None: - typesea = 'default' - typesea_coord = iris.coords.AuxCoord(typesea, - var_name='type', - standard_name='area_type', - long_name='Ocean area type', - units=Unit('no unit')) + typesea = "default" + typesea_coord = iris.coords.AuxCoord( + typesea, + var_name="type", + standard_name="area_type", + long_name="Ocean area type", + units=Unit("no unit"), + ) with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube_in.coord('area_type') - if typesea == 'default': + cube_in.coord("area_type") + if typesea == "default": cube_out = add_scalar_typesea_coord(cube_in) else: cube_out = add_scalar_typesea_coord(cube_in, typesea) assert cube_out is cube_in - coord = cube_in.coord('area_type') + coord = cube_in.coord("area_type") assert coord == typesea_coord cube_out_2 = add_scalar_typesea_coord(cube_out) assert cube_out_2 is cube_out - coord = cube_in.coord('area_type') + coord = cube_in.coord("area_type") assert coord == typesea_coord -@pytest.mark.parametrize('cube_in,typesi', TEST_ADD_SCALAR_COORD) +@pytest.mark.parametrize("cube_in,typesi", TEST_ADD_SCALAR_COORD) def test_add_scalar_typesi_coord(cube_in, typesi): """Test adding of scalar typesi coordinate.""" cube_in = cube_in.copy() if typesi is None: - typesi = 'sea_ice' - typesi_coord = iris.coords.AuxCoord(typesi, - var_name='type', - standard_name='area_type', - long_name='Sea Ice area type', - units=Unit('no unit')) + typesi = "sea_ice" + typesi_coord = iris.coords.AuxCoord( + typesi, + var_name="type", + standard_name="area_type", + long_name="Sea Ice area type", + units=Unit("no unit"), + ) with pytest.raises(iris.exceptions.CoordinateNotFoundError): - cube_in.coord('area_type') - if typesi == 'sea_ice': + cube_in.coord("area_type") + if typesi == "sea_ice": cube_out = add_scalar_typesi_coord(cube_in) else: cube_out = add_scalar_typesi_coord(cube_in, typesi) assert cube_out is cube_in - coord = cube_in.coord('area_type') + coord = cube_in.coord("area_type") assert coord == typesi_coord cube_out_2 = add_scalar_typesi_coord(cube_out) assert cube_out_2 is cube_out - coord = cube_in.coord('area_type') + coord = cube_in.coord("area_type") assert coord == typesi_coord @@ -483,10 +539,10 @@ def test_cube_to_aux_coord(): """Test converting cube to auxiliary coordinate.""" cube = iris.cube.Cube( np.ones((2, 2)), - standard_name='longitude', - long_name='longitude', - var_name='lon', - units='degrees_north', + standard_name="longitude", + long_name="longitude", + var_name="lon", + units="degrees_north", ) coord = cube_to_aux_coord(cube) assert coord.var_name == cube.var_name @@ -497,28 +553,30 @@ def test_cube_to_aux_coord(): TEST_GET_BOUNDS_CUBE = [ - ('x', None), - ('a', iris.cube.Cube(0.0, var_name='a_bnds')), - ('b', iris.cube.Cube([0.0], var_name='b_bounds')), - ('c', False), - ('d', iris.cube.Cube([[0.0]], var_name='d_bnds')), - ('e', False), + ("x", None), + ("a", iris.cube.Cube(0.0, var_name="a_bnds")), + ("b", iris.cube.Cube([0.0], var_name="b_bounds")), + ("c", False), + ("d", iris.cube.Cube([[0.0]], var_name="d_bnds")), + ("e", False), ] -@pytest.mark.parametrize('coord_name,output', TEST_GET_BOUNDS_CUBE) +@pytest.mark.parametrize("coord_name,output", TEST_GET_BOUNDS_CUBE) def test_get_bounds_cube(coord_name, output): """Test retrieving of bounds cube from list of cubes.""" - cubes = iris.cube.CubeList([ - iris.cube.Cube(0.0, var_name='a_bnds'), - iris.cube.Cube([0.0], var_name='b_bounds'), - iris.cube.Cube([0.0], var_name='c_bnds'), - iris.cube.Cube([0.0], var_name='c_bnds'), - iris.cube.Cube([[0.0]], var_name='d_bnds'), - iris.cube.Cube([[0.0]], var_name='d_bounds'), - iris.cube.Cube([[0.0]], var_name='e_bounds'), - iris.cube.Cube([[0.0]], var_name='e_bounds'), - ]) + cubes = iris.cube.CubeList( + [ + iris.cube.Cube(0.0, var_name="a_bnds"), + iris.cube.Cube([0.0], var_name="b_bounds"), + iris.cube.Cube([0.0], var_name="c_bnds"), + iris.cube.Cube([0.0], var_name="c_bnds"), + iris.cube.Cube([[0.0]], var_name="d_bnds"), + iris.cube.Cube([[0.0]], var_name="d_bounds"), + iris.cube.Cube([[0.0]], var_name="e_bounds"), + iris.cube.Cube([[0.0]], var_name="e_bounds"), + ] + ) if output is None: with pytest.raises(ValueError) as err: get_bounds_cube(cubes, coord_name) @@ -537,37 +595,39 @@ def test_get_bounds_cube(coord_name, output): TEST_FIX_BOUNDS = [ ([], [None, [[-3.0, 4.0]]]), - (['a'], [[[1.0, 2.0]], [[-3.0, 4.0]]]), - (['b'], [None, [[-3.0, 4.0]]]), - (['a', 'b'], [[[1.0, 2.0]], [[-3.0, 4.0]]]), + (["a"], [[[1.0, 2.0]], [[-3.0, 4.0]]]), + (["b"], [None, [[-3.0, 4.0]]]), + (["a", "b"], [[[1.0, 2.0]], [[-3.0, 4.0]]]), ] -@pytest.mark.parametrize('var_names,output', TEST_FIX_BOUNDS) +@pytest.mark.parametrize("var_names,output", TEST_FIX_BOUNDS) def test_fix_bounds(var_names, output): """Test retrieving of bounds cube from list of cubes.""" - a_coord = iris.coords.AuxCoord(1.5, var_name='a') - b_coord = iris.coords.AuxCoord(1.5, bounds=[-3.0, 4.0], var_name='b') + a_coord = iris.coords.AuxCoord(1.5, var_name="a") + b_coord = iris.coords.AuxCoord(1.5, bounds=[-3.0, 4.0], var_name="b") cube = iris.cube.Cube( 0.0, aux_coords_and_dims=[(a_coord, ()), (b_coord, ())], - var_name='x', + var_name="x", + ) + cubes = iris.cube.CubeList( + [ + iris.cube.Cube([1.0, 2.0], var_name="a_bnds"), + iris.cube.Cube([1.0, 2.0], var_name="b_bounds"), + iris.cube.Cube([1000.0, 2000.0], var_name="c_bounds"), + ] ) - cubes = iris.cube.CubeList([ - iris.cube.Cube([1.0, 2.0], var_name='a_bnds'), - iris.cube.Cube([1.0, 2.0], var_name='b_bounds'), - iris.cube.Cube([1000.0, 2000.0], var_name='c_bounds'), - ]) - assert cube.coord(var_name='a').bounds is None + assert cube.coord(var_name="a").bounds is None fix_bounds(cube, cubes, var_names) if output[0] is None: - assert cube.coord(var_name='a').bounds is None + assert cube.coord(var_name="a").bounds is None else: - np.testing.assert_allclose(cube.coord(var_name='a').bounds, output[0]) - np.testing.assert_allclose(cube.coord(var_name='b').bounds, output[1]) + np.testing.assert_allclose(cube.coord(var_name="a").bounds, output[0]) + np.testing.assert_allclose(cube.coord(var_name="b").bounds, output[1]) -DIM_COORD_NB = iris.coords.DimCoord([3.1415], standard_name='latitude') +DIM_COORD_NB = iris.coords.DimCoord([3.1415], standard_name="latitude") CUBE_3 = iris.cube.Cube([5.0], dim_coords_and_dims=[(DIM_COORD_NB, 0)]) COORD_3_DEC = DIM_COORD.copy([3.142], [[1.23, 4.568]]) COORD_5_DEC = DIM_COORD.copy([3.14159], [[1.23, 4.56789]]) @@ -581,13 +641,13 @@ def test_fix_bounds(var_names, output): ] -@pytest.mark.parametrize('cubes_in,decimals,out', TEST_ROUND) +@pytest.mark.parametrize("cubes_in,decimals,out", TEST_ROUND) def test_round_coordinate(cubes_in, decimals, out): """Test rounding of coordinates.""" - kwargs = {} if decimals is None else {'decimals': decimals} + kwargs = {} if decimals is None else {"decimals": decimals} cubes_out = round_coordinates(cubes_in, **kwargs) assert cubes_out is cubes_in - for (idx, cube) in enumerate(cubes_out): + for idx, cube in enumerate(cubes_out): coords = cube.coords(dim_coords=True) if out[idx] is None: assert not coords @@ -598,70 +658,75 @@ def test_round_coordinate(cubes_in, decimals, out): def test_round_coordinates_single_coord(): """Test rounding of specified coordinate.""" coords, bounds = [10.0001], [[9.0001, 11.0001]] - latcoord = iris.coords.DimCoord(coords.copy(), bounds=bounds.copy(), - standard_name='latitude') - loncoord = iris.coords.DimCoord(coords.copy(), bounds=bounds.copy(), - standard_name='longitude') - cube = iris.cube.Cube([[1.0]], standard_name='air_temperature', - dim_coords_and_dims=[(latcoord, 0), (loncoord, 1)]) + latcoord = iris.coords.DimCoord( + coords.copy(), bounds=bounds.copy(), standard_name="latitude" + ) + loncoord = iris.coords.DimCoord( + coords.copy(), bounds=bounds.copy(), standard_name="longitude" + ) + cube = iris.cube.Cube( + [[1.0]], + standard_name="air_temperature", + dim_coords_and_dims=[(latcoord, 0), (loncoord, 1)], + ) cubes = iris.cube.CubeList([cube]) - out = round_coordinates(cubes, decimals=3, coord_names=['latitude']) + out = round_coordinates(cubes, decimals=3, coord_names=["latitude"]) assert out is cubes - assert cubes[0].coord('longitude') is out[0].coord('longitude') - np.testing.assert_allclose(out[0].coord('latitude').points, [10]) - np.testing.assert_allclose(out[0].coord('latitude').bounds, [[9, 11]]) + assert cubes[0].coord("longitude") is out[0].coord("longitude") + np.testing.assert_allclose(out[0].coord("latitude").points, [10]) + np.testing.assert_allclose(out[0].coord("latitude").bounds, [[9, 11]]) def test_fix_ocean_depth_coord(): """Test `fix_ocean_depth_coord`.""" - z_coord = iris.coords.DimCoord(0.0, var_name='alt', - attributes={'positive': 'up'}) - cube = iris.cube.Cube([0.0], var_name='x', - dim_coords_and_dims=[(z_coord, 0)]) + z_coord = iris.coords.DimCoord( + 0.0, var_name="alt", attributes={"positive": "up"} + ) + cube = iris.cube.Cube( + [0.0], var_name="x", dim_coords_and_dims=[(z_coord, 0)] + ) fix_ocean_depth_coord(cube) - depth_coord = cube.coord('depth') - assert depth_coord.standard_name == 'depth' - assert depth_coord.var_name == 'lev' - assert depth_coord.units == 'm' - assert depth_coord.long_name == 'ocean depth coordinate' - assert depth_coord.attributes == {'positive': 'down'} + depth_coord = cube.coord("depth") + assert depth_coord.standard_name == "depth" + assert depth_coord.var_name == "lev" + assert depth_coord.units == "m" + assert depth_coord.long_name == "ocean depth coordinate" + assert depth_coord.attributes == {"positive": "down"} @pytest.fixture def time_coord(): """Time coordinate.""" time_coord = AuxCoord( - [15.0, 350.0], - standard_name='time', - units='days since 1850-01-01' + [15.0, 350.0], standard_name="time", units="days since 1850-01-01" ) return time_coord @pytest.mark.parametrize( - 'freq,expected_bounds', + "freq,expected_bounds", [ - ('mon', [[0, 31], [334, 365]]), - ('mo', [[0, 31], [334, 365]]), - ('monC', [[0, 31], [334, 365]]), - ('yr', [[0, 365], [0, 365]]), - ('yrPt', [[0, 365], [0, 365]]), - ('dec', [[-1826, 1826], [-1826, 1826]]), - ('day', [[14.5, 15.5], [349.5, 350.5]]), - ('24hr', [[14.5, 15.5], [349.5, 350.5]]), - ('12hr', [[14.75, 15.25], [349.75, 350.25]]), - ('8hr', [[14.83333333, 15.16666667], [349.83333333, 350.16666667]]), - ('6hr', [[14.875, 15.125], [349.875, 350.125]]), - ('6hrCM', [[14.875, 15.125], [349.875, 350.125]]), - ('4hr', [[14.91666667, 15.08333333], [349.91666667, 350.08333333]]), - ('3hr', [[14.9375, 15.0625], [349.9375, 350.0625]]), - ('3hrPt', [[14.9375, 15.0625], [349.9375, 350.0625]]), - ('2hr', [[14.95833333, 15.04166667], [349.95833333, 350.04166667]]), - ('1hr', [[14.97916666, 15.020833333], [349.97916666, 350.020833333]]), - ('1hrC', [[14.97916666, 15.020833333], [349.97916666, 350.020833333]]), - ('hr', [[14.97916666, 15.020833333], [349.97916666, 350.020833333]]), - ] + ("mon", [[0, 31], [334, 365]]), + ("mo", [[0, 31], [334, 365]]), + ("monC", [[0, 31], [334, 365]]), + ("yr", [[0, 365], [0, 365]]), + ("yrPt", [[0, 365], [0, 365]]), + ("dec", [[-1826, 1826], [-1826, 1826]]), + ("day", [[14.5, 15.5], [349.5, 350.5]]), + ("24hr", [[14.5, 15.5], [349.5, 350.5]]), + ("12hr", [[14.75, 15.25], [349.75, 350.25]]), + ("8hr", [[14.83333333, 15.16666667], [349.83333333, 350.16666667]]), + ("6hr", [[14.875, 15.125], [349.875, 350.125]]), + ("6hrCM", [[14.875, 15.125], [349.875, 350.125]]), + ("4hr", [[14.91666667, 15.08333333], [349.91666667, 350.08333333]]), + ("3hr", [[14.9375, 15.0625], [349.9375, 350.0625]]), + ("3hrPt", [[14.9375, 15.0625], [349.9375, 350.0625]]), + ("2hr", [[14.95833333, 15.04166667], [349.95833333, 350.04166667]]), + ("1hr", [[14.97916666, 15.020833333], [349.97916666, 350.020833333]]), + ("1hrC", [[14.97916666, 15.020833333], [349.97916666, 350.020833333]]), + ("hr", [[14.97916666, 15.020833333], [349.97916666, 350.020833333]]), + ], ) def test_get_time_bounds(time_coord, freq, expected_bounds): """Test ``get_time_bounds`.""" @@ -673,10 +738,10 @@ def test_get_time_bounds_invalid_freq_fail(time_coord): """Test ``get_time_bounds`.""" msg = "Cannot guess time bounds for frequency 'invalid_freq'" with pytest.raises(NotImplementedError, match=msg): - get_time_bounds(time_coord, 'invalid_freq') + get_time_bounds(time_coord, "invalid_freq") -@pytest.mark.parametrize('freq', ['5hr', '7hrPt', '9hrCM', '10hr', '21hrPt']) +@pytest.mark.parametrize("freq", ["5hr", "7hrPt", "9hrCM", "10hr", "21hrPt"]) def test_get_time_bounds_invalid_hr_fail(time_coord, freq): """Test ``get_time_bounds`.""" msg = f"For `n`-hourly data, `n` must be a divisor of 24, got '{freq}'" diff --git a/tests/integration/cmor/test_fix.py b/tests/integration/cmor/test_fix.py index e52dc0fd42..63b4767841 100644 --- a/tests/integration/cmor/test_fix.py +++ b/tests/integration/cmor/test_fix.py @@ -22,8 +22,8 @@ @pytest.fixture(autouse=True) def disable_fix_cmor_checker(mocker): """Disable the CMOR checker in fixes (will be default in v2.12).""" - class MockChecker: + class MockChecker: def __init__(self, cube): self._cube = cube @@ -33,7 +33,7 @@ def check_metadata(self): def check_data(self): return self._cube - mock = mocker.patch('esmvalcore.cmor.fix._get_cmor_checker') + mock = mocker.patch("esmvalcore.cmor.fix._get_cmor_checker") mock.return_value = MockChecker @@ -44,58 +44,58 @@ class TestGenericFix: def setup(self, mocker): """Setup tests.""" self.mock_debug = mocker.patch( - 'esmvalcore.cmor._fixes.fix.GenericFix._debug_msg', autospec=True + "esmvalcore.cmor._fixes.fix.GenericFix._debug_msg", autospec=True ) self.mock_warning = mocker.patch( - 'esmvalcore.cmor._fixes.fix.GenericFix._warning_msg', + "esmvalcore.cmor._fixes.fix.GenericFix._warning_msg", autospec=True, ) # Create sample data with CMOR errors time_coord = DimCoord( [15, 45], - standard_name='time', - var_name='time', - units=Unit('days since 1851-01-01', calendar='noleap'), - attributes={'test': 1, 'time_origin': 'will_be_removed'}, + standard_name="time", + var_name="time", + units=Unit("days since 1851-01-01", calendar="noleap"), + attributes={"test": 1, "time_origin": "will_be_removed"}, ) plev_coord_rev = DimCoord( [250, 500, 850], - standard_name='air_pressure', - var_name='plev', - units='hPa', + standard_name="air_pressure", + var_name="plev", + units="hPa", ) lev_coord_hybrid_height = DimCoord( [1.0, 0.5, 0.0], - standard_name='atmosphere_hybrid_height_coordinate', - var_name='lev', - units='m', + standard_name="atmosphere_hybrid_height_coordinate", + var_name="lev", + units="m", ) lev_coord_hybrid_pressure = DimCoord( [0.0, 0.5, 1.0], - standard_name='atmosphere_hybrid_sigma_pressure_coordinate', - var_name='lev', - units='1', + standard_name="atmosphere_hybrid_sigma_pressure_coordinate", + var_name="lev", + units="1", ) ap_coord = AuxCoord( [0.0, 0.0, 0.0], - var_name='ap', - units='Pa', + var_name="ap", + units="Pa", ) b_coord = AuxCoord( [0.0, 0.5, 1.0], - var_name='b', - units='1', + var_name="b", + units="1", ) ps_coord = AuxCoord( np.full((2, 2, 2), 10), - var_name='ps', - units='Pa', + var_name="ps", + units="Pa", ) orog_coord = AuxCoord( np.full((2, 2), 10), - var_name='orog', - units='m', + var_name="orog", + units="m", ) hybrid_height_factory = HybridHeightFactory( delta=lev_coord_hybrid_height, @@ -109,46 +109,46 @@ def setup(self, mocker): ) lat_coord = DimCoord( [0, 10], - standard_name='latitude', - var_name='lat', - units='degrees', + standard_name="latitude", + var_name="lat", + units="degrees", ) lat_coord_rev = DimCoord( [10, -10], - standard_name='latitude', - var_name='lat', - units='degrees', + standard_name="latitude", + var_name="lat", + units="degrees", ) lat_coord_2d = AuxCoord( [[10, -10]], - standard_name='latitude', - var_name='wrong_name', - units='degrees', + standard_name="latitude", + var_name="wrong_name", + units="degrees", ) lon_coord = DimCoord( [-180, 0], - standard_name='longitude', - var_name='lon', - units='degrees', + standard_name="longitude", + var_name="lon", + units="degrees", ) lon_coord_unstructured = AuxCoord( [-180, 0], bounds=[[-200, -180, -160], [-20, 0, 20]], - standard_name='longitude', - var_name='lon', - units='degrees', + standard_name="longitude", + var_name="lon", + units="degrees", ) lon_coord_2d = AuxCoord( [[370, 380]], - standard_name='longitude', - var_name='wrong_name', - units='degrees', + standard_name="longitude", + var_name="wrong_name", + units="degrees", ) height2m_coord = AuxCoord( 2.0, - standard_name='height', - var_name='height', - units='m', + standard_name="height", + var_name="height", + units="m", ) coord_spec_3d = [ @@ -158,10 +158,10 @@ def setup(self, mocker): ] self.cube_3d = Cube( da.arange(2 * 2 * 2, dtype=np.float32).reshape(2, 2, 2), - standard_name='air_pressure', - long_name='Air Pressure', - var_name='tas', - units='celsius', + standard_name="air_pressure", + long_name="Air Pressure", + var_name="tas", + units="celsius", dim_coords_and_dims=coord_spec_3d, aux_coords_and_dims=[(height2m_coord, ())], attributes={}, @@ -175,10 +175,10 @@ def setup(self, mocker): ] cube_4d = Cube( da.arange(2 * 3 * 2 * 2, dtype=np.float32).reshape(2, 3, 2, 2), - standard_name='air_pressure', - long_name='Air Pressure', - var_name='ta', - units='celsius', + standard_name="air_pressure", + long_name="Air Pressure", + var_name="ta", + units="celsius", dim_coords_and_dims=coord_spec_4d, attributes={}, ) @@ -196,10 +196,10 @@ def setup(self, mocker): ] cube_hybrid_height_4d = Cube( da.arange(2 * 3 * 2 * 2, dtype=np.float32).reshape(2, 3, 2, 2), - standard_name='air_pressure', - long_name='Air Pressure', - var_name='ta', - units='celsius', + standard_name="air_pressure", + long_name="Air Pressure", + var_name="ta", + units="celsius", dim_coords_and_dims=coord_spec_hybrid_height_4d, aux_coords_and_dims=aux_coord_spec_hybrid_height_4d, aux_factories=[hybrid_height_factory], @@ -219,10 +219,10 @@ def setup(self, mocker): ] cube_hybrid_pressure_4d = Cube( da.arange(2 * 3 * 2 * 2, dtype=np.float32).reshape(2, 3, 2, 2), - standard_name='air_pressure', - long_name='Air Pressure', - var_name='ta', - units='celsius', + standard_name="air_pressure", + long_name="Air Pressure", + var_name="ta", + units="celsius", dim_coords_and_dims=coord_spec_hybrid_pressure_4d, aux_coords_and_dims=aux_coord_spec_hybrid_pressure_4d, aux_factories=[hybrid_pressure_factory], @@ -237,10 +237,10 @@ def setup(self, mocker): ] cube_unstructured = Cube( da.zeros((2, 2)), - standard_name='air_pressure', - long_name='Air Pressure', - var_name='tas', - units='celsius', + standard_name="air_pressure", + long_name="Air Pressure", + var_name="tas", + units="celsius", dim_coords_and_dims=[(time_coord, 0)], aux_coords_and_dims=coord_spec_unstrucutred, attributes={}, @@ -254,10 +254,10 @@ def setup(self, mocker): ] cube_2d_latlon = Cube( da.zeros((2, 1, 2)), - standard_name='air_pressure', - long_name='Air Pressure', - var_name='tas', - units='celsius', + standard_name="air_pressure", + long_name="Air Pressure", + var_name="tas", + units="celsius", dim_coords_and_dims=[(time_coord, 0)], aux_coords_and_dims=coord_spec_2d, attributes={}, @@ -266,51 +266,52 @@ def setup(self, mocker): def assert_time_metadata(self, cube): """Assert time metadata is correct.""" - assert cube.coord('time').standard_name == 'time' - assert cube.coord('time').var_name == 'time' - assert cube.coord('time').units == Unit( - 'days since 1850-01-01', calendar='365_day' + assert cube.coord("time").standard_name == "time" + assert cube.coord("time").var_name == "time" + assert cube.coord("time").units == Unit( + "days since 1850-01-01", calendar="365_day" ) - assert cube.coord('time').attributes == {'test': 1} + assert cube.coord("time").attributes == {"test": 1} def assert_time_data(self, cube, time_has_bounds=True): """Assert time data is correct.""" - np.testing.assert_allclose(cube.coord('time').points, [380, 410]) + np.testing.assert_allclose(cube.coord("time").points, [380, 410]) if time_has_bounds: np.testing.assert_allclose( - cube.coord('time').bounds, [[365, 396], [396, 424]], + cube.coord("time").bounds, + [[365, 396], [396, 424]], ) else: - assert cube.coord('time').bounds is None + assert cube.coord("time").bounds is None def assert_plev_metadata(self, cube): """Assert plev metadata is correct.""" - assert cube.coord('air_pressure').standard_name == 'air_pressure' - assert cube.coord('air_pressure').var_name == 'plev' - assert cube.coord('air_pressure').units == 'Pa' - assert cube.coord('air_pressure').attributes == {} + assert cube.coord("air_pressure").standard_name == "air_pressure" + assert cube.coord("air_pressure").var_name == "plev" + assert cube.coord("air_pressure").units == "Pa" + assert cube.coord("air_pressure").attributes == {} def assert_lat_metadata(self, cube): """Assert lat metadata is correct.""" - assert cube.coord('latitude').standard_name == 'latitude' - assert cube.coord('latitude').var_name == 'lat' - assert str(cube.coord('latitude').units) == 'degrees_north' - assert cube.coord('latitude').attributes == {} + assert cube.coord("latitude").standard_name == "latitude" + assert cube.coord("latitude").var_name == "lat" + assert str(cube.coord("latitude").units) == "degrees_north" + assert cube.coord("latitude").attributes == {} def assert_lon_metadata(self, cube): """Assert lon metadata is correct.""" - assert cube.coord('longitude').standard_name == 'longitude' - assert cube.coord('longitude').var_name == 'lon' - assert str(cube.coord('longitude').units) == 'degrees_east' - assert cube.coord('longitude').attributes == {} + assert cube.coord("longitude").standard_name == "longitude" + assert cube.coord("longitude").var_name == "lon" + assert str(cube.coord("longitude").units) == "degrees_east" + assert cube.coord("longitude").attributes == {} def assert_ta_metadata(self, cube): """Assert ta metadata is correct.""" # Variable metadata - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Air Temperature' - assert cube.var_name == 'ta' - assert cube.units == 'K' + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Air Temperature" + assert cube.var_name == "ta" + assert cube.units == "K" assert cube.attributes == {} def assert_ta_data(self, cube, time_has_bounds=True): @@ -318,18 +319,18 @@ def assert_ta_data(self, cube, time_has_bounds=True): assert cube.has_lazy_data() np.testing.assert_allclose( cube.data, - [[[[284.15, 283.15], - [282.15, 281.15]], - [[280.15, 279.15], - [278.15, 277.15]], - [[276.15, 275.15], - [274.15, 273.15]]], - [[[296.15, 295.15], - [294.15, 293.15]], - [[292.15, 291.15], - [290.15, 289.15]], - [[288.15, 287.15], - [286.15, 285.15]]]], + [ + [ + [[284.15, 283.15], [282.15, 281.15]], + [[280.15, 279.15], [278.15, 277.15]], + [[276.15, 275.15], [274.15, 273.15]], + ], + [ + [[296.15, 295.15], [294.15, 293.15]], + [[292.15, 291.15], [290.15, 289.15]], + [[288.15, 287.15], [286.15, 285.15]], + ], + ], ) # Time @@ -337,50 +338,50 @@ def assert_ta_data(self, cube, time_has_bounds=True): # Air pressure np.testing.assert_allclose( - cube.coord('air_pressure').points, + cube.coord("air_pressure").points, [85000.0, 50000.0, 25000.0], atol=1e-8, ) - assert cube.coord('air_pressure').bounds is None + assert cube.coord("air_pressure").bounds is None # Latitude np.testing.assert_allclose( - cube.coord('latitude').points, [-10.0, 10.0] + cube.coord("latitude").points, [-10.0, 10.0] ) np.testing.assert_allclose( - cube.coord('latitude').bounds, [[-20.0, 0.0], [0.0, 20.0]] + cube.coord("latitude").bounds, [[-20.0, 0.0], [0.0, 20.0]] ) # Longitude np.testing.assert_allclose( - cube.coord('longitude').points, [0.0, 180.0] + cube.coord("longitude").points, [0.0, 180.0] ) np.testing.assert_allclose( - cube.coord('longitude').bounds, [[-90.0, 90.0], [90.0, 270.0]] + cube.coord("longitude").bounds, [[-90.0, 90.0], [90.0, 270.0]] ) def assert_tas_metadata(self, cube): """Assert tas metadata is correct.""" - assert cube.standard_name == 'air_temperature' - assert cube.long_name == 'Near-Surface Air Temperature' - assert cube.var_name == 'tas' - assert cube.units == 'K' + assert cube.standard_name == "air_temperature" + assert cube.long_name == "Near-Surface Air Temperature" + assert cube.var_name == "tas" + assert cube.units == "K" assert cube.attributes == {} # Height 2m coordinate - assert cube.coord('height').standard_name == 'height' - assert cube.coord('height').var_name == 'height' - assert cube.coord('height').units == 'm' - assert cube.coord('height').attributes == {} - np.testing.assert_allclose(cube.coord('height').points, 2.0) - assert cube.coord('height').bounds is None + assert cube.coord("height").standard_name == "height" + assert cube.coord("height").var_name == "height" + assert cube.coord("height").units == "m" + assert cube.coord("height").attributes == {} + np.testing.assert_allclose(cube.coord("height").points, 2.0) + assert cube.coord("height").bounds is None def test_fix_metadata_amon_ta(self): """Test ``fix_metadata``.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" fixed_cubes = fix_metadata( self.cubes_4d, @@ -407,13 +408,13 @@ def test_fix_metadata_amon_ta(self): def test_fix_metadata_amon_ta_wrong_lat_units(self): """Test ``fix_metadata``.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" # Change units of latitude - self.cubes_4d[0].coord('latitude').units = 'K' + self.cubes_4d[0].coord("latitude").units = "K" fixed_cubes = fix_metadata( self.cubes_4d, @@ -433,7 +434,7 @@ def test_fix_metadata_amon_ta_wrong_lat_units(self): self.assert_ta_data(fixed_cube) # CMOR check will fail because of wrong latitude units - assert fixed_cube.coord('latitude').units == 'K' + assert fixed_cube.coord("latitude").units == "K" with pytest.raises(CMORCheckError): cmor_check_metadata(fixed_cube, project, mip, short_name) @@ -445,10 +446,10 @@ def test_fix_metadata_amon_ta_wrong_lat_units(self): def test_fix_metadata_cfmon_ta_hybrid_height(self): """Test ``fix_metadata`` with hybrid height coordinate.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'CFmon' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "CFmon" fixed_cubes = fix_metadata( self.cubes_hybrid_height_4d, @@ -461,13 +462,13 @@ def test_fix_metadata_cfmon_ta_hybrid_height(self): assert len(fixed_cubes) == 1 fixed_cube = fixed_cubes[0] - hybrid_coord = fixed_cube.coord('atmosphere_hybrid_height_coordinate') - assert hybrid_coord.var_name == 'lev' + hybrid_coord = fixed_cube.coord("atmosphere_hybrid_height_coordinate") + assert hybrid_coord.var_name == "lev" assert hybrid_coord.long_name is None - assert hybrid_coord.units == 'm' + assert hybrid_coord.units == "m" np.testing.assert_allclose(hybrid_coord.points, [0.0, 0.5, 1.0]) - assert fixed_cube.coords('altitude') - assert fixed_cube.coord_dims('altitude') == (1, 2, 3) + assert fixed_cube.coords("altitude") + assert fixed_cube.coord_dims("altitude") == (1, 2, 3) self.assert_ta_metadata(fixed_cube) self.assert_time_metadata(fixed_cube) @@ -481,10 +482,10 @@ def test_fix_metadata_cfmon_ta_hybrid_height(self): def test_fix_metadata_cfmon_ta_hybrid_pressure(self): """Test ``fix_metadata`` with hybrid pressure coordinate.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'CFmon' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "CFmon" fixed_cubes = fix_metadata( self.cubes_hybrid_pressure_4d, @@ -498,14 +499,14 @@ def test_fix_metadata_cfmon_ta_hybrid_pressure(self): fixed_cube = fixed_cubes[0] hybrid_coord = fixed_cube.coord( - 'atmosphere_hybrid_sigma_pressure_coordinate' + "atmosphere_hybrid_sigma_pressure_coordinate" ) - assert hybrid_coord.var_name == 'lev' + assert hybrid_coord.var_name == "lev" assert hybrid_coord.long_name is None - assert hybrid_coord.units == '1' + assert hybrid_coord.units == "1" np.testing.assert_allclose(hybrid_coord.points, [1.0, 0.5, 0.0]) - assert fixed_cube.coords('air_pressure') - assert fixed_cube.coord_dims('air_pressure') == (0, 1, 2, 3) + assert fixed_cube.coords("air_pressure") + assert fixed_cube.coord_dims("air_pressure") == (0, 1, 2, 3) self.assert_ta_metadata(fixed_cube) self.assert_time_metadata(fixed_cube) @@ -519,10 +520,10 @@ def test_fix_metadata_cfmon_ta_hybrid_pressure(self): def test_fix_metadata_cfmon_ta_alternative(self): """Test ``fix_metadata`` with alternative generic level coordinate.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'CFmon' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "CFmon" fixed_cubes = fix_metadata( self.cubes_4d, @@ -549,13 +550,13 @@ def test_fix_metadata_cfmon_ta_alternative(self): def test_fix_metadata_cfmon_ta_no_alternative(self, mocker): """Test ``fix_metadata`` with no alternative coordinate.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'CFmon' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "CFmon" # Remove alternative coordinate - self.cubes_4d[0].remove_coord('air_pressure') + self.cubes_4d[0].remove_coord("air_pressure") fixed_cubes = fix_metadata( self.cubes_4d, @@ -575,7 +576,7 @@ def test_fix_metadata_cfmon_ta_no_alternative(self, mocker): self.assert_lon_metadata(fixed_cube) # CMOR check will fail because of missing alevel coordinate - assert not fixed_cube.coords('air_pressure') + assert not fixed_cube.coords("air_pressure") with pytest.raises(CMORCheckError): cmor_check_metadata(fixed_cube, project, mip, short_name) @@ -584,14 +585,16 @@ def test_fix_metadata_cfmon_ta_no_alternative(self, mocker): def test_fix_metadata_e1hr_ta(self): """Test ``fix_metadata`` with plev3.""" - short_name = 'ta' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'E1hr' + short_name = "ta" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "E1hr" # Slightly adapt plev to test fixing of requested levels - self.cubes_4d[0].coord('air_pressure').points = [ - 250.0 + 9e-8, 500.0 + 9e-8, 850.0 + 9e-8 + self.cubes_4d[0].coord("air_pressure").points = [ + 250.0 + 9e-8, + 500.0 + 9e-8, + 850.0 + 9e-8, ] fixed_cubes = fix_metadata( @@ -600,7 +603,7 @@ def test_fix_metadata_e1hr_ta(self): project, dataset, mip, - frequency='mon', + frequency="mon", ) assert len(fixed_cubes) == 1 @@ -614,7 +617,7 @@ def test_fix_metadata_e1hr_ta(self): self.assert_ta_data(fixed_cube, time_has_bounds=False) cmor_check_metadata( - fixed_cube, project, mip, short_name, frequency='mon' + fixed_cube, project, mip, short_name, frequency="mon" ) assert self.mock_debug.call_count == 4 @@ -622,10 +625,10 @@ def test_fix_metadata_e1hr_ta(self): def test_fix_metadata_amon_tas_unstructured(self): """Test ``fix_metadata`` with unstructured grid.""" - short_name = 'tas' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "tas" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" fixed_cubes = fix_metadata( self.cubes_unstructured, @@ -645,16 +648,16 @@ def test_fix_metadata_amon_tas_unstructured(self): # Latitude np.testing.assert_allclose( - fixed_cube.coord('latitude').points, [10.0, -10.0] + fixed_cube.coord("latitude").points, [10.0, -10.0] ) - assert fixed_cube.coord('latitude').bounds is None + assert fixed_cube.coord("latitude").bounds is None # Longitude np.testing.assert_allclose( - fixed_cube.coord('longitude').points, [180.0, 0.0] + fixed_cube.coord("longitude").points, [180.0, 0.0] ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, + fixed_cube.coord("longitude").bounds, [[160.0, 180.0, 200.0], [340.0, 0.0, 20.0]], ) @@ -671,10 +674,10 @@ def test_fix_metadata_amon_tas_unstructured(self): def test_fix_metadata_amon_tas_2d_latlon(self): """Test ``fix_metadata`` with 2D latitude/longitude.""" - short_name = 'tas' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "tas" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" fixed_cubes = fix_metadata( self.cubes_2d_latlon, @@ -694,15 +697,15 @@ def test_fix_metadata_amon_tas_2d_latlon(self): # Latitude np.testing.assert_allclose( - fixed_cube.coord('latitude').points, [[10.0, -10.0]] + fixed_cube.coord("latitude").points, [[10.0, -10.0]] ) - assert fixed_cube.coord('latitude').bounds is None + assert fixed_cube.coord("latitude").bounds is None # Longitude np.testing.assert_allclose( - fixed_cube.coord('longitude').points, [[10.0, 20.0]] + fixed_cube.coord("longitude").points, [[10.0, 20.0]] ) - assert fixed_cube.coord('longitude').bounds is None + assert fixed_cube.coord("longitude").bounds is None # Variable data assert fixed_cube.has_lazy_data() @@ -717,17 +720,17 @@ def test_fix_metadata_amon_tas_2d_latlon(self): def test_fix_metadata_amon_tas_invalid_time_units(self): """Test ``fix_metadata`` with invalid time units.""" - short_name = 'tas' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "tas" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" - self.cubes_2d_latlon[0].remove_coord('time') + self.cubes_2d_latlon[0].remove_coord("time") aux_time_coord = AuxCoord( [1, 2], - standard_name='time', - var_name='time', - units='kg', + standard_name="time", + var_name="time", + units="kg", ) self.cubes_2d_latlon[0].add_aux_coord(aux_time_coord, 0) @@ -745,7 +748,7 @@ def test_fix_metadata_amon_tas_invalid_time_units(self): self.assert_lat_metadata(fixed_cube) self.assert_lon_metadata(fixed_cube) - assert fixed_cube.coord('time').units == 'kg' + assert fixed_cube.coord("time").units == "kg" # CMOR checks fail because calendar is not defined with pytest.raises(ValueError): @@ -756,15 +759,15 @@ def test_fix_metadata_amon_tas_invalid_time_units(self): def test_fix_metadata_amon_tas_invalid_time_attrs(self): """Test ``fix_metadata`` with invalid time attributes.""" - short_name = 'tas' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "tas" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" self.cubes_2d_latlon[0].attributes = { - 'parent_time_units': 'this is certainly not a unit', - 'branch_time_in_parent': 'BRANCH TIME IN PARENT', - 'branch_time_in_child': 'BRANCH TIME IN CHILD', + "parent_time_units": "this is certainly not a unit", + "branch_time_in_parent": "BRANCH TIME IN PARENT", + "branch_time_in_child": "BRANCH TIME IN CHILD", } fixed_cubes = fix_metadata( @@ -783,9 +786,9 @@ def test_fix_metadata_amon_tas_invalid_time_attrs(self): self.assert_lon_metadata(fixed_cube) assert fixed_cube.attributes == { - 'parent_time_units': 'this is certainly not a unit', - 'branch_time_in_parent': 'BRANCH TIME IN PARENT', - 'branch_time_in_child': 'BRANCH TIME IN CHILD', + "parent_time_units": "this is certainly not a unit", + "branch_time_in_parent": "BRANCH TIME IN PARENT", + "branch_time_in_child": "BRANCH TIME IN CHILD", } cmor_check_metadata(fixed_cube, project, mip, short_name) @@ -795,21 +798,21 @@ def test_fix_metadata_amon_tas_invalid_time_attrs(self): def test_fix_metadata_oimon_ssi(self): """Test ``fix_metadata`` with psu units.""" - short_name = 'ssi' - project = 'CMIP5' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'OImon' + short_name = "ssi" + project = "CMIP5" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "OImon" - self.cubes_2d_latlon[0].var_name = 'ssi' + self.cubes_2d_latlon[0].var_name = "ssi" self.cubes_2d_latlon[0].attributes = { - 'invalid_units': 'psu', - 'parent_time_units': 'no parent', + "invalid_units": "psu", + "parent_time_units": "no parent", } # Also test 2D longitude that already has bounds - self.cubes_2d_latlon[0].coord('latitude').var_name = 'lat' - self.cubes_2d_latlon[0].coord('longitude').var_name = 'lon' - self.cubes_2d_latlon[0].coord('longitude').bounds = [ + self.cubes_2d_latlon[0].coord("latitude").var_name = "lat" + self.cubes_2d_latlon[0].coord("longitude").var_name = "lon" + self.cubes_2d_latlon[0].coord("longitude").bounds = [ [[365.0, 375.0], [375.0, 400.0]] ] @@ -825,11 +828,11 @@ def test_fix_metadata_oimon_ssi(self): fixed_cube = fixed_cubes[0] # Variable metadata - assert fixed_cube.standard_name == 'sea_ice_salinity' - assert fixed_cube.long_name == 'Sea Ice Salinity' - assert fixed_cube.var_name == 'ssi' - assert fixed_cube.units == '1' - assert fixed_cube.attributes == {'parent_time_units': 'no parent'} + assert fixed_cube.standard_name == "sea_ice_salinity" + assert fixed_cube.long_name == "Sea Ice Salinity" + assert fixed_cube.var_name == "ssi" + assert fixed_cube.units == "1" + assert fixed_cube.attributes == {"parent_time_units": "no parent"} # Coordinates self.assert_time_metadata(fixed_cube) @@ -838,23 +841,24 @@ def test_fix_metadata_oimon_ssi(self): # Latitude np.testing.assert_allclose( - fixed_cube.coord('latitude').points, [[10.0, -10.0]] + fixed_cube.coord("latitude").points, [[10.0, -10.0]] ) - assert fixed_cube.coord('latitude').bounds is None + assert fixed_cube.coord("latitude").bounds is None # Longitude np.testing.assert_allclose( - fixed_cube.coord('longitude').points, [[10.0, 20.0]] + fixed_cube.coord("longitude").points, [[10.0, 20.0]] ) np.testing.assert_allclose( - fixed_cube.coord('longitude').bounds, + fixed_cube.coord("longitude").bounds, [[[5.0, 15.0], [15.0, 40.0]]], ) # Variable data assert fixed_cube.has_lazy_data() np.testing.assert_allclose( - fixed_cube.data, [[[0.0, 0.0]], [[0.0, 0.0]]], + fixed_cube.data, + [[[0.0, 0.0]], [[0.0, 0.0]]], ) cmor_check_metadata(fixed_cube, project, mip, short_name) @@ -864,10 +868,10 @@ def test_fix_metadata_oimon_ssi(self): def test_fix_data_amon_tas(self): """Test ``fix_data``.""" - short_name = 'tas' - project = 'CMIP6' - dataset = '__MODEL_WITH_NO_EXPLICIT_FIX__' - mip = 'Amon' + short_name = "tas" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "Amon" fixed_cube = fix_data( self.cube_3d, @@ -889,10 +893,10 @@ def test_deprecate_check_level_fix_metadata(self): with pytest.warns(ESMValCoreDeprecationWarning): fix_metadata( self.cubes_4d, - 'ta', - 'CMIP6', - 'MODEL', - 'Amon', + "ta", + "CMIP6", + "MODEL", + "Amon", check_level=CheckLevels.RELAXED, ) @@ -901,9 +905,9 @@ def test_deprecate_check_level_fix_data(self): with pytest.warns(ESMValCoreDeprecationWarning): fix_metadata( self.cubes_4d, - 'ta', - 'CMIP6', - 'MODEL', - 'Amon', + "ta", + "CMIP6", + "MODEL", + "Amon", check_level=CheckLevels.RELAXED, ) diff --git a/tests/integration/cmor/test_read_cmor_tables.py b/tests/integration/cmor/test_read_cmor_tables.py index a77b9b2946..70ae6f6dce 100644 --- a/tests/integration/cmor/test_read_cmor_tables.py +++ b/tests/integration/cmor/test_read_cmor_tables.py @@ -4,9 +4,8 @@ import pytest import yaml -from esmvalcore.cmor.table import CMOR_TABLES +from esmvalcore.cmor.table import CMOR_TABLES, read_cmor_tables from esmvalcore.cmor.table import __file__ as root -from esmvalcore.cmor.table import read_cmor_tables def test_read_cmor_tables_raiser(): @@ -19,35 +18,36 @@ def test_read_cmor_tables_raiser(): def test_read_cmor_tables(): """Test that the function `read_cmor_tables` loads the tables correctly.""" - table_path = Path(root).parent / 'tables' + table_path = Path(root).parent / "tables" - for project in 'CMIP5', 'CMIP6': + for project in "CMIP5", "CMIP6": table = CMOR_TABLES[project] - assert Path( - table._cmor_folder) == table_path / project.lower() / 'Tables' + assert ( + Path(table._cmor_folder) == table_path / project.lower() / "Tables" + ) assert table.strict is True - project = 'OBS' + project = "OBS" table = CMOR_TABLES[project] - assert Path(table._cmor_folder) == table_path / 'cmip5' / 'Tables' + assert Path(table._cmor_folder) == table_path / "cmip5" / "Tables" assert table.strict is False - project = 'OBS6' + project = "OBS6" table = CMOR_TABLES[project] - assert Path(table._cmor_folder) == table_path / 'cmip6' / 'Tables' + assert Path(table._cmor_folder) == table_path / "cmip6" / "Tables" assert table.strict is False - project = 'obs4MIPs' + project = "obs4MIPs" table = CMOR_TABLES[project] - assert Path(table._cmor_folder) == table_path / 'obs4mips' / 'Tables' + assert Path(table._cmor_folder) == table_path / "obs4mips" / "Tables" assert table.strict is False - project = 'custom' + project = "custom" table = CMOR_TABLES[project] - assert Path(table._cmor_folder) == table_path / 'custom' + assert Path(table._cmor_folder) == table_path / "custom" assert table._user_table_folder is None assert table.coords - assert table.tables['custom'] + assert table.tables["custom"] CMOR_NEWVAR_ENTRY = dedent( @@ -127,50 +127,49 @@ def test_read_cmor_tables(): def test_read_custom_cmor_tables(tmp_path): """Test reading of custom CMOR tables.""" - (tmp_path / 'CMOR_newvarfortesting.dat').write_text(CMOR_NEWVAR_ENTRY) - (tmp_path / 'CMOR_netcre.dat').write_text(CMOR_NETCRE_ENTRY) - (tmp_path / 'CMOR_coordinates.dat').write_text(CMOR_NEWCOORD_ENTRY) + (tmp_path / "CMOR_newvarfortesting.dat").write_text(CMOR_NEWVAR_ENTRY) + (tmp_path / "CMOR_netcre.dat").write_text(CMOR_NETCRE_ENTRY) + (tmp_path / "CMOR_coordinates.dat").write_text(CMOR_NEWCOORD_ENTRY) custom_cfg_developer = { - 'custom': {'cmor_path': str(tmp_path)}, - 'CMIP6': { - 'cmor_strict': True, - 'input_dir': {'default': '/'}, - 'input_file': '*.nc', - 'output_file': 'out.nc', - 'cmor_type': 'CMIP6', + "custom": {"cmor_path": str(tmp_path)}, + "CMIP6": { + "cmor_strict": True, + "input_dir": {"default": "/"}, + "input_file": "*.nc", + "output_file": "out.nc", + "cmor_type": "CMIP6", }, } - cfg_file = tmp_path / 'config-developer.yml' - with cfg_file.open('w', encoding='utf-8') as file: + cfg_file = tmp_path / "config-developer.yml" + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(custom_cfg_developer, file) read_cmor_tables(cfg_file) assert len(CMOR_TABLES) == 2 - assert 'CMIP6' in CMOR_TABLES - assert 'custom' in CMOR_TABLES + assert "CMIP6" in CMOR_TABLES + assert "custom" in CMOR_TABLES - custom_table = CMOR_TABLES['custom'] - assert ( - custom_table._cmor_folder == - str(Path(root).parent / 'tables' / 'custom') + custom_table = CMOR_TABLES["custom"] + assert custom_table._cmor_folder == str( + Path(root).parent / "tables" / "custom" ) assert custom_table._user_table_folder == str(tmp_path) # Make sure that default tables have been read - assert 'alb' in custom_table.tables['custom'] - assert 'latitude' in custom_table.coords + assert "alb" in custom_table.tables["custom"] + assert "latitude" in custom_table.coords # Make sure that custom tables have been read - assert 'newvarfortesting' in custom_table.tables['custom'] - assert 'newcoordfortesting' in custom_table.coords - netcre = custom_table.get_variable('custom', 'netcre') - assert netcre.standard_name == 'air_temperature' - assert netcre.units == 'K' - assert netcre.long_name == 'This is New' - - cmip6_table = CMOR_TABLES['CMIP6'] + assert "newvarfortesting" in custom_table.tables["custom"] + assert "newcoordfortesting" in custom_table.coords + netcre = custom_table.get_variable("custom", "netcre") + assert netcre.standard_name == "air_temperature" + assert netcre.units == "K" + assert netcre.long_name == "This is New" + + cmip6_table = CMOR_TABLES["CMIP6"] assert cmip6_table.default is custom_table # Restore default tables diff --git a/tests/integration/cmor/test_table.py b/tests/integration/cmor/test_table.py index 385869bc5f..3c75631459 100644 --- a/tests/integration/cmor/test_table.py +++ b/tests/integration/cmor/test_table.py @@ -18,66 +18,69 @@ def test_update_cmor_facets(): facets = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", } _update_cmor_facets(facets) expected = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', - 'original_short_name': 'tas', - 'standard_name': 'air_temperature', - 'long_name': 'Near-Surface Air Temperature', - 'units': 'K', - 'modeling_realm': ['atmos'], - 'frequency': 'mon', + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", + "original_short_name": "tas", + "standard_name": "air_temperature", + "long_name": "Near-Surface Air Temperature", + "units": "K", + "modeling_realm": ["atmos"], + "frequency": "mon", } assert facets == expected def test_update_cmor_facets_facet_not_in_table(mocker): facets = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", } mocker.patch.object( esmvalcore.cmor.table, - 'getattr', + "getattr", create_autospec=True, return_value=None, ) _update_cmor_facets(facets) expected = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', - 'original_short_name': 'tas', + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", + "original_short_name": "tas", } assert facets == expected class TestCMIP6Info(unittest.TestCase): """Test for the CMIP6 info class.""" + @classmethod def setUpClass(cls): """Set up tests. We read CMIP6Info once to keep tests times manageable """ - cls.variables_info = CMIP6Info('cmip6', - default=CustomInfo(), - strict=True, - alt_names=[ - ['sic', 'siconc'], - ['tro3', 'o3'], - ]) + cls.variables_info = CMIP6Info( + "cmip6", + default=CustomInfo(), + strict=True, + alt_names=[ + ["sic", "siconc"], + ["tro3", "o3"], + ], + ) def setUp(self): self.variables_info.strict = True @@ -85,87 +88,91 @@ def setUp(self): def test_custom_tables_location(self): """Test constructor with custom tables location.""" cmor_path = os.path.dirname(os.path.realpath(esmvalcore.cmor.__file__)) - cmor_tables_path = os.path.join(cmor_path, 'tables', 'cmip6') + cmor_tables_path = os.path.join(cmor_path, "tables", "cmip6") cmor_tables_path = os.path.abspath(cmor_tables_path) CMIP6Info(cmor_tables_path, default=None, strict=False) def test_get_table_frequency(self): """Test get table frequency.""" self.assertEqual( - self.variables_info.get_table('Amon').frequency, 'mon') - self.assertEqual(self.variables_info.get_table('day').frequency, 'day') + self.variables_info.get_table("Amon").frequency, "mon" + ) + self.assertEqual(self.variables_info.get_table("day").frequency, "day") def test_get_variable_tas(self): """Get tas variable.""" - var = self.variables_info.get_variable('Amon', 'tas') - self.assertEqual(var.short_name, 'tas') + var = self.variables_info.get_variable("Amon", "tas") + self.assertEqual(var.short_name, "tas") def test_get_variable_from_alt_names(self): """Get a variable from a known alt_names.""" - var = self.variables_info.get_variable('SImon', 'sic') - self.assertEqual(var.short_name, 'siconc') + var = self.variables_info.get_variable("SImon", "sic") + self.assertEqual(var.short_name, "siconc") def test_get_variable_derived(self): """Test that derived variable are looked up from other MIP tables.""" - var = self.variables_info.get_variable('3hr', 'sfcWind', derived=True) - self.assertEqual(var.short_name, 'sfcWind') + var = self.variables_info.get_variable("3hr", "sfcWind", derived=True) + self.assertEqual(var.short_name, "sfcWind") def test_get_variable_from_custom(self): """Get a variable from default.""" self.variables_info.strict = False - var = self.variables_info.get_variable('Amon', 'swcre') - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Amon", "swcre") + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "mon") - var = self.variables_info.get_variable('day', 'swcre') - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, 'day') + var = self.variables_info.get_variable("day", "swcre") + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "day") def test_get_bad_variable(self): """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'ta')) + self.assertIsNone(self.variables_info.get_variable("Omon", "ta")) def test_omon_ta_fail_if_strict(self): """Get ta fails with Omon if strict.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'ta')) + self.assertIsNone(self.variables_info.get_variable("Omon", "ta")) def test_omon_ta_succes_if_strict(self): """Get ta does not fail with AERMonZ if not strict.""" self.variables_info.strict = False - var = self.variables_info.get_variable('Omon', 'ta') - self.assertEqual(var.short_name, 'ta') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Omon", "ta") + self.assertEqual(var.short_name, "ta") + self.assertEqual(var.frequency, "mon") def test_omon_toz_succes_if_strict(self): - """Get troz does not fail with Omon if not strict.""" + """Get toz does not fail with Omon if not strict.""" self.variables_info.strict = False - var = self.variables_info.get_variable('Omon', 'toz') - self.assertEqual(var.short_name, 'toz') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Omon", "toz") + self.assertEqual(var.short_name, "toz") + self.assertEqual(var.frequency, "mon") def test_get_institute_from_source(self): """Get institution for source ACCESS-CM2.""" - institute = self.variables_info.institutes['ACCESS-CM2'] - self.assertListEqual(institute, ['CSIRO-ARCCSS']) + institute = self.variables_info.institutes["ACCESS-CM2"] + self.assertListEqual(institute, ["CSIRO-ARCCSS"]) def test_get_activity_from_exp(self): """Get activity for experiment 1pctCO2.""" - activity = self.variables_info.activities['1pctCO2'] - self.assertListEqual(activity, ['CMIP']) + activity = self.variables_info.activities["1pctCO2"] + self.assertListEqual(activity, ["CMIP"]) class Testobs4mipsInfo(unittest.TestCase): """Test for the obs$mips info class.""" + @classmethod def setUpClass(cls): """Set up tests. We read CMIP6Info once to keep tests times manageable """ - cls.variables_info = CMIP6Info(cmor_tables_path='obs4mips', - default=CustomInfo(), - strict=False, - default_table_prefix='obs4MIPs_') + cls.variables_info = CMIP6Info( + cmor_tables_path="obs4mips", + default=CustomInfo(), + strict=False, + default_table_prefix="obs4MIPs_", + ) def setUp(self): self.variables_info.strict = False @@ -173,13 +180,14 @@ def setUp(self): def test_get_table_frequency(self): """Test get table frequency.""" self.assertEqual( - self.variables_info.get_table('obs4MIPs_monStderr').frequency, - 'mon') + self.variables_info.get_table("obs4MIPs_monStderr").frequency, + "mon", + ) def test_custom_tables_location(self): """Test constructor with custom tables location.""" cmor_path = os.path.dirname(os.path.realpath(esmvalcore.cmor.__file__)) - cmor_tables_path = os.path.join(cmor_path, 'tables', 'cmip6') + cmor_tables_path = os.path.join(cmor_path, "tables", "cmip6") cmor_tables_path = os.path.abspath(cmor_tables_path) CMIP6Info(cmor_tables_path, None, True) @@ -188,61 +196,64 @@ def test_get_variable_ndvistderr(self): Note table name obs4MIPs_[mip] """ - var = self.variables_info.get_variable('obs4MIPs_monStderr', - 'ndviStderr') - self.assertEqual(var.short_name, 'ndviStderr') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable( + "obs4MIPs_monStderr", "ndviStderr" + ) + self.assertEqual(var.short_name, "ndviStderr") + self.assertEqual(var.frequency, "mon") def test_get_variable_hus(self): """Get hus variable.""" - var = self.variables_info.get_variable('obs4MIPs_Amon', 'hus') - self.assertEqual(var.short_name, 'hus') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("obs4MIPs_Amon", "hus") + self.assertEqual(var.short_name, "hus") + self.assertEqual(var.frequency, "mon") def test_get_variable_hus_default_prefix(self): """Get hus variable.""" - var = self.variables_info.get_variable('Amon', 'hus') - self.assertEqual(var.short_name, 'hus') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Amon", "hus") + self.assertEqual(var.short_name, "hus") + self.assertEqual(var.frequency, "mon") def test_get_variable_from_custom(self): """Get prStderr variable. Note table name obs4MIPs_[mip] """ - var = self.variables_info.get_variable('obs4MIPs_monStderr', - 'prStderr') - self.assertEqual(var.short_name, 'prStderr') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable( + "obs4MIPs_monStderr", "prStderr" + ) + self.assertEqual(var.short_name, "prStderr") + self.assertEqual(var.frequency, "mon") def test_get_variable_from_custom_deriving(self): """Get a variable from default.""" - var = self.variables_info.get_variable('obs4MIPs_Amon', - 'swcre', - derived=True) - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, 'mon') - - var = self.variables_info.get_variable('obs4MIPs_Aday', - 'swcre', - derived=True) - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, 'day') + var = self.variables_info.get_variable( + "obs4MIPs_Amon", "swcre", derived=True + ) + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "mon") + + var = self.variables_info.get_variable( + "obs4MIPs_Aday", "swcre", derived=True + ) + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "day") def test_get_bad_variable(self): """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'tras')) + self.assertIsNone(self.variables_info.get_variable("Omon", "tras")) class TestCMIP5Info(unittest.TestCase): """Test for the CMIP5 info class.""" + @classmethod def setUpClass(cls): """Set up tests. We read CMIP5Info once to keep testing times manageable """ - cls.variables_info = CMIP5Info('cmip5', CustomInfo(), strict=True) + cls.variables_info = CMIP5Info("cmip5", CustomInfo(), strict=True) def setUp(self): self.variables_info.strict = True @@ -250,68 +261,86 @@ def setUp(self): def test_custom_tables_location(self): """Test constructor with custom tables location.""" cmor_path = os.path.dirname(os.path.realpath(esmvalcore.cmor.__file__)) - cmor_tables_path = os.path.join(cmor_path, 'tables', 'cmip5') + cmor_tables_path = os.path.join(cmor_path, "tables", "cmip5") cmor_tables_path = os.path.abspath(cmor_tables_path) CMIP5Info(cmor_tables_path, None, True) def test_get_variable_tas(self): """Get tas variable.""" - var = self.variables_info.get_variable('Amon', 'tas') - self.assertEqual(var.short_name, 'tas') + var = self.variables_info.get_variable("Amon", "tas") + self.assertEqual(var.short_name, "tas") def test_get_variable_zg(self): """Get zg variable.""" - var = self.variables_info.get_variable('Amon', 'zg') - self.assertEqual(var.short_name, 'zg') - self.assertEqual(var.coordinates['plevs'].requested, [ - '100000.', '92500.', '85000.', '70000.', '60000.', '50000.', - '40000.', '30000.', '25000.', '20000.', '15000.', '10000.', - '7000.', '5000.', '3000.', '2000.', '1000.' - ]) + var = self.variables_info.get_variable("Amon", "zg") + self.assertEqual(var.short_name, "zg") + self.assertEqual( + var.coordinates["plevs"].requested, + [ + "100000.", + "92500.", + "85000.", + "70000.", + "60000.", + "50000.", + "40000.", + "30000.", + "25000.", + "20000.", + "15000.", + "10000.", + "7000.", + "5000.", + "3000.", + "2000.", + "1000.", + ], + ) def test_get_variable_from_custom(self): """Get a variable from default.""" self.variables_info.strict = False - var = self.variables_info.get_variable('Amon', 'swcre') - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Amon", "swcre") + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "mon") - var = self.variables_info.get_variable('day', 'swcre') - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, 'day') + var = self.variables_info.get_variable("day", "swcre") + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "day") def test_get_bad_variable(self): """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) + self.assertIsNone(self.variables_info.get_variable("Omon", "tas")) def test_aermon_ta_fail_if_strict(self): """Get ta fails with AERMonZ if strict.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'ta')) + self.assertIsNone(self.variables_info.get_variable("Omon", "ta")) def test_aermon_ta_succes_if_strict(self): """Get ta does not fail with Omon if not strict.""" self.variables_info.strict = False - var = self.variables_info.get_variable('Omon', 'ta') - self.assertEqual(var.short_name, 'ta') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Omon", "ta") + self.assertEqual(var.short_name, "ta") + self.assertEqual(var.frequency, "mon") def test_omon_toz_succes_if_strict(self): - """Get troz does not fail with Omon if not strict.""" + """Get toz does not fail with Omon if not strict.""" self.variables_info.strict = False - var = self.variables_info.get_variable('Omon', 'toz') - self.assertEqual(var.short_name, 'toz') - self.assertEqual(var.frequency, 'mon') + var = self.variables_info.get_variable("Omon", "toz") + self.assertEqual(var.short_name, "toz") + self.assertEqual(var.frequency, "mon") class TestCMIP3Info(unittest.TestCase): """Test for the CMIP5 info class.""" + @classmethod def setUpClass(cls): """Set up tests. We read CMIP5Info once to keep testing times manageable """ - cls.variables_info = CMIP3Info('cmip3', CustomInfo(), strict=True) + cls.variables_info = CMIP3Info("cmip3", CustomInfo(), strict=True) def setUp(self): self.variables_info.strict = True @@ -319,87 +348,106 @@ def setUp(self): def test_custom_tables_location(self): """Test constructor with custom tables location.""" cmor_path = os.path.dirname(os.path.realpath(esmvalcore.cmor.__file__)) - cmor_tables_path = os.path.join(cmor_path, 'tables', 'cmip3') + cmor_tables_path = os.path.join(cmor_path, "tables", "cmip3") cmor_tables_path = os.path.abspath(cmor_tables_path) CMIP3Info(cmor_tables_path, None, True) def test_get_variable_tas(self): """Get tas variable.""" - var = self.variables_info.get_variable('A1', 'tas') - self.assertEqual(var.short_name, 'tas') + var = self.variables_info.get_variable("A1", "tas") + self.assertEqual(var.short_name, "tas") def test_get_variable_zg(self): """Get zg variable.""" - var = self.variables_info.get_variable('A1', 'zg') - self.assertEqual(var.short_name, 'zg') - self.assertEqual(var.coordinates['pressure'].requested, [ - '100000.', '92500.', '85000.', '70000.', '60000.', '50000.', - '40000.', '30000.', '25000.', '20000.', '15000.', '10000.', - '7000.', '5000.', '3000.', '2000.', '1000.' - ]) + var = self.variables_info.get_variable("A1", "zg") + self.assertEqual(var.short_name, "zg") + self.assertEqual( + var.coordinates["pressure"].requested, + [ + "100000.", + "92500.", + "85000.", + "70000.", + "60000.", + "50000.", + "40000.", + "30000.", + "25000.", + "20000.", + "15000.", + "10000.", + "7000.", + "5000.", + "3000.", + "2000.", + "1000.", + ], + ) def test_get_variable_from_custom(self): """Get a variable from default.""" self.variables_info.strict = False - var = self.variables_info.get_variable('A1', 'swcre') - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, '') + var = self.variables_info.get_variable("A1", "swcre") + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "") - var = self.variables_info.get_variable('day', 'swcre') - self.assertEqual(var.short_name, 'swcre') - self.assertEqual(var.frequency, '') + var = self.variables_info.get_variable("day", "swcre") + self.assertEqual(var.short_name, "swcre") + self.assertEqual(var.frequency, "") def test_get_bad_variable(self): """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('O1', 'tas')) + self.assertIsNone(self.variables_info.get_variable("O1", "tas")) def test_aermon_ta_fail_if_strict(self): """Get ta fails with AERMonZ if strict.""" - self.assertIsNone(self.variables_info.get_variable('O1', 'ta')) + self.assertIsNone(self.variables_info.get_variable("O1", "ta")) def test_aermon_ta_succes_if_strict(self): """Get ta does not fail with Omon if not strict.""" self.variables_info.strict = False - var = self.variables_info.get_variable('O1', 'ta') - self.assertEqual(var.short_name, 'ta') - self.assertEqual(var.frequency, '') + var = self.variables_info.get_variable("O1", "ta") + self.assertEqual(var.short_name, "ta") + self.assertEqual(var.frequency, "") def test_omon_toz_succes_if_strict(self): - """Get troz does not fail with Omon if not strict.""" + """Get toz does not fail with Omon if not strict.""" self.variables_info.strict = False - var = self.variables_info.get_variable('O1', 'toz') - self.assertEqual(var.short_name, 'toz') - self.assertEqual(var.frequency, '') + var = self.variables_info.get_variable("O1", "toz") + self.assertEqual(var.short_name, "toz") + self.assertEqual(var.frequency, "") class TestCORDEXInfo(unittest.TestCase): """Test for the CORDEX info class.""" + @classmethod def setUpClass(cls): """Set up tests. We read CORDEX once to keep testing times manageable """ - cls.variables_info = CMIP5Info('cordex', default=CustomInfo()) + cls.variables_info = CMIP5Info("cordex", default=CustomInfo()) def test_custom_tables_location(self): """Test constructor with custom tables location.""" cmor_path = os.path.dirname(os.path.realpath(esmvalcore.cmor.__file__)) - cmor_tables_path = os.path.join(cmor_path, 'tables', 'cordex') + cmor_tables_path = os.path.join(cmor_path, "tables", "cordex") CMIP5Info(cmor_tables_path) def test_get_variable_tas(self): """Get tas variable.""" - var = self.variables_info.get_variable('mon', 'tas') - self.assertEqual(var.short_name, 'tas') + var = self.variables_info.get_variable("mon", "tas") + self.assertEqual(var.short_name, "tas") def test_get_bad_variable(self): """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) + self.assertIsNone(self.variables_info.get_variable("Omon", "tas")) class TestCustomInfo(unittest.TestCase): """Test for the custom info class.""" + @classmethod def setUpClass(cls): """Set up tests. @@ -413,88 +461,98 @@ def test_custom_tables_default_location(self): custom_info = CustomInfo() expected_cmor_folder = os.path.join( os.path.dirname(esmvalcore.cmor.__file__), - 'tables', - 'custom', + "tables", + "custom", ) self.assertEqual(custom_info._cmor_folder, expected_cmor_folder) - self.assertTrue(custom_info.tables['custom']) + self.assertTrue(custom_info.tables["custom"]) self.assertTrue(custom_info.coords) def test_custom_tables_location(self): """Test constructor with custom tables location.""" cmor_path = os.path.dirname(os.path.realpath(esmvalcore.cmor.__file__)) - default_cmor_tables_path = os.path.join(cmor_path, 'tables', 'custom') - cmor_tables_path = os.path.join(cmor_path, 'tables', 'cmip5') + default_cmor_tables_path = os.path.join(cmor_path, "tables", "custom") + cmor_tables_path = os.path.join(cmor_path, "tables", "cmip5") cmor_tables_path = os.path.abspath(cmor_tables_path) custom_info = CustomInfo(cmor_tables_path) self.assertEqual(custom_info._cmor_folder, default_cmor_tables_path) self.assertEqual(custom_info._user_table_folder, cmor_tables_path) - self.assertTrue(custom_info.tables['custom']) + self.assertTrue(custom_info.tables["custom"]) self.assertTrue(custom_info.coords) def test_custom_tables_invalid_location(self): """Test constructor with invalid custom tables location.""" with self.assertRaises(ValueError): - CustomInfo('this_file_does_not_exist.dat') + CustomInfo("this_file_does_not_exist.dat") def test_get_variable_netcre(self): """Get tas variable.""" CustomInfo() - var = self.variables_info.get_variable('Amon', 'netcre') - self.assertEqual(var.short_name, 'netcre') + var = self.variables_info.get_variable("Amon", "netcre") + self.assertEqual(var.short_name, "netcre") def test_get_bad_variable(self): """Get none if a variable is not in the given table.""" - self.assertIsNone(self.variables_info.get_variable('Omon', 'badvar')) + self.assertIsNone(self.variables_info.get_variable("Omon", "badvar")) def test_get_variable_tasconf5(self): """Get tas variable.""" CustomInfo() - var = self.variables_info.get_variable('Amon', 'tasConf5') - self.assertEqual(var.short_name, 'tasConf5') - self.assertEqual(var.long_name, - 'Near-Surface Air Temperature Uncertainty Range') - self.assertEqual(var.units, 'K') + var = self.variables_info.get_variable("Amon", "tasConf5") + self.assertEqual(var.short_name, "tasConf5") + self.assertEqual( + var.long_name, "Near-Surface Air Temperature Uncertainty Range" + ) + self.assertEqual(var.units, "K") def test_get_variable_tasconf95(self): """Get tas variable.""" CustomInfo() - var = self.variables_info.get_variable('Amon', 'tasConf95') - self.assertEqual(var.short_name, 'tasConf95') - self.assertEqual(var.long_name, - 'Near-Surface Air Temperature Uncertainty Range') - self.assertEqual(var.units, 'K') + var = self.variables_info.get_variable("Amon", "tasConf95") + self.assertEqual(var.short_name, "tasConf95") + self.assertEqual( + var.long_name, "Near-Surface Air Temperature Uncertainty Range" + ) + self.assertEqual(var.units, "K") def test_get_variable_tasaga(self): """Get tas variable.""" CustomInfo() - var = self.variables_info.get_variable('Amon', 'tasaga') - self.assertEqual(var.short_name, 'tasaga') - self.assertEqual(var.long_name, - 'Global-mean Near-Surface Air Temperature Anomaly') - self.assertEqual(var.units, 'K') + var = self.variables_info.get_variable("Amon", "tasaga") + self.assertEqual(var.short_name, "tasaga") + self.assertEqual( + var.long_name, "Global-mean Near-Surface Air Temperature Anomaly" + ) + self.assertEqual(var.units, "K") def test_get_variable_ch4s(self): - """Get tas variable.""" + """Get ch4s variable.""" + CustomInfo() + var = self.variables_info.get_variable("Amon", "ch4s") + self.assertEqual(var.short_name, "ch4s") + self.assertEqual(var.long_name, "Atmosphere CH4 surface") + self.assertEqual(var.units, "1e-09") + + def test_get_variable_tosstderr(self): + """Get tosStderr variable.""" CustomInfo() - var = self.variables_info.get_variable('Amon', 'ch4s') - self.assertEqual(var.short_name, 'ch4s') - self.assertEqual(var.long_name, - 'Atmosphere CH4 surface') - self.assertEqual(var.units, '1e-09') + var = self.variables_info.get_variable("Omon", "tosStderr") + self.assertEqual(var.short_name, "tosStderr") + self.assertEqual(var.long_name, "Sea Surface Temperature Error") + self.assertEqual(var.units, "K") @pytest.mark.parametrize( - 'project,mip,short_name,frequency', + "project,mip,short_name,frequency", [ - ('CMIP5', 'Amon', 'tas', 'mon'), - ('CMIP5', 'day', 'tas', 'day'), - ('CMIP6', 'Amon', 'tas', 'mon'), - ('CMIP6', 'day', 'tas', 'day'), - ('CORDEX', '3hr', 'tas', '3hr'), - ] + ("CMIP5", "Amon", "tas", "mon"), + ("CMIP5", "day", "tas", "day"), + ("CMIP6", "Amon", "tas", "mon"), + ("CMIP6", "day", "tas", "day"), + ("CORDEX", "3hr", "tas", "3hr"), + ], ) def test_get_var_info(project, mip, short_name, frequency): """Test ``get_var_info``.""" @@ -505,15 +563,15 @@ def test_get_var_info(project, mip, short_name, frequency): @pytest.mark.parametrize( - 'mip,short_name', + "mip,short_name", [ - ('INVALID_MIP', 'tas'), - ('Amon', 'INVALID_VAR'), - ] + ("INVALID_MIP", "tas"), + ("Amon", "INVALID_VAR"), + ], ) def test_get_var_info_invalid_mip_short_name(mip, short_name): """Test ``get_var_info``.""" - var_info = get_var_info('CMIP6', mip, short_name) + var_info = get_var_info("CMIP6", mip, short_name) assert var_info is None @@ -521,4 +579,4 @@ def test_get_var_info_invalid_mip_short_name(mip, short_name): def test_get_var_info_invalid_project(): """Test ``get_var_info``.""" with pytest.raises(KeyError): - get_var_info('INVALID_PROJECT', 'Amon', 'tas') + get_var_info("INVALID_PROJECT", "Amon", "tas") diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index fbddec7fe0..8787b345ee 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -19,10 +19,10 @@ def session(tmp_path: Path, monkeypatch): CFG.clear() CFG.update(CFG_DEFAULT) - monkeypatch.setitem(CFG, 'rootpath', {'default': {tmp_path: 'default'}}) + monkeypatch.setitem(CFG, "rootpath", {"default": {tmp_path: "default"}}) - session = CFG.start_session('recipe_test') - session['output_dir'] = tmp_path / 'esmvaltool_output' + session = CFG.start_session("recipe_test") + session["output_dir"] = tmp_path / "esmvaltool_output" return session @@ -33,7 +33,7 @@ def create_test_file(filename, tracking_id=None): attributes = {} if tracking_id is not None: - attributes['tracking_id'] = tracking_id + attributes["tracking_id"] = tracking_id cube = iris.cube.Cube([]) cube.attributes.globals = attributes @@ -47,60 +47,60 @@ def _get_files(root_path, facets, tracking_id): return files for the two "models" AAA and BBB. """ - if facets['dataset'] == '*': + if facets["dataset"] == "*": all_facets = [ - {**facets, 'dataset': 'AAA', 'institute': 'A'}, - {**facets, 'dataset': 'BBB', 'institute': 'B'}, + {**facets, "dataset": "AAA", "institute": "A"}, + {**facets, "dataset": "BBB", "institute": "B"}, ] else: all_facets = [facets] # Globs without expanded facets - dir_template = _select_drs('input_dir', facets['project'], 'default') - file_template = _select_drs('input_file', facets['project'], 'default') + dir_template = _select_drs("input_dir", facets["project"], "default") + file_template = _select_drs("input_file", facets["project"], "default") dir_globs = _replace_tags(dir_template, facets) file_globs = _replace_tags(file_template, facets) globs = sorted( - root_path / 'input' / d / f for d in dir_globs for f in file_globs + root_path / "input" / d / f for d in dir_globs for f in file_globs ) files = [] for expanded_facets in all_facets: filenames = [] dir_template = _select_drs( - 'input_dir', expanded_facets['project'], 'default' + "input_dir", expanded_facets["project"], "default" ) file_template = _select_drs( - 'input_file', expanded_facets['project'], 'default' + "input_file", expanded_facets["project"], "default" ) dir_globs = _replace_tags(dir_template, expanded_facets) file_globs = _replace_tags(file_template, expanded_facets) - filename = ( - str(root_path / 'input' / dir_globs[0] / Path(file_globs[0]).name) + filename = str( + root_path / "input" / dir_globs[0] / Path(file_globs[0]).name ) - if filename.endswith('[_.]*nc'): + if filename.endswith("[_.]*nc"): # Restore when we support filenames with no dates # filenames.append(filename.replace('[_.]*nc', '.nc')) - filename = filename.replace('[_.]*nc', '_*.nc') + filename = filename.replace("[_.]*nc", "_*.nc") - if filename.endswith('*.nc'): - filename = filename[:-len('*.nc')] + '_' - if facets['frequency'] == 'fx': - intervals = [''] + if filename.endswith("*.nc"): + filename = filename[: -len("*.nc")] + "_" + if facets["frequency"] == "fx": + intervals = [""] else: intervals = [ - '1990_1999', - '2000_2009', - '2010_2019', + "1990_1999", + "2000_2009", + "2010_2019", ] for interval in intervals: - filenames.append(filename + interval + '.nc') + filenames.append(filename + interval + ".nc") else: filenames.append(filename) - if 'timerange' in facets: - filenames = _select_files(filenames, facets['timerange']) + if "timerange" in facets: + filenames = _select_files(filenames, facets["timerange"]) for filename in filenames: create_test_file(filename, next(tracking_id)) @@ -115,7 +115,6 @@ def _get_files(root_path, facets, tracking_id): @pytest.fixture def patched_datafinder(tmp_path, monkeypatch): - def tracking_ids(i=0): while True: yield i @@ -129,7 +128,7 @@ def find_files(*, debug: bool = False, **facets): return files, file_globs return files - monkeypatch.setattr(esmvalcore.local, 'find_files', find_files) + monkeypatch.setattr(esmvalcore.local, "find_files", find_files) @pytest.fixture @@ -153,14 +152,14 @@ def tracking_ids(i=0): def find_files(*, debug: bool = False, **facets): files, file_globs = _get_files(tmp_path, facets, tracking_id) - if 'fx' == facets['frequency']: + if "fx" == facets["frequency"]: files = [] returned_files = [] for file in files: - if not ('AAA' in file.name and 'rsutcs' in file.name): + if not ("AAA" in file.name and "rsutcs" in file.name): returned_files.append(file) if debug: return returned_files, file_globs return returned_files - monkeypatch.setattr(esmvalcore.local, 'find_files', find_files) + monkeypatch.setattr(esmvalcore.local, "find_files", find_files) diff --git a/tests/integration/data_finder.yml b/tests/integration/data_finder.yml index 40e0c3e821..9b90bc7da6 100644 --- a/tests/integration/data_finder.yml +++ b/tests/integration/data_finder.yml @@ -452,7 +452,7 @@ get_input_filelist: - ta_Amon_HadGEM2-ES_historical_r1i1p1*.nc found_files: - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - + - drs: NCI variable: <<: *variable @@ -472,7 +472,7 @@ get_input_filelist: found_files: - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc - + - drs: NCI variable: <<: *variable @@ -593,7 +593,7 @@ get_input_filelist: found_files: - historical/atmos/mon/ta/HADGEM1/r1i1p1/ta_HADGEM1_195001-199912.nc - historical/atmos/mon/ta/HADGEM1/r1i1p1/ta_HADGEM1_200001-200112.nc - + - drs: NCI variable: variable_group: test diff --git a/tests/integration/dataset/test_dataset.py b/tests/integration/dataset/test_dataset.py index 0c94dc8c48..cdc8310ea0 100644 --- a/tests/integration/dataset/test_dataset.py +++ b/tests/integration/dataset/test_dataset.py @@ -11,17 +11,38 @@ @pytest.fixture def example_data(tmp_path, monkeypatch): cwd = Path(__file__).parent - tas_src = cwd / 'tas.nc' - areacella_src = cwd / 'areacella.nc' - - rootpath = tmp_path / 'climate_data' - tas_tgt = (rootpath / 'cmip5' / 'output1' / 'CCCma' / 'CanESM2' / - 'historical' / 'mon' / 'atmos' / 'Amon' / 'r1i1p1' / - 'v20120718' / - 'tas_Amon_CanESM2_historical_r1i1p1_185001-200512.nc') - areacella_tgt = (rootpath / 'cmip5' / 'output1' / 'CCCma' / 'CanESM2' / - 'historical' / 'fx' / 'atmos' / 'fx' / 'r0i0p0' / - 'v20120410' / 'areacella_fx_CanESM2_historical_r0i0p0.nc') + tas_src = cwd / "tas.nc" + areacella_src = cwd / "areacella.nc" + + rootpath = tmp_path / "climate_data" + tas_tgt = ( + rootpath + / "cmip5" + / "output1" + / "CCCma" + / "CanESM2" + / "historical" + / "mon" + / "atmos" + / "Amon" + / "r1i1p1" + / "v20120718" + / "tas_Amon_CanESM2_historical_r1i1p1_185001-200512.nc" + ) + areacella_tgt = ( + rootpath + / "cmip5" + / "output1" + / "CCCma" + / "CanESM2" + / "historical" + / "fx" + / "atmos" + / "fx" + / "r0i0p0" + / "v20120410" + / "areacella_fx_CanESM2_historical_r0i0p0.nc" + ) tas_tgt.parent.mkdir(parents=True, exist_ok=True) tas_tgt.symlink_to(tas_src) @@ -29,22 +50,22 @@ def example_data(tmp_path, monkeypatch): areacella_tgt.parent.mkdir(parents=True, exist_ok=True) areacella_tgt.symlink_to(areacella_src) - monkeypatch.setitem(CFG, 'rootpath', {'CMIP5': str(rootpath)}) - monkeypatch.setitem(CFG, 'drs', {'CMIP5': 'ESGF'}) - monkeypatch.setitem(CFG, 'output_dir', tmp_path / 'output_dir') + monkeypatch.setitem(CFG, "rootpath", {"CMIP5": str(rootpath)}) + monkeypatch.setitem(CFG, "drs", {"CMIP5": "ESGF"}) + monkeypatch.setitem(CFG, "output_dir", tmp_path / "output_dir") def test_load(example_data): tas = Dataset( - short_name='tas', - mip='Amon', - project='CMIP5', - dataset='CanESM2', - ensemble='r1i1p1', - exp='historical', - timerange='1850/185002', + short_name="tas", + mip="Amon", + project="CMIP5", + dataset="CanESM2", + ensemble="r1i1p1", + exp="historical", + timerange="1850/185002", ) - tas.add_supplementary(short_name='areacella', mip='fx', ensemble='r0i0p0') + tas.add_supplementary(short_name="areacella", mip="fx", ensemble="r0i0p0") tas.augment_facets() diff --git a/tests/integration/esgf/search_results/expected.yml b/tests/integration/esgf/search_results/expected.yml index 9e463c0cf7..24f02b9181 100644 --- a/tests/integration/esgf/search_results/expected.yml +++ b/tests/integration/esgf/search_results/expected.yml @@ -1,301 +1,301 @@ Amon_r1i1p1_historical,rcp85_INM-CM4_CMIP5_tas.json: -- checksums: - - - SHA256 - - 0c7cc5410d6f03b3a49b8de9e0ae8090249a66a8ebd27e6d17a78fba96eba3f9 - - - SHA256 - - 0c7cc5410d6f03b3a49b8de9e0ae8090249a66a8ebd27e6d17a78fba96eba3f9 - - - SHA256 - - 0c7cc5410d6f03b3a49b8de9e0ae8090249a66a8ebd27e6d17a78fba96eba3f9 - - - MD5 - - fc448373de679c8fcbfe031364049df1 - dataset: cmip5.output1.INM.inmcm4.historical.mon.atmos.Amon.r1i1p1.v20130207 - facets: - dataset: inmcm4 - ensemble: r1i1p1 - exp: historical - frequency: mon - institute: INM - mip: Amon - product: output1 - project: CMIP5 - modeling_realm: atmos - short_name: tas - version: v20130207 - local_file: cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc - name: tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc - size: 161801172 - urls: - - http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/tas/1/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc - - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc - - http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc -- checksums: - - - SHA256 - - 4a39b0fb2698ab4305df9070240a1b9ef88c80422f379e3086f67a0dfc2b3047 - - - SHA256 - - 4a39b0fb2698ab4305df9070240a1b9ef88c80422f379e3086f67a0dfc2b3047 - - - SHA256 - - 4a39b0fb2698ab4305df9070240a1b9ef88c80422f379e3086f67a0dfc2b3047 - - - MD5 - - b8885d3860e66b036db76ca6a49e7c51 - dataset: cmip5.output1.INM.inmcm4.rcp85.mon.atmos.Amon.r1i1p1.v20130207 - facets: - dataset: inmcm4 - ensemble: r1i1p1 - exp: rcp85 - frequency: mon - institute: INM - mip: Amon - product: output1 - project: CMIP5 - modeling_realm: atmos - short_name: tas - version: v20130207 - local_file: cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc - name: tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc - size: 98538788 - urls: - - http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/tas/1/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc - - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc - - http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc + - checksums: + - - SHA256 + - 0c7cc5410d6f03b3a49b8de9e0ae8090249a66a8ebd27e6d17a78fba96eba3f9 + - - SHA256 + - 0c7cc5410d6f03b3a49b8de9e0ae8090249a66a8ebd27e6d17a78fba96eba3f9 + - - SHA256 + - 0c7cc5410d6f03b3a49b8de9e0ae8090249a66a8ebd27e6d17a78fba96eba3f9 + - - MD5 + - fc448373de679c8fcbfe031364049df1 + dataset: cmip5.output1.INM.inmcm4.historical.mon.atmos.Amon.r1i1p1.v20130207 + facets: + dataset: inmcm4 + ensemble: r1i1p1 + exp: historical + frequency: mon + institute: INM + mip: Amon + product: output1 + project: CMIP5 + modeling_realm: atmos + short_name: tas + version: v20130207 + local_file: cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc + name: tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc + size: 161801172 + urls: + - http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/tas/1/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc + - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc + - http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc + - checksums: + - - SHA256 + - 4a39b0fb2698ab4305df9070240a1b9ef88c80422f379e3086f67a0dfc2b3047 + - - SHA256 + - 4a39b0fb2698ab4305df9070240a1b9ef88c80422f379e3086f67a0dfc2b3047 + - - SHA256 + - 4a39b0fb2698ab4305df9070240a1b9ef88c80422f379e3086f67a0dfc2b3047 + - - MD5 + - b8885d3860e66b036db76ca6a49e7c51 + dataset: cmip5.output1.INM.inmcm4.rcp85.mon.atmos.Amon.r1i1p1.v20130207 + facets: + dataset: inmcm4 + ensemble: r1i1p1 + exp: rcp85 + frequency: mon + institute: INM + mip: Amon + product: output1 + project: CMIP5 + modeling_realm: atmos + short_name: tas + version: v20130207 + local_file: cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc + name: tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc + size: 98538788 + urls: + - http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/tas/1/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc + - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc + - http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/INM/inmcm4/rcp85/mon/atmos/Amon/r1i1p1/v20130207/tas/tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc Amon_r1i1p1_historical_FIO-ESM_CMIP5_tas.json: -- checksums: - - - SHA256 - - b6d4b62ccba4cb8141e422d17a31d618a59161c72bd10476391811b693cbff6c - - - SHA256 - - b6d4b62ccba4cb8141e422d17a31d618a59161c72bd10476391811b693cbff6c - - - MD5 - - 970cc36b75466a30cf02b7ae0896a9b0 - - - SHA256 - - b6d4b62ccba4cb8141e422d17a31d618a59161c72bd10476391811b693cbff6c - dataset: cmip5.output1.FIO.FIO-ESM.historical.mon.atmos.Amon.r1i1p1.v20121010 - facets: - dataset: FIO-ESM - ensemble: r1i1p1 - exp: historical - frequency: mon - institute: FIO - mip: Amon - product: output1 - project: CMIP5 - modeling_realm: atmos - short_name: tas - version: v20121010 - local_file: cmip5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc - name: tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc - size: 61398952 - urls: - - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc - - http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc - - http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/output1/FIO/fio-esm/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc + - checksums: + - - SHA256 + - b6d4b62ccba4cb8141e422d17a31d618a59161c72bd10476391811b693cbff6c + - - SHA256 + - b6d4b62ccba4cb8141e422d17a31d618a59161c72bd10476391811b693cbff6c + - - MD5 + - 970cc36b75466a30cf02b7ae0896a9b0 + - - SHA256 + - b6d4b62ccba4cb8141e422d17a31d618a59161c72bd10476391811b693cbff6c + dataset: cmip5.output1.FIO.FIO-ESM.historical.mon.atmos.Amon.r1i1p1.v20121010 + facets: + dataset: FIO-ESM + ensemble: r1i1p1 + exp: historical + frequency: mon + institute: FIO + mip: Amon + product: output1 + project: CMIP5 + modeling_realm: atmos + short_name: tas + version: v20121010 + local_file: cmip5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc + name: tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc + size: 61398952 + urls: + - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc + - http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc + - http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/output1/FIO/fio-esm/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc Amon_r1i1p1_rcp85_HadGEM2-CC_CMIP5_tas.json: -- checksums: - - - SHA256 - - 10a94293f2ac844ab62496d5d5369ccc0e839c73882a323c21800d71d7780315 - - - SHA256 - - 10a94293f2ac844ab62496d5d5369ccc0e839c73882a323c21800d71d7780315 - dataset: cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1.v20120531 - facets: - dataset: HadGEM2-CC - ensemble: r1i1p1 - exp: rcp85 - frequency: mon - institute: MOHC - mip: Amon - product: output1 - project: CMIP5 - modeling_realm: atmos - short_name: tas - version: v20120531 - local_file: cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc - name: tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc - size: 33432040 - urls: - - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc -- checksums: - - - SHA256 - - 568e590103ed3bec8692aad85686b576466bd76dea872a9b0411c4c1941f44ad - - - SHA256 - - 568e590103ed3bec8692aad85686b576466bd76dea872a9b0411c4c1941f44ad - dataset: cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1.v20120531 - facets: - dataset: HadGEM2-CC - ensemble: r1i1p1 - exp: rcp85 - frequency: mon - institute: MOHC - mip: Amon - product: output1 - project: CMIP5 - modeling_realm: atmos - short_name: tas - version: v20120531 - local_file: cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc - name: tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc - size: 25523776 - urls: - - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc -- checksums: - - - SHA256 - - f718b1d91b25e5cc5acd3c0080dabb3762711676ad8efe0f54c0946f495f943a - - - SHA256 - - f718b1d91b25e5cc5acd3c0080dabb3762711676ad8efe0f54c0946f495f943a - dataset: cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1.v20120531 - facets: - dataset: HadGEM2-CC - ensemble: r1i1p1 - exp: rcp85 - frequency: mon - institute: MOHC - mip: Amon - product: output1 - project: CMIP5 - modeling_realm: atmos - short_name: tas - version: v20120531 - local_file: cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc - name: tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc - size: 1353448 - urls: - - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc + - checksums: + - - SHA256 + - 10a94293f2ac844ab62496d5d5369ccc0e839c73882a323c21800d71d7780315 + - - SHA256 + - 10a94293f2ac844ab62496d5d5369ccc0e839c73882a323c21800d71d7780315 + dataset: cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1.v20120531 + facets: + dataset: HadGEM2-CC + ensemble: r1i1p1 + exp: rcp85 + frequency: mon + institute: MOHC + mip: Amon + product: output1 + project: CMIP5 + modeling_realm: atmos + short_name: tas + version: v20120531 + local_file: cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc + name: tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc + size: 33432040 + urls: + - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc + - checksums: + - - SHA256 + - 568e590103ed3bec8692aad85686b576466bd76dea872a9b0411c4c1941f44ad + - - SHA256 + - 568e590103ed3bec8692aad85686b576466bd76dea872a9b0411c4c1941f44ad + dataset: cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1.v20120531 + facets: + dataset: HadGEM2-CC + ensemble: r1i1p1 + exp: rcp85 + frequency: mon + institute: MOHC + mip: Amon + product: output1 + project: CMIP5 + modeling_realm: atmos + short_name: tas + version: v20120531 + local_file: cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc + name: tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc + size: 25523776 + urls: + - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc + - checksums: + - - SHA256 + - f718b1d91b25e5cc5acd3c0080dabb3762711676ad8efe0f54c0946f495f943a + - - SHA256 + - f718b1d91b25e5cc5acd3c0080dabb3762711676ad8efe0f54c0946f495f943a + dataset: cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1.v20120531 + facets: + dataset: HadGEM2-CC + ensemble: r1i1p1 + exp: rcp85 + frequency: mon + institute: MOHC + mip: Amon + product: output1 + project: CMIP5 + modeling_realm: atmos + short_name: tas + version: v20120531 + local_file: cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc + name: tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc + size: 1353448 + urls: + - http://esgf-data1.ceda.ac.uk/thredds/fileServer/esg_dataroot/cmip5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP5/output1/MOHC/HadGEM2-CC/rcp85/mon/atmos/Amon/r1i1p1/v20120531/tas/tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc EUR-11_MOHC-HadGEM2-ES_r1i1p1_historical_CORDEX_RACMO22E_mon_tas.json: -- checksums: - - - SHA256 - - e27fb1414788529a714c27a7d11169136db9ece7247756ab26dcea70d1da53e3 - dataset: cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1.RACMO22E.v2.mon.tas.v20160620 - facets: - dataset: RACMO22E - domain: EUR-11 - driver: MOHC-HadGEM2-ES - ensemble: r1i1p1 - exp: historical - frequency: mon - institute: KNMI - product: output - project: CORDEX - rcm_version: v2 - short_name: tas - version: v20160620 - local_file: cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195001-195012.nc - name: tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195001-195012.nc - size: 5982648 - urls: - - http://esgf1.dkrz.de/thredds/fileServer/cordex/cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/KNMI-RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195001-195012.nc -- checksums: - - - SHA256 - - f14160b5411dc0c7716f80709f309e14948736187087a4c50ec33e0aadcacf53 - dataset: cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1.RACMO22E.v2.mon.tas.v20160620 - facets: - dataset: RACMO22E - domain: EUR-11 - driver: MOHC-HadGEM2-ES - ensemble: r1i1p1 - exp: historical - frequency: mon - institute: KNMI - product: output - project: CORDEX - rcm_version: v2 - short_name: tas - version: v20160620 - local_file: cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195101-196012.nc - name: tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195101-196012.nc - size: 41765410 - urls: - - http://esgf1.dkrz.de/thredds/fileServer/cordex/cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/KNMI-RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195101-196012.nc + - checksums: + - - SHA256 + - e27fb1414788529a714c27a7d11169136db9ece7247756ab26dcea70d1da53e3 + dataset: cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1.RACMO22E.v2.mon.tas.v20160620 + facets: + dataset: RACMO22E + domain: EUR-11 + driver: MOHC-HadGEM2-ES + ensemble: r1i1p1 + exp: historical + frequency: mon + institute: KNMI + product: output + project: CORDEX + rcm_version: v2 + short_name: tas + version: v20160620 + local_file: cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195001-195012.nc + name: tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195001-195012.nc + size: 5982648 + urls: + - http://esgf1.dkrz.de/thredds/fileServer/cordex/cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/KNMI-RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195001-195012.nc + - checksums: + - - SHA256 + - f14160b5411dc0c7716f80709f309e14948736187087a4c50ec33e0aadcacf53 + dataset: cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1.RACMO22E.v2.mon.tas.v20160620 + facets: + dataset: RACMO22E + domain: EUR-11 + driver: MOHC-HadGEM2-ES + ensemble: r1i1p1 + exp: historical + frequency: mon + institute: KNMI + product: output + project: CORDEX + rcm_version: v2 + short_name: tas + version: v20160620 + local_file: cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195101-196012.nc + name: tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195101-196012.nc + size: 41765410 + urls: + - http://esgf1.dkrz.de/thredds/fileServer/cordex/cordex/output/EUR-11/KNMI/MOHC-HadGEM2-ES/historical/r1i1p1/KNMI-RACMO22E/v2/mon/tas/v20160620/tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1_KNMI-RACMO22E_v2_mon_195101-196012.nc historical_gn_r4i1p1f1_CMIP6_CESM2_Amon_tas.json: -- checksums: - - - SHA256 - - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 - - - SHA256 - - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 - - - SHA256 - - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 - - - SHA256 - - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 - - - SHA256 - - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 - - - SHA256 - - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 - dataset: CMIP6.CMIP.NCAR.CESM2.historical.r4i1p1f1.Amon.tas.gn.v20190308 - facets: - activity: CMIP - dataset: CESM2 - ensemble: r4i1p1f1 - exp: historical - grid: gn - institute: NCAR - mip: Amon - project: CMIP6 - short_name: tas - version: v20190308 - local_file: CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - name: tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - size: 243060396 - urls: - - http://aims3.llnl.gov/thredds/fileServer/css03_data/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - - http://esgf-data.ucar.edu/thredds/fileServer/esg_dataroot/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - - http://esgf-data04.diasjp.net/thredds/fileServer/esg_dataroot/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - - https://esgf.ceda.ac.uk/thredds/fileServer/esg_cmip6/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc - - http://esgf3.dkrz.de/thredds/fileServer/cmip6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + - checksums: + - - SHA256 + - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 + - - SHA256 + - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 + - - SHA256 + - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 + - - SHA256 + - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 + - - SHA256 + - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 + - - SHA256 + - 5f7cdf4fd94b995bfdbf0d9316555980f5b0d0d246c07e1cb6356cd7a4fbdce5 + dataset: CMIP6.CMIP.NCAR.CESM2.historical.r4i1p1f1.Amon.tas.gn.v20190308 + facets: + activity: CMIP + dataset: CESM2 + ensemble: r4i1p1f1 + exp: historical + grid: gn + institute: NCAR + mip: Amon + project: CMIP6 + short_name: tas + version: v20190308 + local_file: CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + name: tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + size: 243060396 + urls: + - http://aims3.llnl.gov/thredds/fileServer/css03_data/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + - http://esgf-data.ucar.edu/thredds/fileServer/esg_dataroot/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + - http://esgf-data04.diasjp.net/thredds/fileServer/esg_dataroot/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + - https://esgf.ceda.ac.uk/thredds/fileServer/esg_cmip6/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + - http://esgf.nci.org.au/thredds/fileServer/replica/CMIP6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc + - http://esgf3.dkrz.de/thredds/fileServer/cmip6/CMIP/NCAR/CESM2/historical/r4i1p1f1/Amon/tas/gn/v20190308/tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc obs4MIPs_CERES-EBAF_mon_rsutcs.json: -- checksums: - - - SHA256 - - db1434a04f3c65eb43e85c0f5d5f344dec5c7813989a7e3bfb5aab6ac3a39414 - dataset: obs4MIPs.CERES-EBAF.v20160610 - facets: - dataset: CERES-EBAF - frequency: mon - institute: NASA-LaRC - project: obs4MIPs - modeling_realm: atmos - short_name: rsutcs - version: v20160610 - local_file: obs4MIPs/CERES-EBAF/v20160610/rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc - name: rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc - size: 44090540 - urls: - - https://dpesgf03.nccs.nasa.gov/thredds/fileServer/obs4MIPs/NASA-LaRC/observations/atmos/rsutcs/mon/grid/NASA-LaRC/CERES-EBAF/v20140728/rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc + - checksums: + - - SHA256 + - db1434a04f3c65eb43e85c0f5d5f344dec5c7813989a7e3bfb5aab6ac3a39414 + dataset: obs4MIPs.CERES-EBAF.v20160610 + facets: + dataset: CERES-EBAF + frequency: mon + institute: NASA-LaRC + project: obs4MIPs + modeling_realm: atmos + short_name: rsutcs + version: v20160610 + local_file: obs4MIPs/CERES-EBAF/v20160610/rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc + name: rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc + size: 44090540 + urls: + - https://dpesgf03.nccs.nasa.gov/thredds/fileServer/obs4MIPs/NASA-LaRC/observations/atmos/rsutcs/mon/grid/NASA-LaRC/CERES-EBAF/v20140728/rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc obs4MIPs_GPCP-V2.3_pr.json: -- checksums: - - - SHA256 - - 4dd4678b79ef139446c8406da5aae4fed210abb2f2160ef95f6988bf83e4525b - dataset: obs4MIPs.GPCP-V2.3.v20180519 - facets: - dataset: GPCP-V2.3 - frequency: mon - institute: NASA-GSFC - project: obs4MIPs - short_name: pr - version: v20180519 - local_file: obs4MIPs/GPCP-V2.3/v20180519/pr_GPCP-SG_L3_v2.3_197901-201710.nc - name: pr_GPCP-SG_L3_v2.3_197901-201710.nc - size: 19348352 - urls: - - https://dpesgf03.nccs.nasa.gov/thredds/fileServer/obs4MIPs/observations/NASA-GSFC/Obs-GPCP/GPCP/V2.3/atmos/pr/pr_GPCP-SG_L3_v2.3_197901-201710.nc + - checksums: + - - SHA256 + - 4dd4678b79ef139446c8406da5aae4fed210abb2f2160ef95f6988bf83e4525b + dataset: obs4MIPs.GPCP-V2.3.v20180519 + facets: + dataset: GPCP-V2.3 + frequency: mon + institute: NASA-GSFC + project: obs4MIPs + short_name: pr + version: v20180519 + local_file: obs4MIPs/GPCP-V2.3/v20180519/pr_GPCP-SG_L3_v2.3_197901-201710.nc + name: pr_GPCP-SG_L3_v2.3_197901-201710.nc + size: 19348352 + urls: + - https://dpesgf03.nccs.nasa.gov/thredds/fileServer/obs4MIPs/observations/NASA-GSFC/Obs-GPCP/GPCP/V2.3/atmos/pr/pr_GPCP-SG_L3_v2.3_197901-201710.nc run1_historical_cccma_cgcm3_1_CMIP3_mon_tas.json: -- checksums: - - - SHA256 - - ee398fdd869ff702c525ebac091e79e6ff69cf4487e3d042cf8dc1e2f105fcb4 - dataset: cmip3.CCCma.cccma_cgcm3_1.historical.mon.atmos.run1.tas.v1 - facets: - dataset: cccma_cgcm3_1 - ensemble: run1 - exp: historical - frequency: mon - institute: CCCma - project: CMIP3 - modeling_realm: atmos - short_name: tas - version: v1 - local_file: cmip3/CCCma/cccma_cgcm3_1/historical/mon/atmos/run1/tas/v1/tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc - name: tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc - size: 33448576 - urls: - - http://aims3.llnl.gov/thredds/fileServer/cmip3_data/data2/20c3m/atm/mo/tas/cccma_cgcm3_1/run1/tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc + - checksums: + - - SHA256 + - ee398fdd869ff702c525ebac091e79e6ff69cf4487e3d042cf8dc1e2f105fcb4 + dataset: cmip3.CCCma.cccma_cgcm3_1.historical.mon.atmos.run1.tas.v1 + facets: + dataset: cccma_cgcm3_1 + ensemble: run1 + exp: historical + frequency: mon + institute: CCCma + project: CMIP3 + modeling_realm: atmos + short_name: tas + version: v1 + local_file: cmip3/CCCma/cccma_cgcm3_1/historical/mon/atmos/run1/tas/v1/tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc + name: tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc + size: 33448576 + urls: + - http://aims3.llnl.gov/thredds/fileServer/cmip3_data/data2/20c3m/atm/mo/tas/cccma_cgcm3_1/run1/tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc diff --git a/tests/integration/esgf/test_search_download.py b/tests/integration/esgf/test_search_download.py index ea34aab897..65f17b3ef1 100644 --- a/tests/integration/esgf/test_search_download.py +++ b/tests/integration/esgf/test_search_download.py @@ -8,91 +8,103 @@ from esmvalcore.esgf import _search, download, find_files -VARIABLES = [{ - 'dataset': 'cccma_cgcm3_1', - 'ensemble': 'run1', - 'exp': 'historical', - 'frequency': 'mon', - 'project': 'CMIP3', - 'short_name': 'tas', - 'version': 'v1', -}, { - 'dataset': 'inmcm4', - 'ensemble': 'r1i1p1', - 'exp': ['historical', 'rcp85'], - 'mip': 'Amon', - 'project': 'CMIP5', - 'short_name': 'tas', - 'version': 'v20130207', -}, { - 'dataset': 'FIO-ESM', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'mip': 'Amon', - 'project': 'CMIP5', - 'short_name': 'tas', -}, { - 'dataset': 'HadGEM2-CC', - 'ensemble': 'r1i1p1', - 'exp': 'rcp85', - 'mip': 'Amon', - 'project': 'CMIP5', - 'short_name': 'tas', - 'timerange': '2080/2100', -}, { - 'dataset': 'EC-EARTH', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'mip': 'Amon', - 'project': 'CMIP5', - 'short_name': 'tas', - 'start_year': 1990, # test legacy way of specifying timerange - 'end_year': 1999, -}, { - 'dataset': 'AWI-ESM-1-1-LR', - 'ensemble': 'r1i1p1f1', - 'exp': 'historical', - 'grid': 'gn', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'timerange': '2000/2001', - 'version': 'v20200212', -}, { - 'dataset': 'CESM2', - 'ensemble': 'r4i1p1f1', - 'exp': 'historical', - 'grid': 'gn', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'timerange': '2000/2001', -}, { - 'dataset': 'RACMO22E', - 'driver': 'MOHC-HadGEM2-ES', - 'domain': 'EUR-11', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'frequency': 'mon', - 'project': 'CORDEX', - 'short_name': 'tas', - 'timerange': '1950/1952', - 'version': 'v20160620', -}, { - 'dataset': 'CERES-EBAF', - 'frequency': 'mon', - 'project': 'obs4MIPs', - 'short_name': 'rsutcs', - 'version': 'v20160610', -}, { - 'dataset': 'GPCP-V2.3', - 'project': 'obs4MIPs', - 'short_name': 'pr', -}] +VARIABLES = [ + { + "dataset": "cccma_cgcm3_1", + "ensemble": "run1", + "exp": "historical", + "frequency": "mon", + "project": "CMIP3", + "short_name": "tas", + "version": "v1", + }, + { + "dataset": "inmcm4", + "ensemble": "r1i1p1", + "exp": ["historical", "rcp85"], + "mip": "Amon", + "project": "CMIP5", + "short_name": "tas", + "version": "v20130207", + }, + { + "dataset": "FIO-ESM", + "ensemble": "r1i1p1", + "exp": "historical", + "mip": "Amon", + "project": "CMIP5", + "short_name": "tas", + }, + { + "dataset": "HadGEM2-CC", + "ensemble": "r1i1p1", + "exp": "rcp85", + "mip": "Amon", + "project": "CMIP5", + "short_name": "tas", + "timerange": "2080/2100", + }, + { + "dataset": "EC-EARTH", + "ensemble": "r1i1p1", + "exp": "historical", + "mip": "Amon", + "project": "CMIP5", + "short_name": "tas", + "start_year": 1990, # test legacy way of specifying timerange + "end_year": 1999, + }, + { + "dataset": "AWI-ESM-1-1-LR", + "ensemble": "r1i1p1f1", + "exp": "historical", + "grid": "gn", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "timerange": "2000/2001", + "version": "v20200212", + }, + { + "dataset": "CESM2", + "ensemble": "r4i1p1f1", + "exp": "historical", + "grid": "gn", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "timerange": "2000/2001", + }, + { + "dataset": "RACMO22E", + "driver": "MOHC-HadGEM2-ES", + "domain": "EUR-11", + "ensemble": "r1i1p1", + "exp": "historical", + "frequency": "mon", + "project": "CORDEX", + "short_name": "tas", + "timerange": "1950/1952", + "version": "v20160620", + }, + { + "dataset": "CERES-EBAF", + "frequency": "mon", + "project": "obs4MIPs", + "short_name": "rsutcs", + "version": "v20160610", + }, + { + "dataset": "GPCP-V2.3", + "project": "obs4MIPs", + "short_name": "pr", + }, +] def get_mock_connection(facets, results): """Create a mock pyesgf.search.SearchConnection instance.""" + class MockFileSearchContext: def search(self, **kwargs): return results @@ -105,32 +117,34 @@ def new_context(self, *args, **kwargs): return MockConnection() -@pytest.mark.parametrize('variable', VARIABLES) +@pytest.mark.parametrize("variable", VARIABLES) def test_mock_search(variable, mocker): - data_path = Path(__file__).parent / 'search_results' + data_path = Path(__file__).parent / "search_results" facets = _search.get_esgf_facets(variable) - json_file = '_'.join(str(facets[k]) for k in sorted(facets)) + '.json' + json_file = "_".join(str(facets[k]) for k in sorted(facets)) + ".json" raw_results = data_path / json_file if not raw_results.exists(): # Skip cases where the raw search results were too large to save. pytest.skip(f"Raw search results in {raw_results} not available.") - with raw_results.open('r', encoding='utf-8') as file: + with raw_results.open("r", encoding="utf-8") as file: search_results = [ FileResult(json=j, context=None) for j in json.load(file) ] conn = get_mock_connection(facets, search_results) - mocker.patch.object(_search.pyesgf.search, - 'SearchConnection', - autspec=True, - return_value=conn) + mocker.patch.object( + _search.pyesgf.search, + "SearchConnection", + autspec=True, + return_value=conn, + ) files = find_files(**variable) - expected_results_file = data_path / 'expected.yml' + expected_results_file = data_path / "expected.yml" if expected_results_file.exists(): - with expected_results_file.open(encoding='utf-8') as file: + with expected_results_file.open(encoding="utf-8") as file: expected_results = yaml.safe_load(file) else: expected_results = {} @@ -140,50 +154,52 @@ def test_mock_search(variable, mocker): else: expected_results[json_file] = [ { - 'checksums': file._checksums, - 'dataset': file.dataset, - 'facets': file.facets, - 'local_file': str(file.local_file(Path())), - 'name': file.name, - 'size': file.size, - 'urls': file.urls, + "checksums": file._checksums, + "dataset": file.dataset, + "facets": file.facets, + "local_file": str(file.local_file(Path())), + "name": file.name, + "size": file.size, + "urls": file.urls, } for file in files ] - with expected_results_file.open('w', encoding='utf-8') as file: + with expected_results_file.open("w", encoding="utf-8") as file: yaml.safe_dump(expected_results, file) - assert False, 'Wrote expected results, please check.' + assert False, "Wrote expected results, please check." assert len(files) == len(expected_files) for found_file, expected in zip(files, expected_files): - assert found_file.name == expected['name'] - assert found_file.local_file(Path()) == Path(expected['local_file']) - assert found_file.dataset == expected['dataset'] - assert found_file.size == expected['size'] - assert found_file.facets == expected['facets'] - assert found_file.urls == expected['urls'] + assert found_file.name == expected["name"] + assert found_file.local_file(Path()) == Path(expected["local_file"]) + assert found_file.dataset == expected["dataset"] + assert found_file.size == expected["size"] + assert found_file.facets == expected["facets"] + assert found_file.urls == expected["urls"] assert found_file._checksums == [ - tuple(c) for c in expected['checksums'] + tuple(c) for c in expected["checksums"] ] def test_real_search(): """Test a real search for a single file.""" variable = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', - 'dataset': 'EC-Earth3', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'grid': 'gr', - 'start_year': 1990, - 'end_year': 2000, + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", + "dataset": "EC-Earth3", + "exp": "historical", + "ensemble": "r1i1p1f1", + "grid": "gr", + "start_year": 1990, + "end_year": 2000, } files = find_files(**variable) - dataset = ('CMIP6.CMIP.EC-Earth-Consortium.EC-Earth3' - '.historical.r1i1p1f1.Amon.tas.gr') + dataset = ( + "CMIP6.CMIP.EC-Earth-Consortium.EC-Earth3" + ".historical.r1i1p1f1.Amon.tas.gr" + ) assert files for file in files: assert file.dataset.startswith(dataset) @@ -193,93 +209,94 @@ def test_real_search(): def test_real_search_many(): expected_files = [ [ - 'tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc', + "tas_a1_20c3m_1_cgcm3.1_t47_1850_2000.nc", ], [ - 'tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc', - 'tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc', + "tas_Amon_inmcm4_historical_r1i1p1_185001-200512.nc", + "tas_Amon_inmcm4_rcp85_r1i1p1_200601-210012.nc", ], [ - 'tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc', + "tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc", ], [ - 'tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc', - 'tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc', - 'tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc', + "tas_Amon_HadGEM2-CC_rcp85_r1i1p1_205512-208011.nc", + "tas_Amon_HadGEM2-CC_rcp85_r1i1p1_208012-209912.nc", + "tas_Amon_HadGEM2-CC_rcp85_r1i1p1_210001-210012.nc", ], [ - 'tas_Amon_EC-EARTH_historical_r1i1p1_199001-199912.nc', + "tas_Amon_EC-EARTH_historical_r1i1p1_199001-199912.nc", ], [ - 'tas_Amon_AWI-ESM-1-1-LR_historical_' - 'r1i1p1f1_gn_200001-200012.nc', - 'tas_Amon_AWI-ESM-1-1-LR_historical_' - 'r1i1p1f1_gn_200101-200112.nc', + "tas_Amon_AWI-ESM-1-1-LR_historical_" + "r1i1p1f1_gn_200001-200012.nc", + "tas_Amon_AWI-ESM-1-1-LR_historical_" + "r1i1p1f1_gn_200101-200112.nc", ], [ - 'tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc', + "tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc", ], [ - 'tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1' - '_KNMI-RACMO22E_v2_mon_195001-195012.nc', - 'tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1' - '_KNMI-RACMO22E_v2_mon_195101-196012.nc', + "tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1" + "_KNMI-RACMO22E_v2_mon_195001-195012.nc", + "tas_EUR-11_MOHC-HadGEM2-ES_historical_r1i1p1" + "_KNMI-RACMO22E_v2_mon_195101-196012.nc", ], [ - 'rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc', + "rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201404.nc", ], [ - 'pr_GPCP-SG_L3_v2.3_197901-201710.nc', + "pr_GPCP-SG_L3_v2.3_197901-201710.nc", ], ] expected_datasets = [ [ - 'cmip3.CCCma.cccma_cgcm3_1.historical.mon.atmos.run1.tas.v1', + "cmip3.CCCma.cccma_cgcm3_1.historical.mon.atmos.run1.tas.v1", ], [ - 'cmip5.output1.INM.inmcm4.historical.mon.atmos.Amon.r1i1p1' - '.v20130207', - 'cmip5.output1.INM.inmcm4.rcp85.mon.atmos.Amon.r1i1p1.v20130207', + "cmip5.output1.INM.inmcm4.historical.mon.atmos.Amon.r1i1p1" + ".v20130207", + "cmip5.output1.INM.inmcm4.rcp85.mon.atmos.Amon.r1i1p1.v20130207", ], [ - 'cmip5.output1.FIO.FIO-ESM.historical.mon.atmos.Amon.r1i1p1' - '.v20121010', + "cmip5.output1.FIO.FIO-ESM.historical.mon.atmos.Amon.r1i1p1" + ".v20121010", ], [ - 'cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1' - '.v20120531', - 'cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1' - '.v20120531', - 'cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1' - '.v20120531', + "cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1" + ".v20120531", + "cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1" + ".v20120531", + "cmip5.output1.MOHC.HadGEM2-CC.rcp85.mon.atmos.Amon.r1i1p1" + ".v20120531", ], [ - 'cmip5.output1.ICHEC.EC-EARTH.historical.mon.atmos.Amon.r1i1p1' - '.v20131231', + "cmip5.output1.ICHEC.EC-EARTH.historical.mon.atmos.Amon.r1i1p1" + ".v20131231", ], [ - 'CMIP6.CMIP.AWI.AWI-ESM-1-1-LR.historical.r1i1p1f1.Amon.tas.gn' - '.v20200212', - 'CMIP6.CMIP.AWI.AWI-ESM-1-1-LR.historical.r1i1p1f1.Amon.tas.gn' - '.v20200212', + "CMIP6.CMIP.AWI.AWI-ESM-1-1-LR.historical.r1i1p1f1.Amon.tas.gn" + ".v20200212", + "CMIP6.CMIP.AWI.AWI-ESM-1-1-LR.historical.r1i1p1f1.Amon.tas.gn" + ".v20200212", ], [ - 'CMIP6.CMIP.NCAR.CESM2.historical.r4i1p1f1.Amon.tas.gn.v20190308', + "CMIP6.CMIP.NCAR.CESM2.historical.r4i1p1f1.Amon.tas.gn.v20190308", ], [ - 'cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1' - '.RACMO22E.v2.mon.tas.v20160620', - 'cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1' - '.RACMO22E.v2.mon.tas.v20160620', + "cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1" + ".RACMO22E.v2.mon.tas.v20160620", + "cordex.output.EUR-11.KNMI.MOHC-HadGEM2-ES.historical.r1i1p1" + ".RACMO22E.v2.mon.tas.v20160620", ], [ - 'obs4MIPs.CERES-EBAF.v20160610', + "obs4MIPs.CERES-EBAF.v20160610", ], - ['obs4MIPs.GPCP-V2.3.v20180519'], + ["obs4MIPs.GPCP-V2.3.v20180519"], ] - for variable, files, datasets in zip(VARIABLES, expected_files, - expected_datasets): + for variable, files, datasets in zip( + VARIABLES, expected_files, expected_datasets + ): result = find_files(**variable) found_files = [file.name for file in result] print(found_files) @@ -292,7 +309,7 @@ def test_real_search_many(): print(result[0].facets) for file in result: for key, value in variable.items(): - if key in ('start_year', 'end_year', 'timerange'): + if key in ("start_year", "end_year", "timerange"): continue if isinstance(value, list): assert file.facets.get(key) in value @@ -304,22 +321,24 @@ def test_real_search_many(): def test_real_download(): all_files = [] for variable in VARIABLES: - if variable.get('exp', '') == 'historical': - variable['start_year'] = 2000 - variable['end_year'] = 2001 + if variable.get("exp", "") == "historical": + variable["start_year"] = 2000 + variable["end_year"] = 2001 files = find_files(**variable) assert files all_files.extend(files) - dest_folder = Path.home() / 'esmvaltool_download_test' + dest_folder = Path.home() / "esmvaltool_download_test" download(all_files, dest_folder) print(f"Download of variable={variable} successful") -if __name__ == '__main__': - logging.basicConfig(format="%(asctime)s [%(process)d] %(levelname)-8s " - "%(name)s,%(lineno)s\t%(message)s") - logging.getLogger().setLevel('info'.upper()) +if __name__ == "__main__": + logging.basicConfig( + format="%(asctime)s [%(process)d] %(levelname)-8s " + "%(name)s,%(lineno)s\t%(message)s" + ) + logging.getLogger().setLevel("info".upper()) test_real_search_many() test_real_download() diff --git a/tests/integration/preprocessor/_derive/test_interface.py b/tests/integration/preprocessor/_derive/test_interface.py index 14ea1d6c9a..2cb216ac55 100644 --- a/tests/integration/preprocessor/_derive/test_interface.py +++ b/tests/integration/preprocessor/_derive/test_interface.py @@ -10,7 +10,7 @@ from esmvalcore.preprocessor._derive import get_required from esmvalcore.preprocessor._derive.ohc import DerivedVariable -SHORT_NAME = 'short_name' +SHORT_NAME = "short_name" @pytest.fixture @@ -22,34 +22,43 @@ def mock_cubes(): @pytest.fixture def patched_derive(mocker): """Fixture for mocked derivation scripts.""" - mocker.patch('iris.cube.CubeList', side_effect=lambda x: x) - mocker.patch.object(_derive, 'ALL_DERIVED_VARIABLES', autospec=True) - mocker.patch.object(_derive, 'logger', autospec=True) + mocker.patch("iris.cube.CubeList", side_effect=lambda x: x) + mocker.patch.object(_derive, "ALL_DERIVED_VARIABLES", autospec=True) + mocker.patch.object(_derive, "logger", autospec=True) -def mock_all_derived_variables(returned_units): +def mock_all_derived_variables(returned_units, fail_unit_conversion=False): """Mock the :obj:`dict` containing all derived variables accordingly.""" cube = mock.create_autospec(Cube, instance=True) cube.units = returned_units + if fail_unit_conversion: + cube.convert_units.side_effect = [ValueError] calculate_function = mock.Mock(return_value=cube) - derived_var = mock.Mock(name='DerivedVariable') + derived_var = mock.Mock(name="DerivedVariable") derived_var.return_value.calculate = calculate_function _derive.ALL_DERIVED_VARIABLES.__getitem__.return_value = derived_var def assert_derived_var_calc_called_once_with(*args): """Assert that derivation script of variable has been called.""" - (_derive.ALL_DERIVED_VARIABLES.__getitem__.return_value.return_value. - calculate.assert_called_once_with(*args)) + ( + _derive.ALL_DERIVED_VARIABLES.__getitem__.return_value.return_value.calculate.assert_called_once_with( + *args + ) + ) @pytest.mark.usefixtures("patched_derive") def test_check_units_none(mock_cubes): """Test units after derivation if derivation scripts returns None.""" mock_all_derived_variables(None) - cube = derive(mock_cubes, SHORT_NAME, mock.sentinel.long_name, - mock.sentinel.units, - standard_name=mock.sentinel.standard_name) + cube = derive( + mock_cubes, + SHORT_NAME, + mock.sentinel.long_name, + mock.sentinel.units, + standard_name=mock.sentinel.standard_name, + ) assert_derived_var_calc_called_once_with(mock_cubes) assert cube.units == mock.sentinel.units assert cube.var_name == SHORT_NAME @@ -62,11 +71,16 @@ def test_check_units_none(mock_cubes): @pytest.mark.usefixtures("patched_derive") def test_check_units_equal(mock_cubes): """Test units after derivation if derivation scripts returns None.""" - mock_all_derived_variables(Unit('kg m2 s-2')) - cube = derive(mock_cubes, SHORT_NAME, mock.sentinel.long_name, 'J', - standard_name=mock.sentinel.standard_name) + mock_all_derived_variables(Unit("kg m2 s-2")) + cube = derive( + mock_cubes, + SHORT_NAME, + mock.sentinel.long_name, + "J", + standard_name=mock.sentinel.standard_name, + ) assert_derived_var_calc_called_once_with(mock_cubes) - assert cube.units == Unit('J') + assert cube.units == Unit("J") assert cube.var_name == SHORT_NAME assert cube.long_name == mock.sentinel.long_name assert cube.standard_name == mock.sentinel.standard_name @@ -77,47 +91,70 @@ def test_check_units_equal(mock_cubes): @pytest.mark.usefixtures("patched_derive") def test_check_units_nounit(mock_cubes): """Test units after derivation if derivation scripts returns None.""" - mock_all_derived_variables(Unit('no unit')) - cube = derive(mock_cubes, SHORT_NAME, mock.sentinel.long_name, 'J', - standard_name=mock.sentinel.standard_name) + mock_all_derived_variables(Unit("no unit")) + cube = derive( + mock_cubes, + SHORT_NAME, + mock.sentinel.long_name, + "J", + standard_name=mock.sentinel.standard_name, + ) assert_derived_var_calc_called_once_with(mock_cubes) - assert cube.units == Unit('J') + assert cube.units == Unit("J") assert cube.var_name == SHORT_NAME assert cube.long_name == mock.sentinel.long_name assert cube.standard_name == mock.sentinel.standard_name _derive.logger.warning.assert_called_once_with( "Units of cube after executing derivation script of '%s' are '%s', " "automatically setting them to '%s'. This might lead to incorrect " - "data", SHORT_NAME, Unit('no_unit'), 'J') + "data", + SHORT_NAME, + Unit("no_unit"), + "J", + ) cube.convert_units.assert_not_called() @pytest.mark.usefixtures("patched_derive") def test_check_units_unknown(mock_cubes): """Test units after derivation if derivation scripts returns None.""" - mock_all_derived_variables(Unit('unknown')) - cube = derive(mock_cubes, SHORT_NAME, mock.sentinel.long_name, 'J', - standard_name=mock.sentinel.standard_name) + mock_all_derived_variables(Unit("unknown")) + cube = derive( + mock_cubes, + SHORT_NAME, + mock.sentinel.long_name, + "J", + standard_name=mock.sentinel.standard_name, + ) assert_derived_var_calc_called_once_with(mock_cubes) - assert cube.units == Unit('J') + assert cube.units == Unit("J") assert cube.var_name == SHORT_NAME assert cube.long_name == mock.sentinel.long_name assert cube.standard_name == mock.sentinel.standard_name _derive.logger.warning.assert_called_once_with( "Units of cube after executing derivation script of '%s' are '%s', " "automatically setting them to '%s'. This might lead to incorrect " - "data", SHORT_NAME, Unit('unknown'), 'J') + "data", + SHORT_NAME, + Unit("unknown"), + "J", + ) cube.convert_units.assert_not_called() @pytest.mark.usefixtures("patched_derive") def test_check_units_convertible(mock_cubes): """Test units after derivation if derivation scripts returns None.""" - mock_all_derived_variables(Unit('kg s-1')) - cube = derive(mock_cubes, SHORT_NAME, mock.sentinel.long_name, 'g yr-1', - standard_name=mock.sentinel.standard_name) + mock_all_derived_variables(Unit("kg s-1")) + cube = derive( + mock_cubes, + SHORT_NAME, + mock.sentinel.long_name, + "g yr-1", + standard_name=mock.sentinel.standard_name, + ) assert_derived_var_calc_called_once_with(mock_cubes) - cube.convert_units.assert_called_once_with('g yr-1') + cube.convert_units.assert_called_once_with("g yr-1") assert cube.var_name == SHORT_NAME assert cube.long_name == mock.sentinel.long_name assert cube.standard_name == mock.sentinel.standard_name @@ -127,10 +164,15 @@ def test_check_units_convertible(mock_cubes): @pytest.mark.usefixtures("patched_derive") def test_check_units_fail(mock_cubes): """Test units after derivation if derivation scripts returns None.""" - mock_all_derived_variables(Unit('kg')) + mock_all_derived_variables(Unit("kg"), fail_unit_conversion=True) with pytest.raises(ValueError) as err: - derive(mock_cubes, SHORT_NAME, mock.sentinel.long_name, 'm', - standard_name=mock.sentinel.standard_name) + derive( + mock_cubes, + SHORT_NAME, + mock.sentinel.long_name, + "m", + standard_name=mock.sentinel.standard_name, + ) assert str(err.value) == ( "Units 'kg' after executing derivation script of 'short_name' cannot " "be converted to target units 'm'" @@ -140,14 +182,14 @@ def test_check_units_fail(mock_cubes): def test_get_required(): """Test getting required variables for derivation.""" - variables = get_required('alb', 'CMIP5') + variables = get_required("alb", "CMIP5") reference = [ { - 'short_name': 'rsdscs', + "short_name": "rsdscs", }, { - 'short_name': 'rsuscs', + "short_name": "rsuscs", }, ] @@ -156,11 +198,11 @@ def test_get_required(): def test_get_required_with_fx(): """Test getting required variables for derivation with fx variables.""" - variables = get_required('ohc', 'CMIP5') + variables = get_required("ohc", "CMIP5") reference = [ - {'short_name': 'thetao'}, - {'short_name': 'volcello', 'mip': 'fx'}, + {"short_name": "thetao"}, + {"short_name": "volcello", "mip": "fx"}, ] assert variables == reference @@ -168,17 +210,17 @@ def test_get_required_with_fx(): def test_derive_nonstandard_nofx(): """Test a specific derivation.""" - short_name = 'alb' - long_name = 'albedo at the surface' + short_name = "alb" + long_name = "albedo at the surface" units = 1 - standard_name = '' + standard_name = "" - rsdscs = Cube([2.]) - rsdscs.short_name = 'rsdscs' + rsdscs = Cube([2.0]) + rsdscs.short_name = "rsdscs" rsdscs.var_name = rsdscs.short_name - rsuscs = Cube([1.]) - rsuscs.short_name = 'rsuscs' + rsuscs = Cube([1.0]) + rsuscs.short_name = "rsuscs" rsuscs.var_name = rsuscs.short_name cubes = CubeList([rsdscs, rsuscs]) @@ -193,9 +235,9 @@ def test_derive_nonstandard_nofx(): def test_derive_noop(): """Test derivation when it is not necessary.""" - alb = Cube([1.]) - alb.var_name = 'alb' - alb.long_name = 'albedo at the surface' + alb = Cube([1.0]) + alb.var_name = "alb" + alb.long_name = "albedo at the surface" alb.units = 1 cube = derive([alb], alb.var_name, alb.long_name, alb.units) @@ -205,9 +247,9 @@ def test_derive_noop(): def test_derive_mixed_case_with_fx(monkeypatch): """Test derivation with fx file.""" - short_name = 'ohc' - long_name = 'Heat content in grid cell' - units = 'J' + short_name = "ohc" + long_name = "Heat content in grid cell" + units = "J" ohc_cube = Cube([]) @@ -216,7 +258,7 @@ def mock_calculate(_, cubes): assert cubes[0] == ohc_cube return Cube([]) - monkeypatch.setattr(DerivedVariable, 'calculate', mock_calculate) + monkeypatch.setattr(DerivedVariable, "calculate", mock_calculate) derive( [ohc_cube], diff --git a/tests/integration/preprocessor/_derive/test_sispeed.py b/tests/integration/preprocessor/_derive/test_sispeed.py index 9daee38954..7c3af33cac 100644 --- a/tests/integration/preprocessor/_derive/test_sispeed.py +++ b/tests/integration/preprocessor/_derive/test_sispeed.py @@ -16,43 +16,39 @@ def get_cube(name, lat=((0.5, 1.5), (2.5, 3.5)), lon=((0.5, 1.5), (2.5, 3.5))): lat_bounds = np.array((lat - 0.5, lat + 0.5)) lon_bounds = np.array((lon - 0.5, lon + 0.5)) cube = Cube(np.ones((2, 2, 2)), name) - cube.add_aux_coord(AuxCoord(lat, 'latitude', bounds=lat_bounds), (1, 2)) - cube.add_aux_coord(AuxCoord(lon, 'longitude', bounds=lon_bounds), (1, 2)) + cube.add_aux_coord(AuxCoord(lat, "latitude", bounds=lat_bounds), (1, 2)) + cube.add_aux_coord(AuxCoord(lon, "longitude", bounds=lon_bounds), (1, 2)) return cube @mock.patch( - 'esmvalcore.preprocessor._regrid_esmpy.ESMPyRegridder.__call__', + "esmvalcore.preprocessor._derive.sispeed.regrid", autospec=True, ) def test_sispeed_calculation(mock_regrid): """Test calculation of `sispeed.""" - siu = get_cube('sea_ice_x_velocity') - siv = get_cube('sea_ice_y_velocity') + siu = get_cube("sea_ice_x_velocity") + siv = get_cube("sea_ice_y_velocity") derived_var = DerivedVariable() sispeed = derived_var.calculate(CubeList((siu, siv))) - assert np.all( - sispeed.data == np.full_like(sispeed.data, 1 * math.sqrt(2)) - ) + assert np.all(sispeed.data == np.full_like(sispeed.data, 1 * math.sqrt(2))) assert mock_regrid.call_count == 0 @mock.patch( - 'esmvalcore.preprocessor._regrid_esmpy.ESMPyRegridder.__call__', + "esmvalcore.preprocessor._derive.sispeed.regrid", autospec=True, ) def test_sispeed_calculation_coord_differ(mock_regrid): """Test calculation of `sispeed.""" - siu = get_cube('sea_ice_x_velocity') + siu = get_cube("sea_ice_x_velocity") siv = get_cube( - 'sea_ice_y_velocity', + "sea_ice_y_velocity", lat=((0.25, 1.25), (2.25, 3.25)), - lon=((0.25, 1.25), (2.25, 3.25)) + lon=((0.25, 1.25), (2.25, 3.25)), ) mock_regrid.return_value = siu derived_var = DerivedVariable() sispeed = derived_var.calculate(CubeList((siu, siv))) - assert np.all( - sispeed.data == np.full_like(sispeed.data, 1 * math.sqrt(2)) - ) + assert np.all(sispeed.data == np.full_like(sispeed.data, 1 * math.sqrt(2))) assert mock_regrid.call_count == 1 diff --git a/tests/integration/preprocessor/_derive/test_sithick.py b/tests/integration/preprocessor/_derive/test_sithick.py index 9e23f49963..d0523d6d7d 100644 --- a/tests/integration/preprocessor/_derive/test_sithick.py +++ b/tests/integration/preprocessor/_derive/test_sithick.py @@ -9,26 +9,22 @@ def test_sispeed_calculation(): """Test calculation of `sithick`.""" - siconc = Cube(np.full((2, 2), 0.5), 'sea_ice_area_fraction', units='1.0') - sivol = Cube(np.full((2, 2), 0.5), 'sea_ice_thickness') + siconc = Cube(np.full((2, 2), 0.5), "sea_ice_area_fraction", units="1.0") + sivol = Cube(np.full((2, 2), 0.5), "sea_ice_thickness") derived_var = DerivedVariable() sispeed = derived_var.calculate(CubeList((siconc, sivol))) - assert np.all( - sispeed.data == np.ones_like(sispeed.data) - ) + assert np.all(sispeed.data == np.ones_like(sispeed.data)) def test_sispeed_calculation_percent(): """Test calculation of `sithick` with sit in %.""" - siconc = Cube(np.full((2, 2), 50.), 'sea_ice_area_fraction', units='%') - sivol = Cube(np.full((2, 2), 0.5), 'sea_ice_thickness') + siconc = Cube(np.full((2, 2), 50.0), "sea_ice_area_fraction", units="%") + sivol = Cube(np.full((2, 2), 0.5), "sea_ice_thickness") derived_var = DerivedVariable() sispeed = derived_var.calculate(CubeList((siconc, sivol))) - assert np.all( - sispeed.data == np.ones_like(sispeed.data) - ) + assert np.all(sispeed.data == np.ones_like(sispeed.data)) diff --git a/tests/integration/preprocessor/_extract_region/test_intersect.py b/tests/integration/preprocessor/_extract_region/test_intersect.py index 41442c9c01..833f89896b 100644 --- a/tests/integration/preprocessor/_extract_region/test_intersect.py +++ b/tests/integration/preprocessor/_extract_region/test_intersect.py @@ -4,6 +4,7 @@ Remove this test and test file after iris fixes this https://github.com/SciTools/iris/issues/5413 . """ + import iris import numpy as np from cf_units import Unit @@ -18,44 +19,45 @@ def make_cube(): coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) data = np.ones((10, 192, 288), dtype=np.float32) time = iris.coords.DimCoord( - np.arange(0., 10., 1.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='360_day')) + np.arange(0.0, 10.0, 1.0), + standard_name="time", + units=Unit("days since 1950-01-01 00:00:00", calendar="360_day"), + ) lons = iris.coords.DimCoord( - [i + .5 for i in range(288)], - standard_name='longitude', - bounds=[[i, i + 1.] for i in range(288)], - units='degrees_east', - coord_system=coord_sys) + [i + 0.5 for i in range(288)], + standard_name="longitude", + bounds=[[i, i + 1.0] for i in range(288)], + units="degrees_east", + coord_system=coord_sys, + ) lats = iris.coords.DimCoord( - [i + .5 for i in range(192)], - standard_name='latitude', - bounds=[[i, i + 1.] for i in range(192)], - units='degrees_north', + [i + 0.5 for i in range(192)], + standard_name="latitude", + bounds=[[i, i + 1.0] for i in range(192)], + units="degrees_north", coord_system=coord_sys, ) coords_spec = [(time, 0), (lats, 1), (lons, 2)] simple_cube = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) # add a cell measure - simple_cube = guess_bounds(simple_cube, ['longitude', 'latitude']) + simple_cube = guess_bounds(simple_cube, ["longitude", "latitude"]) grid_areas = iris.analysis.cartography.area_weights(simple_cube) measure = iris.coords.CellMeasure( - grid_areas, - standard_name='cell_area', - units='m2', - measure='area') + grid_areas, standard_name="cell_area", units="m2", measure="area" + ) simple_cube.add_cell_measure(measure, range(0, measure.ndim)) # add ancillary variable ancillary_var = iris.coords.AncillaryVariable( simple_cube.data, - standard_name='land_ice_area_fraction', - var_name='sftgif', - units='%') - simple_cube.add_ancillary_variable(ancillary_var, - range(0, simple_cube.ndim)) + standard_name="land_ice_area_fraction", + var_name="sftgif", + units="%", + ) + simple_cube.add_ancillary_variable( + ancillary_var, range(0, simple_cube.ndim) + ) return simple_cube @@ -66,20 +68,24 @@ def test_extract_region_cell_ancil(): # intersection cube loses cellmeas/ancillary variables # under normal (unpatched) conditions of extract_region - ex1 = extract_region(cube, - start_longitude=-90, - end_longitude=40, - start_latitude=20, - end_latitude=80) + ex1 = extract_region( + cube, + start_longitude=-90, + end_longitude=40, + start_latitude=20, + end_latitude=80, + ) # intersection cube doesn't lose cellmeas/ancillary variables # under normal (unpatched) conditions of extract_region # so duplication must be avoided - ex2 = extract_region(cube, - start_longitude=160, - end_longitude=280, - start_latitude=-5, - end_latitude=5) + ex2 = extract_region( + cube, + start_longitude=160, + end_longitude=280, + start_latitude=-5, + end_latitude=5, + ) expected_cm = cube.cell_measures()[0] result_cm = ex1.cell_measures() diff --git a/tests/integration/preprocessor/_io/test_concatenate.py b/tests/integration/preprocessor/_io/test_concatenate.py index d8d5d680c1..1fef5f9693 100644 --- a/tests/integration/preprocessor/_io/test_concatenate.py +++ b/tests/integration/preprocessor/_io/test_concatenate.py @@ -11,25 +11,31 @@ from esmvalcore.cmor.check import CheckLevels from esmvalcore.preprocessor import _io +from tests import assert_array_equal def get_hybrid_pressure_cube(): """Return cube with hybrid pressure coordinate.""" - ap_coord = AuxCoord([1.0], bounds=[[0.0, 2.0]], var_name='ap', units='Pa') - b_coord = AuxCoord([0.0], - bounds=[[-0.5, 1.5]], - var_name='b', - units=Unit('1')) - ps_coord = AuxCoord([[[100000]]], var_name='ps', units='Pa') + ap_coord = AuxCoord([1.0], bounds=[[0.0, 2.0]], var_name="ap", units="Pa") + b_coord = AuxCoord( + [0.0], bounds=[[-0.5, 1.5]], var_name="b", units=Unit("1") + ) + ps_coord = AuxCoord([[[100000]]], var_name="ps", units="Pa") x_coord = AuxCoord( 0.0, - var_name='x', - standard_name='atmosphere_hybrid_sigma_pressure_coordinate', + var_name="x", + standard_name="atmosphere_hybrid_sigma_pressure_coordinate", + ) + cube = Cube( + [[[[0.0]]]], + var_name="x", + aux_coords_and_dims=[ + (ap_coord, 1), + (b_coord, 1), + (ps_coord, (0, 2, 3)), + (x_coord, ()), + ], ) - cube = Cube([[[[0.0]]]], - var_name='x', - aux_coords_and_dims=[(ap_coord, 1), (b_coord, 1), - (ps_coord, (0, 2, 3)), (x_coord, ())]) return cube @@ -42,9 +48,9 @@ def get_hybrid_pressure_cube_list(): cubes = CubeList([cube_0, cube_1]) for cube in cubes: aux_factory = HybridPressureFactory( - delta=cube.coord(var_name='ap'), - sigma=cube.coord(var_name='b'), - surface_air_pressure=cube.coord(var_name='ps'), + delta=cube.coord(var_name="ap"), + sigma=cube.coord(var_name="b"), + surface_air_pressure=cube.coord(var_name="ps"), ) cube.add_aux_factory(aux_factory) return cubes @@ -52,10 +58,12 @@ def get_hybrid_pressure_cube_list(): def get_time_coord(time_point): """Time coordinate.""" - return DimCoord([time_point], - var_name='time', - standard_name='time', - units='days since 6453-2-1') + return DimCoord( + [time_point], + var_name="time", + standard_name="time", + units="days since 6453-2-1", + ) @pytest.fixture @@ -73,25 +81,28 @@ def real_hybrid_pressure_cube_list(): def test_concatenation_with_aux_factory(real_hybrid_pressure_cube_list): """Test actual concatenation of a cube with a derived coordinate.""" concatenated = _io.concatenate(real_hybrid_pressure_cube_list) - air_pressure_coord = concatenated.coord('air_pressure') + air_pressure_coord = concatenated.coord("air_pressure") expected_coord = AuxCoord( [[[[1.0]]], [[[1.0]]]], bounds=[[[[[-50000.0, 150002.0]]]], [[[[-50000.0, 150002.0]]]]], - standard_name='air_pressure', - units='Pa', + standard_name="air_pressure", + units="Pa", ) assert air_pressure_coord == expected_coord -@pytest.mark.parametrize('check_level', - [CheckLevels.RELAXED, CheckLevels.IGNORE]) +@pytest.mark.parametrize( + "check_level", [CheckLevels.RELAXED, CheckLevels.IGNORE] +) def test_relax_concatenation(check_level, caplog): - caplog.set_level('DEBUG') + caplog.set_level("DEBUG") cubes = get_hybrid_pressure_cube_list() _io.concatenate(cubes, check_level) - msg = ('Concatenation will be performed without checking ' - 'auxiliary coordinates, cell measures, ancillaries ' - 'and derived coordinates present in the cubes.') + msg = ( + "Concatenation will be performed without checking " + "auxiliary coordinates, cell measures, ancillaries " + "and derived coordinates present in the cubes." + ) assert msg in caplog.text @@ -100,26 +111,32 @@ class TestConcatenate(unittest.TestCase): def setUp(self): """Start tests.""" - self._model_coord = DimCoord([1., 2.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') + self._model_coord = DimCoord( + [1.0, 2.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) self.raw_cubes = [] - self._add_cube([1., 2.], [1., 2.]) - self._add_cube([3., 4.], [3., 4.]) - self._add_cube([5., 6.], [5., 6.]) + self._add_cube([1.0, 2.0], [1.0, 2.0]) + self._add_cube([3.0, 4.0], [3.0, 4.0]) + self._add_cube([5.0, 6.0], [5.0, 6.0]) def _add_cube(self, data, coord): self.raw_cubes.append( - Cube(data, - var_name='sample', - dim_coords_and_dims=((self._model_coord.copy(coord), 0), ))) + Cube( + data, + var_name="sample", + dim_coords_and_dims=((self._model_coord.copy(coord), 0),), + ) + ) def test_concatenate(self): """Test concatenation of two cubes.""" concatenated = _io.concatenate(self.raw_cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1, 2, 3, 4, 5, 6])) + concatenated.coord("time").points, np.array([1, 2, 3, 4, 5, 6]) + ) def test_concatenate_empty_cubes(self): """Test concatenation with empty :class:`iris.cube.CubeList`.""" @@ -131,36 +148,49 @@ def test_concatenate_noop(self): """Test concatenation of a single cube.""" concatenated = _io.concatenate([self.raw_cubes[0]]) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1, 2])) + concatenated.coord("time").points, np.array([1, 2]) + ) - def test_concatenate_with_overlap(self, ): + def test_concatenate_with_overlap( + self, + ): """Test concatenation of time overalapping cubes.""" - self._add_cube([6.5, 7.5], [6., 7.]) + self._add_cube([6.5, 7.5], [6.0, 7.0]) concatenated = _io.concatenate(self.raw_cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, - np.array([1., 2., 3., 4., 5., 6., 7.])) - np.testing.assert_array_equal(concatenated.data, - np.array([1., 2., 3., 4., 5., 6.5, 7.5])) + concatenated.coord("time").points, + np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]), + ) + np.testing.assert_array_equal( + concatenated.data, np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.5, 7.5]) + ) def test_concatenate_with_overlap_2(self): """Test a more generic case.""" - self._add_cube([65., 75., 100.], [9., 10., 11.]) - self._add_cube([65., 75., 100.], [7., 8., 9.]) + self._add_cube([65.0, 75.0, 100.0], [9.0, 10.0, 11.0]) + self._add_cube([65.0, 75.0, 100.0], [7.0, 8.0, 9.0]) concatenated = _io.concatenate(self.raw_cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, - np.array([1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11.])) + concatenated.coord("time").points, + np.array( + [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0] + ), + ) def test_concatenate_with_overlap_3(self): """Test a more generic case.""" - self._add_cube([65., 75., 100.], [9., 10., 11.]) - self._add_cube([65., 75., 100., 100., 100., 112.], - [7., 8., 9., 10., 11., 12.]) + self._add_cube([65.0, 75.0, 100.0], [9.0, 10.0, 11.0]) + self._add_cube( + [65.0, 75.0, 100.0, 100.0, 100.0, 112.0], + [7.0, 8.0, 9.0, 10.0, 11.0, 12.0], + ) concatenated = _io.concatenate(self.raw_cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, - np.array([1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12.])) + concatenated.coord("time").points, + np.array( + [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0] + ), + ) def test_concatenate_with_overlap_same_start(self): """Test a more generic case.""" @@ -168,141 +198,214 @@ def test_concatenate_with_overlap_same_start(self): raw_cubes = [ cube1, ] - time_coord = DimCoord([1., 7.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') + time_coord = DimCoord( + [1.0, 7.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) raw_cubes.append( - Cube([33., 55.], - var_name='sample', - dim_coords_and_dims=((time_coord, 0), ))) + Cube( + [33.0, 55.0], + var_name="sample", + dim_coords_and_dims=((time_coord, 0),), + ) + ) concatenated = _io.concatenate(raw_cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1., 7.])) + concatenated.coord("time").points, np.array([1.0, 7.0]) + ) raw_cubes.reverse() concatenated = _io.concatenate(raw_cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1., 7.])) + concatenated.coord("time").points, np.array([1.0, 7.0]) + ) def test_concatenate_with_iris_exception(self): """Test a more generic case.""" - time_coord_1 = DimCoord([1.5, 5., 7.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') - cube1 = Cube([33., 55., 77.], - var_name='sample', - dim_coords_and_dims=((time_coord_1, 0), )) - time_coord_2 = DimCoord([1., 5., 7.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') - cube2 = Cube([33., 55., 77.], - var_name='sample', - dim_coords_and_dims=((time_coord_2, 0), )) + time_coord_1 = DimCoord( + [1.5, 5.0, 7.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) + cube1 = Cube( + [33.0, 55.0, 77.0], + var_name="sample", + dim_coords_and_dims=((time_coord_1, 0),), + ) + time_coord_2 = DimCoord( + [1.0, 5.0, 7.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) + cube2 = Cube( + [33.0, 55.0, 77.0], + var_name="sample", + dim_coords_and_dims=((time_coord_2, 0),), + ) cubes_single_ovlp = [cube2, cube1] cubess = _io.concatenate(cubes_single_ovlp) # this tests the scalar to vector cube conversion too time_points = cubess.coord("time").core_points() - np.testing.assert_array_equal(time_points, [1., 1.5, 5., 7.]) + np.testing.assert_array_equal(time_points, [1.0, 1.5, 5.0, 7.0]) def test_concatenate_no_time_coords(self): """Test a more generic case.""" - time_coord_1 = DimCoord([1.5, 5., 7.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') - cube1 = Cube([33., 55., 77.], - var_name='sample', - dim_coords_and_dims=((time_coord_1, 0), )) - ap_coord_2 = DimCoord([1., 5., 7.], - var_name='air_pressure', - standard_name='air_pressure', - units='m', - attributes={'positive': 'down'}) - cube2 = Cube([33., 55., 77.], - var_name='sample', - dim_coords_and_dims=((ap_coord_2, 0), )) + time_coord_1 = DimCoord( + [1.5, 5.0, 7.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) + cube1 = Cube( + [33.0, 55.0, 77.0], + var_name="sample", + dim_coords_and_dims=((time_coord_1, 0),), + ) + ap_coord_2 = DimCoord( + [1.0, 5.0, 7.0], + var_name="air_pressure", + standard_name="air_pressure", + units="m", + attributes={"positive": "down"}, + ) + cube2 = Cube( + [33.0, 55.0, 77.0], + var_name="sample", + dim_coords_and_dims=((ap_coord_2, 0),), + ) with self.assertRaises(ValueError): _io.concatenate([cube1, cube2]) def test_concatenate_with_order(self): """Test a more generic case.""" - time_coord_1 = DimCoord([1.5, 2., 5., 7.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') - cube1 = Cube([33., 44., 55., 77.], - var_name='sample', - dim_coords_and_dims=((time_coord_1, 0), )) - time_coord_2 = DimCoord([1., 2., 5., 7., 100.], - var_name='time', - standard_name='time', - units='days since 1950-01-01') - cube2 = Cube([33., 44., 55., 77., 1000.], - var_name='sample', - dim_coords_and_dims=((time_coord_2, 0), )) + time_coord_1 = DimCoord( + [1.5, 2.0, 5.0, 7.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) + cube1 = Cube( + [33.0, 44.0, 55.0, 77.0], + var_name="sample", + dim_coords_and_dims=((time_coord_1, 0),), + ) + time_coord_2 = DimCoord( + [1.0, 2.0, 5.0, 7.0, 100.0], + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ) + cube2 = Cube( + [33.0, 44.0, 55.0, 77.0, 1000.0], + var_name="sample", + dim_coords_and_dims=((time_coord_2, 0),), + ) cubes_ordered = [cube2, cube1] concatenated = _io.concatenate(cubes_ordered) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1., 2., 5., 7., - 100.])) + concatenated.coord("time").points, + np.array([1.0, 2.0, 5.0, 7.0, 100.0]), + ) cubes_reverse = [cube1, cube2] concatenated = _io.concatenate(cubes_reverse) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1., 2., 5., 7., - 100.])) + concatenated.coord("time").points, + np.array([1.0, 2.0, 5.0, 7.0, 100.0]), + ) + + def test_concatenate_by_experiment_first(self): + """Test that data from experiments does not get mixed.""" + historical_1 = Cube( + np.zeros(2), + dim_coords_and_dims=( + [ + DimCoord( + np.arange(2), + var_name="time", + standard_name="time", + units="days since 1950-01-01", + ), + 0, + ], + ), + attributes={"experiment_id": "historical"}, + ) + historical_2 = historical_1.copy() + historical_2.coord("time").points = np.arange(2, 4) + historical_3 = historical_1.copy() + historical_3.coord("time").points = np.arange(4, 6) + ssp585_1 = historical_1.copy(np.ones(2)) + ssp585_1.coord("time").points = np.arange(3, 5) + ssp585_1.attributes["experiment_id"] = "ssp585" + ssp585_2 = ssp585_1.copy() + ssp585_2.coord("time").points = np.arange(5, 7) + result = _io.concatenate( + [historical_1, historical_2, historical_3, ssp585_1, ssp585_2] + ) + assert_array_equal(result.coord("time").points, np.arange(7)) + assert_array_equal(result.data, np.array([0, 0, 0, 1, 1, 1, 1])) def test_concatenate_differing_attributes(self): """Test concatenation of cubes with different attributes.""" cubes = CubeList(self.raw_cubes) - for (idx, cube) in enumerate(cubes): + for idx, cube in enumerate(cubes): cube.attributes = { - 'equal_attr': 1, - 'different_attr': 3 - idx, + "equal_attr": 1, + "different_attr": 3 - idx, } concatenated = _io.concatenate(cubes) np.testing.assert_array_equal( - concatenated.coord('time').points, np.array([1, 2, 3, 4, 5, 6])) + concatenated.coord("time").points, np.array([1, 2, 3, 4, 5, 6]) + ) self.assertEqual( concatenated.attributes, - { - 'equal_attr': 1, - 'different_attr': '1 2 3' - }, + {"equal_attr": 1, "different_attr": "1 2 3"}, ) def test_convert_calendar_concatenate_with_overlap(self): """Test compatible calendars get converted.""" - time_coord = DimCoord([4., 5.], - var_name='time', - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='proleptic_gregorian')) + time_coord = DimCoord( + [4.0, 5.0], + var_name="time", + standard_name="time", + units=Unit( + "days since 1950-01-01", calendar="proleptic_gregorian" + ), + ) self.raw_cubes.append( - Cube([33., 55.], - var_name='sample', - dim_coords_and_dims=((time_coord, 0), ))) + Cube( + [33.0, 55.0], + var_name="sample", + dim_coords_and_dims=((time_coord, 0),), + ) + ) concatenated = _io.concatenate(self.raw_cubes) - assert concatenated.coord('time').units.calendar == 'standard' + assert concatenated.coord("time").units.calendar == "standard" def test_fail_on_calendar_concatenate_with_overlap(self): """Test fail of concatenation with overlap.""" - time_coord = DimCoord([3., 7000.], - var_name='time', - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='360_day')) + time_coord = DimCoord( + [3.0, 7000.0], + var_name="time", + standard_name="time", + units=Unit("days since 1950-01-01", calendar="360_day"), + ) self.raw_cubes.append( - Cube([33., 55.], - var_name='sample', - dim_coords_and_dims=((time_coord, 0), ))) + Cube( + [33.0, 55.0], + var_name="sample", + dim_coords_and_dims=((time_coord, 0),), + ) + ) with self.assertRaises(TypeError): _io.concatenate(self.raw_cubes) def test_fail_metadata_differs(self): """Test exception raised if two cubes have different metadata.""" - self.raw_cubes[0].units = 'm' - self.raw_cubes[1].units = 'K' + self.raw_cubes[0].units = "m" + self.raw_cubes[1].units = "K" with self.assertRaises(ValueError): _io.concatenate(self.raw_cubes) diff --git a/tests/integration/preprocessor/_io/test_load.py b/tests/integration/preprocessor/_io/test_load.py index c0ad0bba88..1df7c3bb55 100644 --- a/tests/integration/preprocessor/_io/test_load.py +++ b/tests/integration/preprocessor/_io/test_load.py @@ -14,8 +14,8 @@ def _create_sample_cube(): - coord = DimCoord([1, 2], standard_name='latitude', units='degrees_north') - cube = Cube([1, 2], var_name='sample', dim_coords_and_dims=((coord, 0), )) + coord = DimCoord([1, 2], standard_name="latitude", units="degrees_north") + cube = Cube([1, 2], var_name="sample", dim_coords_and_dims=((coord, 0),)) return cube @@ -32,7 +32,7 @@ def tearDown(self): os.remove(temp_file) def _save_cube(self, cube): - descriptor, temp_file = tempfile.mkstemp('.nc') + descriptor, temp_file = tempfile.mkstemp(".nc") os.close(descriptor) iris.save(cube, temp_file) self.temp_files.append(temp_file) @@ -46,14 +46,15 @@ def test_load(self): cubes = load(temp_file) cube = cubes[0] self.assertEqual(1, len(cubes)) - self.assertEqual(temp_file, cube.attributes['source_file']) + self.assertEqual(temp_file, cube.attributes["source_file"]) self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == np.array([1, - 2])).all()) + self.assertTrue( + (cube.coord("latitude").points == np.array([1, 2])).all() + ) def test_callback_remove_attributes(self): """Test callback remove unwanted attributes.""" - attributes = ('history', 'creation_date', 'tracking_id', 'comment') + attributes = ("history", "creation_date", "tracking_id", "comment") for _ in range(2): cube = _create_sample_cube() for attr in attributes: @@ -65,13 +66,14 @@ def test_callback_remove_attributes(self): self.assertEqual(1, len(cubes)) self.assertTrue((cube.data == np.array([1, 2])).all()) self.assertTrue( - (cube.coord('latitude').points == np.array([1, 2])).all()) + (cube.coord("latitude").points == np.array([1, 2])).all() + ) for attr in attributes: self.assertTrue(attr not in cube.attributes) def test_callback_remove_attributes_from_coords(self): """Test callback remove unwanted attributes from coords.""" - attributes = ('history', ) + attributes = ("history",) for _ in range(2): cube = _create_sample_cube() for coord in cube.coords(): @@ -84,7 +86,8 @@ def test_callback_remove_attributes_from_coords(self): self.assertEqual(1, len(cubes)) self.assertTrue((cube.data == np.array([1, 2])).all()) self.assertTrue( - (cube.coord('latitude').points == np.array([1, 2])).all()) + (cube.coord("latitude").points == np.array([1, 2])).all() + ) for coord in cube.coords(): for attr in attributes: self.assertTrue(attr not in cube.attributes) @@ -98,50 +101,52 @@ def test_callback_fix_lat_units(self): cube = cubes[0] self.assertEqual(1, len(cubes)) self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == np.array([1, - 2])).all()) - self.assertEqual(cube.coord('latitude').units, 'degrees_north') + self.assertTrue( + (cube.coord("latitude").points == np.array([1, 2])).all() + ) + self.assertEqual(cube.coord("latitude").units, "degrees_north") - @unittest.mock.patch('iris.load_raw', autospec=True) + @unittest.mock.patch("iris.load_raw", autospec=True) def test_fail_empty_cubes(self, mock_load_raw): """Test that ValueError is raised when cubes are empty.""" mock_load_raw.return_value = CubeList([]) msg = "Can not load cubes from myfilename" with self.assertRaises(ValueError, msg=msg): - load('myfilename') + load("myfilename") @staticmethod def load_with_warning(*_, **__): """Mock load with a warning.""" - warnings.warn("This is a custom expected warning", - category=UserWarning) + warnings.warn( + "This is a custom expected warning", category=UserWarning + ) return CubeList([Cube(0)]) - @unittest.mock.patch('iris.load_raw', autospec=True) + @unittest.mock.patch("iris.load_raw", autospec=True) def test_do_not_ignore_warnings(self, mock_load_raw): """Test do not ignore specific warnings.""" mock_load_raw.side_effect = self.load_with_warning - ignore_warnings = [{'message': "non-relevant warning"}] + ignore_warnings = [{"message": "non-relevant warning"}] # Warning is not ignored -> assert warning has been issued with self.assertWarns(UserWarning): - cubes = load('myfilename', ignore_warnings=ignore_warnings) + cubes = load("myfilename", ignore_warnings=ignore_warnings) # Check output self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].attributes, {'source_file': 'myfilename'}) + self.assertEqual(cubes[0].attributes, {"source_file": "myfilename"}) - @unittest.mock.patch('iris.load_raw', autospec=True) + @unittest.mock.patch("iris.load_raw", autospec=True) def test_ignore_warnings(self, mock_load_raw): """Test ignore specific warnings.""" mock_load_raw.side_effect = self.load_with_warning - ignore_warnings = [{'message': "This is a custom expected warning"}] + ignore_warnings = [{"message": "This is a custom expected warning"}] # Warning is ignored -> assert warning has not been issued with self.assertRaises(AssertionError): with self.assertWarns(UserWarning): - cubes = load('myfilename', ignore_warnings=ignore_warnings) + cubes = load("myfilename", ignore_warnings=ignore_warnings) # Check output self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].attributes, {'source_file': 'myfilename'}) + self.assertEqual(cubes[0].attributes, {"source_file": "myfilename"}) diff --git a/tests/integration/preprocessor/_io/test_save.py b/tests/integration/preprocessor/_io/test_save.py index 3d8b127703..0bfd0c6958 100644 --- a/tests/integration/preprocessor/_io/test_save.py +++ b/tests/integration/preprocessor/_io/test_save.py @@ -1,4 +1,5 @@ """Integration tests for :func:`esmvalcore.preprocessor.save`""" + import iris import netCDF4 import numpy as np @@ -11,31 +12,31 @@ @pytest.fixture def filename(tmp_path): - return tmp_path / 'test.nc' + return tmp_path / "test.nc" @pytest.fixture def cube(): lat = DimCoord( np.asarray([1, 2], np.single), - standard_name='latitude', - units='degrees_north', + standard_name="latitude", + units="degrees_north", ) lon = DimCoord( np.asarray([1, 2], np.single), - standard_name='longitude', - units='degrees_east', + standard_name="longitude", + units="degrees_east", ) time = DimCoord( np.asarray([1, 2], np.single), - standard_name='time', - units='days since 2000-1-1', + standard_name="time", + units="days since 2000-1-1", ) cube = Cube( np.random.random_sample([2, 2, 2]), - var_name='sample', - units='1', + var_name="sample", + units="1", dim_coords_and_dims=((lat, 0), (lon, 1), (time, 2)), ) @@ -45,13 +46,14 @@ def cube(): def _compare_cubes(cube, loaded_cube): np.testing.assert_equal(cube.data, loaded_cube.data) for coord in cube.coords(): - np.testing.assert_equal(coord.points, - loaded_cube.coord(coord.name()).points) + np.testing.assert_equal( + coord.points, loaded_cube.coord(coord.name()).points + ) def _check_chunks(path, expected_chunks): - with netCDF4.Dataset(path, 'r') as handler: - chunking = handler.variables['sample'].chunking() + with netCDF4.Dataset(path, "r") as handler: + chunking = handler.variables["sample"].chunking() assert expected_chunks == chunking @@ -63,7 +65,7 @@ def test_save(cube, filename): def test_save_create_parent_dir(cube, tmp_path): - filename = tmp_path / 'preproc' / 'something' / 'test.nc' + filename = tmp_path / "preproc" / "something" / "test.nc" path = save([cube], filename) loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) @@ -71,10 +73,10 @@ def test_save_create_parent_dir(cube, tmp_path): def test_save_alias(cube, filename): """Test save.""" - path = save([cube], filename, alias='alias') + path = save([cube], filename, alias="alias") loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) - assert loaded_cube.var_name == 'alias' + assert loaded_cube.var_name == "alias" def test_save_zlib(cube, filename): @@ -82,11 +84,11 @@ def test_save_zlib(cube, filename): path = save([cube], filename, compress=True) loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) - with netCDF4.Dataset(path, 'r') as handler: - sample_filters = handler.variables['sample'].filters() - assert sample_filters['zlib'] is True - assert sample_filters['shuffle'] is True - assert sample_filters['complevel'] == 4 + with netCDF4.Dataset(path, "r") as handler: + sample_filters = handler.variables["sample"].filters() + assert sample_filters["zlib"] is True + assert sample_filters["shuffle"] is True + assert sample_filters["complevel"] == 4 def test_fail_empty_cubes(filename): @@ -104,7 +106,7 @@ def test_fail_without_filename(cube): def test_save_optimized_map(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access='map') + path = save([cube], filename, optimize_access="map") loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) _check_chunks(path, [2, 2, 1]) @@ -112,7 +114,7 @@ def test_save_optimized_map(cube, filename): def test_save_optimized_timeseries(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access='timeseries') + path = save([cube], filename, optimize_access="timeseries") loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) _check_chunks(path, [1, 1, 2]) @@ -120,7 +122,7 @@ def test_save_optimized_timeseries(cube, filename): def test_save_optimized_lat(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access='latitude') + path = save([cube], filename, optimize_access="latitude") loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) expected_chunks = [2, 1, 1] @@ -129,7 +131,7 @@ def test_save_optimized_lat(cube, filename): def test_save_optimized_lon_time(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access='longitude time') + path = save([cube], filename, optimize_access="longitude time") loaded_cube = iris.load_cube(path) _compare_cubes(cube, loaded_cube) _check_chunks(path, [1, 2, 2]) diff --git a/tests/integration/preprocessor/_mask/test_mask.py b/tests/integration/preprocessor/_mask/test_mask.py index 4e5e51167b..253864f385 100644 --- a/tests/integration/preprocessor/_mask/test_mask.py +++ b/tests/integration/preprocessor/_mask/test_mask.py @@ -2,12 +2,15 @@ Integration tests for the :func:`esmvalcore.preprocessor._mask` module. """ + from pathlib import Path +import dask.array as da import iris import iris.fileformats import numpy as np import pytest +from iris.coords import AuxCoord from esmvalcore.preprocessor import ( PreprocessorFile, @@ -26,93 +29,109 @@ class Test: def setUp(self): """Assemble a stock cube.""" fx_data = np.empty((3, 3)) - fx_data[:] = 60. - fx_data[1, 2] = 30. + fx_data[:] = 60.0 + fx_data[1, 2] = 30.0 self.new_cube_data = np.empty((2, 3, 3)) - self.new_cube_data[:] = 200. + self.new_cube_data[:] = 200.0 crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - self.lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=crd_sys) - self.lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=crd_sys) - self.zcoord = iris.coords.DimCoord([0.5, 5.], - long_name='zcoord', - bounds=[[0., 2.5], [2.5, 25.]], - units='m', - attributes={'positive': 'down'}) - self.times = iris.coords.DimCoord([0, 1.5, 2.5, 3.5], - standard_name='time', - bounds=[[0, 1], [1, 2], [2, 3], - [3, 4]], - units='hours') - self.time2 = iris.coords.DimCoord([0, 1.5, 2.5], - standard_name='time', - bounds=[[0, 1], [1, 2], [2, 3]], - units='hours') + self.lons = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=crd_sys, + ) + self.lats = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="latitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_north", + coord_system=crd_sys, + ) + self.zcoord = iris.coords.DimCoord( + [0.5, 5.0], + long_name="zcoord", + bounds=[[0.0, 2.5], [2.5, 25.0]], + units="m", + attributes={"positive": "down"}, + ) + self.times = iris.coords.DimCoord( + [0, 1.5, 2.5, 3.5], + standard_name="time", + bounds=[[0, 1], [1, 2], [2, 3], [3, 4]], + units="hours", + ) + self.time2 = iris.coords.DimCoord( + [0, 1.5, 2.5], + standard_name="time", + bounds=[[0, 1], [1, 2], [2, 3]], + units="hours", + ) self.fx_coords_spec = [(self.lats, 0), (self.lons, 1)] - self.cube_coords_spec = [(self.zcoord, 0), - (self.lats, 1), (self.lons, 2)] - self.fx_mask = iris.cube.Cube(fx_data, - dim_coords_and_dims=self.fx_coords_spec, - units='%') + self.cube_coords_spec = [ + (self.zcoord, 0), + (self.lats, 1), + (self.lons, 2), + ] + self.fx_mask = iris.cube.Cube( + fx_data, dim_coords_and_dims=self.fx_coords_spec, units="%" + ) self.mock_data = np.ma.empty((4, 3, 3)) - self.mock_data[:] = 10. + self.mock_data[:] = 10.0 - def test_components_fx_var(self): + @pytest.mark.parametrize("lazy_fx", [True, False]) + @pytest.mark.parametrize("lazy", [True, False]) + def test_components_fx_var(self, lazy, lazy_fx): """Test compatibility of ancillary variables.""" - self.fx_mask.var_name = 'sftlf' - self.fx_mask.standard_name = 'land_area_fraction' + if lazy: + cube_data = da.array(self.new_cube_data) + else: + cube_data = self.new_cube_data + fx_cube = self.fx_mask.copy() + if lazy_fx: + fx_cube.data = fx_cube.lazy_data() + + # mask_landsea + fx_cube.var_name = "sftlf" + fx_cube.standard_name = "land_area_fraction" new_cube_land = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec - ) - new_cube_land = add_supplementary_variables( - new_cube_land, - [self.fx_mask], - ) - result_land = mask_landsea( - new_cube_land, - 'land', + cube_data, dim_coords_and_dims=self.cube_coords_spec ) + new_cube_land = add_supplementary_variables(new_cube_land, [fx_cube]) + result_land = mask_landsea(new_cube_land, "land") assert isinstance(result_land, iris.cube.Cube) + assert result_land.has_lazy_data() is (lazy or lazy_fx) - self.fx_mask.var_name = 'sftgif' - self.fx_mask.standard_name = 'land_ice_area_fraction' + # mask_landseaice + fx_cube.var_name = "sftgif" + fx_cube.standard_name = "land_ice_area_fraction" new_cube_ice = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec - ) - new_cube_ice = add_supplementary_variables( - new_cube_ice, - [self.fx_mask], - ) - result_ice = mask_landseaice( - new_cube_ice, - 'ice', + cube_data, dim_coords_and_dims=self.cube_coords_spec ) + new_cube_ice = add_supplementary_variables(new_cube_ice, [fx_cube]) + result_ice = mask_landseaice(new_cube_ice, "ice") assert isinstance(result_ice, iris.cube.Cube) + assert result_ice.has_lazy_data() is (lazy or lazy_fx) - def test_mask_landsea(self): + @pytest.mark.parametrize("lazy", [True, False]) + def test_mask_landsea(self, lazy): """Test mask_landsea func.""" - self.fx_mask.var_name = 'sftlf' - self.fx_mask.standard_name = 'land_area_fraction' + if lazy: + cube_data = da.array(self.new_cube_data) + else: + cube_data = self.new_cube_data + + self.fx_mask.var_name = "sftlf" + self.fx_mask.standard_name = "land_area_fraction" new_cube_land = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec + cube_data, dim_coords_and_dims=self.cube_coords_spec ) new_cube_land = add_supplementary_variables( new_cube_land, [self.fx_mask], ) new_cube_sea = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec + cube_data, dim_coords_and_dims=self.cube_coords_spec ) new_cube_sea = add_supplementary_variables( new_cube_sea, @@ -122,20 +141,22 @@ def test_mask_landsea(self): # mask with fx files result_land = mask_landsea( new_cube_land, - 'land', + "land", ) result_sea = mask_landsea( new_cube_sea, - 'sea', + "sea", ) + assert result_land.has_lazy_data() is lazy + assert result_sea.has_lazy_data() is lazy expected = np.ma.empty((2, 3, 3)) - expected.data[:] = 200. + expected.data[:] = 200.0 expected.mask = np.ones((2, 3, 3), bool) expected.mask[:, 1, 2] = False # set fillvalues so we are sure they are equal - np.ma.set_fill_value(result_land.data, 1e+20) - np.ma.set_fill_value(result_sea.data, 1e+20) - np.ma.set_fill_value(expected, 1e+20) + np.ma.set_fill_value(result_land.data, 1e20) + np.ma.set_fill_value(result_sea.data, 1e20) + np.ma.set_fill_value(expected, 1e20) assert_array_equal(result_land.data, expected) expected.mask = np.zeros((2, 3, 3), bool) expected.mask[:, 1, 2] = True @@ -143,46 +164,127 @@ def test_mask_landsea(self): # mask with shp files new_cube_land = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec + cube_data, dim_coords_and_dims=self.cube_coords_spec ) new_cube_sea = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec + cube_data, dim_coords_and_dims=self.cube_coords_spec ) - result_land = mask_landsea(new_cube_land, 'land') - result_sea = mask_landsea(new_cube_sea, 'sea') + result_land = mask_landsea(new_cube_land, "land") + result_sea = mask_landsea(new_cube_sea, "sea") # bear in mind all points are in the ocean - np.ma.set_fill_value(result_land.data, 1e+20) - np.ma.set_fill_value(result_sea.data, 1e+20) + assert result_land.has_lazy_data() is lazy + assert result_sea.has_lazy_data() is lazy + np.ma.set_fill_value(result_land.data, 1e20) + np.ma.set_fill_value(result_sea.data, 1e20) expected.mask = np.zeros((3, 3), bool) assert_array_equal(result_land.data, expected) expected.mask = np.ones((3, 3), bool) assert_array_equal(result_sea.data, expected) - def test_mask_landseaice(self): + @pytest.mark.parametrize("lazy", [True, False]) + def test_mask_landsea_transposed_fx(self, lazy): + """Test mask_landsea func.""" + if lazy: + cube_data = da.array(self.new_cube_data) + else: + cube_data = self.new_cube_data + cube = iris.cube.Cube( + cube_data, dim_coords_and_dims=self.cube_coords_spec + ) + self.fx_mask.var_name = "sftlf" + self.fx_mask.standard_name = "land_area_fraction" + cube = add_supplementary_variables(cube, [self.fx_mask]) + cube.transpose([2, 1, 0]) + + result = mask_landsea(cube, "land") + + assert result.has_lazy_data() is lazy + expected = np.ma.array( + np.full((3, 3, 2), 200.0), mask=np.ones((3, 3, 2), bool) + ) + expected.mask[2, 1, :] = False + assert_array_equal(result.data, expected) + + @pytest.mark.parametrize("lazy", [True, False]) + def test_mask_landsea_transposed_shp(self, lazy): + """Test mask_landsea func.""" + if lazy: + cube_data = da.array(self.new_cube_data) + else: + cube_data = self.new_cube_data + cube = iris.cube.Cube( + cube_data, dim_coords_and_dims=self.cube_coords_spec + ) + cube.transpose([2, 1, 0]) + + result = mask_landsea(cube, "land") + + assert result.has_lazy_data() is lazy + expected = np.ma.array( + np.full((3, 3, 2), 200.0), mask=np.zeros((3, 3, 2), bool) + ) + assert_array_equal(result.data, expected) + + def test_mask_landsea_multidim_fail(self): + """Test mask_landsea func.""" + lon_coord = AuxCoord(np.ones((3, 3)), standard_name="longitude") + cube = iris.cube.Cube( + self.new_cube_data, + dim_coords_and_dims=[(self.zcoord, 0), (self.lats, 1)], + aux_coords_and_dims=[(lon_coord, (1, 2))], + ) + + msg = ( + "Use of shapefiles with irregular grids not yet implemented, " + "land-sea mask not applied." + ) + with pytest.raises(ValueError, match=msg): + mask_landsea(cube, "land") + + @pytest.mark.parametrize("lazy", [True, False]) + def test_mask_landseaice(self, lazy): """Test mask_landseaice func.""" - self.fx_mask.var_name = 'sftgif' - self.fx_mask.standard_name = 'land_ice_area_fraction' + if lazy: + cube_data = da.array(self.new_cube_data).rechunk((1, 3, 3)) + else: + cube_data = self.new_cube_data + + self.fx_mask.var_name = "sftgif" + self.fx_mask.standard_name = "land_ice_area_fraction" new_cube_ice = iris.cube.Cube( - self.new_cube_data, - dim_coords_and_dims=self.cube_coords_spec + cube_data, dim_coords_and_dims=self.cube_coords_spec ) new_cube_ice = add_supplementary_variables( new_cube_ice, [self.fx_mask], ) - result_ice = mask_landseaice(new_cube_ice, 'ice') + result_ice = mask_landseaice(new_cube_ice, "ice") + assert result_ice.has_lazy_data() is lazy + if lazy: + assert result_ice.lazy_data().chunksize == (1, 3, 3) expected = np.ma.empty((2, 3, 3)) - expected.data[:] = 200. + expected.data[:] = 200.0 expected.mask = np.ones((2, 3, 3), bool) expected.mask[:, 1, 2] = False - np.ma.set_fill_value(result_ice.data, 1e+20) - np.ma.set_fill_value(expected, 1e+20) + np.ma.set_fill_value(result_ice.data, 1e20) + np.ma.set_fill_value(expected, 1e20) assert_array_equal(result_ice.data, expected) - @pytest.mark.parametrize('lazy', [True, False]) + def test_mask_landseaice_multidim_fail(self): + """Test mask_landseaice func.""" + lon_coord = AuxCoord(np.ones((3, 3)), standard_name="longitude") + cube = iris.cube.Cube( + self.new_cube_data, + dim_coords_and_dims=[(self.zcoord, 0), (self.lats, 1)], + aux_coords_and_dims=[(lon_coord, (1, 2))], + ) + + msg = "Landsea-ice mask could not be found. Stopping." + with pytest.raises(ValueError, match=msg): + mask_landseaice(cube, "ice") + + @pytest.mark.parametrize("lazy", [True, False]) def test_mask_fillvalues(self, mocker, lazy): """Test the fillvalues mask: func mask_fillvalues.""" data_1 = data_2 = self.mock_data @@ -193,8 +295,8 @@ def test_mask_fillvalues(self, mocker, lazy): if lazy: cube_1.data = cube_1.lazy_data().rechunk((2, None, None)) cube_2.data = cube_2.lazy_data() - filename_1 = 'file1.nc' - filename_2 = 'file2.nc' + filename_1 = "file1.nc" + filename_2 = "file2.nc" product_1 = mocker.create_autospec( PreprocessorFile, spec_set=True, @@ -209,10 +311,9 @@ def test_mask_fillvalues(self, mocker, lazy): ) product_2.filename = filename_2 product_2.cubes = [cube_2] - results = mask_fillvalues({product_1, product_2}, - 0.95, - min_value=-1.e10, - time_window=1) + results = mask_fillvalues( + {product_1, product_2}, 0.95, min_value=-1.0e10, time_window=1 + ) result_1, result_2 = None, None for product in results: if product.filename == filename_1: @@ -228,7 +329,7 @@ def test_mask_fillvalues(self, mocker, lazy): assert_array_equal(result_2.data.mask, data_2.mask) assert_array_equal(result_1.data, data_1) - @pytest.mark.parametrize('lazy', [True, False]) + @pytest.mark.parametrize("lazy", [True, False]) def test_mask_fillvalues_zero_threshold(self, mocker, lazy): """Test the fillvalues mask: func mask_fillvalues for 0-threshold.""" data_1 = self.mock_data @@ -247,8 +348,8 @@ def test_mask_fillvalues_zero_threshold(self, mocker, lazy): cube_1.data = cube_1.lazy_data().rechunk((2, None, None)) cube_2.data = cube_2.lazy_data() - filename_1 = Path('file1.nc') - filename_2 = Path('file2.nc') + filename_1 = Path("file1.nc") + filename_2 = Path("file2.nc") product_1 = mocker.create_autospec( PreprocessorFile, spec_set=True, @@ -263,7 +364,9 @@ def test_mask_fillvalues_zero_threshold(self, mocker, lazy): ) product_2.filename = filename_2 product_2.cubes = [cube_2] - results = mask_fillvalues({product_1, product_2}, 0., min_value=-1.e20) + results = mask_fillvalues( + {product_1, product_2}, 0.0, min_value=-1.0e20 + ) result_1, result_2 = None, None for product in results: if product.filename == filename_1: @@ -286,7 +389,7 @@ def test_mask_fillvalues_zero_threshold(self, mocker, lazy): assert_array_equal(result_1[1:2].data.mask, cumulative_mask) assert_array_equal(result_2[2:3].data.mask, cumulative_mask) - @pytest.mark.parametrize('lazy', [True, False]) + @pytest.mark.parametrize("lazy", [True, False]) def test_mask_fillvalues_min_value_none(self, mocker, lazy): """Test ``mask_fillvalues`` for min_value=None.""" # We use non-masked data here and explicitly set some values to 0 here @@ -304,8 +407,8 @@ def test_mask_fillvalues_min_value_none(self, mocker, lazy): cube_1.data = cube_1.lazy_data().rechunk((2, None, None)) cube_2.data = cube_2.lazy_data() - filename_1 = Path('file1.nc') - filename_2 = Path('file2.nc') + filename_1 = Path("file1.nc") + filename_2 = Path("file2.nc") # Mock PreprocessorFile to avoid provenance errors product_1 = mocker.create_autospec( diff --git a/tests/integration/preprocessor/_regrid/test_extract_coordinate_points.py b/tests/integration/preprocessor/_regrid/test_extract_coordinate_points.py index 872b4aec2a..d5cad7c9ec 100644 --- a/tests/integration/preprocessor/_regrid/test_extract_coordinate_points.py +++ b/tests/integration/preprocessor/_regrid/test_extract_coordinate_points.py @@ -19,31 +19,34 @@ def setUp(self): """Prepare tests.""" shape = (3, 4, 4) data = np.arange(np.prod(shape)).reshape(shape) - self.cube = _make_cube(data, dtype=np.float64, rotated=True) + self.cube = _make_cube(data, dtype=np.float64, grid="rotated") self.cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) def test_extract_point__single_linear(self): """Test linear interpolation when extracting a single point""" point = extract_coordinate_points( self.cube, - {'grid_latitude': 2.1, 'grid_longitude': 2.1}, - scheme='linear') + {"grid_latitude": 2.1, "grid_longitude": 2.1}, + scheme="linear", + ) self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [5.5, 21.5, 37.5]) # Exactly centred between grid points. point = extract_coordinate_points( self.cube, - {'grid_latitude': 2.5, 'grid_longitude': 2.5}, - scheme='linear') + {"grid_latitude": 2.5, "grid_longitude": 2.5}, + scheme="linear", + ) self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [7.5, 23.5, 39.5]) # On a (edge) grid point. point = extract_coordinate_points( self.cube, - {'grid_latitude': 4, 'grid_longitude': 4}, - scheme='linear') + {"grid_latitude": 4, "grid_longitude": 4}, + scheme="linear", + ) self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [15, 31, 47]) @@ -52,16 +55,18 @@ def test_extract_point__single_linear(self): # schemes that way. point = extract_coordinate_points( self.cube, - {'grid_latitude': -1, 'grid_longitude': -1}, - scheme='linear') + {"grid_latitude": -1, "grid_longitude": -1}, + scheme="linear", + ) self.assertEqual(point.shape, (3,)) masked = np.ma.array([np.nan] * 3, mask=True) self.assert_array_equal(point.data, masked) point = extract_coordinate_points( self.cube, - {'grid_latitude': 30, 'grid_longitude': 30}, - scheme='linear') + {"grid_latitude": 30, "grid_longitude": 30}, + scheme="linear", + ) self.assertEqual(point.shape, (3,)) self.assert_array_equal(point.data, masked) @@ -70,31 +75,35 @@ def test_extract_point__single_nearest(self): point = extract_coordinate_points( self.cube, - {'grid_latitude': 2.1, 'grid_longitude': 2.1}, - scheme='nearest') + {"grid_latitude": 2.1, "grid_longitude": 2.1}, + scheme="nearest", + ) self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [5, 21, 37]) point = extract_coordinate_points( self.cube, - {'grid_latitude': 4, 'grid_longitude': 4}, - scheme='nearest') + {"grid_latitude": 4, "grid_longitude": 4}, + scheme="nearest", + ) self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [15, 31, 47]) # Test two points outside the valid area point = extract_coordinate_points( self.cube, - {'grid_latitude': -1, 'grid_longitude': -1}, - scheme='nearest') + {"grid_latitude": -1, "grid_longitude": -1}, + scheme="nearest", + ) self.assertEqual(point.shape, (3,)) masked = np.ma.array(np.empty(3, dtype=np.float64), mask=True) self.assert_array_equal(point.data, masked) point = extract_coordinate_points( self.cube, - {'grid_latitude': 30, 'grid_longitude': 30}, - scheme='nearest') + {"grid_latitude": 30, "grid_longitude": 30}, + scheme="nearest", + ) self.assertEqual(point.shape, (3,)) self.assert_array_equal(point.data, masked) @@ -108,44 +117,60 @@ def test_extract_point__multiple_linear(self): point = extract_coordinate_points( self.cube, - {'grid_latitude': [1, 1.1, 1.5, 2, 4], - 'grid_longitude': 2}, - scheme='linear') + {"grid_latitude": [1, 1.1, 1.5, 2, 4], "grid_longitude": 2}, + scheme="linear", + ) self.assertEqual(point.shape, (3, 5)) # Longitude is not a dimension coordinate anymore. - self.assertEqual(['air_pressure', 'grid_latitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[1, 1.4, 3, 5, 13], - [17, 17.4, 19., 21., 29], - [33, 33.4, 35, 37, 45]]) + self.assertEqual( + ["air_pressure", "grid_latitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [1, 1.4, 3, 5, 13], + [17, 17.4, 19.0, 21.0, 29], + [33, 33.4, 35, 37, 45], + ], + ) point = extract_coordinate_points( self.cube, - {'grid_latitude': 4, - 'grid_longitude': [1, 1.1, 1.5, 2, 4]}, - scheme='linear') + {"grid_latitude": 4, "grid_longitude": [1, 1.1, 1.5, 2, 4]}, + scheme="linear", + ) self.assertEqual(point.shape, (3, 5)) - self.assertEqual(['air_pressure', 'grid_longitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[12, 12.1, 12.5, 13, 15], - [28, 28.1, 28.5, 29, 31], - [44, 44.1, 44.5, 45, 47]]) + self.assertEqual( + ["air_pressure", "grid_longitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [12, 12.1, 12.5, 13, 15], + [28, 28.1, 28.5, 29, 31], + [44, 44.1, 44.5, 45, 47], + ], + ) # Test latitude and longitude points outside the grid. # These should all be masked. coords = self.cube.coords(dim_coords=True) point = extract_coordinate_points( self.cube, - {'grid_latitude': [0, 10], 'grid_longitude': 3}, - scheme='linear') + {"grid_latitude": [0, 10], "grid_longitude": 3}, + scheme="linear", + ) self.assertEqual(point.shape, (3, 2)) masked = np.ma.array(np.empty((3, 2), dtype=np.float64), mask=True) self.assert_array_equal(point.data, masked) coords = self.cube.coords(dim_coords=True) point = extract_coordinate_points( self.cube, - {'grid_latitude': 2, 'grid_longitude': [0, 10]}, - scheme='linear') + {"grid_latitude": 2, "grid_longitude": [0, 10]}, + scheme="linear", + ) coords = point.coords(dim_coords=True) self.assertEqual(point.shape, (3, 2)) self.assert_array_equal(point.data, masked) @@ -155,39 +180,53 @@ def test_extract_point__multiple_nearest(self): point = extract_coordinate_points( self.cube, - {'grid_latitude': [1, 1.1, 1.5, 1.501, 2, 4], - 'grid_longitude': 2}, - scheme='nearest') + {"grid_latitude": [1, 1.1, 1.5, 1.501, 2, 4], "grid_longitude": 2}, + scheme="nearest", + ) self.assertEqual(point.shape, (3, 6)) - self.assertEqual(['air_pressure', 'grid_latitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[1, 1, 1, 5, 5, 13], - [17, 17, 17, 21, 21, 29], - [33, 33, 33, 37, 37, 45]]) + self.assertEqual( + ["air_pressure", "grid_latitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [1, 1, 1, 5, 5, 13], + [17, 17, 17, 21, 21, 29], + [33, 33, 33, 37, 37, 45], + ], + ) point = extract_coordinate_points( self.cube, - {'grid_latitude': 4, - 'grid_longitude': [1, 1.1, 1.5, 1.501, 2, 4]}, - scheme='nearest') + {"grid_latitude": 4, "grid_longitude": [1, 1.1, 1.5, 1.501, 2, 4]}, + scheme="nearest", + ) self.assertEqual(point.shape, (3, 6)) - self.assertEqual(['air_pressure', 'grid_longitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[12, 12, 12, 13, 13, 15], - [28, 28, 28, 29, 29, 31], - [44, 44, 44, 45, 45, 47]]) + self.assertEqual( + ["air_pressure", "grid_longitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [12, 12, 12, 13, 13, 15], + [28, 28, 28, 29, 29, 31], + [44, 44, 44, 45, 45, 47], + ], + ) point = extract_coordinate_points( self.cube, - {'grid_latitude': [0, 10], - 'grid_longitude': 3}, - scheme='nearest') + {"grid_latitude": [0, 10], "grid_longitude": 3}, + scheme="nearest", + ) masked = np.ma.array(np.empty((3, 2), dtype=np.float64), mask=True) self.assertEqual(point.shape, (3, 2)) self.assert_array_equal(point.data, masked) point = extract_coordinate_points( self.cube, - {'grid_latitude': 2, - 'grid_longitude': [0, 10]}, - scheme='nearest') + {"grid_latitude": 2, "grid_longitude": [0, 10]}, + scheme="nearest", + ) self.assertEqual(point.shape, (3, 2)) self.assert_array_equal(point.data, masked) @@ -196,8 +235,12 @@ def test_extract_point__multiple_both_linear(self): linear interpolation""" point = extract_coordinate_points( self.cube, - {'grid_latitude': [0, 1.1, 1.5, 1.51, 4, 5], - 'grid_longitude': [0, 1.1, 1.5, 1.51, 4, 5]}, scheme='linear') + { + "grid_latitude": [0, 1.1, 1.5, 1.51, 4, 5], + "grid_longitude": [0, 1.1, 1.5, 1.51, 4, 5], + }, + scheme="linear", + ) self.assertEqual(point.data.shape, (3, 6, 6)) result = np.ma.array(np.empty((3, 6, 6), dtype=np.float64), mask=True) @@ -226,9 +269,12 @@ def test_extract_point__multiple_both_nearest(self): """Test for both latitude and longitude arrays, with nearest match""" point = extract_coordinate_points( self.cube, - {'grid_latitude': [0, 1.1, 1.5, 1.51, 4, 5], - 'grid_longitude': [0, 1.1, 1.5, 1.51, 4, 5]}, - scheme='nearest') + { + "grid_latitude": [0, 1.1, 1.5, 1.51, 4, 5], + "grid_longitude": [0, 1.1, 1.5, 1.51, 4, 5], + }, + scheme="nearest", + ) self.assertEqual(point.data.shape, (3, 6, 6)) result = np.ma.array(np.empty((3, 6, 6), dtype=np.float64), mask=True) @@ -255,8 +301,11 @@ def test_wrong_interpolation_scheme(self): self.assertRaises( ValueError, extract_coordinate_points, - self.cube, {'grid_latitude': 0.}, 'wrong') + self.cube, + {"grid_latitude": 0.0}, + "wrong", + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/integration/preprocessor/_regrid/test_extract_levels.py b/tests/integration/preprocessor/_regrid/test_extract_levels.py index 287f019146..000931637d 100644 --- a/tests/integration/preprocessor/_regrid/test_extract_levels.py +++ b/tests/integration/preprocessor/_regrid/test_extract_levels.py @@ -13,7 +13,6 @@ class Test(tests.Test): - def setUp(self): """Prepare tests.""" shape = (3, 2, 2) @@ -22,62 +21,64 @@ def setUp(self): cubes = iris.cube.CubeList() # Create first realization cube. cube = _make_cube(data) - coord = iris.coords.DimCoord(0, standard_name='realization') + coord = iris.coords.DimCoord(0, standard_name="realization") cube.add_aux_coord(coord) cubes.append(cube) # Create second realization cube. cube = _make_cube(data + np.prod(shape)) - coord = iris.coords.DimCoord(1, standard_name='realization') + coord = iris.coords.DimCoord(1, standard_name="realization") cube.add_aux_coord(coord) cubes.append(cube) # Create a 4d synthetic test cube. self.cube = cubes.merge_cube() - coord = self.cube.coord(axis='z', dim_coords=True) + coord = self.cube.coord(axis="z", dim_coords=True) self.shape = list(self.cube.shape) [self.z_dim] = self.cube.coord_dims(coord) def test_nop__levels_match(self): vcoord = _make_vcoord(self.z) - self.assertEqual(self.cube.coord(axis='z', dim_coords=True), vcoord) + self.assertEqual(self.cube.coord(axis="z", dim_coords=True), vcoord) levels = vcoord.points - result = extract_levels(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, "linear") self.assertEqual(result, self.cube) self.assertEqual(id(result), id(self.cube)) def test_levels_almost_match(self): - vcoord = self.cube.coord(axis='z', dim_coords=True) + vcoord = self.cube.coord(axis="z", dim_coords=True) levels = np.array(vcoord.points, dtype=float) - vcoord.points = vcoord.points + 1.e-7 - result = extract_levels(self.cube, levels, 'linear') + vcoord.points = vcoord.points + 1.0e-7 + result = extract_levels(self.cube, levels, "linear") self.assert_array_equal(vcoord.points, levels) self.assertTrue(result is self.cube) def test_interpolation__linear(self): levels = [0.5, 1.5] - scheme = 'linear' + scheme = "linear" result = extract_levels(self.cube, levels, scheme) - expected = np.ma.array([ - [ - [[2., 3.], [4., 5.]], - [[6., 7.], [8., 9.]], - ], + expected = np.ma.array( [ - [[14., 15.], [16., 17.]], - [[18., 19.], [20., 21.]], - ], - ]) + [ + [[2.0, 3.0], [4.0, 5.0]], + [[6.0, 7.0], [8.0, 9.0]], + ], + [ + [[14.0, 15.0], [16.0, 17.0]], + [[18.0, 19.0], [20.0, 21.0]], + ], + ] + ) self.assert_array_equal(result.data, expected) self.shape[self.z_dim] = len(levels) self.assertEqual(result.shape, tuple(self.shape)) def test_interpolation__linear_lazy(self): levels = [0.5, 1.5] - scheme = 'linear' + scheme = "linear" cube = self.cube.copy(self.cube.lazy_data()) - coord_name = 'multidimensional_vertical_coord' - coord_points = ( - cube.coord('air_pressure').core_points().reshape(3, 1, 1) * - np.ones((3, 2, 2))) + coord_name = "multidimensional_vertical_coord" + coord_points = cube.coord("air_pressure").core_points().reshape( + 3, 1, 1 + ) * np.ones((3, 2, 2)) cube.add_aux_coord( iris.coords.AuxCoord( da.asarray(coord_points), @@ -88,56 +89,76 @@ def test_interpolation__linear_lazy(self): result = extract_levels(cube, levels, scheme, coordinate=coord_name) self.assertTrue(result.has_lazy_data()) self.assertTrue(cube.coord(coord_name).has_lazy_points()) - expected = np.ma.array([ - [ - [[2., 3.], [4., 5.]], - [[6., 7.], [8., 9.]], - ], + expected = np.ma.array( [ - [[14., 15.], [16., 17.]], - [[18., 19.], [20., 21.]], - ], - ]) + [ + [[2.0, 3.0], [4.0, 5.0]], + [[6.0, 7.0], [8.0, 9.0]], + ], + [ + [[14.0, 15.0], [16.0, 17.0]], + [[18.0, 19.0], [20.0, 21.0]], + ], + ] + ) self.assert_array_equal(result.data, expected) def test_interpolation__nearest(self): levels = [0.49, 1.51] - scheme = 'nearest' + scheme = "nearest" result = extract_levels(self.cube, levels, scheme) - expected = np.ma.array([ - [ - [[0., 1.], [2., 3.]], - [[8., 9.], [10., 11.]], - ], + expected = np.ma.array( [ - [[12., 13.], [14., 15.]], - [[20., 21.], [22., 23.]], - ], - ]) + [ + [[0.0, 1.0], [2.0, 3.0]], + [[8.0, 9.0], [10.0, 11.0]], + ], + [ + [[12.0, 13.0], [14.0, 15.0]], + [[20.0, 21.0], [22.0, 23.0]], + ], + ] + ) self.assert_array_equal(result.data, expected) self.shape[self.z_dim] = len(levels) self.assertEqual(result.shape, tuple(self.shape)) def test_interpolation__extrapolated_nan_filling(self): levels = [-10, 1, 2, 10] - scheme = 'nearest' + scheme = "nearest" result = extract_levels(self.cube, levels, scheme) - expected = np.array([[[[_MDI, _MDI], [_MDI, _MDI]], [[4., 5.], - [6., 7.]], - [[8., 9.], [10., 11.]], - [[_MDI, _MDI], [_MDI, _MDI]]], - [[[_MDI, _MDI], [_MDI, _MDI]], - [[16., 17.], [18., 19.]], [[20., 21.], - [22., 23.]], - [[_MDI, _MDI], [_MDI, _MDI]]]]) - expected_mask = np.array([[[[True, True], [True, True]], - [[False, False], [False, False]], - [[False, False], [False, False]], - [[True, True], [True, True]]], - [[[True, True], [True, True]], - [[False, False], [False, False]], - [[False, False], [False, False]], - [[True, True], [True, True]]]]) + expected = np.array( + [ + [ + [[_MDI, _MDI], [_MDI, _MDI]], + [[4.0, 5.0], [6.0, 7.0]], + [[8.0, 9.0], [10.0, 11.0]], + [[_MDI, _MDI], [_MDI, _MDI]], + ], + [ + [[_MDI, _MDI], [_MDI, _MDI]], + [[16.0, 17.0], [18.0, 19.0]], + [[20.0, 21.0], [22.0, 23.0]], + [[_MDI, _MDI], [_MDI, _MDI]], + ], + ] + ) + expected_mask = np.array( + [ + [ + [[True, True], [True, True]], + [[False, False], [False, False]], + [[False, False], [False, False]], + [[True, True], [True, True]], + ], + [ + [[True, True], [True, True]], + [[False, False], [False, False]], + [[False, False], [False, False]], + [[True, True], [True, True]], + ], + ] + ) expected = np.ma.array(expected, mask=expected_mask) self.assert_array_equal(result.data, expected) self.shape[self.z_dim] = len(levels) @@ -145,41 +166,43 @@ def test_interpolation__extrapolated_nan_filling(self): def test_interpolation__scalar_collapse(self): level = 1 - scheme = 'nearest' + scheme = "nearest" result = extract_levels(self.cube, level, scheme) - expected = np.array([[[4., 5.], [6., 7.]], [[16., 17.], [18., 19.]]]) + expected = np.array( + [[[4.0, 5.0], [6.0, 7.0]], [[16.0, 17.0], [18.0, 19.0]]] + ) self.assert_array_equal(result.data, expected) del self.shape[self.z_dim] self.assertEqual(result.shape, tuple(self.shape)) def test_add_alt_coord(self): - assert self.cube.coords('air_pressure') - assert not self.cube.coords('altitude') - result = extract_levels(self.cube, [1, 2], - 'linear_extrapolate', - coordinate='altitude') - assert not result.coords('air_pressure') - assert result.coords('altitude') + assert self.cube.coords("air_pressure") + assert not self.cube.coords("altitude") + result = extract_levels( + self.cube, [1, 2], "linear_extrapolate", coordinate="altitude" + ) + assert not result.coords("air_pressure") + assert result.coords("altitude") assert result.shape == (2, 2, 2, 2) - np.testing.assert_allclose(result.coord('altitude').points, - [1.0, 2.0]) + np.testing.assert_allclose(result.coord("altitude").points, [1.0, 2.0]) def test_add_plev_coord(self): - self.cube.coord('air_pressure').standard_name = 'altitude' - self.cube.coord('altitude').var_name = 'alt' - self.cube.coord('altitude').long_name = 'altitude' - self.cube.coord('altitude').units = 'm' - assert not self.cube.coords('air_pressure') - assert self.cube.coords('altitude') - result = extract_levels(self.cube, [1, 2], - 'linear_extrapolate', - coordinate='air_pressure') - assert result.coords('air_pressure') - assert not result.coords('altitude') + self.cube.coord("air_pressure").standard_name = "altitude" + self.cube.coord("altitude").var_name = "alt" + self.cube.coord("altitude").long_name = "altitude" + self.cube.coord("altitude").units = "m" + assert not self.cube.coords("air_pressure") + assert self.cube.coords("altitude") + result = extract_levels( + self.cube, [1, 2], "linear_extrapolate", coordinate="air_pressure" + ) + assert result.coords("air_pressure") + assert not result.coords("altitude") assert result.shape == (2, 2, 2, 2) - np.testing.assert_allclose(result.coord('air_pressure').points, - [1.0, 2.0]) + np.testing.assert_allclose( + result.coord("air_pressure").points, [1.0, 2.0] + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/integration/preprocessor/_regrid/test_extract_location.py b/tests/integration/preprocessor/_regrid/test_extract_location.py index 15c1ffb417..b5c316bd39 100644 --- a/tests/integration/preprocessor/_regrid/test_extract_location.py +++ b/tests/integration/preprocessor/_regrid/test_extract_location.py @@ -1,4 +1,5 @@ """Integration tests for :func:`esmvalcore.preprocessor.extract_location.""" + import ssl from unittest.mock import patch @@ -18,11 +19,14 @@ def mocked_geopy_geocoders_nominatim(mocker): See https://github.com/ESMValGroup/ESMValCore/issues/1982. """ mocked_nominatim = mocker.patch( - 'esmvalcore.preprocessor._regrid.Nominatim', autospec=True) - geolocation_penacaballera = mocker.Mock(latitude=40.3442754, - longitude=-5.8606859) + "esmvalcore.preprocessor._regrid.Nominatim", autospec=True + ) + geolocation_penacaballera = mocker.Mock( + latitude=40.3442754, longitude=-5.8606859 + ) mocked_nominatim.return_value.geocode.side_effect = ( - lambda x: geolocation_penacaballera if x == 'Peñacaballera' else None) + lambda x: geolocation_penacaballera if x == "Peñacaballera" else None + ) @pytest.fixture @@ -33,39 +37,44 @@ def test_cube(): z, y, x = shape # Create the cube. - cm = CellMethod(method='mean', - coords='time', - intervals='20 minutes', - comments=None) - kwargs = dict(standard_name='air_temperature', - long_name='Air Temperature', - var_name='ta', - units='K', - attributes=dict(cube='attribute'), - cell_methods=(cm, )) + cm = CellMethod( + method="mean", coords="time", intervals="20 minutes", comments=None + ) + kwargs = dict( + standard_name="air_temperature", + long_name="Air Temperature", + var_name="ta", + units="K", + attributes=dict(cube="attribute"), + cell_methods=(cm,), + ) cube = iris.cube.Cube(data, **kwargs) # Create a synthetic test latitude coordinate. data = np.linspace(-90, 90, y) cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - kwargs = dict(standard_name='latitude', - long_name='Latitude', - var_name='lat', - units='degrees_north', - attributes=dict(latitude='attribute'), - coord_system=cs) + kwargs = dict( + standard_name="latitude", + long_name="Latitude", + var_name="lat", + units="degrees_north", + attributes=dict(latitude="attribute"), + coord_system=cs, + ) ycoord = DimCoord(data, **kwargs) ycoord.guess_bounds() cube.add_dim_coord(ycoord, 1) # Create a synthetic test longitude coordinate. data = np.linspace(0, 360, x) - kwargs = dict(standard_name='longitude', - long_name='Longitude', - var_name='lon', - units='degrees_east', - attributes=dict(longitude='attribute'), - coord_system=cs) + kwargs = dict( + standard_name="longitude", + long_name="Longitude", + var_name="lon", + units="degrees_east", + attributes=dict(longitude="attribute"), + coord_system=cs, + ) xcoord = DimCoord(data, **kwargs) xcoord.guess_bounds() cube.add_dim_coord(xcoord, 2) @@ -74,10 +83,10 @@ def test_cube(): def test_extract_successful(test_cube): """Test only town name.""" - point = extract_location(test_cube, - scheme='nearest', - location='Peñacaballera') - assert point.shape == (3, ) + point = extract_location( + test_cube, scheme="nearest", location="Peñacaballera" + ) + assert point.shape == (3,) np.testing.assert_equal(point.data, [1186, 2806, 4426]) @@ -85,25 +94,25 @@ def test_non_existing_location(test_cube): """Test town plus region plus country.""" msg = "Requested location Minas Tirith,Gondor can not be found" with pytest.raises(ValueError, match=msg): - extract_location(test_cube, - scheme='nearest', - location='Minas Tirith,Gondor') + extract_location( + test_cube, scheme="nearest", location="Minas Tirith,Gondor" + ) def test_no_location_parameter(test_cube): """Test if no location supplied.""" msg = "Location needs to be specified." with pytest.raises(ValueError, match=msg): - extract_location(test_cube, scheme='nearest', location=None) + extract_location(test_cube, scheme="nearest", location=None) def test_no_scheme_parameter(test_cube): """Test if no scheme supplied.""" msg = "Interpolation scheme needs to be specified." with pytest.raises(ValueError, match=msg): - extract_location(test_cube, - scheme=None, - location='Calvitero,Candelario') + extract_location( + test_cube, scheme=None, location="Calvitero,Candelario" + ) @patch("esmvalcore.preprocessor._regrid.ssl.create_default_context") diff --git a/tests/integration/preprocessor/_regrid/test_extract_point.py b/tests/integration/preprocessor/_regrid/test_extract_point.py index d3d93945b9..132d3fb8dd 100644 --- a/tests/integration/preprocessor/_regrid/test_extract_point.py +++ b/tests/integration/preprocessor/_regrid/test_extract_point.py @@ -25,33 +25,33 @@ def setUp(self): def test_extract_point__single_linear(self): """Test linear interpolation when extracting a single point""" - point = extract_point(self.cube, 2.1, 2.1, scheme='linear') + point = extract_point(self.cube, 2.1, 2.1, scheme="linear") self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [5.5, 21.5, 37.5]) # Exactly centred between grid points. - point = extract_point(self.cube, 2.5, 2.5, scheme='linear') + point = extract_point(self.cube, 2.5, 2.5, scheme="linear") self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [7.5, 23.5, 39.5]) # On a (edge) grid point. - point = extract_point(self.cube, 4, 4, scheme='linear') + point = extract_point(self.cube, 4, 4, scheme="linear") self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [15, 31, 47]) # Test two points outside the valid area. # These should be masked, since we set up the interpolation # schemes that way. - point = extract_point(self.cube, -1, -1, scheme='linear') + point = extract_point(self.cube, -1, -1, scheme="linear") self.assertEqual(point.shape, (3,)) masked = np.ma.array([np.nan] * 3, mask=True) self.assert_array_equal(point.data, masked) - point = extract_point(self.cube, 30, 30, scheme='linear') + point = extract_point(self.cube, 30, 30, scheme="linear") self.assertEqual(point.shape, (3,)) self.assert_array_equal(point.data, masked) - point = extract_point(self.cube, 30, 30, scheme='nearest') + point = extract_point(self.cube, 30, 30, scheme="nearest") self.assertEqual(point.shape, (3,)) # do it the proletarian way, back to basics is good sometimes assert np.ma.is_masked(point.data) @@ -60,21 +60,21 @@ def test_extract_point__single_linear(self): def test_extract_point__single_nearest(self): """Test nearest match when extracting a single point""" - point = extract_point(self.cube, 2.1, 2.1, scheme='nearest') + point = extract_point(self.cube, 2.1, 2.1, scheme="nearest") self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [5, 21, 37]) - point = extract_point(self.cube, 4, 4, scheme='nearest') + point = extract_point(self.cube, 4, 4, scheme="nearest") self.assertEqual(point.shape, (3,)) np.testing.assert_allclose(point.data, [15, 31, 47]) # Test two points outside the valid area - point = extract_point(self.cube, -1, -1, scheme='nearest') + point = extract_point(self.cube, -1, -1, scheme="nearest") self.assertEqual(point.shape, (3,)) masked = np.ma.array(np.empty(3, dtype=np.float64), mask=True) self.assert_array_equal(point.data, masked) - point = extract_point(self.cube, 30, 30, scheme='nearest') + point = extract_point(self.cube, 30, 30, scheme="nearest") self.assertEqual(point.shape, (3,)) self.assert_array_equal(point.data, masked) @@ -86,36 +86,50 @@ def test_extract_point__multiple_linear(self): coords = self.cube.coords(dim_coords=True) print([coord.standard_name for coord in coords]) - point = extract_point(self.cube, [1, 1.1, 1.5, 2, 4], 2, - scheme='linear') + point = extract_point( + self.cube, [1, 1.1, 1.5, 2, 4], 2, scheme="linear" + ) self.assertEqual(point.shape, (3, 5)) # Longitude is not a dimension coordinate anymore. - self.assertEqual(['air_pressure', 'latitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[1, 1.4, 3, 5, 13], - [17, 17.4, 19., 21., 29], - [33, 33.4, 35, 37, 45]]) - - point = extract_point(self.cube, 4, [1, 1.1, 1.5, 2, 4], - scheme='linear') + self.assertEqual( + ["air_pressure", "latitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [1, 1.4, 3, 5, 13], + [17, 17.4, 19.0, 21.0, 29], + [33, 33.4, 35, 37, 45], + ], + ) + + point = extract_point( + self.cube, 4, [1, 1.1, 1.5, 2, 4], scheme="linear" + ) self.assertEqual(point.shape, (3, 5)) - self.assertEqual(['air_pressure', 'longitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[12, 12.1, 12.5, 13, 15], - [28, 28.1, 28.5, 29, 31], - [44, 44.1, 44.5, 45, 47]]) + self.assertEqual( + ["air_pressure", "longitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [12, 12.1, 12.5, 13, 15], + [28, 28.1, 28.5, 29, 31], + [44, 44.1, 44.5, 45, 47], + ], + ) # Test latitude and longitude points outside the grid. # These should all be masked. coords = self.cube.coords(dim_coords=True) - point = extract_point(self.cube, [0, 10], 3, - scheme='linear') + point = extract_point(self.cube, [0, 10], 3, scheme="linear") self.assertEqual(point.shape, (3, 2)) masked = np.ma.array(np.empty((3, 2), dtype=np.float64), mask=True) self.assert_array_equal(point.data, masked) coords = self.cube.coords(dim_coords=True) - point = extract_point(self.cube, 2, [0, 10], - scheme='linear') + point = extract_point(self.cube, 2, [0, 10], scheme="linear") coords = point.coords(dim_coords=True) self.assertEqual(point.shape, (3, 2)) self.assert_array_equal(point.data, masked) @@ -123,37 +137,55 @@ def test_extract_point__multiple_linear(self): def test_extract_point__multiple_nearest(self): """Test nearest match for an array of one coordinate""" - point = extract_point(self.cube, [1, 1.1, 1.5, 1.501, 2, 4], 2, - scheme='nearest') + point = extract_point( + self.cube, [1, 1.1, 1.5, 1.501, 2, 4], 2, scheme="nearest" + ) self.assertEqual(point.shape, (3, 6)) - self.assertEqual(['air_pressure', 'latitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[1, 1, 1, 5, 5, 13], - [17, 17, 17, 21, 21, 29], - [33, 33, 33, 37, 37, 45]]) - point = extract_point(self.cube, 4, [1, 1.1, 1.5, 1.501, 2, 4], - scheme='nearest') + self.assertEqual( + ["air_pressure", "latitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [1, 1, 1, 5, 5, 13], + [17, 17, 17, 21, 21, 29], + [33, 33, 33, 37, 37, 45], + ], + ) + point = extract_point( + self.cube, 4, [1, 1.1, 1.5, 1.501, 2, 4], scheme="nearest" + ) self.assertEqual(point.shape, (3, 6)) - self.assertEqual(['air_pressure', 'longitude'], [ - coord.standard_name for coord in point.coords(dim_coords=True)]) - np.testing.assert_allclose(point.data, [[12, 12, 12, 13, 13, 15], - [28, 28, 28, 29, 29, 31], - [44, 44, 44, 45, 45, 47]]) - point = extract_point(self.cube, [0, 10], 3, - scheme='nearest') + self.assertEqual( + ["air_pressure", "longitude"], + [coord.standard_name for coord in point.coords(dim_coords=True)], + ) + np.testing.assert_allclose( + point.data, + [ + [12, 12, 12, 13, 13, 15], + [28, 28, 28, 29, 29, 31], + [44, 44, 44, 45, 45, 47], + ], + ) + point = extract_point(self.cube, [0, 10], 3, scheme="nearest") masked = np.ma.array(np.empty((3, 2), dtype=np.float64), mask=True) self.assertEqual(point.shape, (3, 2)) self.assert_array_equal(point.data, masked) - point = extract_point(self.cube, 2, [0, 10], - scheme='nearest') + point = extract_point(self.cube, 2, [0, 10], scheme="nearest") self.assertEqual(point.shape, (3, 2)) self.assert_array_equal(point.data, masked) def test_extract_point__multiple_both_linear(self): """Test for both latitude and longitude arrays, with linear interpolation""" - point = extract_point(self.cube, [0, 1.1, 1.5, 1.51, 4, 5], - [0, 1.1, 1.5, 1.51, 4, 5], scheme='linear') + point = extract_point( + self.cube, + [0, 1.1, 1.5, 1.51, 4, 5], + [0, 1.1, 1.5, 1.51, 4, 5], + scheme="linear", + ) self.assertEqual(point.data.shape, (3, 6, 6)) result = np.ma.array(np.empty((3, 6, 6), dtype=np.float64), mask=True) @@ -180,8 +212,12 @@ def test_extract_point__multiple_both_linear(self): def test_extract_point__multiple_both_nearest(self): """Test for both latitude and longitude arrays, with nearest match""" - point = extract_point(self.cube, [0, 1.1, 1.5, 1.51, 4, 5], - [0, 1.1, 1.5, 1.51, 4, 5], scheme='nearest') + point = extract_point( + self.cube, + [0, 1.1, 1.5, 1.51, 4, 5], + [0, 1.1, 1.5, 1.51, 4, 5], + scheme="nearest", + ) self.assertEqual(point.data.shape, (3, 6, 6)) result = np.ma.array(np.empty((3, 6, 6), dtype=np.float64), mask=True) @@ -204,5 +240,5 @@ def test_extract_point__multiple_both_nearest(self): np.testing.assert_allclose(point.data, result) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py b/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py index b4f869d171..af18135070 100644 --- a/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py +++ b/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py @@ -11,41 +11,93 @@ class TestGetCmorLevels(unittest.TestCase): - def test_cmip6_alt40(self): self.assertListEqual( - _regrid.get_cmor_levels('CMIP6', 'alt40'), [ - 240.0, 720.0, 1200.0, 1680.0, 2160.0, 2640.0, 3120.0, 3600.0, - 4080.0, 4560.0, 5040.0, 5520.0, 6000.0, 6480.0, 6960.0, 7440.0, - 7920.0, 8400.0, 8880.0, 9360.0, 9840.0, 10320.0, 10800.0, - 11280.0, 11760.0, 12240.0, 12720.0, 13200.0, 13680.0, 14160.0, - 14640.0, 15120.0, 15600.0, 16080.0, 16560.0, 17040.0, 17520.0, - 18000.0, 18480.0, 18960.0 - ]) + _regrid.get_cmor_levels("CMIP6", "alt40"), + [ + 240.0, + 720.0, + 1200.0, + 1680.0, + 2160.0, + 2640.0, + 3120.0, + 3600.0, + 4080.0, + 4560.0, + 5040.0, + 5520.0, + 6000.0, + 6480.0, + 6960.0, + 7440.0, + 7920.0, + 8400.0, + 8880.0, + 9360.0, + 9840.0, + 10320.0, + 10800.0, + 11280.0, + 11760.0, + 12240.0, + 12720.0, + 13200.0, + 13680.0, + 14160.0, + 14640.0, + 15120.0, + 15600.0, + 16080.0, + 16560.0, + 17040.0, + 17520.0, + 18000.0, + 18480.0, + 18960.0, + ], + ) def test_cmip6_p200(self): self.assertListEqual( - _regrid.get_cmor_levels('CMIP6', 'p200'), [20000.]) + _regrid.get_cmor_levels("CMIP6", "p200"), [20000.0] + ) def test_cmip5_alt40(self): self.assertListEqual( - _regrid.get_cmor_levels('CMIP5', 'plevs'), [ - 100000., 92500., 85000., 70000., 60000., 50000., 40000., - 30000., 25000., 20000., 15000., 10000., 7000., 5000., 3000., - 2000., 1000. - ]) + _regrid.get_cmor_levels("CMIP5", "plevs"), + [ + 100000.0, + 92500.0, + 85000.0, + 70000.0, + 60000.0, + 50000.0, + 40000.0, + 30000.0, + 25000.0, + 20000.0, + 15000.0, + 10000.0, + 7000.0, + 5000.0, + 3000.0, + 2000.0, + 1000.0, + ], + ) def test_cmip5_p500(self): - self.assertListEqual(_regrid.get_cmor_levels('CMIP5', 'p500'), [50000]) + self.assertListEqual(_regrid.get_cmor_levels("CMIP5", "p500"), [50000]) def test_not_values_in_coordinate(self): with self.assertRaises(ValueError): - _regrid.get_cmor_levels('CMIP6', 'time') + _regrid.get_cmor_levels("CMIP6", "time") def test_bad_table(self): with self.assertRaises(ValueError): - _regrid.get_cmor_levels('CMOCK', 'p500') + _regrid.get_cmor_levels("CMOCK", "p500") def test_bad_coordinate(self): with self.assertRaises(ValueError): - _regrid.get_cmor_levels('CMIP5', 'uglycoord') + _regrid.get_cmor_levels("CMIP5", "uglycoord") diff --git a/tests/integration/preprocessor/_regrid/test_get_file_levels.py b/tests/integration/preprocessor/_regrid/test_get_file_levels.py index 9ca33d7145..ed5069e2a9 100644 --- a/tests/integration/preprocessor/_regrid/test_get_file_levels.py +++ b/tests/integration/preprocessor/_regrid/test_get_file_levels.py @@ -1,5 +1,6 @@ """Integration test for :func:`esmvalcore.preprocessor.regrid.get_reference_levels`.""" + import iris.coords import iris.cube import iris.util @@ -12,9 +13,9 @@ @pytest.fixture def test_cube(): - cube = iris.cube.Cube(np.ones([2, 2, 2]), var_name='var') - coord = iris.coords.DimCoord(np.arange(0, 2), var_name='coord') - coord.attributes['positive'] = 'up' + cube = iris.cube.Cube(np.ones([2, 2, 2]), var_name="var") + coord = iris.coords.DimCoord(np.arange(0, 2), var_name="coord") + coord.attributes["positive"] = "up" cube.add_dim_coord(coord, 0) return cube @@ -23,11 +24,11 @@ def test_get_file_levels_from_coord(mocker, test_cube): dataset = mocker.create_autospec(Dataset, spec_set=True, instance=True) dataset.copy.return_value.load.return_value = test_cube reference_levels = _regrid.get_reference_levels(dataset) - assert reference_levels == [0., 1] + assert reference_levels == [0.0, 1] def test_get_file_levels_from_coord_fail(mocker, test_cube): - test_cube.coord('coord').attributes.clear() + test_cube.coord("coord").attributes.clear() dataset = mocker.create_autospec(Dataset, spec_set=True, instance=True) dataset.copy.return_value.load.return_value = test_cube with pytest.raises(ValueError): diff --git a/tests/integration/preprocessor/_regrid/test_regrid.py b/tests/integration/preprocessor/_regrid/test_regrid.py index 7166cfbfe5..bf39ee9ff2 100644 --- a/tests/integration/preprocessor/_regrid/test_regrid.py +++ b/tests/integration/preprocessor/_regrid/test_regrid.py @@ -17,7 +17,6 @@ class Test: - @pytest.fixture(autouse=True) def setUp(self): """Prepare tests.""" @@ -27,39 +26,51 @@ def setUp(self): self.cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) # Setup grid for linear regridding - data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.5], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.5], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [1.5], + standard_name="longitude", + bounds=[[1, 2]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [1.5], + standard_name="latitude", + bounds=[[1, 2]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] - grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - self.grid_for_linear = grid + self.grid_for_linear = iris.cube.Cube( + np.empty((1, 1)), + dim_coords_and_dims=coords_spec, + ) + + # Setup mesh cube + self.mesh_cube = _make_cube(data, grid="mesh") # Setup unstructured cube and grid - data = np.zeros((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [1.6], + standard_name="longitude", + bounds=[[1, 2]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [1.6], + standard_name="latitude", + bounds=[[1, 2]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] self.tgt_grid_for_unstructured = iris.cube.Cube( - data, dim_coords_and_dims=coords_spec) + np.zeros((1, 1)), dim_coords_and_dims=coords_spec + ) - lons = self.cube.coord('longitude') - lats = self.cube.coord('latitude') + lons = self.cube.coord("longitude") + lats = self.cube.coord("latitude") x, y = np.meshgrid(lons.points, lats.points) lats = iris.coords.AuxCoord( @@ -85,72 +96,78 @@ def setUp(self): ) unstructured_data = np.ma.masked_less( - self.cube.data.reshape(3, 4).astype(np.float32), 3.5 + self.cube.data.reshape(3, -1).astype(np.float32), 3.5 ) self.unstructured_grid_cube = iris.cube.Cube( unstructured_data, - dim_coords_and_dims=[(self.cube.coord('air_pressure'), 0)], + dim_coords_and_dims=[(self.cube.coord("air_pressure"), 0)], aux_coords_and_dims=[(lats, 1), (lons, 1)], ) self.unstructured_grid_cube.metadata = self.cube.metadata # Setup irregular cube and grid lons_2d = iris.coords.AuxCoord( - [[0, 1]], standard_name='longitude', units='degrees_east' + [[0, 1]], standard_name="longitude", units="degrees_east" ) lats_2d = iris.coords.AuxCoord( - [[0, 1]], standard_name='latitude', units='degrees_north' + [[0, 1]], standard_name="latitude", units="degrees_north" ) self.irregular_grid = iris.cube.Cube( [[1, 1]], aux_coords_and_dims=[(lats_2d, (0, 1)), (lons_2d, (0, 1))], ) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__linear(self, cache_weights): result = regrid( self.cube, self.grid_for_linear, - 'linear', + "linear", cache_weights=cache_weights, ) expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__linear_file(self, tmp_path, cache_weights): file = tmp_path / "file.nc" iris.save(self.grid_for_linear, target=file) - result = regrid( - self.cube, file, 'linear', cache_weights=cache_weights - ) + result = regrid(self.cube, file, "linear", cache_weights=cache_weights) expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__linear_dataset(self, monkeypatch, cache_weights): - monkeypatch.setattr(Dataset, 'files', ["file.nc"]) + monkeypatch.setattr(Dataset, "files", ["file.nc"]) def load(_): return self.grid_for_linear - monkeypatch.setattr(Dataset, 'load', load) + monkeypatch.setattr(Dataset, "load", load) dataset = Dataset( - short_name='tas', + short_name="tas", ) result = regrid( - self.cube, dataset, 'linear', cache_weights=cache_weights + self.cube, dataset, "linear", cache_weights=cache_weights ) expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) - def test_regrid__esmf_rectilinear(self, cache_weights): - scheme_name = 'esmf_regrid.schemes:regrid_rectilinear_to_rectilinear' - scheme = { - 'reference': scheme_name - } + @pytest.mark.parametrize( + "scheme", + [ + { + "reference": "esmf_regrid.schemes:regrid_rectilinear_to_rectilinear", + }, + { + "reference": "esmvalcore.preprocessor.regrid_schemes:IrisESMFRegrid", + "method": "bilinear", + }, + ], + ) + @pytest.mark.parametrize("cache_weights", [True, False]) + def test_regrid__esmf_rectilinear(self, scheme, cache_weights): result = regrid( self.cube, self.grid_for_linear, @@ -160,36 +177,45 @@ def test_regrid__esmf_rectilinear(self, cache_weights): expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) np.testing.assert_array_almost_equal(result.data, expected, decimal=1) - @pytest.mark.parametrize('cache_weights', [True, False]) + def test_regrid__esmf_mesh_to_regular(self): + result = regrid(self.mesh_cube, self.grid_for_linear, "linear") + expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) + np.testing.assert_array_almost_equal(result.data, expected, decimal=1) + + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__regular_coordinates(self, cache_weights): data = np.ones((1, 1)) - lons = iris.coords.DimCoord([1.50000000000001], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.50000000000001], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [1.50000000000001], + standard_name="longitude", + bounds=[[1, 2]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [1.50000000000001], + standard_name="latitude", + bounds=[[1, 2]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] regular_grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid( regular_grid, self.grid_for_linear, - 'linear', + "linear", cache_weights=cache_weights, ) iris.common.resolve.Resolve(result, self.grid_for_linear) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__linear_do_not_preserve_dtype(self, cache_weights): self.cube.data = self.cube.data.astype(int) result = regrid( self.cube, self.grid_for_linear, - 'linear', + "linear", cache_weights=cache_weights, ) expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) @@ -197,142 +223,175 @@ def test_regrid__linear_do_not_preserve_dtype(self, cache_weights): assert np.issubdtype(self.cube.dtype, np.integer) assert np.issubdtype(result.dtype, np.floating) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__linear_with_extrapolation(self, cache_weights): data = np.empty((3, 3)) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="latitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) scheme = { - 'reference': 'iris.analysis:Linear', - 'extrapolation_mode': 'extrapolate', + "reference": "iris.analysis:Linear", + "extrapolation_mode": "extrapolate", } result = regrid(self.cube, grid, scheme, cache_weights=cache_weights) - expected = [[[-3., -1.5, 0.], [0., 1.5, 3.], [3., 4.5, 6.]], - [[1., 2.5, 4.], [4., 5.5, 7.], [7., 8.5, 10.]], - [[5., 6.5, 8.], [8., 9.5, 11.], [11., 12.5, 14.]]] + expected = [ + [[-3.0, -1.5, 0.0], [0.0, 1.5, 3.0], [3.0, 4.5, 6.0]], + [[1.0, 2.5, 4.0], [4.0, 5.5, 7.0], [7.0, 8.5, 10.0]], + [[5.0, 6.5, 8.0], [8.0, 9.5, 11.0], [11.0, 12.5, 14.0]], + ] assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__linear_with_mask(self, cache_weights): data = np.empty((3, 3)) grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="latitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - result = regrid(self.cube, grid, 'linear', cache_weights=cache_weights) + result = regrid(self.cube, grid, "linear", cache_weights=cache_weights) expected = ma.empty((3, 3, 3)) expected.mask = ma.masked expected[:, 1, 1] = np.array([1.5, 5.5, 9.5]) assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__nearest(self, cache_weights): data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [1.6], + standard_name="longitude", + bounds=[[1, 2]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [1.6], + standard_name="latitude", + bounds=[[1, 2]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid( - self.cube, grid, 'nearest', cache_weights=cache_weights + self.cube, grid, "nearest", cache_weights=cache_weights ) expected = np.array([[[3]], [[7]], [[11]]]) assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__nearest_extrapolate_with_mask(self, cache_weights): data = np.empty((3, 3)) - lons = iris.coords.DimCoord([0, 1.6, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.6, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [0, 1.6, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [0, 1.6, 3], + standard_name="latitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid( - self.cube, grid, 'nearest', cache_weights=cache_weights + self.cube, grid, "nearest", cache_weights=cache_weights ) expected = ma.empty((3, 3, 3)) expected.mask = ma.masked expected[:, 1, 1] = np.array([3, 7, 11]) assert_array_equal(result.data, expected) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid__area_weighted(self, cache_weights): data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [1.6], + standard_name="longitude", + bounds=[[1, 2]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [1.6], + standard_name="latitude", + bounds=[[1, 2]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid( - self.cube, grid, 'area_weighted', cache_weights=cache_weights + self.cube, grid, "area_weighted", cache_weights=cache_weights ) expected = np.array([1.499886, 5.499886, 9.499886]) np.testing.assert_array_almost_equal(result.data, expected, decimal=6) - @pytest.mark.parametrize('cache_weights', [True, False]) - def test_regrid__esmf_area_weighted(self, cache_weights): + @pytest.mark.parametrize( + "scheme", + [ + {"reference": "esmf_regrid.schemes:ESMFAreaWeighted"}, + { + "reference": "esmvalcore.preprocessor.regrid_schemes:IrisESMFRegrid", + "method": "conservative", + }, + ], + ) + @pytest.mark.parametrize("cache_weights", [True, False]) + def test_regrid__esmf_area_weighted(self, scheme, cache_weights): data = np.empty((1, 1)) - lons = iris.coords.DimCoord([1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [1.6], + standard_name="longitude", + bounds=[[1, 2]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [1.6], + standard_name="latitude", + bounds=[[1, 2]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - scheme = { - 'reference': 'esmf_regrid.schemes:ESMFAreaWeighted' - } result = regrid(self.cube, grid, scheme, cache_weights=cache_weights) expected = np.array([[[1.499886]], [[5.499886]], [[9.499886]]]) np.testing.assert_array_almost_equal(result.data, expected, decimal=6) - @pytest.mark.parametrize('scheme', ['linear', 'nearest']) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("scheme", ["linear", "nearest"]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid_unstructured_grid_float(self, cache_weights, scheme): """Test regridding with unstructured cube of floats.""" result = regrid( @@ -344,68 +403,70 @@ def test_regrid_unstructured_grid_float(self, cache_weights, scheme): assert self.unstructured_grid_cube.dtype == np.float32 assert result.dtype == np.float32 - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid_nearest_unstructured_grid_int(self, cache_weights): """Test nearest-neighbor regridding with unstructured cube of ints.""" self.unstructured_grid_cube.data = np.ones((3, 4), dtype=int) result = regrid( self.unstructured_grid_cube, self.tgt_grid_for_unstructured, - 'nearest', + "nearest", cache_weights=cache_weights, ) assert self.unstructured_grid_cube.dtype == int assert result.dtype == int - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid_linear_unstructured_grid_int(self, cache_weights): """Test linear regridding with unstructured cube of ints.""" self.unstructured_grid_cube.data = np.ones((3, 4), dtype=int) result = regrid( self.unstructured_grid_cube, self.tgt_grid_for_unstructured, - 'linear', + "linear", cache_weights=cache_weights, ) assert self.unstructured_grid_cube.dtype == int assert result.dtype == np.float64 - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_invalid_scheme_for_unstructured_grid(self, cache_weights): """Test invalid scheme for unstructured cube.""" msg = ( - "Regridding scheme 'invalid' does not support unstructured data, " + "Regridding scheme 'invalid' not available for unstructured data, " + "expected one of: linear, nearest" ) with pytest.raises(ValueError, match=msg): regrid( self.unstructured_grid_cube, self.tgt_grid_for_unstructured, - 'invalid', + "invalid", cache_weights=cache_weights, ) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_invalid_scheme_for_irregular_grid(self, cache_weights): """Test invalid scheme for irregular cube.""" msg = ( - "Regridding scheme 'invalid' does not support irregular data, " + "Regridding scheme 'invalid' not available for irregular data, " + "expected one of: area_weighted, linear, nearest" ) with pytest.raises(ValueError, match=msg): regrid( self.irregular_grid, self.tgt_grid_for_unstructured, - 'invalid', + "invalid", cache_weights=cache_weights, ) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_deprecate_unstrucured_nearest(self, cache_weights): """Test deprecation of `unstructured_nearest` regridding scheme.""" with pytest.warns(ESMValCoreDeprecationWarning): result = regrid( self.unstructured_grid_cube, self.tgt_grid_for_unstructured, - 'unstructured_nearest', + "unstructured_nearest", cache_weights=cache_weights, ) expected = np.ma.array( @@ -415,20 +476,24 @@ def test_deprecate_unstrucured_nearest(self, cache_weights): np.testing.assert_array_equal(result.data.mask, expected.mask) np.testing.assert_array_almost_equal(result.data, expected, decimal=6) - @pytest.mark.parametrize('cache_weights', [True, False]) + @pytest.mark.parametrize("cache_weights", [True, False]) def test_deprecate_linear_extrapolate(self, cache_weights): """Test deprecation of `linear_extrapolate` regridding scheme.""" data = np.empty((3, 3)) - lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=self.cs, + ) + lats = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="latitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_north", + coord_system=self.cs, + ) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) @@ -436,11 +501,13 @@ def test_deprecate_linear_extrapolate(self, cache_weights): result = regrid( self.cube, grid, - 'linear_extrapolate', + "linear_extrapolate", cache_weights=cache_weights, ) - expected = [[[-3., -1.5, 0.], [0., 1.5, 3.], [3., 4.5, 6.]], - [[1., 2.5, 4.], [4., 5.5, 7.], [7., 8.5, 10.]], - [[5., 6.5, 8.], [8., 9.5, 11.], [11., 12.5, 14.]]] + expected = [ + [[-3.0, -1.5, 0.0], [0.0, 1.5, 3.0], [3.0, 4.5, 6.0]], + [[1.0, 2.5, 4.0], [4.0, 5.5, 7.0], [7.0, 8.5, 10.0]], + [[5.0, 6.5, 8.0], [8.0, 9.5, 11.0], [11.0, 12.5, 14.0]], + ] assert_array_equal(result.data, expected) diff --git a/tests/integration/preprocessor/_regrid/test_regrid_schemes.py b/tests/integration/preprocessor/_regrid/test_regrid_schemes.py index b4dd039f44..2685d33207 100644 --- a/tests/integration/preprocessor/_regrid/test_regrid_schemes.py +++ b/tests/integration/preprocessor/_regrid/test_regrid_schemes.py @@ -1,4 +1,5 @@ """Integration tests for regrid schemes.""" + import numpy as np import pytest from iris.cube import Cube @@ -24,13 +25,13 @@ def generic_func_scheme(): def test_generic_func_scheme_init(generic_func_scheme): """Test ``GenericFuncScheme``.""" assert generic_func_scheme.func == set_data_to_const - assert generic_func_scheme.kwargs == {'const': 2} + assert generic_func_scheme.kwargs == {"const": 2} def test_generic_func_scheme_repr(generic_func_scheme): """Test ``GenericFuncScheme``.""" repr = generic_func_scheme.__repr__() - assert repr == 'GenericFuncScheme(set_data_to_const, const=2)' + assert repr == "GenericFuncScheme(set_data_to_const, const=2)" def test_generic_func_scheme_regridder(generic_func_scheme, mocker): @@ -43,13 +44,13 @@ def test_generic_func_scheme_regridder(generic_func_scheme, mocker): assert regridder.src_cube == mocker.sentinel.src_cube assert regridder.tgt_cube == mocker.sentinel.tgt_cube assert regridder.func == set_data_to_const - assert regridder.kwargs == {'const': 2} + assert regridder.kwargs == {"const": 2} def test_generic_func_scheme_regrid(generic_func_scheme, mocker): """Test ``GenericFuncScheme``.""" - cube = Cube([0.0, 0.0], var_name='x') + cube = Cube([0.0, 0.0], var_name="x") result = cube.regrid(mocker.sentinel.tgt_grid, generic_func_scheme) - assert result == Cube([2, 2], var_name='x') + assert result == Cube([2, 2], var_name="x") diff --git a/tests/integration/preprocessor/_regrid/test_regrid_unstructured.py b/tests/integration/preprocessor/_regrid/test_regrid_unstructured.py index fee070863c..1455fd5065 100644 --- a/tests/integration/preprocessor/_regrid/test_regrid_unstructured.py +++ b/tests/integration/preprocessor/_regrid/test_regrid_unstructured.py @@ -1,4 +1,4 @@ -""" Integration tests for unstructured regridding.""" +"""Integration tests for unstructured regridding.""" import numpy as np import pytest @@ -16,32 +16,32 @@ def unstructured_grid_cube_2d(): """Sample 2D cube with unstructured grid.""" time = DimCoord( - [0.0, 1.0], standard_name='time', units='days since 1950-01-01' + [0.0, 1.0], standard_name="time", units="days since 1950-01-01" ) lat = AuxCoord( [-50.0, -50.0, 20.0, 20.0], - standard_name='latitude', - units='degrees_north', + standard_name="latitude", + units="degrees_north", ) lon = AuxCoord( [71.0, 250.0, 250.0, 71.0], - standard_name='longitude', - units='degrees_east', + standard_name="longitude", + units="degrees_east", ) - acoord_0 = AuxCoord([0, 0], var_name='aux0') - acoord_1 = AuxCoord([0, 0, 0, 0], var_name='aux1') + acoord_0 = AuxCoord([0, 0], var_name="aux0") + acoord_1 = AuxCoord([0, 0, 0, 0], var_name="aux1") cube = Cube( np.array( [[0.0, 1.0, 2.0, 3.0], [0.0, 0.0, 0.0, 0.0]], dtype=np.float32 ), - standard_name='air_temperature', - var_name='ta', - long_name='Air Temperature', - units='K', + standard_name="air_temperature", + var_name="ta", + long_name="Air Temperature", + units="K", dim_coords_and_dims=[(time, 0)], aux_coords_and_dims=[(acoord_0, 0), (acoord_1, 1), (lat, 1), (lon, 1)], - attributes={'test': '1'}, - cell_methods=(CellMethod('test', 'time'),), + attributes={"test": "1"}, + cell_methods=(CellMethod("test", "time"),), ) return cube @@ -50,28 +50,28 @@ def unstructured_grid_cube_2d(): def unstructured_grid_cube_3d(): """Sample 3D cube with unstructured grid.""" time = DimCoord( - [0.0, 1.0], standard_name='time', units='days since 1950-01-01' + [0.0, 1.0], standard_name="time", units="days since 1950-01-01" ) - alt = DimCoord([0.0, 1.0], standard_name='altitude', units='m') + alt = DimCoord([0.0, 1.0], standard_name="altitude", units="m") lat = AuxCoord( [-50.0, -50.0, 20.0, 20.0], - standard_name='latitude', - units='degrees_north', + standard_name="latitude", + units="degrees_north", ) lon = AuxCoord( [71.0, 250.0, 250.0, 71.0], - standard_name='longitude', - units='degrees_east', + standard_name="longitude", + units="degrees_east", ) - acoord = AuxCoord([0, 0], var_name='aux') + acoord = AuxCoord([0, 0], var_name="aux") cube = Cube( np.ma.masked_greater( np.arange(16, dtype=np.float32).reshape(2, 2, 4), 7.5 ), - standard_name='air_temperature', - var_name='ta', - long_name='Air Temperature', - units='K', + standard_name="air_temperature", + var_name="ta", + long_name="Air Temperature", + units="K", dim_coords_and_dims=[(time, 0), (alt, 1)], aux_coords_and_dims=[(acoord, 1), (lat, 2), (lon, 2)], ) @@ -81,7 +81,7 @@ def unstructured_grid_cube_3d(): @pytest.fixture def target_grid(): """Sample cube with regular grid.""" - return _global_stock_cube('120x60') + return _global_stock_cube("120x60") class TestUnstructuredNearest: @@ -95,18 +95,16 @@ def test_regridding(self, unstructured_grid_cube_2d, target_grid): assert src_cube == unstructured_grid_cube_2d assert result.shape == (2, 3, 3) - assert result.coord('time') == src_cube.coord('time') - assert result.coord('latitude') == target_grid.coord('latitude') - assert result.coord('longitude') == target_grid.coord('longitude') + assert result.coord("time") == src_cube.coord("time") + assert result.coord("latitude") == target_grid.coord("latitude") + assert result.coord("longitude") == target_grid.coord("longitude") assert result.dtype == np.float32 np.testing.assert_allclose( result.data, - [[[0.0, 1.0, 1.0], - [3.0, 2.0, 2.0], - [3.0, 2.0, 2.0]], - [[0.0, 0.0, 0.0], - [0.0, 0.0, 0.0], - [0.0, 0.0, 0.0]]], + [ + [[0.0, 1.0, 1.0], [3.0, 2.0, 2.0], [3.0, 2.0, 2.0]], + [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], + ], ) def test_regridding_with_dim_coord( @@ -118,8 +116,8 @@ def test_regridding_with_dim_coord( src_cube = unstructured_grid_cube_2d.copy() dim_coord = DimCoord( [0, 1, 2, 3], - var_name='x', - standard_name='grid_latitude', + var_name="x", + standard_name="grid_latitude", ) src_cube.add_dim_coord(dim_coord, 1) assert src_cube != unstructured_grid_cube_2d @@ -127,30 +125,32 @@ def test_regridding_with_dim_coord( result = src_cube.regrid(target_grid, UnstructuredNearest()) assert src_cube == unstructured_grid_cube_2d - assert not src_cube.coords('grid_latitude') + assert not src_cube.coords("grid_latitude") assert result.shape == (2, 3, 3) - assert result.coord('time') == src_cube.coord('time') - assert result.coord('latitude') == target_grid.coord('latitude') - assert result.coord('longitude') == target_grid.coord('longitude') + assert result.coord("time") == src_cube.coord("time") + assert result.coord("latitude") == target_grid.coord("latitude") + assert result.coord("longitude") == target_grid.coord("longitude") assert result.dtype == np.float32 np.testing.assert_allclose( result.data, - [[[0.0, 1.0, 1.0], - [3.0, 2.0, 2.0], - [3.0, 2.0, 2.0]], - [[0.0, 0.0, 0.0], - [0.0, 0.0, 0.0], - [0.0, 0.0, 0.0]]], + [ + [[0.0, 1.0, 1.0], [3.0, 2.0, 2.0], [3.0, 2.0, 2.0]], + [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], + ], ) class TestUnstructuredLinear: """Test ``UnstructuredLinear``.""" - @pytest.mark.parametrize('units', [None, 'rad']) - @pytest.mark.parametrize('lazy', [True, False]) + @pytest.mark.parametrize("units", [None, "rad"]) + @pytest.mark.parametrize("lazy", [True, False]) def test_regridding( - self, lazy, units, unstructured_grid_cube_2d, target_grid, + self, + lazy, + units, + unstructured_grid_cube_2d, + target_grid, ): """Test regridding.""" if lazy: @@ -158,10 +158,10 @@ def test_regridding( unstructured_grid_cube_2d.lazy_data() ) if units: - unstructured_grid_cube_2d.coord('latitude').convert_units(units) - unstructured_grid_cube_2d.coord('longitude').convert_units(units) - target_grid.coord('latitude').convert_units(units) - target_grid.coord('longitude').convert_units(units) + unstructured_grid_cube_2d.coord("latitude").convert_units(units) + unstructured_grid_cube_2d.coord("longitude").convert_units(units) + target_grid.coord("latitude").convert_units(units) + target_grid.coord("longitude").convert_units(units) src_cube = unstructured_grid_cube_2d.copy() result = src_cube.regrid(target_grid, UnstructuredLinear()) @@ -169,32 +169,39 @@ def test_regridding( assert src_cube == unstructured_grid_cube_2d assert result.metadata == src_cube.metadata - assert result.coord('time') == src_cube.coord('time') - assert result.coord('latitude') == target_grid.coord('latitude') - assert result.coord('longitude') == target_grid.coord('longitude') - assert result.coord('aux0') == src_cube.coord('aux0') - assert not result.coords('aux1') + assert result.coord("time") == src_cube.coord("time") + assert result.coord("latitude") == target_grid.coord("latitude") + assert result.coord("longitude") == target_grid.coord("longitude") + assert result.coord("aux0") == src_cube.coord("aux0") + assert not result.coords("aux1") assert result.shape == (2, 3, 3) assert result.has_lazy_data() is lazy assert result.dtype == np.float32 print(result.data) expected_data = np.ma.masked_invalid( - [[ - [np.nan, np.nan, np.nan], - [2.0820837020874023, 2.105347156524658, 1.4380426406860352], - [np.nan, np.nan, np.nan], - ], [ - [np.nan, np.nan, np.nan], - [0.0, 0.0, 0.0], - [np.nan, np.nan, np.nan], - ]] + [ + [ + [np.nan, np.nan, np.nan], + [ + 2.0820837020874023, + 2.105347156524658, + 1.4380426406860352, + ], + [np.nan, np.nan, np.nan], + ], + [ + [np.nan, np.nan, np.nan], + [0.0, 0.0, 0.0], + [np.nan, np.nan, np.nan], + ], + ] ) np.testing.assert_allclose(result.data, expected_data) np.testing.assert_array_equal(result.data.mask, expected_data.mask) - @pytest.mark.parametrize('units', [None, 'rad']) - @pytest.mark.parametrize('lazy', [True, False]) + @pytest.mark.parametrize("units", [None, "rad"]) + @pytest.mark.parametrize("lazy", [True, False]) def test_regridding_mask_and_transposed( self, units, lazy, unstructured_grid_cube_3d, target_grid ): @@ -207,10 +214,10 @@ def test_regridding_mask_and_transposed( unstructured_grid_cube_3d.lazy_data() ) if units: - unstructured_grid_cube_3d.coord('latitude').convert_units(units) - unstructured_grid_cube_3d.coord('longitude').convert_units(units) - target_grid.coord('latitude').convert_units(units) - target_grid.coord('longitude').convert_units(units) + unstructured_grid_cube_3d.coord("latitude").convert_units(units) + unstructured_grid_cube_3d.coord("longitude").convert_units(units) + target_grid.coord("latitude").convert_units(units) + target_grid.coord("longitude").convert_units(units) src_cube = unstructured_grid_cube_3d.copy() result = src_cube.regrid(target_grid, UnstructuredLinear()) @@ -218,11 +225,11 @@ def test_regridding_mask_and_transposed( assert src_cube == unstructured_grid_cube_3d assert result.metadata == src_cube.metadata - assert result.coord('time') == src_cube.coord('time') - assert result.coord('altitude') == src_cube.coord('altitude') - assert result.coord('latitude') == target_grid.coord('latitude') - assert result.coord('longitude') == target_grid.coord('longitude') - assert result.coord('aux') == src_cube.coord('aux') + assert result.coord("time") == src_cube.coord("time") + assert result.coord("altitude") == src_cube.coord("altitude") + assert result.coord("latitude") == target_grid.coord("latitude") + assert result.coord("longitude") == target_grid.coord("longitude") + assert result.coord("aux") == src_cube.coord("aux") assert result.shape == (2, 3, 3, 2) assert result.has_lazy_data() is lazy @@ -255,7 +262,7 @@ def test_invalid_tgt_cube(self, unstructured_grid_cube_2d): with pytest.raises(ValueError, match=msg): src_cube.regrid(src_cube, UnstructuredLinear()) - @pytest.mark.parametrize('units', [None, 'rad']) + @pytest.mark.parametrize("units", [None, "rad"]) def test_regridder_same_grid( self, units, @@ -265,29 +272,29 @@ def test_regridder_same_grid( ): """Test regridding.""" if units: - unstructured_grid_cube_2d.coord('latitude').convert_units(units) - unstructured_grid_cube_2d.coord('longitude').convert_units(units) - unstructured_grid_cube_3d.coord('latitude').convert_units(units) - unstructured_grid_cube_3d.coord('longitude').convert_units(units) - target_grid.coord('latitude').convert_units(units) - target_grid.coord('longitude').convert_units(units) + unstructured_grid_cube_2d.coord("latitude").convert_units(units) + unstructured_grid_cube_2d.coord("longitude").convert_units(units) + unstructured_grid_cube_3d.coord("latitude").convert_units(units) + unstructured_grid_cube_3d.coord("longitude").convert_units(units) + target_grid.coord("latitude").convert_units(units) + target_grid.coord("longitude").convert_units(units) cube = unstructured_grid_cube_3d.copy() regridder = UnstructuredLinear().regridder( unstructured_grid_cube_2d, target_grid ) result = regridder(cube) assert result.shape == (2, 2, 3, 3) - assert result.coord('time') == cube.coord('time') - assert result.coord('altitude') == cube.coord('altitude') - assert result.coord('latitude') == target_grid.coord('latitude') - assert result.coord('longitude') == target_grid.coord('longitude') + assert result.coord("time") == cube.coord("time") + assert result.coord("altitude") == cube.coord("altitude") + assert result.coord("latitude") == target_grid.coord("latitude") + assert result.coord("longitude") == target_grid.coord("longitude") def test_regridder_different_grid( self, unstructured_grid_cube_2d, unstructured_grid_cube_3d, target_grid ): """Test regridding.""" cube = unstructured_grid_cube_3d.copy() - cube.coord('latitude').points = [0.0, 0.0, 0.0, 0.0] + cube.coord("latitude").points = [0.0, 0.0, 0.0, 0.0] regridder = UnstructuredLinear().regridder( unstructured_grid_cube_2d, target_grid ) diff --git a/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py b/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py index d4a9b0b217..42dee35f62 100644 --- a/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py +++ b/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py @@ -3,6 +3,7 @@ Integration tests for the :func:`esmvalcore.preprocessor._supplementary_vars` module. """ + import iris import iris.fileformats import numpy as np @@ -18,104 +19,148 @@ class Test: """Test class.""" + @pytest.fixture(autouse=True) def setUp(self): """Assemble a stock cube.""" fx_area_data = np.ones((3, 3)) fx_volume_data = np.ones((3, 3, 3)) self.new_cube_data = np.empty((3, 3)) - self.new_cube_data[:] = 200. + self.new_cube_data[:] = 200.0 self.new_cube_3D_data = np.empty((3, 3, 3)) - self.new_cube_3D_data[:] = 200. + self.new_cube_3D_data[:] = 200.0 crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - self.lons = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=crd_sys) - self.lats = iris.coords.DimCoord([0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=crd_sys) - self.depth = iris.coords.DimCoord([0, 1.5, 3], - standard_name='depth', - bounds=[[0, 1], [1, 2], [2, 3]], - units='m', - long_name='ocean depth coordinate') + self.lons = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=crd_sys, + ) + self.lats = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="latitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_north", + coord_system=crd_sys, + ) + self.depth = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="depth", + bounds=[[0, 1], [1, 2], [2, 3]], + units="m", + long_name="ocean depth coordinate", + ) self.monthly_times = iris.coords.DimCoord( - [15.5, 45, 74.5, 105, 135.5, 166, - 196.5, 227.5, 258, 288.5, 319, 349.5], - standard_name='time', - var_name='time', - bounds=[[0, 31], [31, 59], [59, 90], - [90, 120], [120, 151], [151, 181], - [181, 212], [212, 243], [243, 273], - [273, 304], [304, 334], [334, 365]], - units='days since 1950-01-01 00:00:00') + [ + 15.5, + 45, + 74.5, + 105, + 135.5, + 166, + 196.5, + 227.5, + 258, + 288.5, + 319, + 349.5, + ], + standard_name="time", + var_name="time", + bounds=[ + [0, 31], + [31, 59], + [59, 90], + [90, 120], + [120, 151], + [151, 181], + [181, 212], + [212, 243], + [243, 273], + [273, 304], + [304, 334], + [334, 365], + ], + units="days since 1950-01-01 00:00:00", + ) self.yearly_times = iris.coords.DimCoord( [182.5, 547.5], - standard_name='time', + standard_name="time", bounds=[[0, 365], [365, 730]], - units='days since 1950-01-01 00:00') + units="days since 1950-01-01 00:00", + ) self.coords_spec = [(self.lats, 0), (self.lons, 1)] - self.fx_area = iris.cube.Cube(fx_area_data, - dim_coords_and_dims=self.coords_spec) - self.fx_volume = iris.cube.Cube(fx_volume_data, - dim_coords_and_dims=[ - (self.depth, 0), - (self.lats, 1), - (self.lons, 2) - ]) - self.monthly_volume = iris.cube.Cube(np.ones((12, 3, 3, 3)), - dim_coords_and_dims=[ - (self.monthly_times, 0), - (self.depth, 1), - (self.lats, 2), - (self.lons, 3) - ]) + self.fx_area = iris.cube.Cube( + fx_area_data, dim_coords_and_dims=self.coords_spec + ) + self.fx_volume = iris.cube.Cube( + fx_volume_data, + dim_coords_and_dims=[ + (self.depth, 0), + (self.lats, 1), + (self.lons, 2), + ], + ) + self.monthly_volume = iris.cube.Cube( + np.ones((12, 3, 3, 3)), + dim_coords_and_dims=[ + (self.monthly_times, 0), + (self.depth, 1), + (self.lats, 2), + (self.lons, 3), + ], + ) - @pytest.mark.parametrize('var_name', ['areacella', 'areacello']) + @pytest.mark.parametrize("var_name", ["areacella", "areacello"]) def test_add_cell_measure_area(self, var_name): """Test add area fx variables as cell measures.""" self.fx_area.var_name = var_name - self.fx_area.standard_name = 'cell_area' - self.fx_area.units = 'm2' - cube = iris.cube.Cube(self.new_cube_data, - dim_coords_and_dims=self.coords_spec) + self.fx_area.standard_name = "cell_area" + self.fx_area.units = "m2" + cube = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec + ) cube = add_supplementary_variables(cube, [self.fx_area]) assert cube.cell_measure(self.fx_area.standard_name) is not None def test_add_cell_measure_volume(self): """Test add volume as cell measure.""" - self.fx_volume.var_name = 'volcello' - self.fx_volume.standard_name = 'ocean_volume' - self.fx_volume.units = 'm3' - cube = iris.cube.Cube(self.new_cube_3D_data, - dim_coords_and_dims=[ - (self.depth, 0), - (self.lats, 1), - (self.lons, 2)]) + self.fx_volume.var_name = "volcello" + self.fx_volume.standard_name = "ocean_volume" + self.fx_volume.units = "m3" + cube = iris.cube.Cube( + self.new_cube_3D_data, + dim_coords_and_dims=[ + (self.depth, 0), + (self.lats, 1), + (self.lons, 2), + ], + ) cube = add_supplementary_variables(cube, [self.fx_volume]) assert cube.cell_measure(self.fx_volume.standard_name) is not None def test_no_cell_measure(self): """Test no cell measure is added.""" - cube = iris.cube.Cube(self.new_cube_3D_data, - dim_coords_and_dims=[ - (self.depth, 0), - (self.lats, 1), - (self.lons, 2)]) + cube = iris.cube.Cube( + self.new_cube_3D_data, + dim_coords_and_dims=[ + (self.depth, 0), + (self.lats, 1), + (self.lons, 2), + ], + ) cube = add_supplementary_variables(cube, []) assert cube.cell_measures() == [] def test_add_supplementary_vars(self): """Test invalid variable is not added as cell measure.""" - self.fx_area.var_name = 'sftlf' + self.fx_area.var_name = "sftlf" self.fx_area.standard_name = "land_area_fraction" - self.fx_area.units = '%' - cube = iris.cube.Cube(self.new_cube_data, - dim_coords_and_dims=self.coords_spec) + self.fx_area.units = "%" + cube = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec + ) cube = add_supplementary_variables(cube, [self.fx_area]) assert cube.ancillary_variable(self.fx_area.standard_name) is not None @@ -124,38 +169,48 @@ def test_wrong_shape(self, monkeypatch): volume_data = np.ones((2, 3, 3, 3)) volume_cube = iris.cube.Cube( volume_data, - dim_coords_and_dims=[(self.yearly_times, 0), - (self.depth, 1), - (self.lats, 2), - (self.lons, 3)]) - volume_cube.standard_name = 'ocean_volume' - volume_cube.var_name = 'volcello' - volume_cube.units = 'm3' + dim_coords_and_dims=[ + (self.yearly_times, 0), + (self.depth, 1), + (self.lats, 2), + (self.lons, 3), + ], + ) + volume_cube.standard_name = "ocean_volume" + volume_cube.var_name = "volcello" + volume_cube.units = "m3" data = np.ones((12, 3, 3, 3)) cube = iris.cube.Cube( data, - dim_coords_and_dims=[(self.monthly_times, 0), - (self.depth, 1), - (self.lats, 2), - (self.lons, 3)]) - cube.var_name = 'thetao' + dim_coords_and_dims=[ + (self.monthly_times, 0), + (self.depth, 1), + (self.lats, 2), + (self.lons, 3), + ], + ) + cube.var_name = "thetao" with pytest.raises(iris.exceptions.CannotAddError): add_supplementary_variables(cube, [volume_cube]) def test_remove_supplementary_vars(self): """Test supplementary variables are removed from cube.""" - cube = iris.cube.Cube(self.new_cube_3D_data, - dim_coords_and_dims=[(self.depth, 0), - (self.lats, 1), - (self.lons, 2)]) - self.fx_area.var_name = 'areacella' - self.fx_area.standard_name = 'cell_area' - self.fx_area.units = 'm2' - add_cell_measure(cube, self.fx_area, measure='area') + cube = iris.cube.Cube( + self.new_cube_3D_data, + dim_coords_and_dims=[ + (self.depth, 0), + (self.lats, 1), + (self.lons, 2), + ], + ) + self.fx_area.var_name = "areacella" + self.fx_area.standard_name = "cell_area" + self.fx_area.units = "m2" + add_cell_measure(cube, self.fx_area, measure="area") assert cube.cell_measure(self.fx_area.standard_name) is not None - self.fx_area.var_name = 'sftlf' + self.fx_area.var_name = "sftlf" self.fx_area.standard_name = "land_area_fraction" - self.fx_area.units = '%' + self.fx_area.units = "%" add_ancillary_variable(cube, self.fx_area) assert cube.ancillary_variable(self.fx_area.standard_name) is not None cube = remove_supplementary_variables(cube) diff --git a/tests/integration/preprocessor/_supplementary_vars/test_register.py b/tests/integration/preprocessor/_supplementary_vars/test_register.py index cfe6d5b7da..9512b5067e 100644 --- a/tests/integration/preprocessor/_supplementary_vars/test_register.py +++ b/tests/integration/preprocessor/_supplementary_vars/test_register.py @@ -8,21 +8,21 @@ def test_register(monkeypatch): registered = {} monkeypatch.setattr( _supplementary_vars, - 'PREPROCESSOR_SUPPLEMENTARIES', + "PREPROCESSOR_SUPPLEMENTARIES", registered, ) @_supplementary_vars.register_supplementaries( - ['areacella'], - required='require_at_least_one', + ["areacella"], + required="require_at_least_one", ) def test_func(): pass assert registered == { - 'test_func': { - 'required': 'require_at_least_one', - 'variables': ['areacella'], + "test_func": { + "required": "require_at_least_one", + "variables": ["areacella"], } } @@ -32,8 +32,8 @@ def test_register_invalid_fails(): with pytest.raises(NotImplementedError): @_supplementary_vars.register_supplementaries( - ['areacella'], - required='invalid', + ["areacella"], + required="invalid", ) def test_func(): pass diff --git a/tests/integration/preprocessor/_time/test_time.py b/tests/integration/preprocessor/_time/test_time.py index f1da8d45e7..d283e04515 100644 --- a/tests/integration/preprocessor/_time/test_time.py +++ b/tests/integration/preprocessor/_time/test_time.py @@ -24,43 +24,41 @@ def easy_2d_cube(): time = DimCoord( [2.0, 3.0], bounds=[[-0.5, 2.5], [2.5, 3.5]], - standard_name='time', - units='days since 2000-01-01', - ) - lat = DimCoord( - [0.0, 1.0], standard_name='latitude', units='degrees' + standard_name="time", + units="days since 2000-01-01", ) + lat = DimCoord([0.0, 1.0], standard_name="latitude", units="degrees") cube = Cube( np.arange(4, dtype=np.float32).reshape(2, 2), - standard_name='air_temperature', - units='K', + standard_name="air_temperature", + units="K", dim_coords_and_dims=[(time, 0), (lat, 1)], ) return cube @pytest.mark.parametrize( - 'operator,kwargs,expected_data,expected_units', + "operator,kwargs,expected_data,expected_units", [ - ('gmean', {}, [0.0, 1.7320509], 'K'), - ('hmean', {}, [0.0, 1.5], 'K'), - ('max', {}, [2.0, 3.0], 'K'), - ('mean', {}, [0.5, 1.5], 'K'), - ('mean', {'weights': False}, [1.0, 2.0], 'K'), - ('median', {}, [1.0, 2.0], 'K'), - ('min', {}, [0.0, 1.0], 'K'), - ('peak', {}, [2.0, 3.0], 'K'), - ('percentile', {'percent': 0.0}, [0.0, 1.0], 'K'), - ('rms', {}, [1.0, 1.7320509], 'K'), - ('rms', {'weights': False}, [1.414214, 2.236068], 'K'), - ('std_dev', {}, [1.414214, 1.414214], 'K'), - ('std_dev', {'ddof': 0}, [1.0, 1.0], 'K'), - ('sum', {}, [2.0, 6.0], 'K day'), - ('sum', {'weights': False}, [2.0, 4.0], 'K'), - ('variance', {}, [2.0, 2.0], 'K2'), - ('variance', {'ddof': 0}, [1.0, 1.0], 'K2'), - ('wpercentile', {'percent': 50.0}, [0.5, 1.5], 'K'), - ] + ("gmean", {}, [0.0, 1.7320509], "K"), + ("hmean", {}, [0.0, 1.5], "K"), + ("max", {}, [2.0, 3.0], "K"), + ("mean", {}, [0.5, 1.5], "K"), + ("mean", {"weights": False}, [1.0, 2.0], "K"), + ("median", {}, [1.0, 2.0], "K"), + ("min", {}, [0.0, 1.0], "K"), + ("peak", {}, [2.0, 3.0], "K"), + ("percentile", {"percent": 0.0}, [0.0, 1.0], "K"), + ("rms", {}, [1.0, 1.7320509], "K"), + ("rms", {"weights": False}, [1.414214, 2.236068], "K"), + ("std_dev", {}, [1.414214, 1.414214], "K"), + ("std_dev", {"ddof": 0}, [1.0, 1.0], "K"), + ("sum", {}, [2.0, 6.0], "K day"), + ("sum", {"weights": False}, [2.0, 4.0], "K"), + ("variance", {}, [2.0, 2.0], "K2"), + ("variance", {"ddof": 0}, [1.0, 1.0], "K2"), + ("wpercentile", {"percent": 50.0}, [0.5, 1.5], "K"), + ], ) def test_statistical_operators( operator, kwargs, expected_data, expected_units, easy_2d_cube @@ -73,8 +71,8 @@ def test_statistical_operators( assert res.standard_name == easy_2d_cube.standard_name assert res.attributes == easy_2d_cube.attributes assert res.units == expected_units - assert res.coord('latitude') == easy_2d_cube.coord('latitude') - assert res.coord('time').shape == (1, ) + assert res.coord("latitude") == easy_2d_cube.coord("latitude") + assert res.coord("time").shape == (1,) np.testing.assert_allclose(res.data, expected_data, atol=1e-6, rtol=1e-6) @@ -83,47 +81,47 @@ def realistic_4d_cube(): """Create realistic 4D cube.""" time = DimCoord( [11.0, 12.0], - standard_name='time', - units=Unit('hours since 1851-01-01', calendar='360_day'), + standard_name="time", + units=Unit("hours since 1851-01-01", calendar="360_day"), ) - plev = DimCoord([50000], standard_name='air_pressure', units='Pa') - lat = DimCoord([0.0, 1.0], standard_name='latitude', units='degrees') + plev = DimCoord([50000], standard_name="air_pressure", units="Pa") + lat = DimCoord([0.0, 1.0], standard_name="latitude", units="degrees") lon = DimCoord( - [0.0, 20.0, 345.0], standard_name='longitude', units='degrees' + [0.0, 20.0, 345.0], standard_name="longitude", units="degrees" ) aux_2d_data = np.arange(2 * 3).reshape(2, 3) aux_2d_bounds = np.stack( (aux_2d_data - 1, aux_2d_data, aux_2d_data + 1), axis=-1 ) - aux_2d = AuxCoord(aux_2d_data, var_name='aux_2d') + aux_2d = AuxCoord(aux_2d_data, var_name="aux_2d") aux_2d_with_bnds = AuxCoord( - aux_2d_data, bounds=aux_2d_bounds, var_name='aux_2d_with_bnds' + aux_2d_data, bounds=aux_2d_bounds, var_name="aux_2d_with_bnds" ) - aux_time = AuxCoord(['Jan', 'Jan'], var_name='aux_time') - aux_lon = AuxCoord([0, 1, 2], var_name='aux_lon') + aux_time = AuxCoord(["Jan", "Jan"], var_name="aux_time") + aux_lon = AuxCoord([0, 1, 2], var_name="aux_lon") cell_area = CellMeasure( np.arange(2 * 2 * 3).reshape(2, 2, 3) + 10, - standard_name='cell_area', - units='m2', - measure='area', + standard_name="cell_area", + units="m2", + measure="area", ) type_var = AncillaryVariable( - [['sea', 'land', 'lake'], ['lake', 'sea', 'land']], - var_name='type', - units='no_unit', + [["sea", "land", "lake"], ["lake", "sea", "land"]], + var_name="type", + units="no_unit", ) cube = Cube( np.ma.masked_inside( np.arange(2 * 1 * 2 * 3).reshape(2, 1, 2, 3), 1, 3 ), - var_name='ta', - standard_name='air_temperature', - long_name='Air Temperature', - units='K', - cell_methods=[CellMethod('mean', 'time')], + var_name="ta", + standard_name="air_temperature", + long_name="Air Temperature", + units="K", + cell_methods=[CellMethod("mean", "time")], dim_coords_and_dims=[(time, 0), (plev, 1), (lat, 2), (lon, 3)], aux_coords_and_dims=[ (aux_2d, (0, 3)), @@ -133,7 +131,7 @@ def realistic_4d_cube(): ], cell_measures_and_dims=[(cell_area, (0, 2, 3))], ancillary_variables_and_dims=[(type_var, (0, 3))], - attributes={'test': 1}, + attributes={"test": 1}, ) return cube @@ -148,48 +146,46 @@ def test_local_solar_time_regular(realistic_4d_cube): assert result.metadata == input_cube.metadata assert result.shape == input_cube.shape - assert result.coord('time') != input_cube.coord('time') - assert result.coord('air_pressure') == input_cube.coord('air_pressure') - assert result.coord('latitude') == input_cube.coord('latitude') - assert result.coord('longitude') == input_cube.coord('longitude') - - assert result.coord('time').standard_name == 'time' - assert result.coord('time').var_name is None - assert result.coord('time').long_name == 'Local Solar Time' - assert result.coord('time').units == Unit( - 'hours since 1850-01-01', calendar='360_day' - ) - assert result.coord('time').attributes == {} - np.testing.assert_allclose( - result.coord('time').points, [8651.0, 8652.0] - ) + assert result.coord("time") != input_cube.coord("time") + assert result.coord("air_pressure") == input_cube.coord("air_pressure") + assert result.coord("latitude") == input_cube.coord("latitude") + assert result.coord("longitude") == input_cube.coord("longitude") + + assert result.coord("time").standard_name == "time" + assert result.coord("time").var_name is None + assert result.coord("time").long_name == "Local Solar Time" + assert result.coord("time").units == Unit( + "hours since 1850-01-01", calendar="360_day" + ) + assert result.coord("time").attributes == {} + np.testing.assert_allclose(result.coord("time").points, [8651.0, 8652.0]) np.testing.assert_allclose( - result.coord('time').bounds, [[8650.5, 8651.5], [8651.5, 8652.5]] + result.coord("time").bounds, [[8650.5, 8651.5], [8651.5, 8652.5]] ) - assert result.coord('aux_time') == input_cube.coord('aux_time') - assert result.coord('aux_lon') == input_cube.coord('aux_lon') + assert result.coord("aux_time") == input_cube.coord("aux_time") + assert result.coord("aux_lon") == input_cube.coord("aux_lon") assert ( - result.coord('aux_2d').metadata == input_cube.coord('aux_2d').metadata + result.coord("aux_2d").metadata == input_cube.coord("aux_2d").metadata ) - assert not result.coord('aux_2d').has_lazy_points() + assert not result.coord("aux_2d").has_lazy_points() assert_array_equal( - result.coord('aux_2d').points, + result.coord("aux_2d").points, np.ma.masked_equal([[0, 99, 5], [3, 1, 99]], 99), ) - assert not result.coord('aux_2d').has_bounds() + assert not result.coord("aux_2d").has_bounds() assert ( - result.coord('aux_2d_with_bnds').metadata == - input_cube.coord('aux_2d_with_bnds').metadata + result.coord("aux_2d_with_bnds").metadata + == input_cube.coord("aux_2d_with_bnds").metadata ) - assert not result.coord('aux_2d_with_bnds').has_lazy_points() + assert not result.coord("aux_2d_with_bnds").has_lazy_points() assert_array_equal( - result.coord('aux_2d_with_bnds').points, + result.coord("aux_2d_with_bnds").points, np.ma.masked_equal([[0, 99, 5], [3, 1, 99]], 99), ) - assert not result.coord('aux_2d_with_bnds').has_lazy_bounds() + assert not result.coord("aux_2d_with_bnds").has_lazy_bounds() assert_array_equal( - result.coord('aux_2d_with_bnds').bounds, + result.coord("aux_2d_with_bnds").bounds, np.ma.masked_equal( [ [[-1, 0, 1], [99, 99, 99], [4, 5, 6]], @@ -200,12 +196,12 @@ def test_local_solar_time_regular(realistic_4d_cube): ) assert ( - result.cell_measure('cell_area').metadata == - input_cube.cell_measure('cell_area').metadata + result.cell_measure("cell_area").metadata + == input_cube.cell_measure("cell_area").metadata ) - assert not result.cell_measure('cell_area').has_lazy_data() + assert not result.cell_measure("cell_area").has_lazy_data() assert_array_equal( - result.cell_measure('cell_area').data, + result.cell_measure("cell_area").data, np.ma.masked_equal( [ [[10, 99, 18], [13, 99, 21]], @@ -215,14 +211,14 @@ def test_local_solar_time_regular(realistic_4d_cube): ), ) assert ( - result.ancillary_variable('type').metadata == - input_cube.ancillary_variable('type').metadata + result.ancillary_variable("type").metadata + == input_cube.ancillary_variable("type").metadata ) - assert not result.ancillary_variable('type').has_lazy_data() + assert not result.ancillary_variable("type").has_lazy_data() assert_array_equal( - result.ancillary_variable('type').data, + result.ancillary_variable("type").data, np.ma.masked_equal( - [['sea', 'miss', 'land'], ['lake', 'land', 'miss']], 'miss' + [["sea", "miss", "land"], ["lake", "land", "miss"]], "miss" ), ) @@ -249,63 +245,63 @@ def test_local_solar_time_1_time_step(realistic_4d_cube): assert result.metadata == input_cube.metadata assert result.shape == input_cube.shape - assert result.coord('time') != input_cube.coord('time') - assert result.coord('air_pressure') == input_cube.coord('air_pressure') - assert result.coord('latitude') == input_cube.coord('latitude') - assert result.coord('longitude') == input_cube.coord('longitude') - - assert result.coord('time').standard_name == 'time' - assert result.coord('time').var_name is None - assert result.coord('time').long_name == 'Local Solar Time' - assert result.coord('time').units == Unit( - 'hours since 1850-01-01', calendar='360_day' - ) - assert result.coord('time').attributes == {} - np.testing.assert_allclose(result.coord('time').points, [8651.0]) - np.testing.assert_allclose(result.coord('time').bounds, [[8650.5, 8651.5]]) - - assert result.coord('aux_time') == input_cube.coord('aux_time') - assert result.coord('aux_lon') == input_cube.coord('aux_lon') + assert result.coord("time") != input_cube.coord("time") + assert result.coord("air_pressure") == input_cube.coord("air_pressure") + assert result.coord("latitude") == input_cube.coord("latitude") + assert result.coord("longitude") == input_cube.coord("longitude") + + assert result.coord("time").standard_name == "time" + assert result.coord("time").var_name is None + assert result.coord("time").long_name == "Local Solar Time" + assert result.coord("time").units == Unit( + "hours since 1850-01-01", calendar="360_day" + ) + assert result.coord("time").attributes == {} + np.testing.assert_allclose(result.coord("time").points, [8651.0]) + np.testing.assert_allclose(result.coord("time").bounds, [[8650.5, 8651.5]]) + + assert result.coord("aux_time") == input_cube.coord("aux_time") + assert result.coord("aux_lon") == input_cube.coord("aux_lon") assert ( - result.coord('aux_2d').metadata == input_cube.coord('aux_2d').metadata + result.coord("aux_2d").metadata == input_cube.coord("aux_2d").metadata ) - assert not result.coord('aux_2d').has_lazy_points() + assert not result.coord("aux_2d").has_lazy_points() assert_array_equal( - result.coord('aux_2d').points, np.ma.masked_equal([[0, 99, 99]], 99) + result.coord("aux_2d").points, np.ma.masked_equal([[0, 99, 99]], 99) ) - assert not result.coord('aux_2d').has_bounds() + assert not result.coord("aux_2d").has_bounds() assert ( - result.coord('aux_2d_with_bnds').metadata == - input_cube.coord('aux_2d_with_bnds').metadata + result.coord("aux_2d_with_bnds").metadata + == input_cube.coord("aux_2d_with_bnds").metadata ) - assert not result.coord('aux_2d_with_bnds').has_lazy_points() + assert not result.coord("aux_2d_with_bnds").has_lazy_points() assert_array_equal( - result.coord('aux_2d_with_bnds').points, + result.coord("aux_2d_with_bnds").points, np.ma.masked_equal([[0, 99, 99]], 99), ) - assert not result.coord('aux_2d_with_bnds').has_lazy_bounds() + assert not result.coord("aux_2d_with_bnds").has_lazy_bounds() assert_array_equal( - result.coord('aux_2d_with_bnds').bounds, + result.coord("aux_2d_with_bnds").bounds, np.ma.masked_equal([[[-1, 0, 1], [99, 99, 99], [99, 99, 99]]], 99), ) assert ( - result.cell_measure('cell_area').metadata == - input_cube.cell_measure('cell_area').metadata + result.cell_measure("cell_area").metadata + == input_cube.cell_measure("cell_area").metadata ) - assert not result.cell_measure('cell_area').has_lazy_data() + assert not result.cell_measure("cell_area").has_lazy_data() assert_array_equal( - result.cell_measure('cell_area').data, + result.cell_measure("cell_area").data, np.ma.masked_equal([[[10, 99, 99], [13, 99, 99]]], 99), ) assert ( - result.ancillary_variable('type').metadata == - input_cube.ancillary_variable('type').metadata + result.ancillary_variable("type").metadata + == input_cube.ancillary_variable("type").metadata ) - assert not result.ancillary_variable('type').has_lazy_data() + assert not result.ancillary_variable("type").has_lazy_data() assert_array_equal( - result.ancillary_variable('type').data, - np.ma.masked_equal([['sea', 'miss', 'miss']], 'miss'), + result.ancillary_variable("type").data, + np.ma.masked_equal([["sea", "miss", "miss"]], "miss"), ) assert not result.has_lazy_data() @@ -321,51 +317,55 @@ def realistic_unstructured_cube(): time = DimCoord( [0.0, 6.0, 12.0, 18.0, 24.0], bounds=[ - [-3.0, 3.0], [3.0, 9.0], [9.0, 15.0], [15.0, 21.0], [21.0, 27.0] + [-3.0, 3.0], + [3.0, 9.0], + [9.0, 15.0], + [15.0, 21.0], + [21.0, 27.0], ], - var_name='time', - standard_name='time', - long_name='time', - units=Unit('hours since 1851-01-01'), + var_name="time", + standard_name="time", + long_name="time", + units=Unit("hours since 1851-01-01"), ) lat = AuxCoord( [0.0, 0.0, 0.0, 0.0], - var_name='lat', - standard_name='latitude', - long_name='latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", ) lon = AuxCoord( [0.0, 80 * np.pi / 180.0, -120 * np.pi / 180.0, 160 * np.pi / 180.0], - var_name='lon', - standard_name='longitude', - long_name='longitude', - units='rad', + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="rad", ) aux_2d_data = da.ma.masked_inside(da.arange(4 * 5).reshape(4, 5), 3, 10) aux_2d_bounds = da.stack((aux_2d_data - 1, aux_2d_data + 1), axis=-1) - aux_2d = AuxCoord(aux_2d_data, var_name='aux_2d') + aux_2d = AuxCoord(aux_2d_data, var_name="aux_2d") aux_2d_with_bnds = AuxCoord( - aux_2d_data, bounds=aux_2d_bounds, var_name='aux_2d_with_bnds' + aux_2d_data, bounds=aux_2d_bounds, var_name="aux_2d_with_bnds" ) - aux_0d = AuxCoord([0], var_name='aux_0d') + aux_0d = AuxCoord([0], var_name="aux_0d") cell_measure_2d = CellMeasure( da.ma.masked_inside(da.arange(4 * 5).reshape(4, 5), 3, 10), - var_name='cell_measure', + var_name="cell_measure", ) anc_var_2d = AncillaryVariable( da.ma.masked_inside(da.arange(4 * 5).reshape(4, 5), 3, 10), - var_name='anc_var', + var_name="anc_var", ) cube = Cube( da.arange(4 * 5).reshape(4, 5), - var_name='ta', - standard_name='air_temperature', - long_name='Air Temperature', - units='K', + var_name="ta", + standard_name="air_temperature", + long_name="Air Temperature", + units="K", dim_coords_and_dims=[(time, 1)], aux_coords_and_dims=[ (lat, 0), @@ -390,20 +390,20 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): assert result.metadata == input_cube.metadata assert result.shape == input_cube.shape - assert result.coord('time') != input_cube.coord('time') - assert result.coord('latitude') == input_cube.coord('latitude') - assert result.coord('longitude') == input_cube.coord('longitude') - - assert result.coord('time').standard_name == 'time' - assert result.coord('time').var_name == 'time' - assert result.coord('time').long_name == 'Local Solar Time' - assert result.coord('time').units == 'hours since 1850-01-01' - assert result.coord('time').attributes == {} + assert result.coord("time") != input_cube.coord("time") + assert result.coord("latitude") == input_cube.coord("latitude") + assert result.coord("longitude") == input_cube.coord("longitude") + + assert result.coord("time").standard_name == "time" + assert result.coord("time").var_name == "time" + assert result.coord("time").long_name == "Local Solar Time" + assert result.coord("time").units == "hours since 1850-01-01" + assert result.coord("time").attributes == {} np.testing.assert_allclose( - result.coord('time').points, [8760.0, 8766.0, 8772.0, 8778.0, 8784.0] + result.coord("time").points, [8760.0, 8766.0, 8772.0, 8778.0, 8784.0] ) np.testing.assert_allclose( - result.coord('time').bounds, + result.coord("time").bounds, [ [8757.0, 8763.0], [8763.0, 8769.0], @@ -413,13 +413,13 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): ], ) - assert result.coord('aux_0d') == input_cube.coord('aux_0d') + assert result.coord("aux_0d") == input_cube.coord("aux_0d") assert ( - result.coord('aux_2d').metadata == input_cube.coord('aux_2d').metadata + result.coord("aux_2d").metadata == input_cube.coord("aux_2d").metadata ) - assert result.coord('aux_2d').has_lazy_points() + assert result.coord("aux_2d").has_lazy_points() assert_array_equal( - result.coord('aux_2d').points, + result.coord("aux_2d").points, np.ma.masked_equal( [ [0, 1, 2, 99, 99], @@ -430,14 +430,14 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): 99, ), ) - assert not result.coord('aux_2d').has_bounds() + assert not result.coord("aux_2d").has_bounds() assert ( - result.coord('aux_2d_with_bnds').metadata == - input_cube.coord('aux_2d_with_bnds').metadata + result.coord("aux_2d_with_bnds").metadata + == input_cube.coord("aux_2d_with_bnds").metadata ) - assert result.coord('aux_2d_with_bnds').has_lazy_points() + assert result.coord("aux_2d_with_bnds").has_lazy_points() assert_array_equal( - result.coord('aux_2d_with_bnds').points, + result.coord("aux_2d_with_bnds").points, np.ma.masked_equal( [ [0, 1, 2, 99, 99], @@ -448,9 +448,9 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): 99, ), ) - assert result.coord('aux_2d_with_bnds').has_lazy_bounds() + assert result.coord("aux_2d_with_bnds").has_lazy_bounds() assert_array_equal( - result.coord('aux_2d_with_bnds').bounds, + result.coord("aux_2d_with_bnds").bounds, np.ma.masked_equal( [ [[-1, 1], [0, 2], [1, 3], [99, 99], [99, 99]], @@ -463,12 +463,12 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): ) assert ( - result.cell_measure('cell_measure').metadata == - input_cube.cell_measure('cell_measure').metadata + result.cell_measure("cell_measure").metadata + == input_cube.cell_measure("cell_measure").metadata ) - assert result.cell_measure('cell_measure').has_lazy_data() + assert result.cell_measure("cell_measure").has_lazy_data() assert_array_equal( - result.cell_measure('cell_measure').data, + result.cell_measure("cell_measure").data, np.ma.masked_equal( [ [0, 1, 2, 99, 99], @@ -480,12 +480,12 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): ), ) assert ( - result.ancillary_variable('anc_var').metadata == - input_cube.ancillary_variable('anc_var').metadata + result.ancillary_variable("anc_var").metadata + == input_cube.ancillary_variable("anc_var").metadata ) - assert result.ancillary_variable('anc_var').has_lazy_data() + assert result.ancillary_variable("anc_var").has_lazy_data() assert_array_equal( - result.ancillary_variable('anc_var').data, + result.ancillary_variable("anc_var").data, np.ma.masked_equal( [ [0, 1, 2, 99, 99], @@ -514,8 +514,8 @@ def test_local_solar_time_unstructured(realistic_unstructured_cube): def test_local_solar_time_no_time_fail(realistic_4d_cube): """Test ``local_solar_time``.""" - realistic_4d_cube.remove_coord('time') - msg = 'needs a dimensional coordinate `time`' + realistic_4d_cube.remove_coord("time") + msg = "needs a dimensional coordinate `time`" with pytest.raises(CoordinateNotFoundError, match=msg): local_solar_time(realistic_4d_cube) @@ -523,7 +523,7 @@ def test_local_solar_time_no_time_fail(realistic_4d_cube): def test_local_solar_time_scalar_time_fail(realistic_4d_cube): """Test ``local_solar_time``.""" input_cube = realistic_4d_cube[0] - msg = 'needs a dimensional coordinate `time`' + msg = "needs a dimensional coordinate `time`" with pytest.raises(CoordinateNotFoundError, match=msg): local_solar_time(input_cube) @@ -531,15 +531,15 @@ def test_local_solar_time_scalar_time_fail(realistic_4d_cube): def test_local_solar_time_time_decreasing_fail(realistic_4d_cube): """Test ``local_solar_time``.""" input_cube = realistic_4d_cube[::-1] - msg = '`time` coordinate must be monotonically increasing' + msg = "`time` coordinate must be monotonically increasing" with pytest.raises(ValueError, match=msg): local_solar_time(input_cube) def test_local_solar_time_no_lon_fail(realistic_4d_cube): """Test ``local_solar_time``.""" - realistic_4d_cube.remove_coord('longitude') - msg = 'needs a coordinate `longitude`' + realistic_4d_cube.remove_coord("longitude") + msg = "needs a coordinate `longitude`" with pytest.raises(CoordinateNotFoundError, match=msg): local_solar_time(realistic_4d_cube) @@ -547,15 +547,15 @@ def test_local_solar_time_no_lon_fail(realistic_4d_cube): def test_local_solar_time_scalar_lon_fail(realistic_4d_cube): """Test ``local_solar_time``.""" input_cube = realistic_4d_cube[..., 0] - msg = 'needs a 1D coordinate `longitude`, got 0D' + msg = "needs a 1D coordinate `longitude`, got 0D" with pytest.raises(CoordinateMultiDimError, match=msg): local_solar_time(input_cube) def test_local_solar_time_2d_lon_fail(easy_2d_cube): """Test ``local_solar_time``.""" - lon_coord = AuxCoord(easy_2d_cube.data, standard_name='longitude') + lon_coord = AuxCoord(easy_2d_cube.data, standard_name="longitude") easy_2d_cube.add_aux_coord(lon_coord, (0, 1)) - msg = 'needs a 1D coordinate `longitude`, got 2D' + msg = "needs a 1D coordinate `longitude`, got 2D" with pytest.raises(CoordinateMultiDimError, match=msg): local_solar_time(easy_2d_cube) diff --git a/tests/integration/preprocessor/test_preprocessing_task.py b/tests/integration/preprocessor/test_preprocessing_task.py index 3b33e8f44e..6b3023f1d2 100644 --- a/tests/integration/preprocessor/test_preprocessing_task.py +++ b/tests/integration/preprocessor/test_preprocessing_task.py @@ -1,4 +1,5 @@ """Tests for `esmvalcore.preprocessor.PreprocessingTask`.""" + import iris import iris.cube from prov.model import ProvDocument @@ -11,26 +12,28 @@ def test_load_save_task(tmp_path): """Test that a task that just loads and saves a file.""" # Prepare a test dataset - cube = iris.cube.Cube(data=[273.], var_name='tas', units='K') - in_file = tmp_path / 'tas_in.nc' + cube = iris.cube.Cube(data=[273.0], var_name="tas", units="K") + in_file = tmp_path / "tas_in.nc" iris.save(cube, in_file) - dataset = Dataset(short_name='tas') + dataset = Dataset(short_name="tas") dataset.files = [in_file] dataset.load = lambda: cube.copy() # Create task - task = PreprocessingTask([ - PreprocessorFile( - filename=tmp_path / 'tas_out.nc', - settings={}, - datasets=[dataset], - ), - ]) + task = PreprocessingTask( + [ + PreprocessorFile( + filename=tmp_path / "tas_out.nc", + settings={}, + datasets=[dataset], + ), + ] + ) # Create an 'activity' representing a run of the tool provenance = ProvDocument() - provenance.add_namespace('software', uri='https://example.com/software') - activity = provenance.activity('software:esmvalcore') + provenance.add_namespace("software", uri="https://example.com/software") + activity = provenance.activity("software:esmvalcore") task.initialize_provenance(activity) task.run() @@ -46,59 +49,61 @@ def test_load_save_task(tmp_path): def test_load_save_and_other_task(tmp_path, monkeypatch): """Test that a task just copies one file and preprocesses another file.""" # Prepare test datasets - in_cube = iris.cube.Cube(data=[0.], var_name='tas', units='degrees_C') - (tmp_path / 'climate_data').mkdir() - file1 = tmp_path / 'climate_data' / 'tas_dataset1.nc' - file2 = tmp_path / 'climate_data' / 'tas_dataset2.nc' + in_cube = iris.cube.Cube(data=[0.0], var_name="tas", units="degrees_C") + (tmp_path / "climate_data").mkdir() + file1 = tmp_path / "climate_data" / "tas_dataset1.nc" + file2 = tmp_path / "climate_data" / "tas_dataset2.nc" # Save cubes for reading global attributes into provenance iris.save(in_cube, target=file1) iris.save(in_cube, target=file2) - dataset1 = Dataset(short_name='tas', dataset='dataset1') + dataset1 = Dataset(short_name="tas", dataset="dataset1") dataset1.files = [file1] dataset1.load = lambda: in_cube.copy() - dataset2 = Dataset(short_name='tas', dataset='dataset1') + dataset2 = Dataset(short_name="tas", dataset="dataset1") dataset2.files = [file2] dataset2.load = lambda: in_cube.copy() # Create some mock preprocessor functions and patch # `esmvalcore.preprocessor` so it uses them. def single_preproc_func(cube): - cube.data = cube.core_data() + 1. + cube.data = cube.core_data() + 1.0 return cube def multi_preproc_func(products): for product in products: cube = product.cubes[0] - cube.data = cube.core_data() + 1. + cube.data = cube.core_data() + 1.0 product.cubes = [cube] return products monkeypatch.setattr( esmvalcore.preprocessor, - 'single_preproc_func', + "single_preproc_func", single_preproc_func, raising=False, ) monkeypatch.setattr( esmvalcore.preprocessor, - 'multi_preproc_func', + "multi_preproc_func", multi_preproc_func, raising=False, ) monkeypatch.setattr( esmvalcore.preprocessor, - 'MULTI_MODEL_FUNCTIONS', - {'multi_preproc_func'}, + "MULTI_MODEL_FUNCTIONS", + {"multi_preproc_func"}, + ) + default_order = ( + esmvalcore.preprocessor.INITIAL_STEPS + + ("single_preproc_func", "multi_preproc_func") + + esmvalcore.preprocessor.FINAL_STEPS ) - default_order = (esmvalcore.preprocessor.INITIAL_STEPS + - ('single_preproc_func', 'multi_preproc_func') + - esmvalcore.preprocessor.FINAL_STEPS) monkeypatch.setattr( esmvalcore.preprocessor, - 'DEFAULT_ORDER', + "DEFAULT_ORDER", default_order, ) @@ -106,19 +111,19 @@ def multi_preproc_func(products): task = PreprocessingTask( [ PreprocessorFile( - filename=tmp_path / 'tas_dataset1.nc', + filename=tmp_path / "tas_dataset1.nc", settings={}, datasets=[dataset1], - attributes={'dataset': 'dataset1'}, + attributes={"dataset": "dataset1"}, ), PreprocessorFile( - filename=tmp_path / 'tas_dataset2.nc', + filename=tmp_path / "tas_dataset2.nc", settings={ - 'single_preproc_func': {}, - 'multi_preproc_func': {}, + "single_preproc_func": {}, + "multi_preproc_func": {}, }, datasets=[dataset2], - attributes={'dataset': 'dataset2'}, + attributes={"dataset": "dataset2"}, ), ], order=default_order, @@ -126,8 +131,8 @@ def multi_preproc_func(products): # Create an 'activity' representing a run of the tool provenance = ProvDocument() - provenance.add_namespace('software', uri='https://example.com/software') - activity = provenance.activity('software:esmvalcore') + provenance.add_namespace("software", uri="https://example.com/software") + activity = provenance.activity("software:esmvalcore") task.initialize_provenance(activity) task.run() @@ -140,9 +145,9 @@ def multi_preproc_func(products): assert product.filename.exists() out_cube = iris.load_cube(product.filename) print(out_cube.data) - if product.attributes['dataset'] == 'dataset1': - assert out_cube.data.tolist() == [0.] - elif product.attributes['dataset'] == 'dataset2': - assert out_cube.data.tolist() == [2.] + if product.attributes["dataset"] == "dataset1": + assert out_cube.data.tolist() == [0.0] + elif product.attributes["dataset"] == "dataset2": + assert out_cube.data.tolist() == [2.0] else: assert False, "unexpected product" diff --git a/tests/integration/recipe/test_check.py b/tests/integration/recipe/test_check.py index 5694cd2eec..b118162c15 100644 --- a/tests/integration/recipe/test_check.py +++ b/tests/integration/recipe/test_check.py @@ -1,4 +1,5 @@ """Integration tests for :mod:`esmvalcore._recipe.check`.""" + import os.path from pathlib import Path from typing import Any, List @@ -14,44 +15,51 @@ from esmvalcore.exceptions import RecipeError from esmvalcore.preprocessor import PreprocessorFile -ERR_ALL = 'Looked for files matching%s' -ERR_RANGE = 'No input data available for years {} in files:\n{}' +ERR_ALL = "Looked for files matching%s" +ERR_RANGE = "No input data available for years {} in files:\n{}" VAR = { - 'frequency': 'mon', - 'short_name': 'tas', - 'timerange': '2020/2025', - 'alias': 'alias', - 'start_year': 2020, - 'end_year': 2025 + "frequency": "mon", + "short_name": "tas", + "timerange": "2020/2025", + "alias": "alias", + "start_year": 2020, + "end_year": 2025, } FX_VAR = { - 'frequency': 'fx', - 'short_name': 'areacella', + "frequency": "fx", + "short_name": "areacella", } FILES = [ - 'a/b/c_20200101-20201231', - 'a/b/c_20210101-20211231', - 'a/b/c_20220101-20221231', - 'a/b/c_20230101-20231231', - 'a/b/c_20240101-20241231', - 'a/b/c_20250101-20251231', + "a/b/c_20200101-20201231", + "a/b/c_20210101-20211231", + "a/b/c_20220101-20221231", + "a/b/c_20230101-20231231", + "a/b/c_20240101-20241231", + "a/b/c_20250101-20251231", ] DATA_AVAILABILITY_DATA = [ (FILES, dict(VAR), None), (FILES, dict(FX_VAR), None), - (FILES[1:], dict(VAR), ERR_RANGE.format('2020', "\n".join(FILES[1:]))), - (FILES[:-1], dict(VAR), ERR_RANGE.format('2025', "\n".join(FILES[:-1]))), - (FILES[:-3], dict(VAR), ERR_RANGE.format('2023-2025', - "\n".join(FILES[:-3]))), - ([FILES[1]] + [FILES[3]], dict(VAR), - ERR_RANGE.format('2020, 2022, 2024-2025', - "\n".join([FILES[1]] + [FILES[3]]))), + (FILES[1:], dict(VAR), ERR_RANGE.format("2020", "\n".join(FILES[1:]))), + (FILES[:-1], dict(VAR), ERR_RANGE.format("2025", "\n".join(FILES[:-1]))), + ( + FILES[:-3], + dict(VAR), + ERR_RANGE.format("2023-2025", "\n".join(FILES[:-3])), + ), + ( + [FILES[1]] + [FILES[3]], + dict(VAR), + ERR_RANGE.format( + "2020, 2022, 2024-2025", "\n".join([FILES[1]] + [FILES[3]]) + ), + ), ] -@pytest.mark.parametrize('input_files,var,error', DATA_AVAILABILITY_DATA) -@mock.patch('esmvalcore._recipe.check.logger', autospec=True) +@pytest.mark.parametrize("input_files,var,error", DATA_AVAILABILITY_DATA) +@mock.patch("esmvalcore._recipe.check.logger", autospec=True) def test_data_availability_data(mock_logger, input_files, var, error): """Test check for data when data is present.""" dataset = Dataset(**var) @@ -68,37 +76,37 @@ def test_data_availability_data(mock_logger, input_files, var, error): DATA_AVAILABILITY_NO_DATA: List[Any] = [ ([], [], None), - ([''], ['a*.nc'], (ERR_ALL, ': a*.nc')), - ([''], ['a*.nc', 'b*.nc'], (ERR_ALL, '\na*.nc\nb*.nc')), - (['1'], ['a'], (ERR_ALL, ': 1/a')), - (['1'], ['a', 'b'], (ERR_ALL, '\n1/a\n1/b')), - (['1', '2'], ['a'], (ERR_ALL, '\n1/a\n2/a')), - (['1', '2'], ['a', 'b'], (ERR_ALL, '\n1/a\n1/b\n2/a\n2/b')), + ([""], ["a*.nc"], (ERR_ALL, ": a*.nc")), + ([""], ["a*.nc", "b*.nc"], (ERR_ALL, "\na*.nc\nb*.nc")), + (["1"], ["a"], (ERR_ALL, ": 1/a")), + (["1"], ["a", "b"], (ERR_ALL, "\n1/a\n1/b")), + (["1", "2"], ["a"], (ERR_ALL, "\n1/a\n2/a")), + (["1", "2"], ["a", "b"], (ERR_ALL, "\n1/a\n1/b\n2/a\n2/b")), ] -@pytest.mark.parametrize('dirnames,filenames,error', DATA_AVAILABILITY_NO_DATA) -@mock.patch('esmvalcore._recipe.check.logger', autospec=True) +@pytest.mark.parametrize("dirnames,filenames,error", DATA_AVAILABILITY_NO_DATA) +@mock.patch("esmvalcore._recipe.check.logger", autospec=True) def test_data_availability_no_data(mock_logger, dirnames, filenames, error): """Test check for data when no data is present.""" facets = { - 'frequency': 'mon', - 'short_name': 'tas', - 'timerange': '2020/2025', - 'alias': 'alias', - 'start_year': 2020, - 'end_year': 2025 + "frequency": "mon", + "short_name": "tas", + "timerange": "2020/2025", + "alias": "alias", + "start_year": 2020, + "end_year": 2025, } dataset = Dataset(**facets) dataset.files = [] dataset._file_globs = [ os.path.join(d, f) for d in dirnames for f in filenames ] - error_first = ('No input files found for %s', dataset) - error_last = ("Set 'log_level' to 'debug' to get more information", ) + error_first = ("No input files found for %s", dataset) + error_last = ("Set 'log_level' to 'debug' to get more information",) with pytest.raises(RecipeError) as rec_err: check.data_availability(dataset) - assert str(rec_err.value) == 'Missing data for Dataset: tas' + assert str(rec_err.value) == "Missing data for Dataset: tas" if error is None: assert mock_logger.error.call_count == 2 errors = [error_first, error_last] @@ -111,50 +119,57 @@ def test_data_availability_no_data(mock_logger, dirnames, filenames, error): GOOD_TIMERANGES = [ - '*', - '1990/1992', - '19900101/19920101', - '19900101T12H00M00S/19920101T12H00M00', - '1990/*', - '*/1992', - '1990/P2Y', - '19900101/P2Y2M1D', - '19900101TH00M00S/P2Y2M1DT12H00M00S', - 'P2Y/1992', - 'P2Y2M1D/19920101', - 'P2Y2M1D/19920101T12H00M00S', - 'P2Y/*', - 'P2Y2M1D/*', - 'P2Y21DT12H00M00S/*', - '*/P2Y', - '*/P2Y2M1D', - '*/P2Y21DT12H00M00S', - '1/301', - '1/*', - '*/301', + "*", + "1990/1992", + "19900101/19920101", + "19900101T12H00M00S/19920101T12H00M00", + "1990/*", + "*/1992", + "1990/P2Y", + "19900101/P2Y2M1D", + "19900101TH00M00S/P2Y2M1DT12H00M00S", + "P2Y/1992", + "P2Y2M1D/19920101", + "P2Y2M1D/19920101T12H00M00S", + "P2Y/*", + "P2Y2M1D/*", + "P2Y21DT12H00M00S/*", + "*/P2Y", + "*/P2Y2M1D", + "*/P2Y21DT12H00M00S", + "1/301", + "1/*", + "*/301", ] -@pytest.mark.parametrize('timerange', GOOD_TIMERANGES) +@pytest.mark.parametrize("timerange", GOOD_TIMERANGES) def test_valid_time_selection(timerange): """Check that good definitions do not raise anything.""" check.valid_time_selection(timerange) BAD_TIMERANGES = [ - ('randomnonsense', - 'Invalid value encountered for `timerange`. Valid values must be ' - "separated by `/`. Got ['randomnonsense'] instead."), - ('199035345/19923463164526', - 'Invalid value encountered for `timerange`. Valid value must follow ' - "ISO 8601 standard for dates and duration periods, or be set to '*' " - "to load available years. Got ['199035345', '19923463164526'] instead."), - ('P11Y/P42Y', 'Invalid value encountered for `timerange`. Cannot set both ' - 'the beginning and the end as duration periods.'), + ( + "randomnonsense", + "Invalid value encountered for `timerange`. Valid values must be " + "separated by `/`. Got ['randomnonsense'] instead.", + ), + ( + "199035345/19923463164526", + "Invalid value encountered for `timerange`. Valid value must follow " + "ISO 8601 standard for dates and duration periods, or be set to '*' " + "to load available years. Got ['199035345', '19923463164526'] instead.", + ), + ( + "P11Y/P42Y", + "Invalid value encountered for `timerange`. Cannot set both " + "the beginning and the end as duration periods.", + ), ] -@pytest.mark.parametrize('timerange,message', BAD_TIMERANGES) +@pytest.mark.parametrize("timerange,message", BAD_TIMERANGES) def test_valid_time_selection_rejections(timerange, message): """Check that bad definitions raise RecipeError.""" with pytest.raises(check.RecipeError) as rec_err: @@ -164,21 +179,14 @@ def test_valid_time_selection_rejections(timerange, message): def test_differing_timeranges(caplog): timeranges = set() - timeranges.add('1950/1951') - timeranges.add('1950/1952') + timeranges.add("1950/1951") + timeranges.add("1950/1952") required_variables = [ - { - 'short_name': 'rsdscs', - 'timerange': '1950/1951' - }, - { - 'short_name': 'rsuscs', - 'timerange': '1950/1952' - }, + {"short_name": "rsdscs", "timerange": "1950/1951"}, + {"short_name": "rsuscs", "timerange": "1950/1952"}, ] with pytest.raises(ValueError) as exc: - check.differing_timeranges( - timeranges, required_variables) + check.differing_timeranges(timeranges, required_variables) expected_log = ( f"Differing timeranges with values {timeranges} " "found for required variables " @@ -192,20 +200,20 @@ def test_differing_timeranges(caplog): def test_data_availability_nonexistent(tmp_path): var = { - 'dataset': 'ABC', - 'short_name': 'tas', - 'frequency': 'mon', - 'timerange': '1990/1992', - 'start_year': 1990, - 'end_year': 1992 + "dataset": "ABC", + "short_name": "tas", + "frequency": "mon", + "timerange": "1990/1992", + "start_year": 1990, + "end_year": 1992, } result = pyesgf.search.results.FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 10, - 'title': 'tas_1990-1992.nc', + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 10, + "title": "tas_1990-1992.nc", }, context=None, ) @@ -230,13 +238,13 @@ def test_reference_for_bias_preproc_one_ref(): """Test ``reference_for_bias_preproc`` with one reference.""" products = { PreprocessorFile(filename=90), - PreprocessorFile(filename=10, - settings={'bias': {}}), - PreprocessorFile(filename=20, - settings={'bias': {}}), - PreprocessorFile(filename=30, - settings={'bias': {}}, - attributes={'reference_for_bias': True}), + PreprocessorFile(filename=10, settings={"bias": {}}), + PreprocessorFile(filename=20, settings={"bias": {}}), + PreprocessorFile( + filename=30, + settings={"bias": {}}, + attributes={"reference_for_bias": True}, + ), } check.reference_for_bias_preproc(products) @@ -245,69 +253,78 @@ def test_reference_for_bias_preproc_no_ref(): """Test ``reference_for_bias_preproc`` with no reference.""" products = { PreprocessorFile(filename=90), - PreprocessorFile(filename=10, - settings={'bias': {}}), - PreprocessorFile(filename=20, - settings={'bias': {}}), - PreprocessorFile(filename=30, - settings={'bias': {}}) + PreprocessorFile(filename=10, settings={"bias": {}}), + PreprocessorFile(filename=20, settings={"bias": {}}), + PreprocessorFile(filename=30, settings={"bias": {}}), } with pytest.raises(RecipeError) as rec_err: check.reference_for_bias_preproc(products) # Note: checking the message directly does not work due to the unknown # (machine-dependent) ordering of products in the set - assert ("Expected exactly 1 dataset with 'reference_for_bias: true' in " - "products\n[") in str(rec_err.value) - assert '10' in str(rec_err.value) - assert '20' in str(rec_err.value) - assert '30' in str(rec_err.value) - assert '90' not in str(rec_err.value) - assert ("],\nfound 0. Please also ensure that the reference dataset is " - "not excluded with the 'exclude' option") in str(rec_err.value) + assert ( + "Expected exactly 1 dataset with 'reference_for_bias: true' in " + "products\n[" + ) in str(rec_err.value) + assert "10" in str(rec_err.value) + assert "20" in str(rec_err.value) + assert "30" in str(rec_err.value) + assert "90" not in str(rec_err.value) + assert ( + "],\nfound 0. Please also ensure that the reference dataset is " + "not excluded with the 'exclude' option" + ) in str(rec_err.value) def test_reference_for_bias_preproc_two_refs(): """Test ``reference_for_bias_preproc`` with two references.""" products = { PreprocessorFile(filename=90), - PreprocessorFile(filename=10, settings={'bias': {}}), - PreprocessorFile(filename=20, - attributes={'reference_for_bias': True}, - settings={'bias': {}}), - PreprocessorFile(filename=30, - attributes={'reference_for_bias': True}, - settings={'bias': {}}) + PreprocessorFile(filename=10, settings={"bias": {}}), + PreprocessorFile( + filename=20, + attributes={"reference_for_bias": True}, + settings={"bias": {}}, + ), + PreprocessorFile( + filename=30, + attributes={"reference_for_bias": True}, + settings={"bias": {}}, + ), } with pytest.raises(RecipeError) as rec_err: check.reference_for_bias_preproc(products) # Note: checking the message directly does not work due to the unknown # (machine-dependent) ordering of products in the set - assert ("Expected exactly 1 dataset with 'reference_for_bias: true' in " - "products\n[") in str(rec_err.value) - assert '10' in str(rec_err.value) - assert '20' in str(rec_err.value) - assert '30' in str(rec_err.value) - assert '90' not in str(rec_err.value) + assert ( + "Expected exactly 1 dataset with 'reference_for_bias: true' in " + "products\n[" + ) in str(rec_err.value) + assert "10" in str(rec_err.value) + assert "20" in str(rec_err.value) + assert "30" in str(rec_err.value) + assert "90" not in str(rec_err.value) assert "],\nfound 2:\n[" in str(rec_err.value) - assert ("].\nPlease also ensure that the reference dataset is " - "not excluded with the 'exclude' option") in str(rec_err.value) + assert ( + "].\nPlease also ensure that the reference dataset is " + "not excluded with the 'exclude' option" + ) in str(rec_err.value) INVALID_MM_SETTINGS = { - 'wrong_parametre': 'wrong', - 'statistics': ['wrong'], - 'span': 'wrong', - 'groupby': 'wrong', - 'keep_input_datasets': 'wrong', - 'ignore_scalar_coords': 'wrong', - } + "wrong_parametre": "wrong", + "statistics": ["wrong"], + "span": "wrong", + "groupby": "wrong", + "keep_input_datasets": "wrong", + "ignore_scalar_coords": "wrong", +} def test_invalid_multi_model_span(): with pytest.raises(RecipeError) as rec_err: - check._verify_span_value(INVALID_MM_SETTINGS['span']) + check._verify_span_value(INVALID_MM_SETTINGS["span"]) assert str(rec_err.value) == ( "Invalid value encountered for `span` in preprocessor " "`multi_model_statistics`. Valid values are ('overlap', 'full')." @@ -317,27 +334,31 @@ def test_invalid_multi_model_span(): def test_invalid_multi_model_groupy(): with pytest.raises(RecipeError) as rec_err: - check._verify_groupby(INVALID_MM_SETTINGS['groupby']) + check._verify_groupby(INVALID_MM_SETTINGS["groupby"]) assert str(rec_err.value) == ( - 'Invalid value encountered for `groupby` in preprocessor ' - '`multi_model_statistics`.`groupby` must be defined ' - 'as a list. Got wrong.' + "Invalid value encountered for `groupby` in preprocessor " + "`multi_model_statistics`.`groupby` must be defined " + "as a list. Got wrong." ) def test_invalid_multi_model_keep_input(): with pytest.raises(RecipeError) as rec_err: check._verify_keep_input_datasets( - INVALID_MM_SETTINGS['keep_input_datasets']) + INVALID_MM_SETTINGS["keep_input_datasets"] + ) assert str(rec_err.value) == ( - 'Invalid value encountered for `keep_input_datasets`.' - 'Must be defined as a boolean (true or false). Got wrong.') + "Invalid value encountered for `keep_input_datasets`." + "Must be defined as a boolean (true or false). Got wrong." + ) def test_invalid_multi_model_ignore_scalar_coords(): with pytest.raises(RecipeError) as rec_err: check._verify_ignore_scalar_coords( - INVALID_MM_SETTINGS['ignore_scalar_coords']) + INVALID_MM_SETTINGS["ignore_scalar_coords"] + ) assert str(rec_err.value) == ( - 'Invalid value encountered for `ignore_scalar_coords`.' - 'Must be defined as a boolean (true or false). Got wrong.') + "Invalid value encountered for `ignore_scalar_coords`." + "Must be defined as a boolean (true or false). Got wrong." + ) diff --git a/tests/integration/recipe/test_recipe.py b/tests/integration/recipe/test_recipe.py index d8133fc2b7..75bed13e9f 100644 --- a/tests/integration/recipe/test_recipe.py +++ b/tests/integration/recipe/test_recipe.py @@ -30,63 +30,63 @@ from tests.integration.test_provenance import check_provenance TAGS_FOR_TESTING = { - 'authors': { - 'andela_bouwe': { - 'name': 'Bouwe, Andela', + "authors": { + "andela_bouwe": { + "name": "Bouwe, Andela", }, }, - 'projects': { - 'c3s-magic': 'C3S MAGIC project', + "projects": { + "c3s-magic": "C3S MAGIC project", }, - 'themes': { - 'phys': 'physics', + "themes": { + "phys": "physics", }, - 'realms': { - 'atmos': 'atmosphere', + "realms": { + "atmos": "atmosphere", }, - 'statistics': { - 'mean': 'mean', - 'var': 'variability', + "statistics": { + "mean": "mean", + "var": "variability", }, - 'domains': { - 'et': 'extra tropics', - 'trop': 'tropics', + "domains": { + "et": "extra tropics", + "trop": "tropics", }, - 'plot_types': { - 'zonal': 'zonal', + "plot_types": { + "zonal": "zonal", }, } MANDATORY_DATASET_KEYS = ( - 'dataset', - 'diagnostic', - 'frequency', - 'institute', - 'long_name', - 'mip', - 'modeling_realm', - 'preprocessor', - 'project', - 'short_name', - 'standard_name', - 'timerange', - 'units', + "dataset", + "diagnostic", + "frequency", + "institute", + "long_name", + "mip", + "modeling_realm", + "preprocessor", + "project", + "short_name", + "standard_name", + "timerange", + "units", ) MANDATORY_SCRIPT_SETTINGS_KEYS = ( - 'log_level', - 'script', - 'plot_dir', - 'run_dir', - 'work_dir', + "log_level", + "script", + "plot_dir", + "run_dir", + "work_dir", ) DEFAULT_PREPROCESSOR_STEPS = ( - 'remove_supplementary_variables', - 'save', + "remove_supplementary_variables", + "save", ) -INITIALIZATION_ERROR_MSG = 'Could not create all tasks' +INITIALIZATION_ERROR_MSG = "Could not create all tasks" def create_test_file(filename, tracking_id=None): @@ -96,7 +96,7 @@ def create_test_file(filename, tracking_id=None): attributes = {} if tracking_id is not None: - attributes['tracking_id'] = tracking_id + attributes["tracking_id"] = tracking_id cube = iris.cube.Cube([]) cube.attributes.globals = attributes @@ -106,36 +106,29 @@ def create_test_file(filename, tracking_id=None): def _get_default_settings_for_chl(save_filename): """Get default preprocessor settings for chl.""" defaults = { - 'remove_supplementary_variables': {}, - 'save': { - 'compress': False, - 'filename': save_filename, - } + "remove_supplementary_variables": {}, + "save": { + "compress": False, + "filename": save_filename, + }, } return defaults @pytest.fixture def patched_tas_derivation(monkeypatch): - def get_required(short_name, _): - if short_name != 'tas': + if short_name != "tas": assert False required = [ - { - 'short_name': 'pr' - }, - { - 'short_name': 'areacella', - 'mip': 'fx', - 'optional': True - }, + {"short_name": "pr"}, + {"short_name": "areacella", "mip": "fx", "optional": True}, ] return required monkeypatch.setattr( esmvalcore._recipe.to_datasets, - 'get_required', + "get_required", get_required, ) @@ -156,7 +149,7 @@ def get_required(short_name, _): def get_recipe(tempdir: Path, content: str, session: Session): """Save and load recipe content.""" - recipe_file = tempdir / 'recipe_test.yml' + recipe_file = tempdir / "recipe_test.yml" # Add mandatory documentation section content = str(DEFAULT_DOCUMENTATION + content) recipe_file.write_text(content) @@ -181,7 +174,7 @@ def test_recipe_missing_scripts(tmp_path, session): ensemble: r1i1p1 timerange: 1999/2002 """) - exc_message = ("Missing scripts section in diagnostic 'diagnostic_name'.") + exc_message = "Missing scripts section in diagnostic 'diagnostic_name'." with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == exc_message @@ -206,9 +199,11 @@ def test_recipe_duplicate_var_script_name(tmp_path, session): ta: script: tmp_path / 'diagnostic.py' """) - exc_message = ("Invalid script name 'ta' encountered in diagnostic " - "'diagnostic_name': scripts cannot have the same " - "name as variables.") + exc_message = ( + "Invalid script name 'ta' encountered in diagnostic " + "'diagnostic_name': scripts cannot have the same " + "name as variables." + ) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == exc_message @@ -233,8 +228,10 @@ def test_recipe_no_script(tmp_path, session): script_name: argument: 1 """) - exc_message = ("No script defined for script 'script_name' in " - "diagnostic 'diagnostic_name'.") + exc_message = ( + "No script defined for script 'script_name' in " + "diagnostic 'diagnostic_name'." + ) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == exc_message @@ -254,9 +251,11 @@ def test_recipe_no_datasets(tmp_path, session): end_year: 2002 scripts: null """) - exc_message = ("You have not specified any dataset " - "or additional_dataset groups for variable " - "'ta' in diagnostic 'diagnostic_name'.") + exc_message = ( + "You have not specified any dataset " + "or additional_dataset groups for variable " + "'ta' in diagnostic 'diagnostic_name'." + ) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == exc_message @@ -279,8 +278,10 @@ def test_recipe_duplicated_datasets(tmp_path, session): timerange: 1999/2002 scripts: null """) - exc_message = ("Duplicate dataset\n{'dataset': 'bcc-csm1-1'}\n" - "for variable 'ta' in diagnostic 'diagnostic_name'.") + exc_message = ( + "Duplicate dataset\n{'dataset': 'bcc-csm1-1'}\n" + "for variable 'ta' in diagnostic 'diagnostic_name'." + ) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == exc_message @@ -301,17 +302,19 @@ def test_recipe_var_missing_args(tmp_path, session): timerange: 1999/2002 scripts: null """) - exc_message = ("Missing keys {'mip'} in\n{'dataset': 'bcc-csm1-1'," - "\n 'ensemble': 'r1i1p1',\n 'exp': 'historical',\n" - " 'project': 'CMIP5',\n 'short_name': 'ta',\n " - "'timerange': '1999/2002'}\nfor variable 'ta' " - "in diagnostic 'diagnostic_name'.") + exc_message = ( + "Missing keys {'mip'} in\n{'dataset': 'bcc-csm1-1'," + "\n 'ensemble': 'r1i1p1',\n 'exp': 'historical',\n" + " 'project': 'CMIP5',\n 'short_name': 'ta',\n " + "'timerange': '1999/2002'}\nfor variable 'ta' " + "in diagnostic 'diagnostic_name'." + ) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == exc_message -@pytest.mark.parametrize('skip_nonexistent', [True, False]) +@pytest.mark.parametrize("skip_nonexistent", [True, False]) def test_recipe_no_data(tmp_path, session, skip_nonexistent): content = dedent(""" datasets: @@ -329,18 +332,20 @@ def test_recipe_no_data(tmp_path, session, skip_nonexistent): end_year: 2002 scripts: null """) - session['skip_nonexistent'] = skip_nonexistent + session["skip_nonexistent"] = skip_nonexistent with pytest.raises(RecipeError) as error: get_recipe(tmp_path, content, session) if skip_nonexistent: - msg = ("Did not find any input data for task diagnostic_name/ta") + msg = "Did not find any input data for task diagnostic_name/ta" else: - msg = ("Missing data for preprocessor diagnostic_name/ta:\n" - "- Missing data for Dataset: .*") + msg = ( + "Missing data for preprocessor diagnostic_name/ta:\n" + "- Missing data for Dataset: .*" + ) assert re.match(msg, error.value.failed_tasks[0].message) -@pytest.mark.parametrize('script_file', ['diagnostic.py', 'diagnostic.ncl']) +@pytest.mark.parametrize("script_file", ["diagnostic.py", "diagnostic.ncl"]) def test_simple_recipe( tmp_path, patched_datafinder, @@ -348,20 +353,20 @@ def test_simple_recipe( script_file, monkeypatch, ): - def ncl_version(): - return '6.5' + return "6.5" - monkeypatch.setattr(esmvalcore._recipe.check, 'ncl_version', ncl_version) + monkeypatch.setattr(esmvalcore._recipe.check, "ncl_version", ncl_version) def which(interpreter): return interpreter - monkeypatch.setattr(esmvalcore._task, 'which', which) + monkeypatch.setattr(esmvalcore._task, "which", which) script = tmp_path / script_file - script.write_text('') - content = dedent(""" + script.write_text("") + content = dedent( + """ datasets: - dataset: bcc-csm1-1 @@ -389,7 +394,8 @@ def which(interpreter): script_name: script: {} custom_setting: 1 - """.format(script)) + """.format(script) + ) recipe = get_recipe(tmp_path, content, session) # Check that datasets have been read and updated @@ -410,14 +416,16 @@ def which(interpreter): assert task.order == list(DEFAULT_ORDER) for product in task.products: dataset = [ - d for d in datasets if _get_output_file( - d.facets, session.preproc_dir) == product.filename + d + for d in datasets + if _get_output_file(d.facets, session.preproc_dir) + == product.filename ][0] assert product.datasets == [dataset] attributes = dict(dataset.facets) - attributes['filename'] = product.filename - attributes['start_year'] = 1999 - attributes['end_year'] = 2002 + attributes["filename"] = product.filename + attributes["start_year"] = 1999 + attributes["end_year"] = 2002 assert product.attributes == attributes for step in DEFAULT_PREPROCESSOR_STEPS: assert step in product.settings @@ -430,17 +438,18 @@ def which(interpreter): assert task.script == str(script) for key in MANDATORY_SCRIPT_SETTINGS_KEYS: assert key in task.settings and task.settings[key] - assert task.settings['custom_setting'] == 1 + assert task.settings["custom_setting"] == 1 # Check that NCL interface is enabled for NCL scripts. - write_ncl_interface = script.suffix == '.ncl' - assert datasets[0].session['write_ncl_interface'] == write_ncl_interface + write_ncl_interface = script.suffix == ".ncl" + assert datasets[0].session["write_ncl_interface"] == write_ncl_interface def test_write_filled_recipe(tmp_path, patched_datafinder, session): - script = tmp_path / 'diagnostic.py' - script.write_text('') - content = dedent(""" + script = tmp_path / "diagnostic.py" + script.write_text("") + content = dedent( + """ datasets: - dataset: bcc-csm1-1 @@ -469,28 +478,29 @@ def test_write_filled_recipe(tmp_path, patched_datafinder, session): script_name: script: {} custom_setting: 1 - """.format(script)) + """.format(script) + ) recipe = get_recipe(tmp_path, content, session) session.run_dir.mkdir(parents=True) esmvalcore._recipe.recipe.Recipe.write_filled_recipe(recipe) - recipe_file = session.run_dir / 'recipe_test_filled.yml' + recipe_file = session.run_dir / "recipe_test_filled.yml" assert recipe_file.is_file() updated_recipe_object = read_recipe_file(recipe_file, session) updated_recipe = updated_recipe_object._raw_recipe print(pformat(updated_recipe)) - assert get_occurrence_of_value(updated_recipe, value='*') == 0 - assert get_occurrence_of_value(updated_recipe, value='1990/2019') == 2 - assert get_occurrence_of_value(updated_recipe, value='1990/P2Y') == 1 + assert get_occurrence_of_value(updated_recipe, value="*") == 0 + assert get_occurrence_of_value(updated_recipe, value="1990/2019") == 2 + assert get_occurrence_of_value(updated_recipe, value="1990/P2Y") == 1 assert len(updated_recipe_object.datasets) == 3 def test_fx_preproc_error(tmp_path, patched_datafinder, session): - script = tmp_path / 'diagnostic.py' - script.write_text('') + script = tmp_path / "diagnostic.py" + script.write_text("") content = dedent(""" datasets: - dataset: bcc-csm1-1 @@ -515,8 +525,10 @@ def test_fx_preproc_error(tmp_path, patched_datafinder, session): - dataset: MPI-ESM-LR scripts: null """) - msg = ("Time coordinate preprocessor step(s) ['extract_season'] not " - "permitted on fx vars, please remove them from recipe") + msg = ( + "Time coordinate preprocessor step(s) ['extract_season'] not " + "permitted on fx vars, please remove them from recipe" + ) with pytest.raises(Exception) as rec_err_exp: get_recipe(tmp_path, content, session) assert str(rec_err_exp.value) == INITIALIZATION_ERROR_MSG @@ -553,8 +565,9 @@ def test_default_preprocessor(tmp_path, patched_datafinder, session): assert product.settings == defaults -def test_default_preprocessor_custom_order(tmp_path, patched_datafinder, - session): +def test_default_preprocessor_custom_order( + tmp_path, patched_datafinder, session +): """Test if default settings are used when ``custom_order`` is ``True``.""" content = dedent(""" @@ -650,7 +663,7 @@ def test_disable_preprocessor_function(tmp_path, patched_datafinder, session): task = recipe.tasks.pop() assert len(task.products) == 1 product = task.products.pop() - assert 'remove_supplementary_variables' not in product.settings + assert "remove_supplementary_variables" not in product.settings def test_default_fx_preprocessor(tmp_path, patched_datafinder, session): @@ -678,11 +691,11 @@ def test_default_fx_preprocessor(tmp_path, patched_datafinder, session): assert preproc_dir.startswith(str(tmp_path)) defaults = { - 'remove_supplementary_variables': {}, - 'save': { - 'compress': False, - 'filename': product.filename, - } + "remove_supplementary_variables": {}, + "save": { + "compress": False, + "filename": product.filename, + }, } assert product.settings == defaults @@ -710,37 +723,42 @@ def test_empty_variable(tmp_path, patched_datafinder, session): task = recipe.tasks.pop() assert len(task.products) == 1 product = task.products.pop() - assert product.attributes['short_name'] == 'pr' - assert product.attributes['dataset'] == 'CanESM2' + assert product.attributes["short_name"] == "pr" + assert product.attributes["dataset"] == "CanESM2" TEST_ISO_TIMERANGE = [ - ('*', '1990-2019'), - ('1990/1992', '1990-1992'), - ('19900101/19920101', '19900101-19920101'), - ('19900101T12H00M00S/19920101T12H00M00', - '19900101T12H00M00S-19920101T12H00M00'), - ('1990/*', '1990-2019'), - ('*/1992', '1990-1992'), - ('1990/P2Y', '1990-P2Y'), - ('19900101/P2Y2M1D', '19900101-P2Y2M1D'), - ('19900101TH00M00S/P2Y2M1DT12H00M00S', - '19900101TH00M00S-P2Y2M1DT12H00M00S'), - ('P2Y/1992', 'P2Y-1992'), - ('P1Y2M1D/19920101', 'P1Y2M1D-19920101'), - ('P1Y2M1D/19920101T12H00M00S', 'P1Y2M1D-19920101T12H00M00S'), - ('P2Y/*', 'P2Y-2019'), - ('P2Y2M1D/*', 'P2Y2M1D-2019'), - ('P2Y21DT12H00M00S/*', 'P2Y21DT12H00M00S-2019'), - ('*/P2Y', '1990-P2Y'), - ('*/P2Y2M1D', '1990-P2Y2M1D'), - ('*/P2Y21DT12H00M00S', '1990-P2Y21DT12H00M00S'), + ("*", "1990-2019"), + ("1990/1992", "1990-1992"), + ("19900101/19920101", "19900101-19920101"), + ( + "19900101T12H00M00S/19920101T12H00M00", + "19900101T12H00M00S-19920101T12H00M00", + ), + ("1990/*", "1990-2019"), + ("*/1992", "1990-1992"), + ("1990/P2Y", "1990-P2Y"), + ("19900101/P2Y2M1D", "19900101-P2Y2M1D"), + ( + "19900101TH00M00S/P2Y2M1DT12H00M00S", + "19900101TH00M00S-P2Y2M1DT12H00M00S", + ), + ("P2Y/1992", "P2Y-1992"), + ("P1Y2M1D/19920101", "P1Y2M1D-19920101"), + ("P1Y2M1D/19920101T12H00M00S", "P1Y2M1D-19920101T12H00M00S"), + ("P2Y/*", "P2Y-2019"), + ("P2Y2M1D/*", "P2Y2M1D-2019"), + ("P2Y21DT12H00M00S/*", "P2Y21DT12H00M00S-2019"), + ("*/P2Y", "1990-P2Y"), + ("*/P2Y2M1D", "1990-P2Y2M1D"), + ("*/P2Y21DT12H00M00S", "1990-P2Y21DT12H00M00S"), ] -@pytest.mark.parametrize('input_time,output_time', TEST_ISO_TIMERANGE) -def test_recipe_iso_timerange(tmp_path, patched_datafinder, session, - input_time, output_time): +@pytest.mark.parametrize("input_time,output_time", TEST_ISO_TIMERANGE) +def test_recipe_iso_timerange( + tmp_path, patched_datafinder, session, input_time, output_time +): """Test recipe with timerange tag.""" content = dedent(f""" diagnostics: @@ -762,26 +780,30 @@ def test_recipe_iso_timerange(tmp_path, patched_datafinder, session, recipe = get_recipe(tmp_path, content, session) assert len(recipe.tasks) == 2 - pr_task = [t for t in recipe.tasks if t.name.endswith('pr')][0] + pr_task = [t for t in recipe.tasks if t.name.endswith("pr")][0] assert len(pr_task.products) == 1 pr_product = pr_task.products.pop() - filename = ('CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_' - f'pr_gn_{output_time}.nc') + filename = ( + "CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_" + f"pr_gn_{output_time}.nc" + ) assert pr_product.filename.name == filename - areacella_task = [t for t in recipe.tasks - if t.name.endswith('areacella')][0] + areacella_task = [t for t in recipe.tasks if t.name.endswith("areacella")][ + 0 + ] assert len(areacella_task.products) == 1 areacella_product = areacella_task.products.pop() - filename = 'CMIP6_HadGEM3-GC31-LL_fx_historical_r2i1p1f1_areacella_gn.nc' + filename = "CMIP6_HadGEM3-GC31-LL_fx_historical_r2i1p1f1_areacella_gn.nc" assert areacella_product.filename.name == filename -@pytest.mark.parametrize('input_time,output_time', TEST_ISO_TIMERANGE) -def test_recipe_iso_timerange_as_dataset(tmp_path, patched_datafinder, session, - input_time, output_time): +@pytest.mark.parametrize("input_time,output_time", TEST_ISO_TIMERANGE) +def test_recipe_iso_timerange_as_dataset( + tmp_path, patched_datafinder, session, input_time, output_time +): """Test recipe with timerange tag in the datasets section.""" content = dedent(f""" datasets: @@ -808,24 +830,28 @@ def test_recipe_iso_timerange_as_dataset(tmp_path, patched_datafinder, session, task = recipe.tasks.pop() assert len(task.products) == 1 product = task.products.pop() - filename = ('CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_' - f'pr_gn_{output_time}.nc') + filename = ( + "CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_" + f"pr_gn_{output_time}.nc" + ) assert product.filename.name == filename assert len(product.datasets) == 1 dataset = product.datasets[0] assert len(dataset.supplementaries) == 1 supplementary_ds = dataset.supplementaries[0] - assert supplementary_ds.facets['short_name'] == 'areacella' - assert 'timerange' not in supplementary_ds.facets + assert supplementary_ds.facets["short_name"] == "areacella" + assert "timerange" not in supplementary_ds.facets def test_reference_dataset(tmp_path, patched_datafinder, session, monkeypatch): levels = [100] get_reference_levels = create_autospec( - esmvalcore._recipe.recipe.get_reference_levels, return_value=levels) - monkeypatch.setattr(esmvalcore._recipe.recipe, 'get_reference_levels', - get_reference_levels) + esmvalcore._recipe.recipe.get_reference_levels, return_value=levels + ) + monkeypatch.setattr( + esmvalcore._recipe.recipe, "get_reference_levels", get_reference_levels + ) content = dedent(""" preprocessors: @@ -871,28 +897,34 @@ def test_reference_dataset(tmp_path, patched_datafinder, session, monkeypatch): assert len(recipe.tasks) == 2 # Check that the reference dataset has been used - task = next(t for t in recipe.tasks - if t.name == 'diagnostic_name' + TASKSEP + 'ta') + task = next( + t for t in recipe.tasks if t.name == "diagnostic_name" + TASKSEP + "ta" + ) assert len(task.products) == 2 - product = next(p for p in task.products - if p.attributes['dataset'] == 'GFDL-CM3') - reference = next(p for p in task.products - if p.attributes['dataset'] == 'MPI-ESM-LR') + product = next( + p for p in task.products if p.attributes["dataset"] == "GFDL-CM3" + ) + reference = next( + p for p in task.products if p.attributes["dataset"] == "MPI-ESM-LR" + ) - assert product.settings['regrid']['target_grid'] == reference.datasets[0] - assert product.settings['extract_levels']['levels'] == levels + assert product.settings["regrid"]["target_grid"] == reference.datasets[0] + assert product.settings["extract_levels"]["levels"] == levels get_reference_levels.assert_called_once_with(reference.datasets[0]) - assert 'regrid' not in reference.settings - assert 'extract_levels' not in reference.settings + assert "regrid" not in reference.settings + assert "extract_levels" not in reference.settings # Check that levels have been read from CMOR table - task = next(t for t in recipe.tasks - if t.name == 'diagnostic_name' + TASKSEP + 'ch4') + task = next( + t + for t in recipe.tasks + if t.name == "diagnostic_name" + TASKSEP + "ch4" + ) assert len(task.products) == 1 product = next(iter(task.products)) - assert product.settings['extract_levels']['levels'] == [ + assert product.settings["extract_levels"]["levels"] == [ 0, 250, 750, @@ -939,8 +971,10 @@ def test_reference_dataset_undefined(tmp_path, monkeypatch, session): """) with pytest.raises(RecipeError) as error: get_recipe(tmp_path, content, session) - msg = ("Preprocessor 'test_from_reference' uses 'reference_dataset', but " - "'reference_dataset' is not defined") + msg = ( + "Preprocessor 'test_from_reference' uses 'reference_dataset', but " + "'reference_dataset' is not defined" + ) assert msg in error.value.failed_tasks[0].message @@ -999,29 +1033,32 @@ def test_custom_preproc_order(tmp_path, patched_datafinder, session): assert len(recipe.tasks) == 4 for task in recipe.tasks: - if task.name == 'diagnostic_name/chl_default': - assert task.order.index('area_statistics') < task.order.index( - 'multi_model_statistics') - elif task.name == 'diagnostic_name/chl_custom': - assert task.order.index('area_statistics') > task.order.index( - 'multi_model_statistics') - elif task.name == 'diagnostic_name/chl_empty_custom': + if task.name == "diagnostic_name/chl_default": + assert task.order.index("area_statistics") < task.order.index( + "multi_model_statistics" + ) + elif task.name == "diagnostic_name/chl_custom": + assert task.order.index("area_statistics") > task.order.index( + "multi_model_statistics" + ) + elif task.name == "diagnostic_name/chl_empty_custom": assert len(task.products) == 1 product = list(task.products)[0] - assert set( - product.settings.keys()) == set(DEFAULT_PREPROCESSOR_STEPS) - elif task.name == 'diagnostic_name/chl_with_extract_time': + assert set(product.settings.keys()) == set( + DEFAULT_PREPROCESSOR_STEPS + ) + elif task.name == "diagnostic_name/chl_with_extract_time": assert len(task.products) == 1 product = list(task.products)[0] - steps = set(DEFAULT_PREPROCESSOR_STEPS + tuple(['extract_time'])) + steps = set(DEFAULT_PREPROCESSOR_STEPS + tuple(["extract_time"])) assert set(product.settings.keys()) == steps - assert product.settings['extract_time'] == { - 'start_year': 2001, - 'start_month': 3, - 'start_day': 14, - 'end_year': 2002, - 'end_month': 6, - 'end_day': 28, + assert product.settings["extract_time"] == { + "start_year": 2001, + "start_month": 3, + "start_day": 14, + "end_year": 2002, + "end_month": 6, + "end_day": 28, } else: assert False, f"invalid task {task.name}" @@ -1050,17 +1087,17 @@ def test_derive(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'toz' + assert task.name == "diagnostic_name" + TASKSEP + "toz" # Check product content of tasks assert len(task.products) == 1 product = task.products.pop() - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'toz' + assert "derive" in product.settings + assert product.attributes["short_name"] == "toz" assert len(product.datasets) == 2 - input_variables = {d.facets['short_name'] for d in product.datasets} - assert input_variables == {'ps', 'tro3'} + input_variables = {d.facets["short_name"] for d in product.datasets} + assert input_variables == {"ps", "tro3"} def test_derive_not_needed(tmp_path, patched_datafinder, session): @@ -1086,17 +1123,17 @@ def test_derive_not_needed(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name/toz' + assert task.name == "diagnostic_name/toz" # Check product content of tasks assert len(task.products) == 1 product = task.products.pop() - assert 'derive' not in product.settings + assert "derive" not in product.settings # Check dataset assert len(product.datasets) == 1 dataset = product.datasets[0] - assert dataset.facets['short_name'] == 'toz' + assert dataset.facets["short_name"] == "toz" assert dataset.files @@ -1126,30 +1163,33 @@ def test_derive_with_fx_ohc(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'ohc' + assert task.name == "diagnostic_name" + TASKSEP + "ohc" # Check products assert len(task.products) == 3 for product in task.products: - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'ohc' + assert "derive" in product.settings + assert product.attributes["short_name"] == "ohc" # Check datasets assert len(product.datasets) == 2 - thetao_ds = next(d for d in product.datasets - if d.facets['short_name'] == 'thetao') - assert thetao_ds.facets['mip'] == 'Omon' - volcello_ds = next(d for d in product.datasets - if d.facets['short_name'] == 'volcello') - if volcello_ds.facets['project'] == 'CMIP6': - mip = 'Ofx' + thetao_ds = next( + d for d in product.datasets if d.facets["short_name"] == "thetao" + ) + assert thetao_ds.facets["mip"] == "Omon" + volcello_ds = next( + d for d in product.datasets if d.facets["short_name"] == "volcello" + ) + if volcello_ds.facets["project"] == "CMIP6": + mip = "Ofx" else: - mip = 'fx' - assert volcello_ds.facets['mip'] == mip + mip = "fx" + assert volcello_ds.facets["mip"] == mip -def test_derive_with_fx_ohc_fail(tmp_path, patched_failing_datafinder, - session): +def test_derive_with_fx_ohc_fail( + tmp_path, patched_failing_datafinder, session +): content = dedent(""" diagnostics: diagnostic_name: @@ -1174,8 +1214,9 @@ def test_derive_with_fx_ohc_fail(tmp_path, patched_failing_datafinder, get_recipe(tmp_path, content, session) -def test_derive_with_optional_var(tmp_path, patched_datafinder, - patched_tas_derivation, session): +def test_derive_with_optional_var( + tmp_path, patched_datafinder, patched_tas_derivation, session +): content = dedent(""" diagnostics: diagnostic_name: @@ -1201,26 +1242,31 @@ def test_derive_with_optional_var(tmp_path, patched_datafinder, # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'tas' + assert task.name == "diagnostic_name" + TASKSEP + "tas" # Check products assert len(task.products) == 3 for product in task.products: - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'tas' + assert "derive" in product.settings + assert product.attributes["short_name"] == "tas" assert len(product.datasets) == 2 - pr_ds = next(d for d in product.datasets - if d.facets['short_name'] == 'pr') - assert pr_ds.facets['mip'] == 'Amon' - assert pr_ds.facets['timerange'] == '2000/2005' - areacella_ds = next(d for d in product.datasets - if d.facets['short_name'] == 'areacella') - assert areacella_ds.facets['mip'] == 'fx' - assert 'timerange' not in areacella_ds.facets - - -def test_derive_with_optional_var_nodata(tmp_path, patched_failing_datafinder, - patched_tas_derivation, session): + pr_ds = next( + d for d in product.datasets if d.facets["short_name"] == "pr" + ) + assert pr_ds.facets["mip"] == "Amon" + assert pr_ds.facets["timerange"] == "2000/2005" + areacella_ds = next( + d + for d in product.datasets + if d.facets["short_name"] == "areacella" + ) + assert areacella_ds.facets["mip"] == "fx" + assert "timerange" not in areacella_ds.facets + + +def test_derive_with_optional_var_nodata( + tmp_path, patched_failing_datafinder, patched_tas_derivation, session +): content = dedent(""" diagnostics: diagnostic_name: @@ -1246,17 +1292,17 @@ def test_derive_with_optional_var_nodata(tmp_path, patched_failing_datafinder, # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'tas' + assert task.name == "diagnostic_name" + TASKSEP + "tas" # Check products assert len(task.products) == 3 for product in task.products: - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'tas' + assert "derive" in product.settings + assert product.attributes["short_name"] == "tas" # Check datasets assert len(product.datasets) == 1 - assert product.datasets[0].facets['short_name'] == 'pr' + assert product.datasets[0].facets["short_name"] == "pr" def test_derive_contains_start_end_year(tmp_path, patched_datafinder, session): @@ -1285,17 +1331,17 @@ def test_derive_contains_start_end_year(tmp_path, patched_datafinder, session): # Check that start_year and end_year are present in attributes assert len(task.products) == 1 product = task.products.pop() - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'toz' - assert product.attributes['timerange'] == '2000/2005' - assert product.attributes['start_year'] == 2000 - assert product.attributes['end_year'] == 2005 + assert "derive" in product.settings + assert product.attributes["short_name"] == "toz" + assert product.attributes["timerange"] == "2000/2005" + assert product.attributes["start_year"] == 2000 + assert product.attributes["end_year"] == 2005 -@pytest.mark.parametrize('force_derivation', [True, False]) -def test_derive_timerange_wildcard(tmp_path, patched_datafinder, session, - force_derivation): - +@pytest.mark.parametrize("force_derivation", [True, False]) +def test_derive_timerange_wildcard( + tmp_path, patched_datafinder, session, force_derivation +): content = dedent(f""" diagnostics: diagnostic_name: @@ -1323,26 +1369,26 @@ def test_derive_timerange_wildcard(tmp_path, patched_datafinder, session, assert len(task.products) == 1 product = task.products.pop() if force_derivation: - assert 'derive' in product.settings - assert product.attributes['short_name'] == 'toz' - assert product.attributes['timerange'] == '1990/2019' - assert product.attributes['start_year'] == 1990 - assert product.attributes['end_year'] == 2019 + assert "derive" in product.settings + assert product.attributes["short_name"] == "toz" + assert product.attributes["timerange"] == "1990/2019" + assert product.attributes["start_year"] == 1990 + assert product.attributes["end_year"] == 2019 def create_test_image(basename, cfg): """Get a valid path for saving a diagnostic plot.""" - image = Path(cfg['plot_dir']) / (basename + '.' + cfg['output_file_type']) + image = Path(cfg["plot_dir"]) / (basename + "." + cfg["output_file_type"]) image.parent.mkdir(parents=True) - Image.new('RGB', (1, 1)).save(image) + Image.new("RGB", (1, 1)).save(image) return str(image) -def get_diagnostic_filename(basename, cfg, extension='nc'): +def get_diagnostic_filename(basename, cfg, extension="nc"): """Get a valid path for saving a diagnostic data file.""" return os.path.join( - cfg['work_dir'], - basename + '.' + extension, + cfg["work_dir"], + basename + "." + extension, ) @@ -1361,21 +1407,21 @@ def simulate_diagnostic_run(diagnostic_task): p.filename for a in diagnostic_task.ancestors for p in a.products ] record = { - 'caption': 'Test figure', - 'statistics': ['mean', 'var'], - 'domains': ['trop', 'et'], - 'plot_types': ['zonal'], - 'authors': ['andela_bouwe'], - 'references': ['acknow_project'], - 'ancestors': input_files, + "caption": "Test figure", + "statistics": ["mean", "var"], + "domains": ["trop", "et"], + "plot_types": ["zonal"], + "authors": ["andela_bouwe"], + "references": ["acknow_project"], + "ancestors": input_files, } - diagnostic_file = get_diagnostic_filename('test', cfg) + diagnostic_file = get_diagnostic_filename("test", cfg) create_test_file(diagnostic_file) - plot_file = create_test_image('test', cfg) - provenance = os.path.join(cfg['run_dir'], 'diagnostic_provenance.yml') - os.makedirs(cfg['run_dir']) - with open(provenance, 'w', encoding='utf-8') as file: + plot_file = create_test_image("test", cfg) + provenance = os.path.join(cfg["run_dir"], "diagnostic_provenance.yml") + os.makedirs(cfg["run_dir"]) + with open(provenance, "w", encoding="utf-8") as file: yaml.safe_dump({diagnostic_file: record, plot_file: record}, file) diagnostic_task._collect_provenance() @@ -1387,12 +1433,13 @@ def test_diagnostic_task_provenance( patched_datafinder, session, ): - script = tmp_path / 'diagnostic.py' - script.write_text('') + script = tmp_path / "diagnostic.py" + script.write_text("") TAGS.set_tag_values(TAGS_FOR_TESTING) - content = dedent(""" + content = dedent( + """ diagnostics: diagnostic_name: themes: @@ -1415,11 +1462,13 @@ def test_diagnostic_task_provenance( script_name2: script: {script} ancestors: [script_name] - """.format(script=script)) + """.format(script=script) + ) recipe = get_recipe(tmp_path, content, session) - preproc_task = next(t for t in recipe.tasks.flatten() - if isinstance(t, PreprocessingTask)) + preproc_task = next( + t for t in recipe.tasks.flatten() if isinstance(t, PreprocessingTask) + ) simulate_preprocessor_run(preproc_task) diagnostic_task = recipe.tasks.pop() @@ -1432,41 +1481,47 @@ def test_diagnostic_task_provenance( for product in diagnostic_task.products: product.restore_provenance() check_provenance(product) - assert product.attributes['caption'] == record['caption'] - assert product.entity.get_attribute( - 'attribute:' + 'caption').pop() == record['caption'] + assert product.attributes["caption"] == record["caption"] + assert ( + product.entity.get_attribute("attribute:" + "caption").pop() + == record["caption"] + ) # Check that diagnostic script tags have been added - for key in ('statistics', 'domains', 'authors'): - assert product.attributes[key] == tuple(TAGS[key][k] - for k in record[key]) + for key in ("statistics", "domains", "authors"): + assert product.attributes[key] == tuple( + TAGS[key][k] for k in record[key] + ) # Check that recipe diagnostic tags have been added src = yaml.safe_load(DEFAULT_DOCUMENTATION + content) - for key in ('realms', 'themes'): - value = src['diagnostics']['diagnostic_name'][key] - assert product.attributes[key] == tuple(TAGS[key][k] - for k in value) + for key in ("realms", "themes"): + value = src["diagnostics"]["diagnostic_name"][key] + assert product.attributes[key] == tuple( + TAGS[key][k] for k in value + ) # Check that recipe tags have been added - recipe_record = product.provenance.get_record('recipe:recipe_test.yml') + recipe_record = product.provenance.get_record("recipe:recipe_test.yml") assert len(recipe_record) == 1 - for key in ('description', 'references'): - value = src['documentation'][key] - if key == 'references': - value = str(src['documentation'][key]) - assert recipe_record[0].get_attribute('attribute:' + - key).pop() == value + for key in ("description", "references"): + value = src["documentation"][key] + if key == "references": + value = str(src["documentation"][key]) + assert ( + recipe_record[0].get_attribute("attribute:" + key).pop() + == value + ) # Test that provenance was saved to xml and info embedded in netcdf product = next( - iter(p for p in diagnostic_task.products - if p.filename.endswith('.nc'))) + iter(p for p in diagnostic_task.products if p.filename.endswith(".nc")) + ) cube = iris.load_cube(product.filename) - assert cube.attributes['software'].startswith("Created with ESMValTool v") - assert cube.attributes['caption'] == record['caption'] - prefix = os.path.splitext(product.filename)[0] + '_provenance' - assert os.path.exists(prefix + '.xml') + assert cube.attributes["software"].startswith("Created with ESMValTool v") + assert cube.attributes["caption"] == record["caption"] + prefix = os.path.splitext(product.filename)[0] + "_provenance" + assert os.path.exists(prefix + ".xml") def test_alias_generation(tmp_path, patched_datafinder, session): @@ -1502,45 +1557,45 @@ def test_alias_generation(tmp_path, patched_datafinder, session): - {project: CORDEX, driver: ICHEC-EC-EARTH, dataset: RCA4, ensemble: r1, mip: mon, institute: SMHI} - {project: CORDEX, driver: MIROC-MIROC5, dataset: RCA4, ensemble: r1, mip: mon, institute: SMHI} scripts: null - """) # noqa: + """) recipe = get_recipe(tmp_path, content, session) assert len(recipe.datasets) == 14 for dataset in recipe.datasets: - if dataset['project'] == 'CMIP5': - if dataset['dataset'] == 'GFDL-CM3': - assert dataset['alias'] == 'CMIP5_GFDL-CM3' - elif dataset['dataset'] == 'FGOALS-g3': - if dataset['sub_experiment'] == 's1960': - assert dataset['alias'] == 'CMIP5_FGOALS-g3_s1960' + if dataset["project"] == "CMIP5": + if dataset["dataset"] == "GFDL-CM3": + assert dataset["alias"] == "CMIP5_GFDL-CM3" + elif dataset["dataset"] == "FGOALS-g3": + if dataset["sub_experiment"] == "s1960": + assert dataset["alias"] == "CMIP5_FGOALS-g3_s1960" else: - assert dataset['alias'] == 'CMIP5_FGOALS-g3_s1961' + assert dataset["alias"] == "CMIP5_FGOALS-g3_s1961" else: - if dataset['ensemble'] == 'r1i1p1': - assert dataset['alias'] == 'CMIP5_EC-EARTH_r1i1p1' - elif dataset['ensemble'] == 'r2i1p1': - assert dataset['alias'] == 'CMIP5_EC-EARTH_r2i1p1' + if dataset["ensemble"] == "r1i1p1": + assert dataset["alias"] == "CMIP5_EC-EARTH_r1i1p1" + elif dataset["ensemble"] == "r2i1p1": + assert dataset["alias"] == "CMIP5_EC-EARTH_r2i1p1" else: - assert dataset['alias'] == 'my_alias' - elif dataset['project'] == 'CMIP6': - if dataset['dataset'] == 'GF3': - assert dataset['alias'] == 'CMIP6_CMP_GF3' - elif dataset['dataset'] == 'GF2': - assert dataset['alias'] == 'CMIP6_CMP_GF2' - elif dataset['dataset'] == 'EC': - assert dataset['alias'] == 'CMIP6_HRMP_EC' + assert dataset["alias"] == "my_alias" + elif dataset["project"] == "CMIP6": + if dataset["dataset"] == "GF3": + assert dataset["alias"] == "CMIP6_CMP_GF3" + elif dataset["dataset"] == "GF2": + assert dataset["alias"] == "CMIP6_CMP_GF2" + elif dataset["dataset"] == "EC": + assert dataset["alias"] == "CMIP6_HRMP_EC" else: - assert dataset['alias'] == 'CMIP6_HRMP_HA' - elif dataset['project'] == 'CORDEX': - if dataset['driver'] == 'ICHEC-EC-EARTH': - assert dataset['alias'] == 'CORDEX_ICHEC-EC-EARTH' + assert dataset["alias"] == "CMIP6_HRMP_HA" + elif dataset["project"] == "CORDEX": + if dataset["driver"] == "ICHEC-EC-EARTH": + assert dataset["alias"] == "CORDEX_ICHEC-EC-EARTH" else: - assert dataset['alias'] == 'CORDEX_MIROC-MIROC5' + assert dataset["alias"] == "CORDEX_MIROC-MIROC5" else: - if dataset['version'] == 1: - assert dataset['alias'] == 'OBS_1' + if dataset["version"] == 1: + assert dataset["alias"] == "OBS_1" else: - assert dataset['alias'] == 'OBS_2' + assert dataset["alias"] == "OBS_2" def test_concatenation(tmp_path, patched_datafinder, session): @@ -1570,10 +1625,10 @@ def test_concatenation(tmp_path, patched_datafinder, session): recipe = get_recipe(tmp_path, content, session) assert len(recipe.datasets) == 2 for dataset in recipe.datasets: - if dataset['exp'] == 'historical': - assert dataset['alias'] == 'historical' + if dataset["exp"] == "historical": + assert dataset["alias"] == "historical" else: - assert dataset['alias'] == 'historical-rcp85' + assert dataset["alias"] == "historical-rcp85" def test_ensemble_expansion(tmp_path, patched_datafinder, session): @@ -1599,9 +1654,9 @@ def test_ensemble_expansion(tmp_path, patched_datafinder, session): recipe = get_recipe(tmp_path, content, session) assert len(recipe.datasets) == 3 - assert recipe.datasets[0]['ensemble'] == 'r1i1p1' - assert recipe.datasets[1]['ensemble'] == 'r2i1p1' - assert recipe.datasets[2]['ensemble'] == 'r3i1p1' + assert recipe.datasets[0]["ensemble"] == "r1i1p1" + assert recipe.datasets[1]["ensemble"] == "r2i1p1" + assert recipe.datasets[2]["ensemble"] == "r3i1p1" def test_extract_shape(tmp_path, patched_datafinder, session): @@ -1629,7 +1684,7 @@ def test_extract_shape(tmp_path, patched_datafinder, session): scripts: null """) # Create shapefile - shapefile = session['auxiliary_data_dir'] / Path('test.shp') + shapefile = session["auxiliary_data_dir"] / Path("test.shp") shapefile.parent.mkdir(parents=True, exist_ok=True) shapefile.touch() @@ -1639,17 +1694,19 @@ def test_extract_shape(tmp_path, patched_datafinder, session): task = recipe.tasks.pop() assert len(task.products) == 1 product = task.products.pop() - assert product.settings['extract_shape']['shapefile'] == shapefile + assert product.settings["extract_shape"]["shapefile"] == shapefile -@pytest.mark.parametrize('invalid_arg', - ['shapefile', 'method', 'crop', 'decomposed']) -def test_extract_shape_raises(tmp_path, patched_datafinder, session, - invalid_arg): +@pytest.mark.parametrize( + "invalid_arg", ["shapefile", "method", "crop", "decomposed"] +) +def test_extract_shape_raises( + tmp_path, patched_datafinder, session, invalid_arg +): TAGS.set_tag_values(TAGS_FOR_TESTING) # Create shapefile - shapefile = session['auxiliary_data_dir'] / Path('test.shp') + shapefile = session["auxiliary_data_dir"] / Path("test.shp") shapefile.parent.mkdir(parents=True, exist_ok=True) shapefile.touch() @@ -1679,14 +1736,14 @@ def test_extract_shape_raises(tmp_path, patched_datafinder, session, # Add invalid argument recipe = yaml.safe_load(content) - recipe['preprocessors']['test']['extract_shape'][invalid_arg] = 'x' + recipe["preprocessors"]["test"]["extract_shape"][invalid_arg] = "x" content = yaml.safe_dump(recipe) with pytest.raises(RecipeError) as exc: get_recipe(tmp_path, content, session) assert str(exc.value) == INITIALIZATION_ERROR_MSG - assert 'extract_shape' in exc.value.failed_tasks[0].message + assert "extract_shape" in exc.value.failed_tasks[0].message assert invalid_arg in exc.value.failed_tasks[0].message @@ -1696,7 +1753,7 @@ def _test_output_product_consistency(products, preprocessor, statistics): for i, product in enumerate(products): settings = product.settings.get(preprocessor) if settings: - output_products = settings['output_products'] + output_products = settings["output_products"] for identifier, statistic_out in output_products.items(): for statistic, preproc_file in statistic_out.items(): @@ -1705,16 +1762,16 @@ def _test_output_product_consistency(products, preprocessor, statistics): # Make sure that output products are consistent for (identifier, statistic), value in product_out.items(): assert statistic in statistics - assert len(set(value)) == 1, 'Output products are not equal' + assert len(set(value)) == 1, "Output products are not equal" return product_out def test_ensemble_statistics(tmp_path, patched_datafinder, session): - statistics = ['mean', 'max'] - diagnostic = 'diagnostic_name' - variable = 'pr' - preprocessor = 'ensemble_statistics' + statistics = ["mean", "max"] + diagnostic = "diagnostic_name" + variable = "pr" + preprocessor = "ensemble_statistics" content = dedent(f""" preprocessors: @@ -1743,12 +1800,13 @@ def test_ensemble_statistics(tmp_path, patched_datafinder, session): """) recipe = get_recipe(tmp_path, content, session) - datasets = set([ds['dataset'] for ds in recipe.datasets]) + datasets = set([ds["dataset"] for ds in recipe.datasets]) task = next(iter(recipe.tasks)) products = task.products - product_out = _test_output_product_consistency(products, preprocessor, - statistics) + product_out = _test_output_product_consistency( + products, preprocessor, statistics + ) assert len(product_out) == len(datasets) * len(statistics) @@ -1757,10 +1815,10 @@ def test_ensemble_statistics(tmp_path, patched_datafinder, session): def test_multi_model_statistics(tmp_path, patched_datafinder, session): - statistics = ['mean', 'max'] - diagnostic = 'diagnostic_name' - variable = 'pr' - preprocessor = 'multi_model_statistics' + statistics = ["mean", "max"] + diagnostic = "diagnostic_name" + variable = "pr" + preprocessor = "multi_model_statistics" content = dedent(f""" preprocessors: @@ -1793,8 +1851,9 @@ def test_multi_model_statistics(tmp_path, patched_datafinder, session): task = next(iter(recipe.tasks)) products = task.products - product_out = _test_output_product_consistency(products, preprocessor, - statistics) + product_out = _test_output_product_consistency( + products, preprocessor, statistics + ) assert len(product_out) == len(statistics) @@ -1803,10 +1862,10 @@ def test_multi_model_statistics(tmp_path, patched_datafinder, session): def test_multi_model_statistics_exclude(tmp_path, patched_datafinder, session): - statistics = ['mean', 'max'] - diagnostic = 'diagnostic_name' - variable = 'pr' - preprocessor = 'multi_model_statistics' + statistics = ["mean", "max"] + diagnostic = "diagnostic_name" + variable = "pr" + preprocessor = "multi_model_statistics" content = dedent(f""" preprocessors: @@ -1843,28 +1902,29 @@ def test_multi_model_statistics_exclude(tmp_path, patched_datafinder, session): task = next(iter(recipe.tasks)) products = task.products - product_out = _test_output_product_consistency(products, preprocessor, - statistics) + product_out = _test_output_product_consistency( + products, preprocessor, statistics + ) assert len(product_out) == len(statistics) - assert 'OBS' not in product_out + assert "OBS" not in product_out for id, prods in product_out: - assert id != 'OBS' - assert id == 'CMIP5' + assert id != "OBS" + assert id == "CMIP5" task._initialize_product_provenance() assert next(iter(products)).provenance is not None def test_groupby_combined_statistics(tmp_path, patched_datafinder, session): - diagnostic = 'diagnostic_name' - variable = 'pr' + diagnostic = "diagnostic_name" + variable = "pr" - mm_statistics = ['mean', 'max'] - mm_preprocessor = 'multi_model_statistics' - ens_statistics = ['mean', 'median'] - ens_preprocessor = 'ensemble_statistics' + mm_statistics = ["mean", "max"] + mm_preprocessor = "multi_model_statistics" + ens_statistics = ["mean", "median"] + ens_preprocessor = "ensemble_statistics" - groupby = [ens_preprocessor, 'tag'] + groupby = [ens_preprocessor, "tag"] content = dedent(f""" preprocessors: @@ -1898,7 +1958,7 @@ def test_groupby_combined_statistics(tmp_path, patched_datafinder, session): """) recipe = get_recipe(tmp_path, content, session) - datasets = set([ds['dataset'] for ds in recipe.datasets]) + datasets = set([ds["dataset"] for ds in recipe.datasets]) products = next(iter(recipe.tasks)).products @@ -1915,8 +1975,9 @@ def test_groupby_combined_statistics(tmp_path, patched_datafinder, session): ) assert len(ens_products) == len(datasets) * len(ens_statistics) - assert len( - mm_products) == len(mm_statistics) * len(ens_statistics) * len(groupby) + assert len(mm_products) == len(mm_statistics) * len(ens_statistics) * len( + groupby + ) def test_weighting_landsea_fraction(tmp_path, patched_datafinder, session): @@ -1953,23 +2014,24 @@ def test_weighting_landsea_fraction(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'gpp' + assert task.name == "diagnostic_name" + TASKSEP + "gpp" # Check weighting assert len(task.products) == 2 for product in task.products: - assert 'weighting_landsea_fraction' in product.settings - settings = product.settings['weighting_landsea_fraction'] + assert "weighting_landsea_fraction" in product.settings + settings = product.settings["weighting_landsea_fraction"] assert len(settings) == 1 - assert settings['area_type'] == 'land' + assert settings["area_type"] == "land" assert len(product.datasets) == 1 dataset = product.datasets[0] assert len(dataset.supplementaries) == 1 - assert dataset.supplementaries[0].facets['short_name'] == 'sftlf' + assert dataset.supplementaries[0].facets["short_name"] == "sftlf" -def test_weighting_landsea_fraction_no_fx(tmp_path, patched_failing_datafinder, - session): +def test_weighting_landsea_fraction_no_fx( + tmp_path, patched_failing_datafinder, session +): content = dedent(""" preprocessors: landfrac_weighting: @@ -1999,8 +2061,9 @@ def test_weighting_landsea_fraction_no_fx(tmp_path, patched_failing_datafinder, get_recipe(tmp_path, content, session) -def test_weighting_landsea_fraction_exclude(tmp_path, patched_datafinder, - session): +def test_weighting_landsea_fraction_exclude( + tmp_path, patched_datafinder, session +): content = dedent(""" preprocessors: landfrac_weighting: @@ -2032,23 +2095,24 @@ def test_weighting_landsea_fraction_exclude(tmp_path, patched_datafinder, # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'gpp' + assert task.name == "diagnostic_name" + TASKSEP + "gpp" # Check weighting assert len(task.products) == 3 for product in task.products: - if product.attributes['dataset'] != 'TEST': - assert 'weighting_landsea_fraction' not in product.settings + if product.attributes["dataset"] != "TEST": + assert "weighting_landsea_fraction" not in product.settings continue - assert 'weighting_landsea_fraction' in product.settings - settings = product.settings['weighting_landsea_fraction'] + assert "weighting_landsea_fraction" in product.settings + settings = product.settings["weighting_landsea_fraction"] assert len(settings) == 1 - assert 'exclude' not in settings - assert settings['area_type'] == 'land' + assert "exclude" not in settings + assert settings["area_type"] == "land" -def test_weighting_landsea_fraction_exclude_fail(tmp_path, patched_datafinder, - session): +def test_weighting_landsea_fraction_exclude_fail( + tmp_path, patched_datafinder, session +): content = dedent(""" preprocessors: landfrac_weighting: @@ -2079,7 +2143,8 @@ def test_weighting_landsea_fraction_exclude_fail(tmp_path, patched_datafinder, assert str(exc_info.value.failed_tasks[0].message) == ( "Preprocessor 'landfrac_weighting' uses 'alternative_dataset', but " "'alternative_dataset' is not defined for variable 'gpp' of " - "diagnostic 'diagnostic_name'.") + "diagnostic 'diagnostic_name'." + ) def test_area_statistics(tmp_path, patched_datafinder, session): @@ -2114,19 +2179,19 @@ def test_area_statistics(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'gpp' + assert task.name == "diagnostic_name" + TASKSEP + "gpp" # Check area_statistics assert len(task.products) == 2 for product in task.products: - assert 'area_statistics' in product.settings - settings = product.settings['area_statistics'] + assert "area_statistics" in product.settings + settings = product.settings["area_statistics"] assert len(settings) == 1 - assert settings['operator'] == 'mean' + assert settings["operator"] == "mean" assert len(product.datasets) == 1 dataset = product.datasets[0] assert len(dataset.supplementaries) == 1 - assert dataset.supplementaries[0].facets['short_name'] == 'areacella' + assert dataset.supplementaries[0].facets["short_name"] == "areacella" def test_landmask(tmp_path, patched_datafinder, session): @@ -2161,19 +2226,19 @@ def test_landmask(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'gpp' + assert task.name == "diagnostic_name" + TASKSEP + "gpp" # Check weighting assert len(task.products) == 2 for product in task.products: - assert 'mask_landsea' in product.settings - settings = product.settings['mask_landsea'] + assert "mask_landsea" in product.settings + settings = product.settings["mask_landsea"] assert len(settings) == 1 - assert settings['mask_out'] == 'sea' + assert settings["mask_out"] == "sea" assert len(product.datasets) == 1 dataset = product.datasets[0] assert len(dataset.supplementaries) == 1 - assert dataset.supplementaries[0].facets['short_name'] == 'sftlf' + assert dataset.supplementaries[0].facets["short_name"] == "sftlf" def test_landmask_no_fx(tmp_path, patched_failing_datafinder, session): @@ -2207,15 +2272,15 @@ def test_landmask_no_fx(tmp_path, patched_failing_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == 'diagnostic_name' + TASKSEP + 'gpp' + assert task.name == "diagnostic_name" + TASKSEP + "gpp" # Check masking assert len(task.products) == 3 for product in task.products: - assert 'mask_landsea' in product.settings - settings = product.settings['mask_landsea'] + assert "mask_landsea" in product.settings + settings = product.settings["mask_landsea"] assert len(settings) == 1 - assert settings['mask_out'] == 'sea' + assert settings["mask_out"] == "sea" assert len(product.datasets) == 1 dataset = product.datasets[0] assert dataset.supplementaries == [] @@ -2242,8 +2307,10 @@ def test_wrong_project(tmp_path, patched_datafinder, session): - {dataset: CanESM2} scripts: null """) - msg = ("Unable to load CMOR table (project) 'CMIP7' for variable 'tos' " - "with mip 'Omon'") + msg = ( + "Unable to load CMOR table (project) 'CMIP7' for variable 'tos' " + "with mip 'Omon'" + ) with pytest.raises(RecipeError) as wrong_proj: get_recipe(tmp_path, content, session) assert str(wrong_proj.value) == msg @@ -2278,13 +2345,13 @@ def test_multimodel_mask(tmp_path, patched_datafinder, session): # Check generated tasks assert len(recipe.tasks) == 1 task = recipe.tasks.pop() - assert task.name == f'diagnostic_name{TASKSEP}tas' + assert task.name == f"diagnostic_name{TASKSEP}tas" # Check mask_multimodel assert len(task.products) == 3 for product in task.products: - assert 'mask_multimodel' in product.settings - assert product.settings['mask_multimodel'] == {} + assert "mask_multimodel" in product.settings + assert product.settings["mask_multimodel"] == {} def test_obs4mips_case_correct(tmp_path, patched_datafinder, session): @@ -2307,7 +2374,7 @@ def test_obs4mips_case_correct(tmp_path, patched_datafinder, session): """) recipe = get_recipe(tmp_path, content, session) dataset = recipe.datasets[0] - assert dataset['project'] == 'obs4MIPs' + assert dataset["project"] == "obs4MIPs" def test_recipe_run(tmp_path, patched_datafinder, session, mocker): @@ -2324,12 +2391,12 @@ def test_recipe_run(tmp_path, patched_datafinder, session, mocker): - {dataset: BNU-ESM} scripts: null """) - session['download_dir'] = tmp_path / 'download_dir' - session['search_esgf'] = 'when_missing' + session["download_dir"] = tmp_path / "download_dir" + session["search_esgf"] = "when_missing" - mocker.patch.object(esmvalcore._recipe.recipe.esgf, - 'download', - create_autospec=True) + mocker.patch.object( + esmvalcore._recipe.recipe.esgf, "download", create_autospec=True + ) recipe = get_recipe(tmp_path, content, session) @@ -2339,9 +2406,11 @@ def test_recipe_run(tmp_path, patched_datafinder, session, mocker): recipe.run() esmvalcore._recipe.recipe.esgf.download.assert_called_once_with( - set(), session['download_dir']) + set(), session["download_dir"] + ) recipe.tasks.run.assert_called_once_with( - max_parallel_tasks=session['max_parallel_tasks']) + max_parallel_tasks=session["max_parallel_tasks"] + ) recipe.write_filled_recipe.assert_called_once() recipe.write_html_summary.assert_called_once() @@ -2349,15 +2418,15 @@ def test_recipe_run(tmp_path, patched_datafinder, session, mocker): def test_representative_dataset_regular_var(patched_datafinder, session): """Test ``_representative_dataset`` with regular variable.""" variable = { - 'dataset': 'ICON', - 'exp': 'atm_amip-rad_R2B4_r1i1p1f1', - 'frequency': 'mon', - 'mip': 'Amon', - 'original_short_name': 'tas', - 'project': 'ICON', - 'short_name': 'tas', - 'timerange': '1990/2000', - 'var_type': 'atm_2d_ml', + "dataset": "ICON", + "exp": "atm_amip-rad_R2B4_r1i1p1f1", + "frequency": "mon", + "mip": "Amon", + "original_short_name": "tas", + "project": "ICON", + "short_name": "tas", + "timerange": "1990/2000", + "var_type": "atm_2d_ml", } dataset = Dataset(**variable) dataset.session = session @@ -2365,25 +2434,26 @@ def test_representative_dataset_regular_var(patched_datafinder, session): assert len(datasets) == 1 filename = datasets[0].files[0] path = Path(filename) - assert path.name == 'atm_amip-rad_R2B4_r1i1p1f1_atm_2d_ml_1990_1999.nc' + assert path.name == "atm_amip-rad_R2B4_r1i1p1f1_atm_2d_ml_1990_1999.nc" -@pytest.mark.parametrize('force_derivation', [True, False]) -def test_representative_dataset_derived_var(patched_datafinder, session, - force_derivation): +@pytest.mark.parametrize("force_derivation", [True, False]) +def test_representative_dataset_derived_var( + patched_datafinder, session, force_derivation +): """Test ``_representative_dataset`` with derived variable.""" variable = { - 'dataset': 'ICON', - 'derive': True, - 'exp': 'atm_amip-rad_R2B4_r1i1p1f1', - 'force_derivation': force_derivation, - 'frequency': 'mon', - 'mip': 'Amon', - 'original_short_name': 'alb', - 'project': 'ICON', - 'short_name': 'alb', - 'timerange': '1990/2000', - 'var_type': 'atm_2d_ml', + "dataset": "ICON", + "derive": True, + "exp": "atm_amip-rad_R2B4_r1i1p1f1", + "force_derivation": force_derivation, + "frequency": "mon", + "mip": "Amon", + "original_short_name": "alb", + "project": "ICON", + "short_name": "alb", + "timerange": "1990/2000", + "var_type": "atm_2d_ml", } dataset = Dataset(**variable) dataset.session = session @@ -2391,39 +2461,39 @@ def test_representative_dataset_derived_var(patched_datafinder, session, expected_facets = { # Already present in variable - 'dataset': 'ICON', - 'derive': True, - 'exp': 'atm_amip-rad_R2B4_r1i1p1f1', - 'force_derivation': force_derivation, - 'frequency': 'mon', - 'mip': 'Amon', - 'project': 'ICON', - 'timerange': '1990/2000', + "dataset": "ICON", + "derive": True, + "exp": "atm_amip-rad_R2B4_r1i1p1f1", + "force_derivation": force_derivation, + "frequency": "mon", + "mip": "Amon", + "project": "ICON", + "timerange": "1990/2000", # Added by _add_cmor_info - 'modeling_realm': ['atmos'], - 'units': 'W m-2', + "modeling_realm": ["atmos"], + "units": "W m-2", # Added by _add_extra_facets - 'var_type': 'atm_2d_ml', + "var_type": "atm_2d_ml", } if force_derivation: expected_datasets = [ Dataset( - short_name='rsdscs', - long_name='Surface Downwelling Clear-Sky Shortwave Radiation', - original_short_name='rsdscs', + short_name="rsdscs", + long_name="Surface Downwelling Clear-Sky Shortwave Radiation", + original_short_name="rsdscs", standard_name=( - 'surface_downwelling_shortwave_flux_in_air_assuming_clear_' - 'sky' + "surface_downwelling_shortwave_flux_in_air_assuming_clear_" + "sky" ), **expected_facets, ), Dataset( - short_name='rsuscs', - long_name='Surface Upwelling Clear-Sky Shortwave Radiation', - original_short_name='rsuscs', + short_name="rsuscs", + long_name="Surface Upwelling Clear-Sky Shortwave Radiation", + original_short_name="rsuscs", standard_name=( - 'surface_upwelling_shortwave_flux_in_air_assuming_clear_' - 'sky' + "surface_upwelling_shortwave_flux_in_air_assuming_clear_" + "sky" ), **expected_facets, ), @@ -2439,66 +2509,64 @@ def test_representative_dataset_derived_var(patched_datafinder, session, def test_get_derive_input_variables(patched_datafinder, session): """Test ``_get_derive_input_variables``.""" alb_facets = { - 'dataset': 'ICON', - 'derive': True, - 'exp': 'atm_amip-rad_R2B4_r1i1p1f1', - 'force_derivation': True, - 'frequency': 'mon', - 'mip': 'Amon', - 'original_short_name': 'alb', - 'project': 'ICON', - 'short_name': 'alb', - 'timerange': '1990/2000', + "dataset": "ICON", + "derive": True, + "exp": "atm_amip-rad_R2B4_r1i1p1f1", + "force_derivation": True, + "frequency": "mon", + "mip": "Amon", + "original_short_name": "alb", + "project": "ICON", + "short_name": "alb", + "timerange": "1990/2000", } alb = Dataset(**alb_facets) alb.session = session rsdscs_facets = { # Added by get_required - 'short_name': 'rsdscs', + "short_name": "rsdscs", # Already present in variables - 'dataset': 'ICON', - 'derive': True, - 'exp': 'atm_amip-rad_R2B4_r1i1p1f1', - 'force_derivation': True, - 'frequency': 'mon', - 'mip': 'Amon', - 'project': 'ICON', - 'timerange': '1990/2000', + "dataset": "ICON", + "derive": True, + "exp": "atm_amip-rad_R2B4_r1i1p1f1", + "force_derivation": True, + "frequency": "mon", + "mip": "Amon", + "project": "ICON", + "timerange": "1990/2000", # Added by _add_cmor_info - 'standard_name': - 'surface_downwelling_shortwave_flux_in_air_assuming_clear_sky', - 'long_name': 'Surface Downwelling Clear-Sky Shortwave Radiation', - 'modeling_realm': ['atmos'], - 'original_short_name': 'rsdscs', - 'units': 'W m-2', + "standard_name": "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", + "long_name": "Surface Downwelling Clear-Sky Shortwave Radiation", + "modeling_realm": ["atmos"], + "original_short_name": "rsdscs", + "units": "W m-2", # Added by _add_extra_facets - 'var_type': 'atm_2d_ml', + "var_type": "atm_2d_ml", } rsdscs = Dataset(**rsdscs_facets) rsdscs.session = session rsuscs_facets = { # Added by get_required - 'short_name': 'rsuscs', + "short_name": "rsuscs", # Already present in variables - 'dataset': 'ICON', - 'derive': True, - 'exp': 'atm_amip-rad_R2B4_r1i1p1f1', - 'force_derivation': True, - 'frequency': 'mon', - 'mip': 'Amon', - 'project': 'ICON', - 'timerange': '1990/2000', + "dataset": "ICON", + "derive": True, + "exp": "atm_amip-rad_R2B4_r1i1p1f1", + "force_derivation": True, + "frequency": "mon", + "mip": "Amon", + "project": "ICON", + "timerange": "1990/2000", # Added by _add_cmor_info - 'standard_name': - 'surface_upwelling_shortwave_flux_in_air_assuming_clear_sky', - 'long_name': 'Surface Upwelling Clear-Sky Shortwave Radiation', - 'modeling_realm': ['atmos'], - 'original_short_name': 'rsuscs', - 'units': 'W m-2', + "standard_name": "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", + "long_name": "Surface Upwelling Clear-Sky Shortwave Radiation", + "modeling_realm": ["atmos"], + "original_short_name": "rsuscs", + "units": "W m-2", # Added by _add_extra_facets - 'var_type': 'atm_2d_ml', + "var_type": "atm_2d_ml", } rsuscs = Dataset(**rsuscs_facets) rsuscs.session = session @@ -2508,32 +2576,35 @@ def test_get_derive_input_variables(patched_datafinder, session): TEST_DIAG_SELECTION = [ - (None, {'d1/tas', 'd1/s1', 'd2/s1', 'd3/s1', 'd3/s2', 'd4/s1'}), - ({''}, set()), - ({'wrong_diag/*'}, set()), - ({'d1/*'}, {'d1/tas', 'd1/s1'}), - ({'d2/*'}, {'d1/tas', 'd1/s1', 'd2/s1'}), - ({'d3/*'}, {'d1/tas', 'd1/s1', 'd2/s1', 'd3/s1', 'd3/s2'}), - ({'d4/*'}, {'d1/tas', 'd1/s1', 'd2/s1', 'd3/s2', 'd4/s1'}), - ({'wrong_diag/*', 'd1/*'}, {'d1/tas', 'd1/s1'}), - ({'d1/tas'}, {'d1/tas'}), - ({'d1/tas', 'd2/*'}, {'d1/tas', 'd1/s1', 'd2/s1'}), - ({'d1/tas', 'd3/s1'}, {'d1/tas', 'd3/s1', 'd1/s1'}), - ({'d4/*', - 'd3/s1'}, {'d1/tas', 'd1/s1', 'd2/s1', 'd3/s1', 'd3/s2', 'd4/s1'}), + (None, {"d1/tas", "d1/s1", "d2/s1", "d3/s1", "d3/s2", "d4/s1"}), + ({""}, set()), + ({"wrong_diag/*"}, set()), + ({"d1/*"}, {"d1/tas", "d1/s1"}), + ({"d2/*"}, {"d1/tas", "d1/s1", "d2/s1"}), + ({"d3/*"}, {"d1/tas", "d1/s1", "d2/s1", "d3/s1", "d3/s2"}), + ({"d4/*"}, {"d1/tas", "d1/s1", "d2/s1", "d3/s2", "d4/s1"}), + ({"wrong_diag/*", "d1/*"}, {"d1/tas", "d1/s1"}), + ({"d1/tas"}, {"d1/tas"}), + ({"d1/tas", "d2/*"}, {"d1/tas", "d1/s1", "d2/s1"}), + ({"d1/tas", "d3/s1"}, {"d1/tas", "d3/s1", "d1/s1"}), + ( + {"d4/*", "d3/s1"}, + {"d1/tas", "d1/s1", "d2/s1", "d3/s1", "d3/s2", "d4/s1"}, + ), ] -@pytest.mark.parametrize('diags_to_run,tasks_run', TEST_DIAG_SELECTION) -def test_diag_selection(tmp_path, patched_datafinder, session, diags_to_run, - tasks_run): +@pytest.mark.parametrize("diags_to_run,tasks_run", TEST_DIAG_SELECTION) +def test_diag_selection( + tmp_path, patched_datafinder, session, diags_to_run, tasks_run +): """Test selection of individual diagnostics via --diagnostics option.""" TAGS.set_tag_values(TAGS_FOR_TESTING) - script = tmp_path / 'diagnostic.py' - script.write_text('') + script = tmp_path / "diagnostic.py" + script.write_text("") if diags_to_run is not None: - session['diagnostics'] = diags_to_run + session["diagnostics"] = diags_to_run content = dedent(""" diagnostics: @@ -2583,7 +2654,7 @@ def test_diag_selection(tmp_path, patched_datafinder, session, diags_to_run, @pytest.mark.parametrize( - 'preproc', ['multi_model_statistics', 'ensemble_statistics'] + "preproc", ["multi_model_statistics", "ensemble_statistics"] ) def test_mm_stats_invalid_arg(preproc, tmp_path, patched_datafinder, session): content = dedent(f""" @@ -2614,7 +2685,7 @@ def test_mm_stats_invalid_arg(preproc, tmp_path, patched_datafinder, session): @pytest.mark.parametrize( - 'preproc', ['multi_model_statistics', 'ensemble_statistics'] + "preproc", ["multi_model_statistics", "ensemble_statistics"] ) def test_mm_stats_missing_arg(preproc, tmp_path, patched_datafinder, session): content = dedent(f""" @@ -2642,7 +2713,7 @@ def test_mm_stats_missing_arg(preproc, tmp_path, patched_datafinder, session): @pytest.mark.parametrize( - 'preproc', ['multi_model_statistics', 'ensemble_statistics'] + "preproc", ["multi_model_statistics", "ensemble_statistics"] ) def test_mm_stats_invalid_stats( preproc, tmp_path, patched_datafinder, session @@ -2681,20 +2752,20 @@ def test_mm_stats_invalid_stats( @pytest.mark.parametrize( - 'statistics', + "statistics", [ - {'invalid_value': 1}, - {'percent': 10, 'invalid_value': 1}, - {'percent': 10, 'weights': False}, - ] + {"invalid_value": 1}, + {"percent": 10, "invalid_value": 1}, + {"percent": 10, "weights": False}, + ], ) @pytest.mark.parametrize( - 'preproc', ['multi_model_statistics', 'ensemble_statistics'] + "preproc", ["multi_model_statistics", "ensemble_statistics"] ) def test_mm_stats_invalid_stat_kwargs( preproc, statistics, tmp_path, patched_datafinder, session ): - statistics['operator'] = 'wpercentile' + statistics["operator"] = "wpercentile" content = dedent(f""" preprocessors: test: @@ -2728,15 +2799,15 @@ def test_mm_stats_invalid_stat_kwargs( @pytest.mark.parametrize( - 'preproc', + "preproc", [ - 'area_statistics', - 'axis_statistics', - 'meridional_statistics', - 'volume_statistics', - 'zonal_statistics', - 'rolling_window_statistics', - ] + "area_statistics", + "axis_statistics", + "meridional_statistics", + "volume_statistics", + "zonal_statistics", + "rolling_window_statistics", + ], ) def test_statistics_missing_operator_no_default_fail( preproc, tmp_path, patched_datafinder, session @@ -2767,16 +2838,16 @@ def test_statistics_missing_operator_no_default_fail( @pytest.mark.parametrize( - 'preproc,option', + "preproc,option", [ - ('annual_statistics', ''), - ('climate_statistics', ''), - ('daily_statistics', ''), - ('decadal_statistics', ''), - ('hourly_statistics', 'hours: 1'), - ('monthly_statistics', ''), - ('seasonal_statistics', ''), - ] + ("annual_statistics", ""), + ("climate_statistics", ""), + ("daily_statistics", ""), + ("decadal_statistics", ""), + ("hourly_statistics", "hours: 1"), + ("monthly_statistics", ""), + ("seasonal_statistics", ""), + ], ) def test_statistics_missing_operator_with_default( preproc, option, tmp_path, patched_datafinder, session @@ -2806,27 +2877,27 @@ def test_statistics_missing_operator_with_default( @pytest.mark.parametrize( - 'preproc,preproc_kwargs', + "preproc,preproc_kwargs", [ - ('annual_statistics', {'invalid_value': 1}), - ('area_statistics', {'percent': 10, 'invalid_value': 1}), - ('axis_statistics', {'percent': 10, 'weights': False}), - ('climate_statistics', {'invalid_value': 1}), - ('daily_statistics', {'percent': 10, 'invalid_value': 1}), - ('decadal_statistics', {'percent': 10, 'weights': False}), - ('hourly_statistics', {'invalid_value': 1, 'hours': 2}), - ('meridional_statistics', {'percent': 10, 'invalid_value': 1}), - ('monthly_statistics', {'percent': 10, 'weights': False}), - ('seasonal_statistics', {'invalid_value': 1}), - ('volume_statistics', {'percent': 10, 'weights': False}), - ('zonal_statistics', {'invalid_value': 1}), - ('rolling_window_statistics', {'percent': 10, 'invalid_value': 1}), - ] + ("annual_statistics", {"invalid_value": 1}), + ("area_statistics", {"percent": 10, "invalid_value": 1}), + ("axis_statistics", {"percent": 10, "weights": False}), + ("climate_statistics", {"invalid_value": 1}), + ("daily_statistics", {"percent": 10, "invalid_value": 1}), + ("decadal_statistics", {"percent": 10, "weights": False}), + ("hourly_statistics", {"invalid_value": 1, "hours": 2}), + ("meridional_statistics", {"percent": 10, "invalid_value": 1}), + ("monthly_statistics", {"percent": 10, "weights": False}), + ("seasonal_statistics", {"invalid_value": 1}), + ("volume_statistics", {"percent": 10, "weights": False}), + ("zonal_statistics", {"invalid_value": 1}), + ("rolling_window_statistics", {"percent": 10, "invalid_value": 1}), + ], ) def test_statistics_invalid_kwargs( preproc, preproc_kwargs, tmp_path, patched_datafinder, session ): - preproc_kwargs['operator'] = 'wpercentile' + preproc_kwargs["operator"] = "wpercentile" content = dedent(f""" preprocessors: test: @@ -3019,7 +3090,7 @@ def test_invalid_bias_type(tmp_path, patched_datafinder, session): def test_invalid_builtin_regridding_scheme( - tmp_path, patched_datafinder, session + tmp_path, patched_datafinder, session ): content = dedent(""" preprocessors: @@ -3039,9 +3110,7 @@ def test_invalid_builtin_regridding_scheme( ensemble: r1i1p1} scripts: null """) - msg = ( - "Got invalid built-in regridding scheme 'INVALID', expected one of " - ) + msg = "Got invalid built-in regridding scheme 'INVALID', expected one of " with pytest.raises(RecipeError) as rec_err_exp: get_recipe(tmp_path, content, session) assert str(rec_err_exp.value) == INITIALIZATION_ERROR_MSG @@ -3049,7 +3118,7 @@ def test_invalid_builtin_regridding_scheme( def test_generic_regridding_scheme_no_ref( - tmp_path, patched_datafinder, session + tmp_path, patched_datafinder, session ): content = dedent(""" preprocessors: @@ -3081,7 +3150,7 @@ def test_generic_regridding_scheme_no_ref( def test_invalid_generic_regridding_scheme( - tmp_path, patched_datafinder, session + tmp_path, patched_datafinder, session ): content = dedent(""" preprocessors: @@ -3161,9 +3230,7 @@ def test_deprecated_unstructured_nearest_scheme( get_recipe(tmp_path, content, session) -def test_wildcard_derived_var( - tmp_path, patched_failing_datafinder, session -): +def test_wildcard_derived_var(tmp_path, patched_failing_datafinder, session): content = dedent(""" diagnostics: diagnostic_name: @@ -3182,9 +3249,9 @@ def test_wildcard_derived_var( assert len(recipe.datasets) == 1 dataset = recipe.datasets[0] - assert dataset.facets['dataset'] == 'BBB' - assert dataset.facets['institute'] == 'B' - assert dataset.facets['short_name'] == 'swcre' + assert dataset.facets["dataset"] == "BBB" + assert dataset.facets["institute"] == "B" + assert dataset.facets["short_name"] == "swcre" def test_distance_metric_no_ref(tmp_path, patched_datafinder, session): diff --git a/tests/integration/test_citation.py b/tests/integration/test_citation.py index 339cd253a0..195e9ec8af 100644 --- a/tests/integration/test_citation.py +++ b/tests/integration/test_citation.py @@ -1,11 +1,15 @@ """Test _citation.py.""" + import textwrap from prov.model import ProvDocument import esmvalcore -from esmvalcore._citation import (CMIP6_URL_STEM, ESMVALTOOL_PAPER, - _write_citation_files) +from esmvalcore._citation import ( + CMIP6_URL_STEM, + ESMVALTOOL_PAPER, + _write_citation_files, +) from esmvalcore._provenance import ESMVALTOOL_URI_PREFIX @@ -13,35 +17,36 @@ def test_references(tmp_path, monkeypatch): """Test1: references are replaced with bibtex.""" # Create fake provenance provenance = ProvDocument() - provenance.add_namespace('file', uri=ESMVALTOOL_URI_PREFIX + 'file') - provenance.add_namespace('attribute', - uri=ESMVALTOOL_URI_PREFIX + 'attribute') - filename = str(tmp_path / 'output.nc') + provenance.add_namespace("file", uri=ESMVALTOOL_URI_PREFIX + "file") + provenance.add_namespace( + "attribute", uri=ESMVALTOOL_URI_PREFIX + "attribute" + ) + filename = str(tmp_path / "output.nc") attributes = { - 'attribute:references': 'test_tag', - 'attribute:script_file': 'diagnostics.py' + "attribute:references": "test_tag", + "attribute:script_file": "diagnostics.py", } - provenance.entity('file:' + filename, attributes) + provenance.entity("file:" + filename, attributes) # Create fake bibtex references tag file - references_path = tmp_path / 'references' + references_path = tmp_path / "references" references_path.mkdir() - monkeypatch.setattr(esmvalcore._citation.DIAGNOSTICS, 'path', tmp_path) - fake_bibtex_file = references_path / 'test_tag.bibtex' + monkeypatch.setattr(esmvalcore._citation.DIAGNOSTICS, "path", tmp_path) + fake_bibtex_file = references_path / "test_tag.bibtex" fake_bibtex = "Fake bibtex file content\n" fake_bibtex_file.write_text(fake_bibtex) _write_citation_files(filename, provenance) - citation_file = tmp_path / 'output_citation.bibtex' - citation = citation_file.read_text(encoding='utf-8') - assert citation == '\n'.join([ESMVALTOOL_PAPER, fake_bibtex]) + citation_file = tmp_path / "output_citation.bibtex" + citation = citation_file.read_text(encoding="utf-8") + assert citation == "\n".join([ESMVALTOOL_PAPER, fake_bibtex]) def mock_get_response(url): """Mock _get_response() function.""" json_data = False - if url.lower().startswith('https'): - json_data = {'titles': ['title is found']} + if url.lower().startswith("https"): + json_data = {"titles": ["title is found"]} return json_data @@ -49,31 +54,33 @@ def test_cmip6_data_citation(tmp_path, monkeypatch): """Test2: CMIP6 citation info is retrieved from ES-DOC.""" # Create fake provenance provenance = ProvDocument() - provenance.add_namespace('file', uri=ESMVALTOOL_URI_PREFIX + 'file') - provenance.add_namespace('attribute', - uri=ESMVALTOOL_URI_PREFIX + 'attribute') + provenance.add_namespace("file", uri=ESMVALTOOL_URI_PREFIX + "file") + provenance.add_namespace( + "attribute", uri=ESMVALTOOL_URI_PREFIX + "attribute" + ) attributes = { - 'attribute:mip_era': 'CMIP6', - 'attribute:activity_id': 'activity', - 'attribute:institution_id': 'institution', - 'attribute:source_id': 'source', - 'attribute:experiment_id': 'experiment', + "attribute:mip_era": "CMIP6", + "attribute:activity_id": "activity", + "attribute:institution_id": "institution", + "attribute:source_id": "source", + "attribute:experiment_id": "experiment", } - filename = str(tmp_path / 'output.nc') - provenance.entity('file:' + filename, attributes) + filename = str(tmp_path / "output.nc") + provenance.entity("file:" + filename, attributes) - monkeypatch.setattr(esmvalcore._citation, '_get_response', - mock_get_response) + monkeypatch.setattr( + esmvalcore._citation, "_get_response", mock_get_response + ) _write_citation_files(filename, provenance) - citation_file = tmp_path / 'output_citation.bibtex' + citation_file = tmp_path / "output_citation.bibtex" # Create fake bibtex entry - url = 'url not found' - title = 'title is found' - publisher = 'publisher not found' - year = 'publicationYear not found' - authors = 'creators not found' - doi = 'doi not found' + url = "url not found" + title = "title is found" + publisher = "publisher not found" + year = "publicationYear not found" + authors = "creators not found" + doi = "doi not found" fake_bibtex_entry = textwrap.dedent(f""" @misc{{{url}, \turl = {{{url}}}, @@ -84,34 +91,38 @@ def test_cmip6_data_citation(tmp_path, monkeypatch): \tdoi = {{{doi}}}, }} """).lstrip() - assert citation_file.read_text(encoding='utf-8') == '\n'.join( - [ESMVALTOOL_PAPER, fake_bibtex_entry]) + assert citation_file.read_text(encoding="utf-8") == "\n".join( + [ESMVALTOOL_PAPER, fake_bibtex_entry] + ) def test_cmip6_data_citation_url(tmp_path): """Test3: CMIP6 info_url is retrieved from ES-DOC.""" # Create fake provenance provenance = ProvDocument() - provenance.add_namespace('file', uri=ESMVALTOOL_URI_PREFIX + 'file') - provenance.add_namespace('attribute', - uri=ESMVALTOOL_URI_PREFIX + 'attribute') + provenance.add_namespace("file", uri=ESMVALTOOL_URI_PREFIX + "file") + provenance.add_namespace( + "attribute", uri=ESMVALTOOL_URI_PREFIX + "attribute" + ) attributes = { - 'attribute:mip_era': 'CMIP6', - 'attribute:activity_id': 'activity', - 'attribute:institution_id': 'institution', - 'attribute:source_id': 'source', - 'attribute:experiment_id': 'experiment', + "attribute:mip_era": "CMIP6", + "attribute:activity_id": "activity", + "attribute:institution_id": "institution", + "attribute:source_id": "source", + "attribute:experiment_id": "experiment", } - filename = str(tmp_path / 'output.nc') - provenance.entity('file:' + filename, attributes) + filename = str(tmp_path / "output.nc") + provenance.entity("file:" + filename, attributes) _write_citation_files(filename, provenance) - citation_url = tmp_path / 'output_data_citation_info.txt' + citation_url = tmp_path / "output_data_citation_info.txt" # Create fake info url - fake_url_prefix = '.'.join(attributes.values()) - text = '\n'.join([ - "Follow the links below to find more information about CMIP6 data:", - f"- {CMIP6_URL_STEM}/cmip6?input={fake_url_prefix}", - '', - ]) - assert citation_url.read_text(encoding='utf-8') == text + fake_url_prefix = ".".join(attributes.values()) + text = "\n".join( + [ + "Follow the links below to find more information about CMIP6 data:", + f"- {CMIP6_URL_STEM}/cmip6?input={fake_url_prefix}", + "", + ] + ) + assert citation_url.read_text(encoding="utf-8") == text diff --git a/tests/integration/test_deprecated_config.py b/tests/integration/test_deprecated_config.py index 8dec085134..cf50f2ea4c 100644 --- a/tests/integration/test_deprecated_config.py +++ b/tests/integration/test_deprecated_config.py @@ -9,16 +9,16 @@ def test_no_deprecation_default_cfg(): """Test that default config does not raise any deprecation warnings.""" with warnings.catch_warnings(): - warnings.simplefilter('error', category=ESMValCoreDeprecationWarning) + warnings.simplefilter("error", category=ESMValCoreDeprecationWarning) CFG.reload() - CFG.start_session('my_session') + CFG.start_session("my_session") def test_no_deprecation_user_cfg(): """Test that user config does not raise any deprecation warnings.""" - config_file = Path(esmvalcore.__file__).parent / 'config-user.yml' + config_file = Path(esmvalcore.__file__).parent / "config-user.yml" with warnings.catch_warnings(): - warnings.simplefilter('error', category=ESMValCoreDeprecationWarning) + warnings.simplefilter("error", category=ESMValCoreDeprecationWarning) cfg = Config(CFG.copy()) cfg.load_from_file(config_file) - cfg.start_session('my_session') + cfg.start_session("my_session") diff --git a/tests/integration/test_diagnostic_run.py b/tests/integration/test_diagnostic_run.py index 7816508458..e66cd925c2 100644 --- a/tests/integration/test_diagnostic_run.py +++ b/tests/integration/test_diagnostic_run.py @@ -1,4 +1,5 @@ """Test diagnostic script runs.""" + import contextlib import shutil import sys @@ -23,24 +24,24 @@ def get_distributed_client(): monkeypatch.setattr( esmvalcore._task, - 'get_distributed_client', + "get_distributed_client", get_distributed_client, ) def write_config_user_file(dirname): - config_file = dirname / 'config-user.yml' + config_file = dirname / "config-user.yml" cfg = { - 'output_dir': str(dirname / 'output_dir'), - 'auxiliary_data_dir': str(dirname / 'extra_data'), - 'rootpath': { - 'default': str(dirname / 'input_dir'), + "output_dir": str(dirname / "output_dir"), + "auxiliary_data_dir": str(dirname / "extra_data"), + "rootpath": { + "default": str(dirname / "input_dir"), }, - 'drs': { - 'CMIP5': 'BADC', + "drs": { + "CMIP5": "BADC", }, - 'log_level': 'debug', - 'profile_diagnostic': False, + "log_level": "debug", + "profile_diagnostic": False, } config_file.write_text(yaml.safe_dump(cfg, encoding=None)) return str(config_file) @@ -56,27 +57,26 @@ def arguments(*args): def check(result_file): """Check the results.""" - result = yaml.safe_load(result_file.read_text(encoding='utf-8')) + result = yaml.safe_load(result_file.read_text(encoding="utf-8")) required_keys = { - 'input_files', - 'log_level', - 'plot_dir', - 'run_dir', - 'work_dir', + "input_files", + "log_level", + "plot_dir", + "run_dir", + "work_dir", } missing = required_keys - set(result) assert not missing unwanted_keys = [ - 'profile_diagnostic', + "profile_diagnostic", ] for unwanted_key in unwanted_keys: assert unwanted_key not in result SCRIPTS = { - 'diagnostic.py': - dedent(""" + "diagnostic.py": dedent(""" import yaml import shutil @@ -85,8 +85,7 @@ def check(result_file): shutil.copy("settings.yml", settings["setting_name"]) """), - 'diagnostic.ncl': - dedent(""" + "diagnostic.ncl": dedent(""" begin print("INFO Loading settings from " + getenv("settings")) loadscript("$settings") @@ -101,8 +100,7 @@ def check(result_file): system("echo '" + result + "' > " + diag_script_info@setting_name) """), - 'diagnostic.R': - dedent(""" + "diagnostic.R": dedent(""" library(yaml) args <- commandArgs(trailingOnly = TRUE) @@ -130,39 +128,46 @@ def check(result_file): def interpreter_not_installed(script): """Check if an interpreter is installed for script.""" interpreters = { - '.jl': 'julia', - '.ncl': 'ncl', - '.py': 'python', - '.R': 'Rscript', + ".jl": "julia", + ".ncl": "ncl", + ".py": "python", + ".R": "Rscript", } ext = Path(script).suffix interpreter = interpreters[ext] return shutil.which(interpreter) is None -@pytest.mark.parametrize('script_file, script', [ - pytest.param( - script_file, - script, - marks=[ - pytest.mark.installation, - pytest.mark.xfail(interpreter_not_installed(script_file), - run=False, - reason="Interpreter not available"), - ], - ) for script_file, script in SCRIPTS.items() if script_file != 'null' -]) +@pytest.mark.parametrize( + "script_file, script", + [ + pytest.param( + script_file, + script, + marks=[ + pytest.mark.installation, + pytest.mark.xfail( + interpreter_not_installed(script_file), + run=False, + reason="Interpreter not available", + ), + ], + ) + for script_file, script in SCRIPTS.items() + if script_file != "null" + ], +) def test_diagnostic_run(tmp_path, script_file, script): - - recipe_file = tmp_path / 'recipe_test.yml' + recipe_file = tmp_path / "recipe_test.yml" script_file = tmp_path / script_file - result_file = tmp_path / 'result.yml' + result_file = tmp_path / "result.yml" # Write script to file script_file.write_text(str(script)) # Create recipe - recipe = dedent(""" + recipe = dedent( + """ documentation: title: Recipe without data description: Recipe with no data. @@ -174,7 +179,8 @@ def test_diagnostic_run(tmp_path, script_file, script): script_name: script: {} setting_name: {} - """.format(script_file, result_file)) + """.format(script_file, result_file) + ) recipe_file.write_text(str(recipe)) # ensure that tags are cleared @@ -182,11 +188,11 @@ def test_diagnostic_run(tmp_path, script_file, script): config_user_file = write_config_user_file(tmp_path) with arguments( - 'esmvaltool', - 'run', - '--config_file', - config_user_file, - str(recipe_file), + "esmvaltool", + "run", + "--config_file", + config_user_file, + str(recipe_file), ): run() diff --git a/tests/integration/test_local.py b/tests/integration/test_local.py index 74830130c0..d67116ddcb 100644 --- a/tests/integration/test_local.py +++ b/tests/integration/test_local.py @@ -1,4 +1,5 @@ """Tests for `esmvalcore.local`.""" + import os import pprint from pathlib import Path @@ -10,9 +11,10 @@ from esmvalcore.local import LocalFile, _get_output_file, find_files # Load test configuration -with open(os.path.join(os.path.dirname(__file__), - 'data_finder.yml'), - encoding='utf-8') as file: +with open( + os.path.join(os.path.dirname(__file__), "data_finder.yml"), + encoding="utf-8", +) as file: CONFIG = yaml.safe_load(file) @@ -20,9 +22,9 @@ def print_path(path): """Print path.""" txt = path if os.path.isdir(path): - txt += '/' + txt += "/" if os.path.islink(path): - txt += ' -> ' + os.readlink(path) + txt += " -> " + os.readlink(path) print(txt) @@ -42,7 +44,7 @@ def create_file(filename): if not os.path.exists(dirname): os.makedirs(dirname) - with open(filename, 'a', encoding='utf-8'): + with open(filename, "a", encoding="utf-8"): pass @@ -52,15 +54,15 @@ def create_tree(path, filenames=None, symlinks=None): create_file(os.path.join(path, filename)) for symlink in symlinks or []: - link_name = os.path.join(path, symlink['link_name']) - os.symlink(symlink['target'], link_name) + link_name = os.path.join(path, symlink["link_name"]) + os.symlink(symlink["target"], link_name) -@pytest.mark.parametrize('cfg', CONFIG['get_output_file']) +@pytest.mark.parametrize("cfg", CONFIG["get_output_file"]) def test_get_output_file(cfg): """Test getting output name for preprocessed files.""" - output_file = _get_output_file(cfg['variable'], cfg['preproc_dir']) - expected = Path(cfg['output_file']) + output_file = _get_output_file(cfg["variable"], cfg["preproc_dir"]) + expected = Path(cfg["output_file"]) assert output_file == expected @@ -73,23 +75,26 @@ def root(tmp_path): tree(dirname) -@pytest.mark.parametrize('cfg', CONFIG['get_input_filelist']) +@pytest.mark.parametrize("cfg", CONFIG["get_input_filelist"]) def test_find_files(monkeypatch, root, cfg): """Test retrieving input filelist.""" - print(f"Testing DRS {cfg['drs']} with variable:\n", - pprint.pformat(cfg['variable'])) - project = cfg['variable']['project'] - monkeypatch.setitem(CFG, 'drs', {project: cfg['drs']}) - monkeypatch.setitem(CFG, 'rootpath', {project: root}) - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) + print( + f"Testing DRS {cfg['drs']} with variable:\n", + pprint.pformat(cfg["variable"]), + ) + project = cfg["variable"]["project"] + monkeypatch.setitem(CFG, "drs", {project: cfg["drs"]}) + monkeypatch.setitem(CFG, "rootpath", {project: root}) + create_tree( + root, cfg.get("available_files"), cfg.get("available_symlinks") + ) # Find files - input_filelist, globs = find_files(debug=True, **cfg['variable']) + input_filelist, globs = find_files(debug=True, **cfg["variable"]) # Test result - ref_files = [Path(root, file) for file in cfg['found_files']] + ref_files = [Path(root, file) for file in cfg["found_files"]] ref_globs = [ - Path(root, d, f) for d in cfg['dirs'] for f in cfg['file_patterns'] + Path(root, d, f) for d in cfg["dirs"] for f in cfg["file_patterns"] ] assert [Path(f) for f in input_filelist] == sorted(ref_files) assert [Path(g) for g in globs] == sorted(ref_globs) @@ -97,20 +102,21 @@ def test_find_files(monkeypatch, root, cfg): def test_find_files_with_facets(monkeypatch, root): """Test that a LocalFile with populated `facets` is returned.""" - for cfg in CONFIG['get_input_filelist']: - if cfg['drs'] != 'default': + for cfg in CONFIG["get_input_filelist"]: + if cfg["drs"] != "default": break - project = cfg['variable']['project'] - monkeypatch.setitem(CFG, 'drs', {project: cfg['drs']}) - monkeypatch.setitem(CFG, 'rootpath', {project: root}) + project = cfg["variable"]["project"] + monkeypatch.setitem(CFG, "drs", {project: cfg["drs"]}) + monkeypatch.setitem(CFG, "rootpath", {project: root}) - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) + create_tree( + root, cfg.get("available_files"), cfg.get("available_symlinks") + ) # Find files - input_filelist = find_files(**cfg['variable']) - ref_files = [Path(root, file) for file in cfg['found_files']] + input_filelist = find_files(**cfg["variable"]) + ref_files = [Path(root, file) for file in cfg["found_files"]] assert sorted([Path(f) for f in input_filelist]) == sorted(ref_files) assert isinstance(input_filelist[0], LocalFile) assert input_filelist[0].facets diff --git a/tests/integration/test_main.py b/tests/integration/test_main.py index 94d209ffb5..e0838fd3e2 100644 --- a/tests/integration/test_main.py +++ b/tests/integration/test_main.py @@ -2,6 +2,7 @@ Includes a context manager to temporarily modify sys.argv """ + import contextlib import copy import functools @@ -23,7 +24,7 @@ def wrapper(f): @functools.wraps(f) def empty(*args, **kwargs): if kwargs: - raise ValueError(f'Parameters not supported: {kwargs}') + raise ValueError(f"Parameters not supported: {kwargs}") return True return empty @@ -39,25 +40,25 @@ def arguments(*args): def test_setargs(): original = copy.deepcopy(sys.argv) - with arguments('testing', 'working', 'with', 'sys.argv'): - assert sys.argv == ['testing', 'working', 'with', 'sys.argv'] + with arguments("testing", "working", "with", "sys.argv"): + assert sys.argv == ["testing", "working", "with", "sys.argv"] assert sys.argv == original -@patch('esmvalcore._main.ESMValTool.version', new=wrapper(ESMValTool.version)) +@patch("esmvalcore._main.ESMValTool.version", new=wrapper(ESMValTool.version)) def test_version(): """Test version command.""" - with arguments('esmvaltool', 'version'): + with arguments("esmvaltool", "version"): run() - with arguments('esmvaltool', 'version', '--extra_parameter=asterisk'): + with arguments("esmvaltool", "version", "--extra_parameter=asterisk"): with pytest.raises(FireExit): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run(): """Test version command.""" - with arguments('esmvaltool', 'run', 'recipe.yml'): + with arguments("esmvaltool", "run", "recipe.yml"): run() @@ -79,143 +80,166 @@ def test_empty_run(tmp_path): """) recipe_file.write_text(content) Config.get_config_user(path=tmp_path) - log_dir = f'{tmp_path}/esmvaltool_output' + log_dir = f"{tmp_path}/esmvaltool_output" config_file = f"{tmp_path}/config-user.yml" - with open(config_file, 'r+', encoding='utf-8') as file: + with open(config_file, "r+", encoding="utf-8") as file: config = yaml.safe_load(file) - config['output_dir'] = log_dir + config["output_dir"] = log_dir yaml.safe_dump(config, file, sort_keys=False) with pytest.raises(RecipeError) as exc: ESMValTool().run( - recipe_file, config_file=f"{tmp_path}/config-user.yml") - assert str(exc.value) == 'The given recipe does not have any diagnostic.' - log_file = os.path.join(log_dir, - os.listdir(log_dir)[0], 'run', 'main_log.txt') + recipe_file, config_file=f"{tmp_path}/config-user.yml" + ) + assert str(exc.value) == "The given recipe does not have any diagnostic." + log_file = os.path.join( + log_dir, os.listdir(log_dir)[0], "run", "main_log.txt" + ) filled_recipe = os.path.exists( - log_dir + '/' + os.listdir(log_dir)[0] + '/run/recipe_filled.yml') + log_dir + "/" + os.listdir(log_dir)[0] + "/run/recipe_filled.yml" + ) shutil.rmtree(log_dir) assert log_file assert not filled_recipe -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_config(): - with arguments('esmvaltool', 'run', 'recipe.yml', '--config_file', - 'config.yml'): + with arguments( + "esmvaltool", "run", "recipe.yml", "--config_file", "config.yml" + ): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_max_years(): - with arguments('esmvaltool', 'run', 'recipe.yml', - '--config_file=config.yml', '--max_years=2'): + with arguments( + "esmvaltool", + "run", + "recipe.yml", + "--config_file=config.yml", + "--max_years=2", + ): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_max_datasets(): - with arguments('esmvaltool', 'run', 'recipe.yml', '--max_datasets=2'): + with arguments("esmvaltool", "run", "recipe.yml", "--max_datasets=2"): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_search_esgf(): - with arguments('esmvaltool', 'run', 'recipe.yml', '--search_esgf=always'): + with arguments("esmvaltool", "run", "recipe.yml", "--search_esgf=always"): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_check_level(): - with arguments('esmvaltool', 'run', 'recipe.yml', '--check_level=default'): + with arguments("esmvaltool", "run", "recipe.yml", "--check_level=default"): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_skip_nonexistent(): - with arguments('esmvaltool', 'run', 'recipe.yml', - '--skip_nonexistent=True'): + with arguments( + "esmvaltool", "run", "recipe.yml", "--skip_nonexistent=True" + ): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_with_diagnostics(): - with arguments('esmvaltool', 'run', 'recipe.yml', '--diagnostics=[badt]'): + with arguments("esmvaltool", "run", "recipe.yml", "--diagnostics=[badt]"): run() -@patch('esmvalcore._main.ESMValTool.run', new=wrapper(ESMValTool.run)) +@patch("esmvalcore._main.ESMValTool.run", new=wrapper(ESMValTool.run)) def test_run_fails_with_other_params(): - with arguments('esmvaltool', 'run', 'recipe.yml', '--extra_param=dfa'): + with arguments("esmvaltool", "run", "recipe.yml", "--extra_param=dfa"): with pytest.raises(SystemExit): run() def test_recipes_get(tmp_path, monkeypatch): """Test version command.""" - src_recipe = tmp_path / 'recipe.yml' + src_recipe = tmp_path / "recipe.yml" src_recipe.touch() - tgt_dir = tmp_path / 'test' + tgt_dir = tmp_path / "test" tgt_dir.mkdir() monkeypatch.chdir(tgt_dir) - with arguments('esmvaltool', 'recipes', 'get', str(src_recipe)): + with arguments("esmvaltool", "recipes", "get", str(src_recipe)): run() - assert (tgt_dir / 'recipe.yml').is_file() + assert (tgt_dir / "recipe.yml").is_file() -@patch('esmvalcore._main.Recipes.list', new=wrapper(Recipes.list)) +@patch("esmvalcore._main.Recipes.list", new=wrapper(Recipes.list)) def test_recipes_list(): """Test version command.""" - with arguments('esmvaltool', 'recipes', 'list'): + with arguments("esmvaltool", "recipes", "list"): run() -@patch('esmvalcore._main.Recipes.list', new=wrapper(Recipes.list)) +@patch("esmvalcore._main.Recipes.list", new=wrapper(Recipes.list)) def test_recipes_list_do_not_admit_parameters(): """Test version command.""" - with arguments('esmvaltool', 'recipes', 'list', 'parameter'): + with arguments("esmvaltool", "recipes", "list", "parameter"): with pytest.raises(FireExit): run() -@patch('esmvalcore._main.Config.get_config_developer', - new=wrapper(Config.get_config_developer)) +@patch( + "esmvalcore._main.Config.get_config_developer", + new=wrapper(Config.get_config_developer), +) def test_get_config_developer(): """Test version command.""" - with arguments('esmvaltool', 'config', 'get_config_developer'): + with arguments("esmvaltool", "config", "get_config_developer"): run() -@patch('esmvalcore._main.Config.get_config_user', - new=wrapper(Config.get_config_user)) +@patch( + "esmvalcore._main.Config.get_config_user", + new=wrapper(Config.get_config_user), +) def test_get_config_user(): """Test version command.""" - with arguments('esmvaltool', 'config', 'get_config_user'): + with arguments("esmvaltool", "config", "get_config_user"): run() def test_get_config_user_path(tmp_path): """Test version command.""" - with arguments('esmvaltool', 'config', 'get_config_user', - f'--path={tmp_path}'): + with arguments( + "esmvaltool", "config", "get_config_user", f"--path={tmp_path}" + ): run() - assert (tmp_path / 'config-user.yml').is_file() + assert (tmp_path / "config-user.yml").is_file() def test_get_config_user_overwrite(tmp_path): """Test version command.""" - config_user = tmp_path / 'config-user.yml' + config_user = tmp_path / "config-user.yml" config_user.touch() - with arguments('esmvaltool', 'config', 'get_config_user', - f'--path={tmp_path}', '--overwrite'): + with arguments( + "esmvaltool", + "config", + "get_config_user", + f"--path={tmp_path}", + "--overwrite", + ): run() -@patch('esmvalcore._main.Config.get_config_user', - new=wrapper(Config.get_config_user)) +@patch( + "esmvalcore._main.Config.get_config_user", + new=wrapper(Config.get_config_user), +) def test_get_config_user_bad_option_fails(): """Test version command.""" - with arguments('esmvaltool', 'config', 'get_config_user', - '--bad_option=path'): + with arguments( + "esmvaltool", "config", "get_config_user", "--bad_option=path" + ): with pytest.raises(FireExit): run() diff --git a/tests/integration/test_provenance.py b/tests/integration/test_provenance.py index 091069f3b6..95982d6112 100644 --- a/tests/integration/test_provenance.py +++ b/tests/integration/test_provenance.py @@ -3,7 +3,7 @@ def get_file_record(prov, filename): - records = prov.get_record(f'file:{filename}') + records = prov.get_record(f"file:{filename}") assert records return records[0] @@ -19,7 +19,7 @@ def check_provenance(product): def check_product_wasderivedfrom(product): """Check that product.filename was derived from product._ancestors.""" - print('checking provenance of file', product.filename) + print("checking provenance of file", product.filename) prov = product.provenance def get_identifier(filename): @@ -35,13 +35,14 @@ def get_identifier(filename): for record in relations: if input_identifier == record.get_attribute(PROV_ATTR_USED_ENTITY): assert identifier == record.get_attribute( - PROV_ATTR_GENERATED_ENTITY) + PROV_ATTR_GENERATED_ENTITY + ) break else: assert False if not product._ancestors: - assert 'tracking_id' in product.attributes + assert "tracking_id" in product.attributes else: for ancestor in product._ancestors: check_product_wasderivedfrom(ancestor) diff --git a/tests/integration/test_task.py b/tests/integration/test_task.py index 7ea9964585..2fb56b2cc4 100644 --- a/tests/integration/test_task.py +++ b/tests/integration/test_task.py @@ -6,6 +6,7 @@ from multiprocessing.pool import ThreadPool import pytest +import yaml import esmvalcore from esmvalcore._task import ( @@ -20,13 +21,13 @@ class MockBaseTask(BaseTask): - def _run(self, input_files): tmp_path = self._tmp_path output_file = tmp_path / self.name - msg = ('running {} in thread {}, using input {}, generating {}'.format( - self.name, os.getpid(), input_files, output_file)) + msg = "running {} in thread {}, using input {}, generating {}".format( + self.name, os.getpid(), input_files, output_file + ) print(msg) # Check that the output is created just once @@ -50,9 +51,9 @@ def example_tasks(tmp_path): tasks = TaskSet() for i in range(3): task = MockBaseTask( - name=f'task{i}', + name=f"task{i}", ancestors=[ - MockBaseTask(name=f'task{i}-ancestor{j}') for j in range(3) + MockBaseTask(name=f"task{i}-ancestor{j}") for j in range(3) ], ) for task0 in task.flatten(): @@ -73,22 +74,26 @@ def get_distributed_client(): return get_distributed_client -@pytest.mark.parametrize(['mpmethod', 'max_parallel_tasks'], [ - ('fork', 1), - ('fork', 2), - ('fork', 15), - ('fork', None), - ('spawn', 2), -]) +@pytest.mark.parametrize( + ["mpmethod", "max_parallel_tasks"], + [ + ("fork", 1), + ("fork", 2), + ("fork", 15), + ("fork", None), + ("spawn", 2), + ], +) def test_run_tasks(monkeypatch, max_parallel_tasks, example_tasks, mpmethod): """Check that tasks are run correctly.""" monkeypatch.setattr( esmvalcore._task, - 'get_distributed_client', + "get_distributed_client", get_distributed_client_mock(None), ) - monkeypatch.setattr(esmvalcore._task, 'Pool', - multiprocessing.get_context(mpmethod).Pool) + monkeypatch.setattr( + esmvalcore._task, "Pool", multiprocessing.get_context(mpmethod).Pool + ) example_tasks.run(max_parallel_tasks=max_parallel_tasks) for task in example_tasks: @@ -102,49 +107,52 @@ def test_diag_task_updated_with_address(monkeypatch, mocker, tmp_path): client = mocker.Mock() monkeypatch.setattr( esmvalcore._task, - 'get_distributed_client', + "get_distributed_client", get_distributed_client_mock(client), ) # Create a task - mocker.patch.object(DiagnosticTask, '_initialize_cmd') + mocker.patch.object(DiagnosticTask, "_initialize_cmd") task = DiagnosticTask( - script='test.py', - settings={'run_dir': tmp_path / 'run'}, - output_dir=tmp_path / 'work', + script="test.py", + settings={"run_dir": tmp_path / "run"}, + output_dir=tmp_path / "work", ) # Create a taskset - mocker.patch.object(TaskSet, '_run_sequential') + mocker.patch.object(TaskSet, "_run_sequential") tasks = TaskSet() tasks.add(task) tasks.run(max_parallel_tasks=1) # Check that the scheduler address was added to the # diagnostic task settings. - assert 'scheduler_address' in task.settings - assert task.settings['scheduler_address'] is client.scheduler.address - - -@pytest.mark.parametrize('runner', [ - TaskSet._run_sequential, - partial( - TaskSet._run_parallel, - scheduler_address=None, - max_parallel_tasks=1, - ), -]) + assert "scheduler_address" in task.settings + assert task.settings["scheduler_address"] is client.scheduler.address + + +@pytest.mark.parametrize( + "runner", + [ + TaskSet._run_sequential, + partial( + TaskSet._run_parallel, + scheduler_address=None, + max_parallel_tasks=1, + ), + ], +) def test_runner_uses_priority(monkeypatch, runner, example_tasks): """Check that the runner tries to respect task priority.""" order = [] def _run(self, input_files): - print(f'running task {self.name} with priority {self.priority}') + print(f"running task {self.name} with priority {self.priority}") order.append(self.priority) - return [f'{self.name}_test.nc'] + return [f"{self.name}_test.nc"] - monkeypatch.setattr(MockBaseTask, '_run', _run) - monkeypatch.setattr(esmvalcore._task, 'Pool', ThreadPool) + monkeypatch.setattr(MockBaseTask, "_run", _run) + monkeypatch.setattr(esmvalcore._task, "Pool", ThreadPool) runner(example_tasks) print(order) @@ -152,10 +160,10 @@ def _run(self, input_files): assert order == sorted(order) -@pytest.mark.parametrize('address', [None, 'localhost:1234']) +@pytest.mark.parametrize("address", [None, "localhost:1234"]) def test_run_task(mocker, address): # Set up mock Dask distributed client - mocker.patch.object(esmvalcore._task, 'Client') + mocker.patch.object(esmvalcore._task, "Client") task = mocker.create_autospec(DiagnosticTask, instance=True) task.products = mocker.Mock() @@ -170,36 +178,36 @@ def test_run_task(mocker, address): def test_py2ncl(): """Test for _py2ncl func.""" - ncl_text = _py2ncl(None, 'tas') - assert ncl_text == 'tas = _Missing' - ncl_text = _py2ncl('cow', 'tas') + ncl_text = _py2ncl(None, "tas") + assert ncl_text == "tas = _Missing" + ncl_text = _py2ncl("cow", "tas") assert ncl_text == 'tas = "cow"' - ncl_text = _py2ncl([1, 2], 'tas') - assert ncl_text == 'tas = (/1, 2/)' - ncl_text = _py2ncl({'cow': 22}, 'tas') - assert ncl_text == 'tas = True\ntas@cow = 22\n' + ncl_text = _py2ncl([1, 2], "tas") + assert ncl_text == "tas = (/1, 2/)" + ncl_text = _py2ncl({"cow": 22}, "tas") + assert ncl_text == "tas = True\ntas@cow = 22\n" with pytest.raises(ValueError) as ex_err: - _py2ncl([1, "cow"], 'tas') - assert 'NCL array cannot be mixed type:' in str(ex_err.value) + _py2ncl([1, "cow"], "tas") + assert "NCL array cannot be mixed type:" in str(ex_err.value) with pytest.raises(ValueError) as ex_err: _py2ncl({"a": {"cow": 22}}) - assert 'NCL does not support nested dicts:' in str(ex_err.value) + assert "NCL does not support nested dicts:" in str(ex_err.value) def _get_single_base_task(): """Test BaseTask basic attributes.""" task = BaseTask( - name='task0', - ancestors=[BaseTask(name=f'task0-ancestor{j}') for j in range(2)], + name="task0", + ancestors=[BaseTask(name=f"task0-ancestor{j}") for j in range(2)], ) return task def test_base_task_names(): task = _get_single_base_task() - assert task.name == 'task0' + assert task.name == "task0" ancestor_names = [anc.name for anc in task.ancestors] - assert ancestor_names == ['task0-ancestor0', 'task0-ancestor1'] + assert ancestor_names == ["task0-ancestor0", "task0-ancestor1"] def test_individual_base_task_attrs(): @@ -212,16 +220,16 @@ def test_individual_base_task_attrs(): def _get_single_diagnostic_task(tmp_path, diag_script, write_diag=True): """Assemble a simple DiagnosticTask object.""" - diag_output_dir = tmp_path / 'mydiag' - diag_run_dir = diag_output_dir / 'run_dir' - diag_settings = {'run_dir': diag_run_dir, 'profile_diagnostic': False} + diag_output_dir = tmp_path / "mydiag" + diag_run_dir = diag_output_dir / "run_dir" + diag_settings = {"run_dir": diag_run_dir, "profile_diagnostic": False} if write_diag: - with open(diag_script, "w", encoding='utf-8') as fil: + with open(diag_script, "w", encoding="utf-8") as fil: fil.write("import os\n\nprint(os.getcwd())") task = DiagnosticTask( - name='task0', - ancestors=[BaseTask(name=f'task0-ancestor{j}') for j in range(2)], + name="task0", + ancestors=[BaseTask(name=f"task0-ancestor{j}") for j in range(2)], script=diag_script, settings=diag_settings, output_dir=diag_output_dir, @@ -232,52 +240,66 @@ def _get_single_diagnostic_task(tmp_path, diag_script, write_diag=True): def test_py_diagnostic_task_constructor(tmp_path): """Test DiagnosticTask basic attributes.""" - diag_script = tmp_path / 'diag_cow.py' + diag_script = tmp_path / "diag_cow.py" task = _get_single_diagnostic_task(tmp_path, diag_script) - assert task.name == 'task0' + assert task.name == "task0" ancestor_names = [anc.name for anc in task.ancestors] - assert ancestor_names == ['task0-ancestor0', 'task0-ancestor1'] + assert ancestor_names == ["task0-ancestor0", "task0-ancestor1"] assert task.script == diag_script assert task.settings == { - 'run_dir': tmp_path / 'mydiag' / 'run_dir', - 'profile_diagnostic': False + "run_dir": tmp_path / "mydiag" / "run_dir", + "profile_diagnostic": False, } - assert task.output_dir == tmp_path / 'mydiag' + assert task.output_dir == tmp_path / "mydiag" + + +def test_py_diagnostic_task_write_settings(tmp_path): + """Test DiagnosticTask writtes settings in the user's order.""" + diag_script = tmp_path / "diag_cow.py" + task = _get_single_diagnostic_task(tmp_path, diag_script) + my_arg_dict = {"b": [1], "a": 3.0, "c": False} + task.settings.update(my_arg_dict) + settings = task.write_settings() + with open(settings, "r") as stream: + settings_data = yaml.safe_load(stream) + + assert list(settings_data) == ["run_dir", "b", "a", "c"] def test_diagnostic_diag_script_none(tmp_path): """Test case when diagnostic script doesn't exist.""" - diag_script = tmp_path / 'diag_cow.py' + diag_script = tmp_path / "diag_cow.py" with pytest.raises(DiagnosticError) as err_msg: _get_single_diagnostic_task(tmp_path, diag_script, write_diag=False) diagnostics_root = DIAGNOSTICS.scripts script_file = os.path.abspath(os.path.join(diagnostics_root, diag_script)) - ept = ("Cannot execute script '{}' " - "({}): file does not exist.".format(script_file, script_file)) + ept = "Cannot execute script '{}' ({}): file does not exist.".format( + script_file, script_file + ) assert ept == str(err_msg.value) def _get_diagnostic_tasks(tmp_path, diagnostic_text, extension): """Assemble Python diagnostic tasks of DiagnosticTasks.""" - diag = 'diag_cow.' + extension + diag = "diag_cow." + extension diag_script = tmp_path / diag - diag_output_dir = tmp_path / 'mydiag' - diag_run_dir = diag_output_dir / 'run_dir' - diag_plot_dir = diag_output_dir / 'plot_dir' - diag_work_dir = diag_output_dir / 'work_dir' + diag_output_dir = tmp_path / "mydiag" + diag_run_dir = diag_output_dir / "run_dir" + diag_plot_dir = diag_output_dir / "plot_dir" + diag_work_dir = diag_output_dir / "work_dir" diag_settings = { - 'run_dir': diag_run_dir.as_posix(), - 'plot_dir': diag_plot_dir.as_posix(), - 'work_dir': diag_work_dir.as_posix(), - 'profile_diagnostic': False, - 'exit_on_ncl_warning': False + "run_dir": diag_run_dir.as_posix(), + "plot_dir": diag_plot_dir.as_posix(), + "work_dir": diag_work_dir.as_posix(), + "profile_diagnostic": False, + "exit_on_ncl_warning": False, } - with open(diag_script, "w", encoding='utf-8') as fil: + with open(diag_script, "w", encoding="utf-8") as fil: fil.write(diagnostic_text) task = DiagnosticTask( - name='task0', + name="task0", ancestors=None, script=diag_script.as_posix(), settings=diag_settings, @@ -288,50 +310,57 @@ def _get_diagnostic_tasks(tmp_path, diagnostic_text, extension): # skip if no exec -no_ncl = pytest.mark.skipif(shutil.which('ncl') is None, - reason="ncl is not installed") -no_rscript = pytest.mark.skipif(shutil.which('Rscript') is None, - reason="Rscript is not installed") +no_ncl = pytest.mark.skipif( + shutil.which("ncl") is None, reason="ncl is not installed" +) +no_rscript = pytest.mark.skipif( + shutil.which("Rscript") is None, reason="Rscript is not installed" +) CMD_diag = { - ('ncl', 'ncl'): _py2ncl({'cow': 22}, 'tas'), - ('Rscript', 'R'): 'var0 <- "zg"', - ('python', 'py'): "import os\n\nprint('cow')" + ("ncl", "ncl"): _py2ncl({"cow": 22}, "tas"), + ("Rscript", "R"): 'var0 <- "zg"', + ("python", "py"): "import os\n\nprint('cow')", } CMD_diag_fail = { - ('ncl', 'ncl'): ("cows on the [river]", - "An error occurred during execution of NCL script"), - ('python', 'py'): - ("import os\n\nprint(cow)", "diag_cow.py failed with return code 1") + ("ncl", "ncl"): ( + "cows on the [river]", + "An error occurred during execution of NCL script", + ), + ("python", "py"): ( + "import os\n\nprint(cow)", + "diag_cow.py failed with return code 1", + ), } -@pytest.mark.parametrize('executable,diag_text', CMD_diag.items()) +@pytest.mark.parametrize("executable,diag_text", CMD_diag.items()) @no_ncl @no_rscript def test_diagnostic_run_task(monkeypatch, executable, diag_text, tmp_path): """Run DiagnosticTask that will not fail.""" def _run(self, input_filesi=[]): - print(f'running task {self.name}') + print(f"running task {self.name}") task = _get_diagnostic_tasks(tmp_path, diag_text, executable[1]) - monkeypatch.setattr(BaseTask, '_run', _run) + monkeypatch.setattr(BaseTask, "_run", _run) task.run() -@pytest.mark.parametrize('executable,diag_text', CMD_diag_fail.items()) +@pytest.mark.parametrize("executable,diag_text", CMD_diag_fail.items()) @no_ncl -def test_diagnostic_run_task_fail(monkeypatch, executable, diag_text, - tmp_path): +def test_diagnostic_run_task_fail( + monkeypatch, executable, diag_text, tmp_path +): """Run DiagnosticTask that will fail.""" def _run(self, input_filesi=[]): - print(f'running task {self.name}') + print(f"running task {self.name}") task = _get_diagnostic_tasks(tmp_path, diag_text[0], executable[1]) - monkeypatch.setattr(BaseTask, '_run', _run) + monkeypatch.setattr(BaseTask, "_run", _run) with pytest.raises(DiagnosticError) as err_mssg: task.run() assert diag_text[1] in str(err_mssg.value) diff --git a/tests/parse_pymon.py b/tests/parse_pymon.py index e63da518cc..3f05929703 100644 --- a/tests/parse_pymon.py +++ b/tests/parse_pymon.py @@ -5,6 +5,7 @@ Lots of other metrics can be read from the file via sqlite parsing., currently just MEM_USAGE (RES memory, in MB). """ + import sqlite3 import sys from operator import itemgetter @@ -13,12 +14,12 @@ def _get_big_mem_tests(cur): """Find out which tests are heavy on memory.""" big_mem_tests = [] - for row in cur.execute('select ITEM, MEM_USAGE from TEST_METRICS;'): + for row in cur.execute("select ITEM, MEM_USAGE from TEST_METRICS;"): test_name, memory_used = row[0], row[1] - if memory_used > 1000.: # test result in RES mem in MB + if memory_used > 1000.0: # test result in RES mem in MB print("Test name / memory (MB)") print(test_name, memory_used) - elif memory_used > 4000.: + elif memory_used > 4000.0: big_mem_tests.append((test_name, memory_used)) return big_mem_tests @@ -27,11 +28,16 @@ def _get_big_mem_tests(cur): def _get_slow_tests(cur): """Find out which tests are slow.""" timed_tests = [] - sq_command = \ - 'select ITEM, ITEM_VARIANT, ITEM_PATH, TOTAL_TIME from TEST_METRICS;' + sq_command = ( + "select ITEM, ITEM_VARIANT, ITEM_PATH, TOTAL_TIME from TEST_METRICS;" + ) for row in cur.execute(sq_command): - test_name, test_var, test_path, time_used = \ - row[0], row[1], row[2], row[3] + test_name, test_var, test_path, time_used = ( + row[0], + row[1], + row[2], + row[3], + ) timed_tests.append((test_name, test_var, test_path, time_used)) timed_tests = sorted(timed_tests, reverse=True, key=itemgetter(3)) @@ -60,7 +66,7 @@ def _parse_pymon_database(): big_mem_tests = _get_big_mem_tests(cur) # then look at total time (in seconds) - # (user time is availbale too via USER_TIME, kernel time via KERNEL_TIME) + # (user time is available too via USER_TIME, kernel time via KERNEL_TIME) _get_slow_tests(cur) # Be sure to close the connection @@ -73,5 +79,5 @@ def _parse_pymon_database(): sys.exit(1) -if __name__ == '__main__': +if __name__ == "__main__": _parse_pymon_database() diff --git a/tests/sample_data/experimental/test_run_recipe.py b/tests/sample_data/experimental/test_run_recipe.py index 771c572a77..2abdd22197 100644 --- a/tests/sample_data/experimental/test_run_recipe.py +++ b/tests/sample_data/experimental/test_run_recipe.py @@ -25,11 +25,11 @@ esmvaltool_sample_data = pytest.importorskip("esmvaltool_sample_data") AUTHOR_TAGS = { - 'authors': { - 'doe_john': { - 'name': 'Doe, John', - 'institute': 'Testing', - 'orcid': 'https://orcid.org/0000-0000-0000-0000', + "authors": { + "doe_john": { + "name": "Doe, John", + "institute": "Testing", + "orcid": "https://orcid.org/0000-0000-0000-0000", } } } @@ -45,20 +45,20 @@ def get_distributed_client(): monkeypatch.setattr( esmvalcore._task, - 'get_distributed_client', + "get_distributed_client", get_distributed_client, ) @pytest.fixture def recipe(): - recipe = get_recipe(Path(__file__).with_name('recipe_api_test.yml')) + recipe = get_recipe(Path(__file__).with_name("recipe_api_test.yml")) return recipe @pytest.mark.use_sample_data -@pytest.mark.parametrize('ssh', (True, False)) -@pytest.mark.parametrize('task', (None, 'example/ta')) +@pytest.mark.parametrize("ssh", (True, False)) +@pytest.mark.parametrize("task", (None, "example/ta")) def test_run_recipe(monkeypatch, task, ssh, recipe, tmp_path, caplog): """Test running a basic recipe using sample data. @@ -67,9 +67,9 @@ def test_run_recipe(monkeypatch, task, ssh, recipe, tmp_path, caplog): caplog.set_level(logging.INFO) caplog.clear() if ssh: - monkeypatch.setitem(os.environ, 'SSH_CONNECTION', '0.0 0 1.1 1') + monkeypatch.setitem(os.environ, "SSH_CONNECTION", "0.0 0 1.1 1") else: - monkeypatch.delitem(os.environ, 'SSH_CONNECTION', raising=False) + monkeypatch.delitem(os.environ, "SSH_CONNECTION", raising=False) TAGS.set_tag_values(AUTHOR_TAGS) @@ -77,20 +77,20 @@ def test_run_recipe(monkeypatch, task, ssh, recipe, tmp_path, caplog): assert isinstance(recipe._repr_html_(), str) sample_data_config = esmvaltool_sample_data.get_rootpaths() - monkeypatch.setitem(CFG, 'rootpath', sample_data_config['rootpath']) - monkeypatch.setitem(CFG, 'drs', {'CMIP6': 'SYNDA'}) + monkeypatch.setitem(CFG, "rootpath", sample_data_config["rootpath"]) + monkeypatch.setitem(CFG, "drs", {"CMIP6": "SYNDA"}) session = CFG.start_session(recipe.path.stem) session.clear() session.update(CFG_DEFAULT) - session['output_dir'] = tmp_path / 'esmvaltool_output' - session['max_parallel_tasks'] = 1 - session['remove_preproc_dir'] = False + session["output_dir"] = tmp_path / "esmvaltool_output" + session["max_parallel_tasks"] = 1 + session["remove_preproc_dir"] = False output = recipe.run(task=task, session=session) assert len(output) > 0 assert isinstance(output, RecipeOutput) - assert (output.session.session_dir / 'index.html').exists() + assert (output.session.session_dir / "index.html").exists() assert (output.session.run_dir / output.info.filename).exists() assert isinstance(output.read_main_log(), str) @@ -122,10 +122,10 @@ def test_run_recipe_diagnostic_failing(monkeypatch, recipe, tmp_path): """ TAGS.set_tag_values(AUTHOR_TAGS) - monkeypatch.setitem(CFG, 'output_dir', tmp_path) + monkeypatch.setitem(CFG, "output_dir", tmp_path) session = CFG.start_session(recipe.path.stem) with pytest.raises(RecipeError): - task = 'example/non-existant' + task = "example/non-existent" _ = recipe.run(task, session) diff --git a/tests/sample_data/multimodel_statistics/test_multimodel.py b/tests/sample_data/multimodel_statistics/test_multimodel.py index 4c40d94875..3a9223c15f 100644 --- a/tests/sample_data/multimodel_statistics/test_multimodel.py +++ b/tests/sample_data/multimodel_statistics/test_multimodel.py @@ -20,7 +20,7 @@ # Increase this number anytime you change the cached input data to the tests. TEST_REVISION = 1 -SPAN_PARAMS = ('overlap', 'full') +SPAN_PARAMS = ("overlap", "full") def assert_array_almost_equal(this, other, rtol=1e-7): @@ -51,7 +51,7 @@ def assert_metadata_equal(this, other): def fix_metadata(cubes): """Fix metadata.""" for cube in cubes: - cube.coord('air_pressure').bounds = None + cube.coord("air_pressure").bounds = None def preprocess_data(cubes, time_slice: Optional[dict] = None): @@ -64,8 +64,8 @@ def preprocess_data(cubes, time_slice: Optional[dict] = None): # regrid to first cube regrid_kwargs = { - 'grid': first_cube, - 'scheme': iris.analysis.Nearest(), + "grid": first_cube, + "scheme": iris.analysis.Nearest(), } cubes = [cube.regrid(**regrid_kwargs) for cube in cubes] @@ -81,9 +81,11 @@ def get_cache_key(value): clear`. """ py_version = platform.python_version() - return (f'{value}_iris-{iris.__version__}_' - f'numpy-{np.__version__}_python-{py_version}' - f'rev-{TEST_REVISION}') + return ( + f"{value}_iris-{iris.__version__}_" + f"numpy-{np.__version__}_python-{py_version}" + f"rev-{TEST_REVISION}" + ) @pytest.fixture(scope="module") @@ -94,23 +96,24 @@ def timeseries_cubes_month(request): data = request.config.cache.get(cache_key, None) if data: - cubes = pickle.loads(data.encode('latin1')) + cubes = pickle.loads(data.encode("latin1")) else: # Increase TEST_REVISION anytime you make changes here. time_slice = { - 'start_year': 1985, - 'end_year': 1987, - 'start_month': 12, - 'end_month': 2, - 'start_day': 1, - 'end_day': 1, + "start_year": 1985, + "end_year": 1987, + "start_month": 12, + "end_month": 2, + "start_day": 1, + "end_day": 1, } - cubes = esmvaltool_sample_data.load_timeseries_cubes(mip_table='Amon') + cubes = esmvaltool_sample_data.load_timeseries_cubes(mip_table="Amon") cubes = preprocess_data(cubes, time_slice=time_slice) # cubes are not serializable via json, so we must go via pickle - request.config.cache.set(cache_key, - pickle.dumps(cubes).decode('latin1')) + request.config.cache.set( + cache_key, pickle.dumps(cubes).decode("latin1") + ) fix_metadata(cubes) @@ -125,29 +128,30 @@ def timeseries_cubes_day(request): data = request.config.cache.get(cache_key, None) if data: - cubes = pickle.loads(data.encode('latin1')) + cubes = pickle.loads(data.encode("latin1")) else: # Increase TEST_REVISION anytime you make changes here. time_slice = { - 'start_year': 2001, - 'end_year': 2002, - 'start_month': 12, - 'end_month': 2, - 'start_day': 1, - 'end_day': 1, + "start_year": 2001, + "end_year": 2002, + "start_month": 12, + "end_month": 2, + "start_day": 1, + "end_day": 1, } - cubes = esmvaltool_sample_data.load_timeseries_cubes(mip_table='day') + cubes = esmvaltool_sample_data.load_timeseries_cubes(mip_table="day") cubes = preprocess_data(cubes, time_slice=time_slice) # cubes are not serializable via json, so we must go via pickle - request.config.cache.set(cache_key, - pickle.dumps(cubes).decode('latin1')) + request.config.cache.set( + cache_key, pickle.dumps(cubes).decode("latin1") + ) fix_metadata(cubes) def calendar(cube): - return cube.coord('time').units.calendar + return cube.coord("time").units.calendar # groupby requires sorted list grouped = groupby(sorted(cubes, key=calendar), key=calendar) @@ -161,10 +165,9 @@ def multimodel_test(cubes, statistic, span, **kwargs): """Run multimodel test with some simple checks.""" statistics = [statistic] - result = multi_model_statistics(products=cubes, - statistics=statistics, - span=span, - **kwargs) + result = multi_model_statistics( + products=cubes, statistics=statistics, span=span, **kwargs + ) assert isinstance(result, dict) assert statistic in result @@ -180,11 +183,11 @@ def multimodel_regression_test(cubes, span, name): fail the first time with a RuntimeError, because the reference data are being written. """ - statistic = 'mean' + statistic = "mean" result = multimodel_test(cubes, statistic=statistic, span=span) result_cube = result[statistic] - filename = Path(__file__).with_name(f'{name}-{span}-{statistic}.nc') + filename = Path(__file__).with_name(f"{name}-{span}-{statistic}.nc") if filename.exists(): reference_cube = iris.load_cube(str(filename)) @@ -195,11 +198,11 @@ def multimodel_regression_test(cubes, span, name): else: # The test will fail if no regression data are available. iris.save(result_cube, filename) - raise RuntimeError(f'Wrote reference data to {filename.absolute()}') + raise RuntimeError(f"Wrote reference data to {filename.absolute()}") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_regression_month(timeseries_cubes_month, span): """Test statistic fail due to differing input coordinates (pressure). @@ -207,7 +210,7 @@ def test_multimodel_regression_month(timeseries_cubes_month, span): """ cubes = timeseries_cubes_month - name = 'timeseries_monthly' + name = "timeseries_monthly" msg = ( "Multi-model statistics failed to merge input cubes into a single " "array" @@ -217,61 +220,61 @@ def test_multimodel_regression_month(timeseries_cubes_month, span): @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_regression_day_standard(timeseries_cubes_day, span): """Test statistic.""" - calendar = 'standard' if cf_units.__version__ >= '3.1' else 'gregorian' + calendar = "standard" if cf_units.__version__ >= "3.1" else "gregorian" cubes = timeseries_cubes_day[calendar] - name = f'timeseries_daily_{calendar}' + name = f"timeseries_daily_{calendar}" multimodel_regression_test(name=name, span=span, cubes=cubes) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_regression_day_365_day(timeseries_cubes_day, span): """Test statistic.""" - calendar = '365_day' + calendar = "365_day" cubes = timeseries_cubes_day[calendar] - name = f'timeseries_daily_{calendar}' + name = f"timeseries_daily_{calendar}" multimodel_regression_test(name=name, span=span, cubes=cubes) @pytest.mark.skip( - reason='Cannot calculate statistics with single cube in list' + reason="Cannot calculate statistics with single cube in list" ) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_regression_day_360_day(timeseries_cubes_day, span): """Test statistic.""" - calendar = '360_day' + calendar = "360_day" cubes = timeseries_cubes_day[calendar] - name = f'timeseries_daily_{calendar}' + name = f"timeseries_daily_{calendar}" multimodel_regression_test(name=name, span=span, cubes=cubes) @pytest.mark.skip( - reason='Cannot calculate statistics with single cube in list' + reason="Cannot calculate statistics with single cube in list" ) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_regression_day_julian(timeseries_cubes_day, span): """Test statistic.""" - calendar = 'julian' + calendar = "julian" cubes = timeseries_cubes_day[calendar] - name = f'timeseries_daily_{calendar}' + name = f"timeseries_daily_{calendar}" multimodel_regression_test(name=name, span=span, cubes=cubes) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_regression_day_proleptic_gregorian( timeseries_cubes_day, span, ): """Test statistic.""" - calendar = 'proleptic_gregorian' + calendar = "proleptic_gregorian" cubes = timeseries_cubes_day[calendar] - name = f'timeseries_daily_{calendar}' + name = f"timeseries_daily_{calendar}" msg = ( "Multi-model statistics failed to merge input cubes into a single " "array" @@ -281,15 +284,15 @@ def test_multimodel_regression_day_proleptic_gregorian( @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_no_vertical_dimension(timeseries_cubes_month, span): """Test statistic without vertical dimension using monthly data.""" cubes = [cube[:, 0] for cube in timeseries_cubes_month] - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_merge_error(timeseries_cubes_month, span): """Test statistic with slightly different vertical coordinates. @@ -302,19 +305,19 @@ def test_multimodel_merge_error(timeseries_cubes_month, span): "array" ) with pytest.raises(ValueError, match=msg): - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_only_time_dimension(timeseries_cubes_month, span): """Test statistic without only the time dimension using monthly data.""" cubes = [cube[:, 0, 0, 0] for cube in timeseries_cubes_month] - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_no_time_dimension(timeseries_cubes_month, span): """Test statistic without time dimension using monthly data. @@ -326,23 +329,23 @@ def test_multimodel_no_time_dimension(timeseries_cubes_month, span): """ cubes = [cube[0, 0] for cube in timeseries_cubes_month] - result = multimodel_test(cubes, span=span, statistic='mean')['mean'] + result = multimodel_test(cubes, span=span, statistic="mean")["mean"] assert result.shape == (3, 2) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_scalar_cubes(timeseries_cubes_month, span): """Test statistic with scalar cubes.""" cubes = [cube[0, 0, 0, 0] for cube in timeseries_cubes_month] - result = multimodel_test(cubes, span=span, statistic='mean')['mean'] + result = multimodel_test(cubes, span=span, statistic="mean")["mean"] assert result.shape == () - assert result.coord('time').bounds is None + assert result.coord("time").bounds is None @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_0d_1d_time_no_ignore_scalars(timeseries_cubes_month, span): """Test statistic fail on 0D and 1D time dimension using monthly data. @@ -357,11 +360,11 @@ def test_multimodel_0d_1d_time_no_ignore_scalars(timeseries_cubes_month, span): msg = "Tried to align cubes in multi-model statistics, but failed for cube" with pytest.raises(ValueError, match=msg): - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_0d_1d_time_ignore_scalars(timeseries_cubes_month, span): """Test statistic fail on 0D and 1D time dimension using monthly data. @@ -381,12 +384,12 @@ def test_multimodel_0d_1d_time_ignore_scalars(timeseries_cubes_month, span): ) with pytest.raises(ValueError, match=msg): multimodel_test( - cubes, span=span, statistic='mean', ignore_scalar_coords=True + cubes, span=span, statistic="mean", ignore_scalar_coords=True ) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_only_some_time_dimensions(timeseries_cubes_month, span): """Test statistic fail if only some cubes have time dimension. @@ -400,7 +403,7 @@ def test_multimodel_only_some_time_dimensions(timeseries_cubes_month, span): # Remove time dimension for one cube cubes[1] = cubes[1][0] - cubes[1].remove_coord('time') + cubes[1].remove_coord("time") msg = ( "Multi-model statistics failed to merge input cubes into a single " @@ -408,11 +411,11 @@ def test_multimodel_only_some_time_dimensions(timeseries_cubes_month, span): "dimension." ) with pytest.raises(ValueError, match=msg): - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_diff_scalar_time_fail(timeseries_cubes_month, span): """Test statistic fail on different scalar time dimensions. @@ -425,16 +428,16 @@ def test_multimodel_diff_scalar_time_fail(timeseries_cubes_month, span): cubes = [cube[0, 0] for cube in timeseries_cubes_month] # Use different scalar time point and bounds for one cube - cubes[1].coord('time').points = 20.0 - cubes[1].coord('time').bounds = [0.0, 40.0] + cubes[1].coord("time").points = 20.0 + cubes[1].coord("time").bounds = [0.0, 40.0] msg = "Tried to align cubes in multi-model statistics, but failed for cube" with pytest.raises(ValueError, match=msg): - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_diff_scalar_time_ignore(timeseries_cubes_month, span): """Ignore different scalar time dimensions. @@ -447,17 +450,17 @@ def test_multimodel_diff_scalar_time_ignore(timeseries_cubes_month, span): cubes = [cube[0, 0] for cube in timeseries_cubes_month] # Use different scalar time point and bounds for one cube - cubes[1].coord('time').points = 20.0 - cubes[1].coord('time').bounds = [0.0, 40.0] + cubes[1].coord("time").points = 20.0 + cubes[1].coord("time").bounds = [0.0, 40.0] result = multimodel_test( - cubes, span=span, statistic='mean', ignore_scalar_coords=True - )['mean'] + cubes, span=span, statistic="mean", ignore_scalar_coords=True + )["mean"] assert result.shape == (3, 2) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_ignore_scalar_coords(timeseries_cubes_month, span): """Test statistic does not fail on different scalar coords when ignored. @@ -468,22 +471,22 @@ def test_multimodel_ignore_scalar_coords(timeseries_cubes_month, span): """ cubes = [cube[0, 0] for cube in timeseries_cubes_month] - for (idx, cube) in enumerate(cubes): - aux_coord = AuxCoord(0.0, var_name=f'name_{idx}') + for idx, cube in enumerate(cubes): + aux_coord = AuxCoord(0.0, var_name=f"name_{idx}") cube.add_aux_coord(aux_coord, ()) result = multimodel_test( - cubes, span=span, statistic='mean', ignore_scalar_coords=True - )['mean'] + cubes, span=span, statistic="mean", ignore_scalar_coords=True + )["mean"] assert result.shape == (3, 2) # Make sure that the input cubes still contain the scalar coords - for (idx, cube) in enumerate(cubes): - assert cube.coord(var_name=f'name_{idx}', dimensions=()) + for idx, cube in enumerate(cubes): + assert cube.coord(var_name=f"name_{idx}", dimensions=()) @pytest.mark.use_sample_data -@pytest.mark.parametrize('span', SPAN_PARAMS) +@pytest.mark.parametrize("span", SPAN_PARAMS) def test_multimodel_do_not_ignore_scalar_coords(timeseries_cubes_month, span): """Test statistic fail on different scalar coords. @@ -494,8 +497,8 @@ def test_multimodel_do_not_ignore_scalar_coords(timeseries_cubes_month, span): """ cubes = [cube[0, 0] for cube in timeseries_cubes_month] - for (idx, cube) in enumerate(cubes): - aux_coord = AuxCoord(0.0, var_name=f'name_{idx}') + for idx, cube in enumerate(cubes): + aux_coord = AuxCoord(0.0, var_name=f"name_{idx}") cube.add_aux_coord(aux_coord, ()) msg = ( @@ -503,4 +506,4 @@ def test_multimodel_do_not_ignore_scalar_coords(timeseries_cubes_month, span): "array" ) with pytest.raises(ValueError, match=msg): - multimodel_test(cubes, span=span, statistic='mean') + multimodel_test(cubes, span=span, statistic="mean") diff --git a/tests/unit/cmor/test_cmor_check.py b/tests/unit/cmor/test_cmor_check.py index 957136fd42..331f3b6273 100644 --- a/tests/unit/cmor/test_cmor_check.py +++ b/tests/unit/cmor/test_cmor_check.py @@ -29,29 +29,29 @@ class VariableInfoMock: """Mock for the variables definition.""" def __init__(self): - self.table_type = 'CMIP5' - self.short_name = 'short_name' - self.standard_name = 'age_of_sea_ice' # Iris don't accept fakes ... - self.long_name = 'Long Name' - self.units = 'years' # ... nor in the units - self.valid_min = '0' - self.valid_max = '100' - self.frequency = 'day' - self.positive = '' - - generic_level = CoordinateInfoMock('depth') + self.table_type = "CMIP5" + self.short_name = "short_name" + self.standard_name = "age_of_sea_ice" # Iris don't accept fakes ... + self.long_name = "Long Name" + self.units = "years" # ... nor in the units + self.valid_min = "0" + self.valid_max = "100" + self.frequency = "day" + self.positive = "" + + generic_level = CoordinateInfoMock("depth") generic_level.generic_level = True - generic_level.axis = 'Z' + generic_level.axis = "Z" - requested = CoordinateInfoMock('air_pressure') + requested = CoordinateInfoMock("air_pressure") requested.requested = [str(number) for number in range(20)] self.coordinates = { - 'time': CoordinateInfoMock('time'), - 'lat': CoordinateInfoMock('lat'), - 'lon': CoordinateInfoMock('lon'), - 'air_pressure': requested, - 'depth': generic_level, + "time": CoordinateInfoMock("time"), + "lat": CoordinateInfoMock("lat"), + "lon": CoordinateInfoMock("lon"), + "air_pressure": requested, + "depth": generic_level, } @@ -64,7 +64,7 @@ def __init__(self, name): self.axis = "" self.value = "" - standard_names = {'lat': 'latitude', 'lon': 'longitude'} + standard_names = {"lat": "latitude", "lon": "longitude"} if name in standard_names: self.standard_name = standard_names[name] else: @@ -74,9 +74,9 @@ def __init__(self, name): self.var_name = self.name units = { - 'lat': 'degrees_north', - 'lon': 'degrees_east', - 'time': 'days since 1950-01-01 00:00:00' + "lat": "degrees_north", + "lon": "degrees_east", + "time": "days since 1950-01-01 00:00:00", } if name in units: self.units = units[name] @@ -89,7 +89,7 @@ def __init__(self, name): self.generic_lev_coords = {} self.generic_lev_name = "" - valid_limits = {'lat': ('-90', '90'), 'lon': ('0', '360')} + valid_limits = {"lat": ("-90", "90"), "lon": ("0", "360")} if name in valid_limits: self.valid_min = valid_limits[name][0] self.valid_max = valid_limits[name][1] @@ -110,70 +110,77 @@ def test_report_error(self): """Test report error function.""" checker = CMORCheck(self.cube, self.var_info) self.assertFalse(checker.has_errors()) - checker.report_critical('New error: {}', 'something failed') + checker.report_critical("New error: {}", "something failed") self.assertTrue(checker.has_errors()) def test_fail_on_error(self): """Test exception is raised if fail_on_error is activated.""" checker = CMORCheck(self.cube, self.var_info, fail_on_error=True) with self.assertRaises(CMORCheckError): - checker.report_critical('New error: {}', 'something failed') + checker.report_critical("New error: {}", "something failed") def test_report_warning(self): """Test report warning function.""" checker = CMORCheck(self.cube, self.var_info) self.assertFalse(checker.has_errors()) - checker.report_warning('New error: {}', 'something failed') + checker.report_warning("New error: {}", "something failed") self.assertTrue(checker.has_warnings()) def test_warning_fail_on_error(self): """Test report warning function with fail_on_error.""" checker = CMORCheck(self.cube, self.var_info, fail_on_error=True) - with self.assertLogs(level='WARNING') as cm: - checker.report_warning('New error: {}', 'something failed') + with self.assertLogs(level="WARNING") as cm: + checker.report_warning("New error: {}", "something failed") self.assertEqual( cm.output, - ['WARNING:esmvalcore.cmor.check:New error: something failed', ] + [ + "WARNING:esmvalcore.cmor.check:New error: something failed", + ], ) def test_report_debug_message(self): - """"Test report debug message function""" + """ "Test report debug message function""" checker = CMORCheck(self.cube, self.var_info) self.assertFalse(checker.has_debug_messages()) - checker.report_debug_message('New debug message') + checker.report_debug_message("New debug message") self.assertTrue(checker.has_debug_messages()) def test_check(self): """Test checks succeeds for a good cube.""" self._check_cube() - def _check_cube(self, frequency=None, - check_level=CheckLevels.DEFAULT): + def _check_cube(self, frequency=None, check_level=CheckLevels.DEFAULT): """Apply checks to self.cube.""" + def checker(cube): return CMORCheck( cube, self.var_info, frequency=frequency, - check_level=check_level) + check_level=check_level, + ) self.cube = checker(self.cube).check_metadata() self.cube = checker(self.cube).check_data() - def _check_cube_metadata(self, frequency=None, - check_level=CheckLevels.DEFAULT): + def _check_cube_metadata( + self, frequency=None, check_level=CheckLevels.DEFAULT + ): """Apply checks to self.cube.""" + def checker(cube): return CMORCheck( cube, self.var_info, frequency=frequency, - check_level=check_level) + check_level=check_level, + ) self.cube = checker(self.cube).check_metadata() def test_check_with_custom_logger(self): """Test checks with custom logger.""" + def checker(cube): return CMORCheck(cube, self.var_info) @@ -182,86 +189,87 @@ def checker(cube): def test_check_with_month_number(self): """Test checks succeeds for a good cube with month number.""" - iris.coord_categorisation.add_month_number(self.cube, 'time') + iris.coord_categorisation.add_month_number(self.cube, "time") self._check_cube() def test_check_with_day_of_month(self): """Test checks succeeds for a good cube with day of month.""" - iris.coord_categorisation.add_day_of_month(self.cube, 'time') + iris.coord_categorisation.add_day_of_month(self.cube, "time") self._check_cube() def test_check_with_day_of_year(self): """Test checks succeeds for a good cube with day of year.""" - iris.coord_categorisation.add_day_of_year(self.cube, 'time') + iris.coord_categorisation.add_day_of_year(self.cube, "time") self._check_cube() def test_check_with_year(self): """Test checks succeeds for a good cube with year.""" - iris.coord_categorisation.add_year(self.cube, 'time') + iris.coord_categorisation.add_year(self.cube, "time") self._check_cube() def test_check_no_multiple_coords_same_stdname(self): """Test checks fails if two coords have the same standard_name.""" self.cube.add_aux_coord( iris.coords.AuxCoord( - np.reshape(np.linspace(-90, 90, num=20*20), (20, 20)), - var_name='bad_name', - standard_name='latitude', - units='degrees_north' + np.reshape(np.linspace(-90, 90, num=20 * 20), (20, 20)), + var_name="bad_name", + standard_name="latitude", + units="degrees_north", ), - (1, 2) + (1, 2), ) self._check_fails_in_metadata() def test_check_bad_standard_name(self): """Test check fails for a bad short_name.""" - self.cube.standard_name = 'wind_speed' + self.cube.standard_name = "wind_speed" self._check_fails_in_metadata() def test_check_bad_long_name(self): """Test check fails for a bad short_name.""" - self.cube.long_name = 'bad_name' + self.cube.long_name = "bad_name" self._check_fails_in_metadata() def test_check_bad_units(self): """Test check fails for bad units.""" - self.cube.units = 'days' + self.cube.units = "days" self._check_fails_in_metadata() def test_check_with_positive(self): """Check variable with positive attribute.""" - self.var_info.positive = 'up' + self.var_info.positive = "up" self.cube = self.get_cube(self.var_info) self._check_cube() def test_check_with_no_positive_cmip5(self): """Check CMIP5 variable with no positive attribute report warning.""" self.cube = self.get_cube(self.var_info) - self.var_info.positive = 'up' + self.var_info.positive = "up" self._check_warnings_on_metadata() def test_check_with_no_positive_cmip6(self): """Check CMIP6 variable with no positive attribute report warning.""" - self.var_info.positive = 'up' - self.var_info.table_type = 'CMIP6' + self.var_info.positive = "up" + self.var_info.table_type = "CMIP6" self._check_warnings_on_metadata() def test_invalid_rank(self): """Test check fails in metadata step when rank is not correct.""" - lat = iris.coords.AuxCoord.from_coord(self.cube.coord('latitude')) - self.cube.remove_coord('latitude') - self.cube.add_aux_coord(lat, self.cube.coord_dims('longitude')) + lat = iris.coords.AuxCoord.from_coord(self.cube.coord("latitude")) + self.cube.remove_coord("latitude") + self.cube.add_aux_coord(lat, self.cube.coord_dims("longitude")) self._check_fails_in_metadata() def test_rank_with_aux_coords(self): """Check succeeds even if a required coordinate is an aux coord.""" - iris.util.demote_dim_coord_to_aux_coord(self.cube, 'latitude') + iris.util.demote_dim_coord_to_aux_coord(self.cube, "latitude") self._check_cube() def test_rank_with_scalar_coords(self): """Check succeeds even if a required coordinate is a scalar coord.""" self.cube = self.cube.extract( - iris.Constraint(time=self.cube.coord('time').cell(0))) + iris.Constraint(time=self.cube.coord("time").cell(0)) + ) self._check_cube() def test_rank_unstructured_grid(self): @@ -272,15 +280,16 @@ def test_rank_unstructured_grid(self): def test_bad_generic_level(self): """Test check fails in metadata if generic level coord has wrong var_name.""" - depth_coord = CoordinateInfoMock('depth') - depth_coord.axis = 'Z' - depth_coord.generic_lev_name = 'olevel' - depth_coord.out_name = 'lev' - depth_coord.name = 'depth_coord' - depth_coord.long_name = 'ocean depth coordinate' - self.var_info.coordinates['depth'].generic_lev_coords = { - 'depth_coord': depth_coord} - self.var_info.coordinates['depth'].out_name = "" + depth_coord = CoordinateInfoMock("depth") + depth_coord.axis = "Z" + depth_coord.generic_lev_name = "olevel" + depth_coord.out_name = "lev" + depth_coord.name = "depth_coord" + depth_coord.long_name = "ocean depth coordinate" + self.var_info.coordinates["depth"].generic_lev_coords = { + "depth_coord": depth_coord + } + self.var_info.coordinates["depth"].out_name = "" self._check_fails_in_metadata() def test_valid_generic_level(self): @@ -301,51 +310,52 @@ def test_valid_generic_level_automatic_fixes(self): def test_invalid_generic_level(self): """Test invalid generic level coordinate.""" self._setup_generic_level_var() - self.cube.remove_coord('atmosphere_sigma_coordinate') + self.cube.remove_coord("atmosphere_sigma_coordinate") self._check_fails_in_metadata() def test_generic_level_alternative_cmip3(self): """Test valid alternative for generic level coords (CMIP3).""" - self.var_info.table_type = 'CMIP3' + self.var_info.table_type = "CMIP3" self._setup_generic_level_var() - self.var_info.coordinates['zlevel'] = self.var_info.coordinates.pop( - 'alevel') + self.var_info.coordinates["zlevel"] = self.var_info.coordinates.pop( + "alevel" + ) self._add_plev_to_cube() self._check_warnings_on_metadata() def test_generic_level_alternative_cmip5(self): """Test valid alternative for generic level coords (CMIP5).""" - self.var_info.table_type = 'CMIP5' + self.var_info.table_type = "CMIP5" self._setup_generic_level_var() self._add_plev_to_cube() self._check_warnings_on_metadata() def test_generic_level_alternative_cmip6(self): """Test valid alternative for generic level coords (CMIP6).""" - self.var_info.table_type = 'CMIP6' + self.var_info.table_type = "CMIP6" self._setup_generic_level_var() self._add_plev_to_cube() self._check_warnings_on_metadata() def test_generic_level_alternative_obs4mips(self): """Test valid alternative for generic level coords (obs4MIPs).""" - self.var_info.table_type = 'obs4MIPs' + self.var_info.table_type = "obs4MIPs" self._setup_generic_level_var() self._add_plev_to_cube() self._check_warnings_on_metadata() def test_generic_level_invalid_alternative(self): """Test invalid alternative for generic level coords.""" - self.var_info.table_type = 'CMIP6' + self.var_info.table_type = "CMIP6" self._setup_generic_level_var() self._add_plev_to_cube() - self.cube.coord('air_pressure').standard_name = 'altitude' + self.cube.coord("air_pressure").standard_name = "altitude" self._check_fails_in_metadata() def test_check_bad_var_standard_name_strict_flag(self): """Test check fails for a bad variable standard_name with --cmor-check strict.""" - self.cube.standard_name = 'wind_speed' + self.cube.standard_name = "wind_speed" self._check_fails_in_metadata() def test_check_bad_var_long_name_strict_flag(self): @@ -366,54 +376,54 @@ def test_check_bad_attributes_strict_flag(self): self.var_info.standard_name = "surface_upward_latent_heat_flux" self.var_info.positive = "up" self.cube = self.get_cube(self.var_info) - self.cube.attributes['positive'] = "Wrong attribute" + self.cube.attributes["positive"] = "Wrong attribute" self._check_fails_in_metadata() def test_check_bad_rank_strict_flag(self): """Test check fails for a bad variable rank with --cmor-check strict.""" - lat = iris.coords.AuxCoord.from_coord(self.cube.coord('latitude')) - self.cube.remove_coord('latitude') - self.cube.add_aux_coord(lat, self.cube.coord_dims('longitude')) + lat = iris.coords.AuxCoord.from_coord(self.cube.coord("latitude")) + self.cube.remove_coord("latitude") + self.cube.add_aux_coord(lat, self.cube.coord_dims("longitude")) self._check_fails_in_metadata() def test_check_bad_coord_var_name_strict_flag(self): """Test check fails for bad coord var_name with --cmor-check strict""" - self.var_info.table_type = 'CMIP5' - self.cube.coord('longitude').var_name = 'bad_name' + self.var_info.table_type = "CMIP5" + self.cube.coord("longitude").var_name = "bad_name" self._check_fails_in_metadata() def test_check_missing_lon_strict_flag(self): """Test check fails for missing longitude with --cmor-check strict""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('longitude') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("longitude") self._check_fails_in_metadata() def test_check_missing_lat_strict_flag(self): """Test check fails for missing latitude with --cmor-check strict""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('latitude') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("latitude") self._check_fails_in_metadata() def test_check_missing_time_strict_flag(self): """Test check fails for missing time with --cmor-check strict""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('time') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("time") self._check_fails_in_metadata() def test_check_missing_coord_strict_flag(self): """Test check fails for missing coord other than lat and lon - with --cmor-check strict""" + with --cmor-check strict""" self.var_info.coordinates.update( - {'height2m': CoordinateInfoMock('height2m')} + {"height2m": CoordinateInfoMock("height2m")} ) self._check_fails_in_metadata() def test_check_bad_var_standard_name_relaxed_flag(self): """Test check reports warning for a bad variable standard_name with --cmor-check relaxed.""" - self.cube.standard_name = 'wind_speed' + self.cube.standard_name = "wind_speed" self._check_warnings_on_metadata(check_level=CheckLevels.RELAXED) def test_check_bad_var_long_name_relaxed_flag(self): @@ -434,54 +444,54 @@ def test_check_bad_attributes_relaxed_flag(self): self.var_info.standard_name = "surface_upward_latent_heat_flux" self.var_info.positive = "up" self.cube = self.get_cube(self.var_info) - self.cube.attributes['positive'] = "Wrong attribute" + self.cube.attributes["positive"] = "Wrong attribute" self._check_warnings_on_metadata(check_level=CheckLevels.RELAXED) def test_check_bad_rank_relaxed_flag(self): """Test check report warnings for a bad variable rank with --cmor-check relaxed.""" - lat = iris.coords.AuxCoord.from_coord(self.cube.coord('latitude')) - self.cube.remove_coord('latitude') - self.cube.add_aux_coord(lat, self.cube.coord_dims('longitude')) + lat = iris.coords.AuxCoord.from_coord(self.cube.coord("latitude")) + self.cube.remove_coord("latitude") + self.cube.add_aux_coord(lat, self.cube.coord_dims("longitude")) self._check_warnings_on_metadata(check_level=CheckLevels.RELAXED) def test_check_bad_coord_standard_name_relaxed_flag(self): """Test check reports warning for bad coord var_name with --cmor-check relaxed""" - self.var_info.table_type = 'CMIP5' - self.cube.coord('longitude').var_name = 'bad_name' + self.var_info.table_type = "CMIP5" + self.cube.coord("longitude").var_name = "bad_name" self._check_warnings_on_metadata(check_level=CheckLevels.RELAXED) def test_check_missing_lon_relaxed_flag(self): """Test check fails for missing longitude with --cmor-check relaxed""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('longitude') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("longitude") self._check_fails_in_metadata(check_level=CheckLevels.RELAXED) def test_check_missing_lat_relaxed_flag(self): """Test check fails for missing latitude with --cmor-check relaxed""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('latitude') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("latitude") self._check_fails_in_metadata(check_level=CheckLevels.RELAXED) def test_check_missing_time_relaxed_flag(self): """Test check fails for missing latitude with --cmor-check relaxed""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('time') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("time") self._check_fails_in_metadata(check_level=CheckLevels.RELAXED) def test_check_missing_coord_relaxed_flag(self): """Test check reports warning for missing coord other than lat and lon with --cmor-check relaxed""" self.var_info.coordinates.update( - {'height2m': CoordinateInfoMock('height2m')} + {"height2m": CoordinateInfoMock("height2m")} ) self._check_warnings_on_metadata(check_level=CheckLevels.RELAXED) def test_check_bad_var_standard_name_none_flag(self): """Test check reports warning for a bad variable standard_name with --cmor-check ignore.""" - self.cube.standard_name = 'wind_speed' + self.cube.standard_name = "wind_speed" self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_bad_var_long_name_none_flag(self): @@ -502,83 +512,84 @@ def test_check_bad_attributes_none_flag(self): self.var_info.standard_name = "surface_upward_latent_heat_flux" self.var_info.positive = "up" self.cube = self.get_cube(self.var_info) - self.cube.attributes['positive'] = "Wrong attribute" + self.cube.attributes["positive"] = "Wrong attribute" self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_bad_rank_none_flag(self): """Test check reports warning for a bad variable rank with --cmor-check ignore.""" - lat = iris.coords.AuxCoord.from_coord(self.cube.coord('latitude')) - self.cube.remove_coord('latitude') - self.cube.add_aux_coord(lat, self.cube.coord_dims('longitude')) + lat = iris.coords.AuxCoord.from_coord(self.cube.coord("latitude")) + self.cube.remove_coord("latitude") + self.cube.add_aux_coord(lat, self.cube.coord_dims("longitude")) self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_bad_coord_standard_name_none_flag(self): """Test check reports warning for bad coord var_name with --cmor-check ignore.""" - self.var_info.table_type = 'CMIP5' - self.cube.coord('longitude').var_name = 'bad_name' + self.var_info.table_type = "CMIP5" + self.cube.coord("longitude").var_name = "bad_name" self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_missing_lon_none_flag(self): """Test check reports warning for missing longitude with --cmor-check ignore""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('longitude') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("longitude") self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_missing_lat_none_flag(self): """Test check reports warning for missing latitude with --cmor-check ignore""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('latitude') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("latitude") self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_missing_time_none_flag(self): """Test check reports warning for missing time with --cmor-check ignore""" - self.var_info.table_type = 'CMIP5' - self.cube.remove_coord('time') + self.var_info.table_type = "CMIP5" + self.cube.remove_coord("time") self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_missing_coord_none_flag(self): """Test check reports warning for missing coord other than lat, lon and time with --cmor-check ignore""" self.var_info.coordinates.update( - {'height2m': CoordinateInfoMock('height2m')} + {"height2m": CoordinateInfoMock("height2m")} ) self._check_warnings_on_metadata(check_level=CheckLevels.IGNORE) def test_check_lazy(self): """Test checker does not realise data or aux_coords.""" self.cube.data = self.cube.lazy_data() - self.cube.remove_coord('latitude') - self.cube.remove_coord('longitude') + self.cube.remove_coord("latitude") + self.cube.remove_coord("longitude") self.cube.add_aux_coord( iris.coords.AuxCoord( - da.reshape(da.linspace(-90, 90, num=20*20), (20, 20)), - var_name='lat', - standard_name='latitude', - units='degrees_north' + da.reshape(da.linspace(-90, 90, num=20 * 20), (20, 20)), + var_name="lat", + standard_name="latitude", + units="degrees_north", ), - (1, 2) + (1, 2), ) self.cube.add_aux_coord( iris.coords.AuxCoord( - da.reshape(da.linspace(0, 360, num=20*20), (20, 20)), - var_name='lon', - standard_name='longitude', - units='degrees_east' + da.reshape(da.linspace(0, 360, num=20 * 20), (20, 20)), + var_name="lon", + standard_name="longitude", + units="degrees_east", ), - (1, 2) + (1, 2), ) self._check_cube() - self.assertTrue(self.cube.coord('latitude').has_lazy_points()) - self.assertTrue(self.cube.coord('longitude').has_lazy_points()) + self.assertTrue(self.cube.coord("latitude").has_lazy_points()) + self.assertTrue(self.cube.coord("longitude").has_lazy_points()) self.assertTrue(self.cube.has_lazy_data()) - def _check_fails_in_metadata(self, frequency=None, - check_level=CheckLevels.DEFAULT): + def _check_fails_in_metadata( + self, frequency=None, check_level=CheckLevels.DEFAULT + ): checker = CMORCheck( self.cube, self.var_info, @@ -589,15 +600,14 @@ def _check_fails_in_metadata(self, frequency=None, checker.check_metadata() def _check_warnings_on_metadata(self, check_level=CheckLevels.DEFAULT): - checker = CMORCheck( - self.cube, self.var_info, check_level=check_level - ) + checker = CMORCheck(self.cube, self.var_info, check_level=check_level) checker.check_metadata() self.assertTrue(checker.has_warnings()) def _check_debug_messages_on_metadata(self): checker = CMORCheck( - self.cube, self.var_info, + self.cube, + self.var_info, ) checker.check_metadata() self.assertTrue(checker.has_debug_messages()) @@ -609,7 +619,7 @@ def test_non_requested(self): Check issue a warning if a values requested for a coordinate are not correct in the metadata step """ - coord = self.cube.coord('air_pressure') + coord = self.cube.coord("air_pressure") values = np.linspace(0, 40, len(coord.points)) self._update_coordinate_values(self.cube, coord, values) checker = CMORCheck(self.cube, self.var_info) @@ -623,29 +633,33 @@ def test_requested_str_values(self): Check issue a warning if a values requested for a coordinate are not correct in the metadata step """ - region_coord = CoordinateInfoMock('basin') - region_coord.standard_name = 'region' - region_coord.units = '' + region_coord = CoordinateInfoMock("basin") + region_coord.standard_name = "region" + region_coord.units = "" region_coord.requested = [ "atlantic_arctic_ocean", "indian_pacific_ocean", - "global_ocean" + "global_ocean", ] - self.var_info.coordinates['region'] = region_coord + self.var_info.coordinates["region"] = region_coord self.cube = self.get_cube(self.var_info) self._check_cube() def test_requested_non_1d(self): """Warning if requested values in non-1d cannot be checked.""" - coord = self.cube.coord('air_pressure') + coord = self.cube.coord("air_pressure") values = np.linspace(0, 40, len(coord.points)) values = np.broadcast_to(values, (20, 20)) bounds = np.moveaxis(np.stack((values - 0.01, values + 0.01)), 0, -1) new_plev_coord = iris.coords.AuxCoord( - values, bounds=bounds, var_name=coord.var_name, - standard_name=coord.standard_name, long_name=coord.long_name, - units=coord.units) - self.cube.remove_coord('air_pressure') + values, + bounds=bounds, + var_name=coord.var_name, + standard_name=coord.standard_name, + long_name=coord.long_name, + units=coord.units, + ) + self.cube.remove_coord("air_pressure") self.cube.add_aux_coord(new_plev_coord, (2, 3)) checker = CMORCheck(self.cube, self.var_info) checker.check_metadata() @@ -654,37 +668,35 @@ def test_requested_non_1d(self): def test_non_increasing(self): """Fail in metadata if increasing coordinate is decreasing.""" - coord = self.cube.coord('latitude') + coord = self.cube.coord("latitude") values = np.linspace( - coord.points[-1], - coord.points[0], - len(coord.points) + coord.points[-1], coord.points[0], len(coord.points) ) self._update_coordinate_values(self.cube, coord, values) self._check_fails_in_metadata() def test_non_decreasing(self): """Fail in metadata if decreasing coordinate is increasing.""" - self.var_info.coordinates['lat'].stored_direction = 'decreasing' + self.var_info.coordinates["lat"].stored_direction = "decreasing" self._check_fails_in_metadata() # TODO: remove in v2.12 def test_non_decreasing_automatic_fix_metadata(self): """Automatic fix for decreasing coordinate.""" - self.var_info.coordinates['lat'].stored_direction = 'decreasing' + self.var_info.coordinates["lat"].stored_direction = "decreasing" checker = CMORCheck(self.cube, self.var_info, automatic_fixes=True) checker.check_metadata() # TODO: remove in v2.12 def test_non_decreasing_automatic_fix_data(self): """Automatic fix for decreasing coordinate.""" - self.var_info.coordinates['lat'].stored_direction = 'decreasing' + self.var_info.coordinates["lat"].stored_direction = "decreasing" checker = CMORCheck(self.cube, self.var_info, automatic_fixes=True) checker.check_data() def test_lat_non_monotonic(self): """Test fail for non monotonic latitude.""" - lat = self.cube.coord('latitude') + lat = self.cube.coord("latitude") points = np.array(lat.points) points[-1] = points[0] dims = self.cube.coord_dims(lat) @@ -695,43 +707,39 @@ def test_lat_non_monotonic(self): def test_not_bounds(self): """Warning if bounds are not available.""" - self.cube.coord('longitude').bounds = None + self.cube.coord("longitude").bounds = None self._check_warnings_on_metadata() - self.assertFalse(self.cube.coord('longitude').has_bounds()) + self.assertFalse(self.cube.coord("longitude").has_bounds()) def test_not_correct_lons(self): """Fail if longitudes are not correct in metadata step.""" - self.cube = self.cube.intersection(longitude=(-180., 180.)) + self.cube = self.cube.intersection(longitude=(-180.0, 180.0)) self._check_fails_in_metadata() def test_high_lons(self): """Test bad longitudes.""" - self.cube = self.cube.intersection(longitude=(720., 1080.)) + self.cube = self.cube.intersection(longitude=(720.0, 1080.0)) self._check_fails_in_metadata() def test_low_lons(self): """Test bad longitudes.""" - self.cube = self.cube.intersection(longitude=(-720., -360.)) + self.cube = self.cube.intersection(longitude=(-720.0, -360.0)) self._check_fails_in_metadata() def test_not_valid_min(self): """Fail if coordinate values below valid_min.""" - coord = self.cube.coord('latitude') + coord = self.cube.coord("latitude") values = np.linspace( - coord.points[0] - 1, - coord.points[-1], - len(coord.points) + coord.points[0] - 1, coord.points[-1], len(coord.points) ) self._update_coordinate_values(self.cube, coord, values) self._check_fails_in_metadata() def test_not_valid_max(self): """Fail if coordinate values above valid_max.""" - coord = self.cube.coord('latitude') + coord = self.cube.coord("latitude") values = np.linspace( - coord.points[0], - coord.points[-1] + 1, - len(coord.points) + coord.points[0], coord.points[-1] + 1, len(coord.points) ) self._update_coordinate_values(self.cube, coord, values) self._check_fails_in_metadata() @@ -745,43 +753,45 @@ def _update_coordinate_values(cube, coord, values): standard_name=coord.standard_name, long_name=coord.long_name, var_name=coord.var_name, - units=coord.units) + units=coord.units, + ) cube.add_dim_coord(new_coord, dimension) def test_bad_units(self): """Fail if coordinates have bad units.""" - self.cube.coord('latitude').units = 'degrees_n' + self.cube.coord("latitude").units = "degrees_n" self._check_fails_in_metadata() def test_non_convertible_units(self): """Test fail for incompatible coordinate units.""" - self.cube.coord('latitude').units = 'degC' + self.cube.coord("latitude").units = "degC" self._check_fails_in_metadata() def test_bad_time(self): """Fail if time have bad units.""" - self.cube.coord('time').units = 'days' + self.cube.coord("time").units = "days" self._check_fails_in_metadata() def test_wrong_parent_time_unit(self): """Test fail for wrong parent time units.""" - self.cube.coord('time').units = 'days since 1860-1-1 00:00:00' - self.cube.attributes['parent_time_units'] = 'days since ' \ - '1860-1-1-00-00-00' - self.cube.attributes['branch_time_in_parent'] = 0. - self.cube.attributes['branch_time_in_child'] = 0. + self.cube.coord("time").units = "days since 1860-1-1 00:00:00" + self.cube.attributes["parent_time_units"] = ( + "days since 1860-1-1-00-00-00" + ) + self.cube.attributes["branch_time_in_parent"] = 0.0 + self.cube.attributes["branch_time_in_child"] = 0.0 self._check_warnings_on_metadata() - assert self.cube.attributes['branch_time_in_parent'] == 0. - assert self.cube.attributes['branch_time_in_child'] == 0 + assert self.cube.attributes["branch_time_in_parent"] == 0.0 + assert self.cube.attributes["branch_time_in_child"] == 0 def test_time_non_time_units(self): """Test fail for incompatible time units.""" - self.cube.coord('time').units = 'K' + self.cube.coord("time").units = "K" self._check_fails_in_metadata() def test_time_non_monotonic(self): """Test fail for non monotonic times.""" - time = self.cube.coord('time') + time = self.cube.coord("time") points = np.array(time.points) points[-1] = points[0] dims = self.cube.coord_dims(time) @@ -792,124 +802,124 @@ def test_time_non_monotonic(self): def test_bad_standard_name(self): """Fail if coordinates have bad standard names at metadata step.""" - self.cube.coord('time').standard_name = 'region' + self.cube.coord("time").standard_name = "region" self._check_fails_in_metadata() def test_bad_out_name_region_area_type(self): """Debug message if region/area_type AuxCoord has bad var_name at metadata.""" - region_coord = CoordinateInfoMock('basin') - region_coord.standard_name = 'region' - self.var_info.coordinates['region'] = region_coord + region_coord = CoordinateInfoMock("basin") + region_coord.standard_name = "region" + self.var_info.coordinates["region"] = region_coord self.cube = self.get_cube(self.var_info) - self.cube.coord("region").var_name = 'sector' + self.cube.coord("region").var_name = "sector" self._check_debug_messages_on_metadata() def test_bad_out_name_onedim_latitude(self): """Warning if onedimensional lat has bad var_name at metadata""" - self.var_info.table_type = 'CMIP6' - self.cube.coord('latitude').var_name = 'bad_name' + self.var_info.table_type = "CMIP6" + self.cube.coord("latitude").var_name = "bad_name" self._check_fails_in_metadata() def test_bad_out_name_onedim_longitude(self): """Warning if onedimensional lon has bad var_name at metadata""" - self.var_info.table_type = 'CMIP6' - self.cube.coord('longitude').var_name = 'bad_name' + self.var_info.table_type = "CMIP6" + self.cube.coord("longitude").var_name = "bad_name" self._check_fails_in_metadata() def test_bad_out_name_other(self): """Warning if general coordinate has bad var_name at metadata""" - self.var_info.table_type = 'CMIP6' - self.cube.coord('time').var_name = 'bad_name' + self.var_info.table_type = "CMIP6" + self.cube.coord("time").var_name = "bad_name" self._check_fails_in_metadata() def test_bad_out_name(self): """Fail if coordinates have bad short names at metadata step.""" - self.cube.coord('latitude').var_name = 'region' + self.cube.coord("latitude").var_name = "region" self._check_fails_in_metadata() def test_bad_data_units(self): """Fail if data has bad units at metadata step.""" - self.cube.units = 'hPa' + self.cube.units = "hPa" self._check_fails_in_metadata() def test_bad_positive(self): """Fail if positive value is incorrect at metadata step.""" - self.cube.attributes['positive'] = 'up' - self.var_info.positive = 'down' + self.cube.attributes["positive"] = "up" + self.var_info.positive = "down" self._check_fails_in_metadata() def test_bad_standard_name_genlevel(self): """Check if generic level has a different.""" - self.cube.coord('depth').standard_name = None + self.cube.coord("depth").standard_name = None self._check_cube() def test_frequency_month_not_same_day(self): """Fail at metadata if frequency (day) not matches data frequency.""" - self.cube = self.get_cube(self.var_info, frequency='mon') - time = self.cube.coord('time') + self.cube = self.get_cube(self.var_info, frequency="mon") + time = self.cube.coord("time") points = np.array(time.points) points[1] = points[1] + 12 dims = self.cube.coord_dims(time) self.cube.remove_coord(time) self.cube.add_dim_coord(time.copy(points), dims) - self._check_cube(frequency='mon') + self._check_cube(frequency="mon") def test_check_pt_freq(self): """Test checks succeeds for a good Pt frequency.""" - self.var_info.frequency = 'dayPt' + self.var_info.frequency = "dayPt" self._check_cube() def test_check_pt_lowercase_freq(self): """Test checks succeeds for a good Pt frequency.""" - self.var_info.frequency = 'daypt' + self.var_info.frequency = "daypt" self._check_cube() def test_bad_frequency_day(self): """Fail at metadata if frequency (day) not matches data frequency.""" - self.cube = self.get_cube(self.var_info, frequency='mon') - self._check_fails_in_metadata(frequency='day') + self.cube = self.get_cube(self.var_info, frequency="mon") + self._check_fails_in_metadata(frequency="day") def test_bad_frequency_subhr(self): """Fail at metadata if frequency (subhr) not matches data frequency.""" - self._check_fails_in_metadata(frequency='subhr') + self._check_fails_in_metadata(frequency="subhr") def test_bad_frequency_dec(self): """Fail at metadata if frequency (dec) not matches data frequency.""" - self._check_fails_in_metadata(frequency='d') + self._check_fails_in_metadata(frequency="d") def test_bad_frequency_yr(self): """Fail at metadata if frequency (yr) not matches data frequency.""" - self._check_fails_in_metadata(frequency='yr') + self._check_fails_in_metadata(frequency="yr") def test_bad_frequency_mon(self): """Fail at metadata if frequency (mon) not matches data frequency.""" - self._check_fails_in_metadata(frequency='mon') + self._check_fails_in_metadata(frequency="mon") def test_bad_frequency_hourly(self): """Fail at metadata if frequency (3hr) not matches data frequency.""" - self._check_fails_in_metadata(frequency='3hr') + self._check_fails_in_metadata(frequency="3hr") def test_frequency_not_supported(self): """Fail at metadata if frequency is not supported.""" - self._check_fails_in_metadata(frequency='wrong_freq') + self._check_fails_in_metadata(frequency="wrong_freq") def test_hr_mip_cordex(self): """Test hourly CORDEX tables are found.""" - checker = _get_cmor_checker('CORDEX', '3hr', 'tas', '3hr') - assert checker(self.cube)._cmor_var.short_name == 'tas' - assert checker(self.cube)._cmor_var.frequency == '3hr' + checker = _get_cmor_checker("CORDEX", "3hr", "tas", "3hr") + assert checker(self.cube)._cmor_var.short_name == "tas" + assert checker(self.cube)._cmor_var.frequency == "3hr" def test_custom_variable(self): - checker = _get_cmor_checker('OBS', 'Amon', 'uajet', 'mon') - assert checker(self.cube)._cmor_var.short_name == 'uajet' + checker = _get_cmor_checker("OBS", "Amon", "uajet", "mon") + assert checker(self.cube)._cmor_var.short_name == "uajet" assert checker(self.cube)._cmor_var.long_name == ( - 'Jet position expressed as latitude of maximum meridional wind ' - 'speed' + "Jet position expressed as latitude of maximum meridional wind " + "speed" ) - assert checker(self.cube)._cmor_var.units == 'degrees' + assert checker(self.cube)._cmor_var.units == "degrees" def _check_fails_on_data(self): checker = CMORCheck(self.cube, self.var_info) @@ -923,10 +933,12 @@ def _check_warnings_on_data(self): checker.check_data() self.assertTrue(checker.has_warnings()) - def get_cube(self, - var_info, - set_time_units="days since 1850-1-1 00:00:00", - frequency=None): + def get_cube( + self, + var_info, + set_time_units="days since 1850-1-1 00:00:00", + frequency=None, + ): """ Create a cube based on a specification. @@ -951,7 +963,8 @@ def get_cube(self, frequency = var_info.frequency for dim_spec in var_info.coordinates.values(): coord = self._create_coord_from_spec( - dim_spec, set_time_units, frequency) + dim_spec, set_time_units, frequency + ) if dim_spec.value: scalar_coords.append(coord) else: @@ -959,12 +972,13 @@ def get_cube(self, index += 1 valid_min, valid_max = self._get_valid_limits(var_info) - var_data = (np.ones(len(coords) * [20], 'f') * - (valid_min + (valid_max - valid_min) / 2)) + var_data = np.ones(len(coords) * [20], "f") * ( + valid_min + (valid_max - valid_min) / 2 + ) - if var_info.units == 'psu': + if var_info.units == "psu": units = None - attributes = {'invalid_units': 'psu'} + attributes = {"invalid_units": "psu"} else: units = var_info.units attributes = None @@ -978,7 +992,7 @@ def get_cube(self, attributes=attributes, ) if var_info.positive: - cube.attributes['positive'] = var_info.positive + cube.attributes["positive"] = var_info.positive for coord, i in coords: if isinstance(coord, iris.coords.DimCoord): @@ -997,86 +1011,108 @@ def _get_unstructed_grid_cube(self, n_bounds=2): cube = self.get_cube(self.var_info) cube = cube.extract( - iris.Constraint(latitude=cube.coord('latitude').points[0])) - lat_points = cube.coord('longitude').points - lat_points = lat_points / 3.0 - 50. - cube.remove_coord('latitude') - iris.util.demote_dim_coord_to_aux_coord(cube, 'longitude') + iris.Constraint(latitude=cube.coord("latitude").points[0]) + ) + lat_points = cube.coord("longitude").points + lat_points = lat_points / 3.0 - 50.0 + cube.remove_coord("latitude") + iris.util.demote_dim_coord_to_aux_coord(cube, "longitude") lat_points = np.concatenate( ( - cube.coord('longitude').points[0:10] / 4, - cube.coord('longitude').points[0:10] / 4 + cube.coord("longitude").points[0:10] / 4, + cube.coord("longitude").points[0:10] / 4, ), - axis=0 + axis=0, ) lat_bounds = np.concatenate( ( - cube.coord('longitude').bounds[0:10] / 4, - cube.coord('longitude').bounds[0:10] / 4 + cube.coord("longitude").bounds[0:10] / 4, + cube.coord("longitude").bounds[0:10] / 4, ), - axis=0 + axis=0, ) new_lat = iris.coords.AuxCoord( points=lat_points, bounds=lat_bounds, - var_name='lat', - standard_name='latitude', - long_name='Latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + long_name="Latitude", + units="degrees_north", ) cube.add_aux_coord(new_lat, 1) # Add additional bound if desired if n_bounds == 3: - for coord_name in ('latitude', 'longitude'): + for coord_name in ("latitude", "longitude"): coord = cube.coord(coord_name) - new_bounds = np.stack(( - coord.bounds[:, 0], - 0.5 * (coord.bounds[:, 0] + coord.bounds[:, 1]), - coord.bounds[:, 1], - )) + new_bounds = np.stack( + ( + coord.bounds[:, 0], + 0.5 * (coord.bounds[:, 0] + coord.bounds[:, 1]), + coord.bounds[:, 1], + ) + ) coord.bounds = np.swapaxes(new_bounds, 0, 1) return cube def _setup_generic_level_var(self): """Setup var_info and cube with generic alevel coordinate.""" - self.var_info.coordinates.pop('depth') - self.var_info.coordinates.pop('air_pressure') + self.var_info.coordinates.pop("depth") + self.var_info.coordinates.pop("air_pressure") # Create cube with sigma coordinate - sigma_coord = CoordinateInfoMock('standard_sigma') - sigma_coord.axis = 'Z' - sigma_coord.out_name = 'lev' - sigma_coord.standard_name = 'atmosphere_sigma_coordinate' - sigma_coord.long_name = 'sigma coordinate' - sigma_coord.generic_lev_name = 'alevel' + sigma_coord = CoordinateInfoMock("standard_sigma") + sigma_coord.axis = "Z" + sigma_coord.out_name = "lev" + sigma_coord.standard_name = "atmosphere_sigma_coordinate" + sigma_coord.long_name = "sigma coordinate" + sigma_coord.generic_lev_name = "alevel" var_info_for_cube = deepcopy(self.var_info) - var_info_for_cube.coordinates['standard_sigma'] = sigma_coord + var_info_for_cube.coordinates["standard_sigma"] = sigma_coord self.cube = self.get_cube(var_info_for_cube) # Create var_info with alevel coord that contains sigma coordinate in # generic_lev_coords dict (just like it is the case for the true CMOR # tables) - gen_lev_coord = CoordinateInfoMock('alevel') + gen_lev_coord = CoordinateInfoMock("alevel") gen_lev_coord.standard_name = None gen_lev_coord.generic_level = True - gen_lev_coord.generic_lev_coords = {'standard_sigma': sigma_coord} - self.var_info.coordinates['alevel'] = gen_lev_coord + gen_lev_coord.generic_lev_coords = {"standard_sigma": sigma_coord} + self.var_info.coordinates["alevel"] = gen_lev_coord def _add_plev_to_cube(self): """Add plev coordinate to cube.""" - if self.cube.coords('atmosphere_sigma_coordinate'): - self.cube.remove_coord('atmosphere_sigma_coordinate') - plevs = [100000.0, 92500.0, 85000.0, 70000.0, 60000.0, 50000.0, - 40000.0, 30000.0, 25000.0, 20000.0, 15000.0, 10000.0, 7000.0, - 5000.0, 3000.0, 2000.0, 1000.0, 900.0, 800.0, 700.0] + if self.cube.coords("atmosphere_sigma_coordinate"): + self.cube.remove_coord("atmosphere_sigma_coordinate") + plevs = [ + 100000.0, + 92500.0, + 85000.0, + 70000.0, + 60000.0, + 50000.0, + 40000.0, + 30000.0, + 25000.0, + 20000.0, + 15000.0, + 10000.0, + 7000.0, + 5000.0, + 3000.0, + 2000.0, + 1000.0, + 900.0, + 800.0, + 700.0, + ] coord = iris.coords.DimCoord( plevs, - var_name='plev', - standard_name='air_pressure', - units='Pa', - attributes={'positive': 'down'}, + var_name="plev", + standard_name="air_pressure", + units="Pa", + attributes={"positive": "down"}, ) coord.guess_bounds() self.cube.add_dim_coord(coord, 3) @@ -1101,7 +1137,8 @@ def _construct_scalar_coord(coord_spec): long_name=coord_spec.long_name, var_name=coord_spec.out_name, units=coord_spec.units, - attributes=None) + attributes=None, + ) def _create_coord_from_spec(self, coord_spec, set_time_units, frequency): if coord_spec.units.startswith("days since "): @@ -1121,12 +1158,12 @@ def _create_coord_from_spec(self, coord_spec, set_time_units, frequency): def _construct_array_coord(self, dim_spec, aux=False): if dim_spec.units.startswith("days since "): values = self._get_time_values(dim_spec) - unit = Unit(dim_spec.units, calendar='360_day') + unit = Unit(dim_spec.units, calendar="360_day") else: values = self._get_values(dim_spec) unit = Unit(dim_spec.units) # Set up attributes dictionary - coord_atts = {'stored_direction': dim_spec.stored_direction} + coord_atts = {"stored_direction": dim_spec.stored_direction} if aux: coord = iris.coords.AuxCoord( values, @@ -1155,7 +1192,8 @@ def _get_values(dim_spec): float(dim_spec.requested[0]) except ValueError: return dim_spec.requested + [ - f'Value{x}' for x in range(len(dim_spec.requested), 20)] + f"Value{x}" for x in range(len(dim_spec.requested), 20) + ] valid_min = dim_spec.valid_min if valid_min: valid_min = float(valid_min) @@ -1166,14 +1204,12 @@ def _get_values(dim_spec): valid_max = float(valid_max) else: valid_max = 100.0 - decreasing = dim_spec.stored_direction == 'decreasing' - endpoint = not dim_spec.standard_name == 'longitude' + decreasing = dim_spec.stored_direction == "decreasing" + endpoint = not dim_spec.standard_name == "longitude" if decreasing: - values = np.linspace( - valid_max, valid_min, 20, endpoint=endpoint) + values = np.linspace(valid_max, valid_min, 20, endpoint=endpoint) else: - values = np.linspace( - valid_min, valid_max, 20, endpoint=endpoint) + values = np.linspace(valid_min, valid_max, 20, endpoint=endpoint) values = np.array(values) if dim_spec.requested: requested = [float(val) for val in dim_spec.requested] @@ -1182,10 +1218,12 @@ def _get_values(dim_spec): values[j] = request if decreasing: extra_values = np.linspace( - len(requested), valid_min, 20 - len(requested)) + len(requested), valid_min, 20 - len(requested) + ) else: extra_values = np.linspace( - len(requested), valid_max, 20 - len(requested)) + len(requested), valid_max, 20 - len(requested) + ) for j in range(len(requested), 20): values[j] = extra_values[j - len(requested)] @@ -1195,20 +1233,20 @@ def _get_values(dim_spec): @staticmethod def _get_time_values(dim_spec): frequency = dim_spec.frequency - if frequency == 'mon': + if frequency == "mon": delta = 30 - elif frequency == 'day': + elif frequency == "day": delta = 1 - elif frequency == 'yr': + elif frequency == "yr": delta = 360 - elif frequency == 'dec': + elif frequency == "dec": delta = 3600 - elif frequency.endswith('hr'): - if frequency == 'hr': - frequency = '1hr' + elif frequency.endswith("hr"): + if frequency == "hr": + frequency = "1hr" delta = float(frequency[:-2]) / 24 else: - raise Exception('Frequency {} not supported'.format(frequency)) + raise Exception("Frequency {} not supported".format(frequency)) start = 0 end = start + delta * 20 return np.arange(start, end, step=delta) @@ -1217,13 +1255,13 @@ def _get_time_values(dim_spec): def test_get_cmor_checker_invalid_project_fail(): """Test ``_get_cmor_checker`` with invalid project.""" with pytest.raises(KeyError): - _get_cmor_checker('INVALID_PROJECT', 'mip', 'short_name', 'frequency') + _get_cmor_checker("INVALID_PROJECT", "mip", "short_name", "frequency") def test_deprecate_automatic_fixes(): """Test deprecation of automatic_fixes.""" with pytest.warns(ESMValCoreDeprecationWarning): - CMORCheck('cube', 'var_info', 'frequency', automatic_fixes=True) + CMORCheck("cube", "var_info", "frequency", automatic_fixes=True) if __name__ == "__main__": diff --git a/tests/unit/cmor/test_fix.py b/tests/unit/cmor/test_fix.py index d6a06b2c11..01038d2786 100644 --- a/tests/unit/cmor/test_fix.py +++ b/tests/unit/cmor/test_fix.py @@ -8,64 +8,67 @@ from esmvalcore.cmor.fix import Fix, fix_data, fix_file, fix_metadata -class TestFixFile(): +class TestFixFile: """Fix file tests.""" @pytest.fixture(autouse=True) def setUp(self): """Prepare for testing.""" - self.filename = 'filename' + self.filename = "filename" self.mock_fix = Mock() - self.mock_fix.fix_file.return_value = 'new_filename' + self.mock_fix.fix_file.return_value = "new_filename" self.expected_get_fixes_call = { - 'project': 'project', - 'dataset': 'model', - 'mip': 'mip', - 'short_name': 'short_name', - 'extra_facets': { - 'project': 'project', - 'dataset': 'model', - 'mip': 'mip', - 'short_name': 'short_name', - 'frequency': 'frequency', + "project": "project", + "dataset": "model", + "mip": "mip", + "short_name": "short_name", + "extra_facets": { + "project": "project", + "dataset": "model", + "mip": "mip", + "short_name": "short_name", + "frequency": "frequency", }, - 'session': sentinel.session, - 'frequency': 'frequency', + "session": sentinel.session, + "frequency": "frequency", } def test_fix(self): """Check that the returned fix is applied.""" - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]) as mock_get_fixes: + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", + return_value=[self.mock_fix], + ) as mock_get_fixes: file_returned = fix_file( - file='filename', - short_name='short_name', - project='project', - dataset='model', - mip='mip', - output_dir=Path('output_dir'), + file="filename", + short_name="short_name", + project="project", + dataset="model", + mip="mip", + output_dir=Path("output_dir"), session=sentinel.session, - frequency='frequency', + frequency="frequency", ) assert file_returned != self.filename - assert file_returned == 'new_filename' + assert file_returned == "new_filename" mock_get_fixes.assert_called_once_with( **self.expected_get_fixes_call ) def test_nofix(self): """Check that the same file is returned if no fix is available.""" - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[]) as mock_get_fixes: + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", return_value=[] + ) as mock_get_fixes: file_returned = fix_file( - file='filename', - short_name='short_name', - project='project', - dataset='model', - mip='mip', - output_dir=Path('output_dir'), + file="filename", + short_name="short_name", + project="project", + dataset="model", + mip="mip", + output_dir=Path("output_dir"), session=sentinel.session, - frequency='frequency', + frequency="frequency", ) assert file_returned == self.filename mock_get_fixes.assert_called_once_with( @@ -73,19 +76,19 @@ def test_nofix(self): ) -class TestGetCube(): +class TestGetCube: """Test get cube by var_name method.""" @pytest.fixture(autouse=True) def setUp(self): """Prepare for testing.""" self.cube_1 = Mock() - self.cube_1.var_name = 'cube1' + self.cube_1.var_name = "cube1" self.cube_2 = Mock() - self.cube_2.var_name = 'cube2' + self.cube_2.var_name = "cube2" self.cubes = [self.cube_1, self.cube_2] vardef = Mock() - vardef.short_name = 'fix' + vardef.short_name = "fix" self.fix = Fix(vardef) def test_get_first_cube(self): @@ -103,11 +106,11 @@ def test_get_default_raises(self): def test_get_default(self): """Check that the default return the cube (fix is a cube).""" - self.cube_1.var_name = 'fix' + self.cube_1.var_name = "fix" assert self.cube_1 is self.fix.get_cube_from_list(self.cubes) -class TestFixMetadata(): +class TestFixMetadata: """Fix metadata tests.""" @pytest.fixture(autouse=True) @@ -121,47 +124,49 @@ def setUp(self): self.checker = Mock() self.check_metadata = self.checker.return_value.check_metadata self.expected_get_fixes_call = { - 'project': 'project', - 'dataset': 'model', - 'mip': 'mip', - 'short_name': 'short_name', - 'extra_facets': { - 'project': 'project', - 'dataset': 'model', - 'mip': 'mip', - 'short_name': 'short_name', - 'frequency': 'frequency', + "project": "project", + "dataset": "model", + "mip": "mip", + "short_name": "short_name", + "extra_facets": { + "project": "project", + "dataset": "model", + "mip": "mip", + "short_name": "short_name", + "frequency": "frequency", }, - 'session': sentinel.session, - 'frequency': 'frequency', + "session": sentinel.session, + "frequency": "frequency", } @staticmethod - def _create_mock_cube(var_name='short_name'): + def _create_mock_cube(var_name="short_name"): cube = Mock() cube.var_name = var_name - cube.attributes = {'source_file': 'source_file'} + cube.attributes = {"source_file": "source_file"} return cube def test_fix(self): """Check that the returned fix is applied.""" self.check_metadata.side_effect = lambda: self.fixed_cube - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]) as mock_get_fixes: - with patch('esmvalcore.cmor.fix._get_cmor_checker', - return_value=self.checker): + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", + return_value=[self.mock_fix], + ) as mock_get_fixes: + with patch( + "esmvalcore.cmor.fix._get_cmor_checker", + return_value=self.checker, + ): cube_returned = fix_metadata( cubes=[self.cube], - short_name='short_name', - project='project', - dataset='model', - mip='mip', - frequency='frequency', + short_name="short_name", + project="project", + dataset="model", + mip="mip", + frequency="frequency", session=sentinel.session, )[0] - self.checker.assert_called_once_with( - self.intermediate_cube - ) + self.checker.assert_called_once_with(self.intermediate_cube) self.check_metadata.assert_called_once_with() assert cube_returned is not self.cube assert cube_returned is not self.intermediate_cube @@ -173,17 +178,20 @@ def test_fix(self): def test_nofix(self): """Check that the same cube is returned if no fix is available.""" self.check_metadata.side_effect = lambda: self.cube - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[]) as mock_get_fixes: - with patch('esmvalcore.cmor.fix._get_cmor_checker', - return_value=self.checker): + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", return_value=[] + ) as mock_get_fixes: + with patch( + "esmvalcore.cmor.fix._get_cmor_checker", + return_value=self.checker, + ): cube_returned = fix_metadata( cubes=[self.cube], - short_name='short_name', - project='project', - dataset='model', - mip='mip', - frequency='frequency', + short_name="short_name", + project="project", + dataset="model", + mip="mip", + frequency="frequency", session=sentinel.session, )[0] self.checker.assert_called_once_with(self.cube) @@ -198,16 +206,19 @@ def test_nofix(self): def test_select_var(self): """Check that the same cube is returned if no fix is available.""" self.check_metadata.side_effect = lambda: self.cube - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[]): - with patch('esmvalcore.cmor.fix._get_cmor_checker', - return_value=self.checker): + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", return_value=[] + ): + with patch( + "esmvalcore.cmor.fix._get_cmor_checker", + return_value=self.checker, + ): cube_returned = fix_metadata( - cubes=[self.cube, self._create_mock_cube('extra')], - short_name='short_name', - project='CMIP6', - dataset='model', - mip='mip', + cubes=[self.cube, self._create_mock_cube("extra")], + short_name="short_name", + project="CMIP6", + dataset="model", + mip="mip", )[0] self.checker.assert_called_once_with(self.cube) self.check_metadata.assert_called_once_with() @@ -219,17 +230,17 @@ def test_select_var_failed_if_bad_var_name(self): with pytest.raises(ValueError, match=msg): fix_metadata( cubes=[ - self._create_mock_cube('not_me'), - self._create_mock_cube('me_neither') + self._create_mock_cube("not_me"), + self._create_mock_cube("me_neither"), ], - short_name='tas', - project='CMIP6', - dataset='model', - mip='Amon', + short_name="tas", + project="CMIP6", + dataset="model", + mip="Amon", ) -class TestFixData(): +class TestFixData: """Fix data tests.""" @pytest.fixture(autouse=True) @@ -243,40 +254,42 @@ def setUp(self): self.checker = Mock() self.check_data = self.checker.return_value.check_data self.expected_get_fixes_call = { - 'project': 'project', - 'dataset': 'model', - 'mip': 'mip', - 'short_name': 'short_name', - 'extra_facets': { - 'project': 'project', - 'dataset': 'model', - 'mip': 'mip', - 'short_name': 'short_name', - 'frequency': 'frequency', + "project": "project", + "dataset": "model", + "mip": "mip", + "short_name": "short_name", + "extra_facets": { + "project": "project", + "dataset": "model", + "mip": "mip", + "short_name": "short_name", + "frequency": "frequency", }, - 'session': sentinel.session, - 'frequency': 'frequency', + "session": sentinel.session, + "frequency": "frequency", } def test_fix(self): """Check that the returned fix is applied.""" self.check_data.side_effect = lambda: self.fixed_cube - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]) as mock_get_fixes: - with patch('esmvalcore.cmor.fix._get_cmor_checker', - return_value=self.checker): + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", + return_value=[self.mock_fix], + ) as mock_get_fixes: + with patch( + "esmvalcore.cmor.fix._get_cmor_checker", + return_value=self.checker, + ): cube_returned = fix_data( self.cube, - short_name='short_name', - project='project', - dataset='model', - mip='mip', - frequency='frequency', + short_name="short_name", + project="project", + dataset="model", + mip="mip", + frequency="frequency", session=sentinel.session, ) - self.checker.assert_called_once_with( - self.intermediate_cube - ) + self.checker.assert_called_once_with(self.intermediate_cube) self.check_data.assert_called_once_with() assert cube_returned is not self.cube assert cube_returned is not self.intermediate_cube @@ -288,17 +301,20 @@ def test_fix(self): def test_nofix(self): """Check that the same cube is returned if no fix is available.""" self.check_data.side_effect = lambda: self.cube - with patch('esmvalcore.cmor._fixes.fix.Fix.get_fixes', - return_value=[]) as mock_get_fixes: - with patch('esmvalcore.cmor.fix._get_cmor_checker', - return_value=self.checker): + with patch( + "esmvalcore.cmor._fixes.fix.Fix.get_fixes", return_value=[] + ) as mock_get_fixes: + with patch( + "esmvalcore.cmor.fix._get_cmor_checker", + return_value=self.checker, + ): cube_returned = fix_data( self.cube, - short_name='short_name', - project='project', - dataset='model', - mip='mip', - frequency='frequency', + short_name="short_name", + project="project", + dataset="model", + mip="mip", + frequency="frequency", session=sentinel.session, ) self.checker.assert_called_once_with(self.cube) diff --git a/tests/unit/cmor/test_fixes.py b/tests/unit/cmor/test_fixes.py index c1a87e1bc1..be7d9d49c2 100644 --- a/tests/unit/cmor/test_fixes.py +++ b/tests/unit/cmor/test_fixes.py @@ -1,16 +1,20 @@ """Test individual fix functions.""" + import pytest import esmvalcore.cmor._fixes.shared as shared import esmvalcore.cmor.fixes as fixes -@pytest.mark.parametrize('func', [ - 'add_altitude_from_plev', - 'add_plev_from_altitude', - 'get_next_month', - 'get_time_bounds', -]) +@pytest.mark.parametrize( + "func", + [ + "add_altitude_from_plev", + "add_plev_from_altitude", + "get_next_month", + "get_time_bounds", + ], +) def test_imports(func): assert func in fixes.__all__ fn_in_shared = getattr(shared, func) diff --git a/tests/unit/cmor/test_generic_fix.py b/tests/unit/cmor/test_generic_fix.py index 4294fe0752..e1ab742619 100644 --- a/tests/unit/cmor/test_generic_fix.py +++ b/tests/unit/cmor/test_generic_fix.py @@ -13,15 +13,15 @@ @pytest.fixture def generic_fix(): """Generic fix object.""" - vardef = get_var_info('CMIP6', 'CFmon', 'ta') - extra_facets = {'short_name': 'ta', 'project': 'CMIP6', 'dataset': 'MODEL'} + vardef = get_var_info("CMIP6", "CFmon", "ta") + extra_facets = {"short_name": "ta", "project": "CMIP6", "dataset": "MODEL"} return GenericFix(vardef, extra_facets=extra_facets) def test_generic_fix_empty_long_name(generic_fix, monkeypatch): """Test ``GenericFix``.""" # Artificially set long_name to empty string for test - monkeypatch.setattr(generic_fix.vardef, 'long_name', '') + monkeypatch.setattr(generic_fix.vardef, "long_name", "") cube = generic_fix._fix_long_name(sentinel.cube) @@ -31,8 +31,8 @@ def test_generic_fix_empty_long_name(generic_fix, monkeypatch): def test_generic_fix_empty_units(generic_fix, monkeypatch): """Test ``GenericFix``.""" # Artificially set latitude units to empty string for test - coord_info = generic_fix.vardef.coordinates['latitude'] - monkeypatch.setattr(coord_info, 'units', '') + coord_info = generic_fix.vardef.coordinates["latitude"] + monkeypatch.setattr(coord_info, "units", "") ret = generic_fix._fix_coord_units( sentinel.cube, coord_info, sentinel.cube_coord @@ -45,7 +45,7 @@ def test_generic_fix_no_generic_lev_coords(generic_fix, monkeypatch): """Test ``GenericFix``.""" # Artificially remove generic_lev_coords monkeypatch.setattr( - generic_fix.vardef.coordinates['alevel'], 'generic_lev_coords', {} + generic_fix.vardef.coordinates["alevel"], "generic_lev_coords", {} ) cube = generic_fix._fix_alternative_generic_level_coords(sentinel.cube) @@ -55,7 +55,7 @@ def test_generic_fix_no_generic_lev_coords(generic_fix, monkeypatch): def test_requested_levels_2d_coord(generic_fix, mocker): """Test ``GenericFix``.""" - cube_coord = AuxCoord([[0]], standard_name='latitude', units='rad') + cube_coord = AuxCoord([[0]], standard_name="latitude", units="rad") cmor_coord = mocker.Mock(requested=True) ret = generic_fix._fix_requested_coord_values( @@ -67,8 +67,8 @@ def test_requested_levels_2d_coord(generic_fix, mocker): def test_requested_levels_invalid_arr(generic_fix, mocker): """Test ``GenericFix``.""" - cube_coord = AuxCoord([0], standard_name='latitude', units='rad') - cmor_coord = mocker.Mock(requested=['a', 'b']) + cube_coord = AuxCoord([0], standard_name="latitude", units="rad") + cmor_coord = mocker.Mock(requested=["a", "b"]) ret = generic_fix._fix_requested_coord_values( sentinel.cube, cmor_coord, cube_coord @@ -80,7 +80,7 @@ def test_requested_levels_invalid_arr(generic_fix, mocker): def test_lon_no_fix_needed(generic_fix): """Test ``GenericFix``.""" cube_coord = AuxCoord( - [0.0, 180.0, 360.0], standard_name='longitude', units='rad' + [0.0, 180.0, 360.0], standard_name="longitude", units="rad" ) ret = generic_fix._fix_longitude_0_360( @@ -93,7 +93,7 @@ def test_lon_no_fix_needed(generic_fix): def test_lon_too_low_to_fix(generic_fix): """Test ``GenericFix``.""" cube_coord = AuxCoord( - [-370.0, 0.0], standard_name='longitude', units='rad' + [-370.0, 0.0], standard_name="longitude", units="rad" ) ret = generic_fix._fix_longitude_0_360( @@ -105,7 +105,7 @@ def test_lon_too_low_to_fix(generic_fix): def test_lon_too_high_to_fix(generic_fix): """Test ``GenericFix``.""" - cube_coord = AuxCoord([750.0, 0.0], standard_name='longitude', units='rad') + cube_coord = AuxCoord([750.0, 0.0], standard_name="longitude", units="rad") ret = generic_fix._fix_longitude_0_360( sentinel.cube, sentinel.cmor_coord, cube_coord @@ -116,7 +116,7 @@ def test_lon_too_high_to_fix(generic_fix): def test_fix_direction_2d_coord(generic_fix): """Test ``GenericFix``.""" - cube_coord = AuxCoord([[0]], standard_name='latitude', units='rad') + cube_coord = AuxCoord([[0]], standard_name="latitude", units="rad") ret = generic_fix._fix_coord_direction( sentinel.cube, sentinel.cmor_coord, cube_coord @@ -127,7 +127,7 @@ def test_fix_direction_2d_coord(generic_fix): def test_fix_direction_string_coord(generic_fix): """Test ``GenericFix``.""" - cube_coord = AuxCoord(['a'], standard_name='latitude', units='rad') + cube_coord = AuxCoord(["a"], standard_name="latitude", units="rad") ret = generic_fix._fix_coord_direction( sentinel.cube, sentinel.cmor_coord, cube_coord @@ -139,8 +139,8 @@ def test_fix_direction_string_coord(generic_fix): def test_fix_direction_no_stored_direction(generic_fix, mocker): """Test ``GenericFix``.""" cube = Cube(0) - cube_coord = AuxCoord([0, 1], standard_name='latitude', units='rad') - cmor_coord = mocker.Mock(stored_direction='') + cube_coord = AuxCoord([0, 1], standard_name="latitude", units="rad") + cmor_coord = mocker.Mock(stored_direction="") ret = generic_fix._fix_coord_direction(cube, cmor_coord, cube_coord) @@ -154,16 +154,16 @@ def test_fix_metadata_not_fail_with_empty_cube(generic_fix): assert isinstance(fixed_cubes, CubeList) assert len(fixed_cubes) == 1 assert fixed_cubes[0] == Cube( - 0, standard_name='air_temperature', long_name='Air Temperature' + 0, standard_name="air_temperature", long_name="Air Temperature" ) @pytest.mark.parametrize( - 'extra_facets', [{}, {'project': 'P', 'dataset': 'D'}] + "extra_facets", [{}, {"project": "P", "dataset": "D"}] ) def test_fix_metadata_multiple_cubes_fail(extra_facets): """Generic fixes should fail when multiple invalid cubes are given.""" - vardef = get_var_info('CMIP6', 'Amon', 'ta') + vardef = get_var_info("CMIP6", "Amon", "ta") fix = GenericFix(vardef, extra_facets=extra_facets) with pytest.raises(ValueError): fix.fix_metadata([Cube(0), Cube(0)]) @@ -171,14 +171,14 @@ def test_fix_metadata_multiple_cubes_fail(extra_facets): def test_fix_metadata_no_extra_facets(): """Generic fixes should not fail when no extra facets are given.""" - vardef = get_var_info('CMIP6', 'Amon', 'ta') + vardef = get_var_info("CMIP6", "Amon", "ta") fix = GenericFix(vardef) fixed_cubes = fix.fix_metadata([Cube(0)]) assert isinstance(fixed_cubes, CubeList) assert len(fixed_cubes) == 1 assert fixed_cubes[0] == Cube( - 0, standard_name='air_temperature', long_name='Air Temperature' + 0, standard_name="air_temperature", long_name="Air Temperature" ) @@ -192,7 +192,7 @@ def test_fix_data_not_fail_with_empty_cube(generic_fix): def test_fix_data_no_extra_facets(): """Generic fixes should not fail when no extra facets are given.""" - vardef = get_var_info('CMIP6', 'Amon', 'ta') + vardef = get_var_info("CMIP6", "Amon", "ta") fix = GenericFix(vardef) fixed_cube = fix.fix_data(Cube(0)) diff --git a/tests/unit/cmor/test_table.py b/tests/unit/cmor/test_table.py index f7dcc880cd..155933d958 100644 --- a/tests/unit/cmor/test_table.py +++ b/tests/unit/cmor/test_table.py @@ -10,57 +10,57 @@ class TestVariableInfo(unittest.TestCase): def setUp(self): """Prepare for testing.""" - self.info = VariableInfo('table_type', 'var') - self.value = 'value' + self.info = VariableInfo("table_type", "var") + self.value = "value" self.coords = { - 'dim0': CoordinateInfo('dim0'), - 'dim1': CoordinateInfo('dim1'), - 'dim2': CoordinateInfo('dim2'), + "dim0": CoordinateInfo("dim0"), + "dim1": CoordinateInfo("dim1"), + "dim2": CoordinateInfo("dim2"), } def test_constructor(self): """Test basic constructor.""" - self.assertEqual('table_type', self.info.table_type) - self.assertEqual('var', self.info.short_name) + self.assertEqual("table_type", self.info.table_type) + self.assertEqual("var", self.info.short_name) def test_read_empty_dictionary(self): """Test read empty dict.""" - self.info.read_json({}, '') - self.assertEqual('', self.info.standard_name) + self.info.read_json({}, "") + self.assertEqual("", self.info.standard_name) def test_read_standard_name(self): """Test standard_name.""" - self.info.read_json({'standard_name': self.value}, '') + self.info.read_json({"standard_name": self.value}, "") self.assertEqual(self.info.standard_name, self.value) def test_read_long_name(self): """Test long_name.""" - self.info.read_json({'long_name': self.value}, '') + self.info.read_json({"long_name": self.value}, "") self.assertEqual(self.info.long_name, self.value) def test_read_units(self): """Test units.""" - self.info.read_json({'units': self.value}, '') + self.info.read_json({"units": self.value}, "") self.assertEqual(self.info.units, self.value) def test_read_valid_min(self): """Test valid_min.""" - self.info.read_json({'valid_min': self.value}, '') + self.info.read_json({"valid_min": self.value}, "") self.assertEqual(self.info.valid_min, self.value) def test_read_valid_max(self): """Test valid_max.""" - self.info.read_json({'valid_max': self.value}, '') + self.info.read_json({"valid_max": self.value}, "") self.assertEqual(self.info.valid_max, self.value) def test_read_positive(self): """Test positive.""" - self.info.read_json({'positive': self.value}, '') + self.info.read_json({"positive": self.value}, "") self.assertEqual(self.info.positive, self.value) def test_read_frequency(self): """Test frequency.""" - self.info.read_json({'frequency': self.value}, '') + self.info.read_json({"frequency": self.value}, "") self.assertEqual(self.info.frequency, self.value) def test_read_default_frequency(self): @@ -70,25 +70,25 @@ def test_read_default_frequency(self): def test_has_coord_with_standard_name_empty(self): """Test `has_coord_with_standard_name`.""" - assert self.info.has_coord_with_standard_name('time') is False + assert self.info.has_coord_with_standard_name("time") is False def test_has_coord_with_standard_name_false(self): """Test `has_coord_with_standard_name`.""" self.info.coordinates = self.coords - assert self.info.has_coord_with_standard_name('time') is False + assert self.info.has_coord_with_standard_name("time") is False def test_has_coord_with_standard_name_true(self): """Test `has_coord_with_standard_name`.""" self.info.coordinates = self.coords - self.info.coordinates['dim0'].standard_name = 'time' - assert self.info.has_coord_with_standard_name('time') is True + self.info.coordinates["dim0"].standard_name = "time" + assert self.info.has_coord_with_standard_name("time") is True def test_has_coord_with_standard_name_multiple(self): """Test `has_coord_with_standard_name`.""" self.info.coordinates = self.coords - self.info.coordinates['dim1'].standard_name = 'time' - self.info.coordinates['dim2'].standard_name = 'time' - assert self.info.has_coord_with_standard_name('time') is True + self.info.coordinates["dim1"].standard_name = "time" + self.info.coordinates["dim2"].standard_name = "time" + assert self.info.has_coord_with_standard_name("time") is True class TestCoordinateInfo(unittest.TestCase): @@ -96,64 +96,64 @@ class TestCoordinateInfo(unittest.TestCase): def setUp(self): """Prepare for testing.""" - self.value = 'value' + self.value = "value" def test_constructor(self): """Test constructor.""" - info = CoordinateInfo('var') - self.assertEqual('var', info.name) + info = CoordinateInfo("var") + self.assertEqual("var", info.name) def test_read_empty_dictionary(self): """Test empty dict.""" - info = CoordinateInfo('var') + info = CoordinateInfo("var") info.read_json({}) - self.assertEqual('', info.standard_name) + self.assertEqual("", info.standard_name) def test_read_standard_name(self): """Test standard_name.""" - info = CoordinateInfo('var') - info.read_json({'standard_name': self.value}) + info = CoordinateInfo("var") + info.read_json({"standard_name": self.value}) self.assertEqual(info.standard_name, self.value) def test_read_var_name(self): """Test var_name.""" - info = CoordinateInfo('var') - info.read_json({'var_name': self.value}) + info = CoordinateInfo("var") + info.read_json({"var_name": self.value}) self.assertEqual(info.var_name, self.value) def test_read_out_name(self): """Test out_name.""" - info = CoordinateInfo('var') - info.read_json({'out_name': self.value}) + info = CoordinateInfo("var") + info.read_json({"out_name": self.value}) self.assertEqual(info.out_name, self.value) def test_read_units(self): """Test units.""" - info = CoordinateInfo('var') - info.read_json({'units': self.value}) + info = CoordinateInfo("var") + info.read_json({"units": self.value}) self.assertEqual(info.units, self.value) def test_read_valid_min(self): """Test valid_min.""" - info = CoordinateInfo('var') - info.read_json({'valid_min': self.value}) + info = CoordinateInfo("var") + info.read_json({"valid_min": self.value}) self.assertEqual(info.valid_min, self.value) def test_read_valid_max(self): """Test valid_max.""" - info = CoordinateInfo('var') - info.read_json({'valid_max': self.value}) + info = CoordinateInfo("var") + info.read_json({"valid_max": self.value}) self.assertEqual(info.valid_max, self.value) def test_read_value(self): """Test value.""" - info = CoordinateInfo('var') - info.read_json({'value': self.value}) + info = CoordinateInfo("var") + info.read_json({"value": self.value}) self.assertEqual(info.value, self.value) def test_read_requested(self): """Test requested.""" - value = ['value1', 'value2'] - info = CoordinateInfo('var') - info.read_json({'requested': value}) + value = ["value1", "value2"] + info = CoordinateInfo("var") + info.read_json({"requested": value}) self.assertEqual(info.requested, value) diff --git a/tests/unit/cmor/test_utils.py b/tests/unit/cmor/test_utils.py index f373524a7d..b21fa5ccba 100644 --- a/tests/unit/cmor/test_utils.py +++ b/tests/unit/cmor/test_utils.py @@ -7,54 +7,58 @@ @pytest.mark.parametrize( - 'cubes', [[Cube(0)], [Cube(0, var_name='x')], [Cube(0, var_name='y')]] + "cubes", [[Cube(0)], [Cube(0, var_name="x")], [Cube(0, var_name="y")]] ) def test_get_single_cube_one_cube(cubes, caplog): """Test ``_get_single_cube``.""" - single_cube = _get_single_cube(cubes, 'x') + single_cube = _get_single_cube(cubes, "x") assert single_cube == cubes[0] assert not caplog.records @pytest.mark.parametrize( - 'dataset_str,msg', [ + "dataset_str,msg", + [ (None, "Found variable x, but"), - ('XYZ', "Found variable x in XYZ, but"), - ] + ("XYZ", "Found variable x in XYZ, but"), + ], ) @pytest.mark.parametrize( - 'cubes', [ - [Cube(0), Cube(0, var_name='x')], - [Cube(0, var_name='x'), Cube(0)], - [Cube(0, var_name='x'), Cube(0, var_name='x')], - [Cube(0), Cube(0), Cube(0, var_name='x')], - ] + "cubes", + [ + [Cube(0), Cube(0, var_name="x")], + [Cube(0, var_name="x"), Cube(0)], + [Cube(0, var_name="x"), Cube(0, var_name="x")], + [Cube(0), Cube(0), Cube(0, var_name="x")], + ], ) def test_get_single_cube_multiple_cubes(cubes, dataset_str, msg, caplog): """Test ``_get_single_cube``.""" - single_cube = _get_single_cube(cubes, 'x', dataset_str=dataset_str) - assert single_cube == Cube(0, var_name='x') + single_cube = _get_single_cube(cubes, "x", dataset_str=dataset_str) + assert single_cube == Cube(0, var_name="x") assert len(caplog.records) == 1 log = caplog.records[0] - assert log.levelname == 'WARNING' + assert log.levelname == "WARNING" assert msg in log.message @pytest.mark.parametrize( - 'dataset_str,msg', [ + "dataset_str,msg", + [ (None, "More than one cube found for variable x but"), - ('XYZ', "More than one cube found for variable x in XYZ but"), - ] + ("XYZ", "More than one cube found for variable x in XYZ but"), + ], ) @pytest.mark.parametrize( - 'cubes', [ + "cubes", + [ [Cube(0), Cube(0)], - [Cube(0, var_name='y'), Cube(0)], - [Cube(0, var_name='y'), Cube(0, var_name='z')], - [Cube(0), Cube(0), Cube(0, var_name='z')], - ] + [Cube(0, var_name="y"), Cube(0)], + [Cube(0, var_name="y"), Cube(0, var_name="z")], + [Cube(0), Cube(0), Cube(0, var_name="z")], + ], ) def test_get_single_cube_no_cubes_fail(cubes, dataset_str, msg): """Test ``_get_single_cube``.""" with pytest.raises(ValueError, match=msg): - _get_single_cube(cubes, 'x', dataset_str=dataset_str) + _get_single_cube(cubes, "x", dataset_str=dataset_str) diff --git a/tests/unit/config/test_config.py b/tests/unit/config/test_config.py index ec88e281e3..513fd20595 100644 --- a/tests/unit/config/test_config.py +++ b/tests/unit/config/test_config.py @@ -20,14 +20,17 @@ TEST_DEEP_UPDATE = [ ([{}], {}), ([dict(a=1, b=2), dict(a=3)], dict(a=3, b=2)), - ([ - dict(a=dict(b=1, c=dict(d=2)), e=dict(f=4, g=5)), - dict(a=dict(b=2, c=3)), - ], dict(a=dict(b=2, c=3), e=dict(f=4, g=5))), + ( + [ + dict(a=dict(b=1, c=dict(d=2)), e=dict(f=4, g=5)), + dict(a=dict(b=2, c=3)), + ], + dict(a=dict(b=2, c=3), e=dict(f=4, g=5)), + ), ] -@pytest.mark.parametrize('dictionaries, expected_merged', TEST_DEEP_UPDATE) +@pytest.mark.parametrize("dictionaries, expected_merged", TEST_DEEP_UPDATE) def test_deep_update(dictionaries, expected_merged): merged = dictionaries[0] for update in dictionaries[1:]: @@ -35,36 +38,60 @@ def test_deep_update(dictionaries, expected_merged): assert expected_merged == merged -BASE_PATH = importlib_files('tests') -BASE_PATH /= Path('sample_data') / Path('extra_facets') # type: ignore +BASE_PATH = importlib_files("tests") +BASE_PATH /= Path("sample_data") / Path("extra_facets") # type: ignore TEST_LOAD_EXTRA_FACETS = [ - ('test-nonexistent', tuple(), {}), - ('test-nonexistent', (BASE_PATH / 'simple', ), {}), # type: ignore + ("test-nonexistent", tuple(), {}), + ("test-nonexistent", (BASE_PATH / "simple",), {}), # type: ignore ( - 'test6', - (BASE_PATH / 'simple', ), # type: ignore - dict(PROJECT1=dict(Amon=dict( - tas=dict(cds_var_name='2m_temperature', source_var_name='2t'), - psl=dict(cds_var_name='mean_sea_level_pressure', - source_var_name='msl'))))), + "test6", + (BASE_PATH / "simple",), # type: ignore + dict( + PROJECT1=dict( + Amon=dict( + tas=dict( + cds_var_name="2m_temperature", source_var_name="2t" + ), + psl=dict( + cds_var_name="mean_sea_level_pressure", + source_var_name="msl", + ), + ) + ) + ), + ), ( - 'test6', - (BASE_PATH / 'simple', BASE_PATH / 'override'), # type: ignore - dict(PROJECT1=dict(Amon=dict( - tas=dict(cds_var_name='temperature_2m', source_var_name='t2m'), - psl=dict(cds_var_name='mean_sea_level_pressure', - source_var_name='msl'), - uas=dict(cds_var_name='10m_u-component_of_neutral_wind', - source_var_name='u10n'), - vas=dict(cds_var_name='v-component_of_neutral_wind_at_10m', - source_var_name='10v'), - )))), + "test6", + (BASE_PATH / "simple", BASE_PATH / "override"), # type: ignore + dict( + PROJECT1=dict( + Amon=dict( + tas=dict( + cds_var_name="temperature_2m", source_var_name="t2m" + ), + psl=dict( + cds_var_name="mean_sea_level_pressure", + source_var_name="msl", + ), + uas=dict( + cds_var_name="10m_u-component_of_neutral_wind", + source_var_name="u10n", + ), + vas=dict( + cds_var_name="v-component_of_neutral_wind_at_10m", + source_var_name="10v", + ), + ) + ) + ), + ), ] -@pytest.mark.parametrize('project, extra_facets_dir, expected', - TEST_LOAD_EXTRA_FACETS) +@pytest.mark.parametrize( + "project, extra_facets_dir, expected", TEST_LOAD_EXTRA_FACETS +) def test_load_extra_facets(project, extra_facets_dir, expected): extra_facets = _load_extra_facets(project, extra_facets_dir) assert extra_facets == expected @@ -73,11 +100,12 @@ def test_load_extra_facets(project, extra_facets_dir, expected): def test_get_extra_facets(tmp_path): dataset = Dataset( **{ - 'project': 'test_project', - 'mip': 'test_mip', - 'dataset': 'test_dataset', - 'short_name': 'test_short_name', - }) + "project": "test_project", + "mip": "test_mip", + "dataset": "test_dataset", + "short_name": "test_short_name", + } + ) extra_facets_file = tmp_path / f"{dataset['project']}-test.yml" extra_facets_file.write_text( textwrap.dedent(""" @@ -85,55 +113,61 @@ def test_get_extra_facets(tmp_path): {mip}: {short_name}: key: value - """).strip().format(**dataset.facets)) + """) + .strip() + .format(**dataset.facets) + ) - extra_facets = get_extra_facets(dataset, extra_facets_dir=(tmp_path, )) + extra_facets = get_extra_facets(dataset, extra_facets_dir=(tmp_path,)) - assert extra_facets == {'key': 'value'} + assert extra_facets == {"key": "value"} def test_get_extra_facets_cmip3(): - dataset = Dataset(**{ - 'project': 'CMIP3', - 'mip': 'A1', - 'short_name': 'tas', - 'dataset': 'CM3', - }) + dataset = Dataset( + **{ + "project": "CMIP3", + "mip": "A1", + "short_name": "tas", + "dataset": "CM3", + } + ) extra_facets = get_extra_facets(dataset, extra_facets_dir=tuple()) - assert extra_facets == {'institute': ['CNRM', 'INM', 'CNRM_CERFACS']} + assert extra_facets == {"institute": ["CNRM", "INM", "CNRM_CERFACS"]} def test_get_extra_facets_cmip5(): dataset = Dataset( **{ - 'project': 'CMIP5', - 'mip': 'Amon', - 'short_name': 'tas', - 'dataset': 'ACCESS1-0', - }) + "project": "CMIP5", + "mip": "Amon", + "short_name": "tas", + "dataset": "ACCESS1-0", + } + ) extra_facets = get_extra_facets(dataset, extra_facets_dir=tuple()) assert extra_facets == { - 'institute': ['CSIRO-BOM'], - 'product': ['output1', 'output2'] + "institute": ["CSIRO-BOM"], + "product": ["output1", "output2"], } def test_get_project_config(mocker): mock_result = mocker.Mock() - mocker.patch.object(_config, 'CFG', {'CMIP6': mock_result}) + mocker.patch.object(_config, "CFG", {"CMIP6": mock_result}) # Check valid result - result = _config.get_project_config('CMIP6') + result = _config.get_project_config("CMIP6") assert result == mock_result # Check error with pytest.raises(RecipeError): - _config.get_project_config('non-existent-project') + _config.get_project_config("non-existent-project") -CONFIG_USER_FILE = importlib_files('esmvalcore') / 'config-user.yml' +CONFIG_USER_FILE = importlib_files("esmvalcore") / "config-user.yml" @pytest.fixture @@ -149,52 +183,50 @@ def default_config(): def test_load_default_config(monkeypatch, default_config): """Test that the default configuration can be loaded.""" project_cfg = {} - monkeypatch.setattr(_config, 'CFG', project_cfg) - default_dev_file = importlib_files('esmvalcore') / 'config-developer.yml' - cfg = CFG.start_session('recipe_example') + monkeypatch.setattr(_config, "CFG", project_cfg) + default_dev_file = importlib_files("esmvalcore") / "config-developer.yml" + cfg = CFG.start_session("recipe_example") default_cfg = { - 'auxiliary_data_dir': Path.home() / 'auxiliary_data', - 'check_level': CheckLevels.DEFAULT, - 'compress_netcdf': False, - 'config_developer_file': default_dev_file, - 'config_file': CONFIG_USER_FILE, - 'diagnostics': None, - 'download_dir': Path.home() / 'climate_data', - 'drs': { - 'CMIP3': 'ESGF', - 'CMIP5': 'ESGF', - 'CMIP6': 'ESGF', - 'CORDEX': 'ESGF', - 'obs4MIPs': 'ESGF' - }, - 'exit_on_warning': False, - 'extra_facets_dir': tuple(), - 'log_level': 'info', - 'max_datasets': None, - 'max_parallel_tasks': None, - 'max_years': None, - 'output_dir': Path.home() / 'esmvaltool_output', - 'output_file_type': 'png', - 'profile_diagnostic': False, - 'remove_preproc_dir': True, - 'resume_from': [], - 'rootpath': { - 'default': [Path.home() / 'climate_data'] + "auxiliary_data_dir": Path.home() / "auxiliary_data", + "check_level": CheckLevels.DEFAULT, + "compress_netcdf": False, + "config_developer_file": default_dev_file, + "config_file": CONFIG_USER_FILE, + "diagnostics": None, + "download_dir": Path.home() / "climate_data", + "drs": { + "CMIP3": "ESGF", + "CMIP5": "ESGF", + "CMIP6": "ESGF", + "CORDEX": "ESGF", + "obs4MIPs": "ESGF", }, - 'run_diagnostic': True, - 'search_esgf': 'never', - 'skip_nonexistent': False, - 'save_intermediary_cubes': False, + "exit_on_warning": False, + "extra_facets_dir": tuple(), + "log_level": "info", + "max_datasets": None, + "max_parallel_tasks": None, + "max_years": None, + "output_dir": Path.home() / "esmvaltool_output", + "output_file_type": "png", + "profile_diagnostic": False, + "remove_preproc_dir": True, + "resume_from": [], + "rootpath": {"default": [Path.home() / "climate_data"]}, + "run_diagnostic": True, + "search_esgf": "never", + "skip_nonexistent": False, + "save_intermediary_cubes": False, } directory_attrs = { - 'session_dir', - 'plot_dir', - 'preproc_dir', - 'run_dir', - 'work_dir', - 'config_dir', + "session_dir", + "plot_dir", + "preproc_dir", + "run_dir", + "work_dir", + "config_dir", } # Check that only allowed keys are in it assert set(default_cfg) == set(cfg) @@ -208,10 +240,11 @@ def test_load_default_config(monkeypatch, default_config): # Check output directories assert str(cfg.session_dir).startswith( - str(Path.home() / 'esmvaltool_output' / 'recipe_example')) - for path in ('preproc', 'work', 'run'): - assert getattr(cfg, path + '_dir') == cfg.session_dir / path - assert cfg.plot_dir == cfg.session_dir / 'plots' + str(Path.home() / "esmvaltool_output" / "recipe_example") + ) + for path in ("preproc", "work", "run"): + assert getattr(cfg, path + "_dir") == cfg.session_dir / path + assert cfg.plot_dir == cfg.session_dir / "plots" assert cfg.config_dir == Path(esmvalcore.__file__).parent # Check that projects were configured @@ -220,60 +253,60 @@ def test_load_default_config(monkeypatch, default_config): def test_rootpath_obs4mips_case_correction(default_config): """Test that the name of the obs4MIPs project is correct in rootpath.""" - CFG['rootpath'] = {'obs4mips': '/path/to/data'} - assert 'obs4mips' not in CFG['rootpath'] - assert CFG['rootpath']['obs4MIPs'] == [Path('/path/to/data')] + CFG["rootpath"] = {"obs4mips": "/path/to/data"} + assert "obs4mips" not in CFG["rootpath"] + assert CFG["rootpath"]["obs4MIPs"] == [Path("/path/to/data")] def test_drs_obs4mips_case_correction(default_config): """Test that the name of the obs4MIPs project is correct in rootpath.""" - CFG['drs'] = {'obs4mips': 'ESGF'} - assert 'obs4mips' not in CFG['drs'] - assert CFG['drs']['obs4MIPs'] == 'ESGF' + CFG["drs"] = {"obs4mips": "ESGF"} + assert "obs4mips" not in CFG["drs"] + assert CFG["drs"]["obs4MIPs"] == "ESGF" def test_project_obs4mips_case_correction(tmp_path, monkeypatch, mocker): - monkeypatch.setattr(_config, 'CFG', {}) - mocker.patch.object(_config, 'read_cmor_tables', autospec=True) - cfg_file = tmp_path / 'config-developer.yml' - project_cfg = {'input_dir': {'default': '/'}} + monkeypatch.setattr(_config, "CFG", {}) + mocker.patch.object(_config, "read_cmor_tables", autospec=True) + cfg_file = tmp_path / "config-developer.yml" + project_cfg = {"input_dir": {"default": "/"}} cfg_dev = { - 'obs4mips': project_cfg, + "obs4mips": project_cfg, } - with cfg_file.open('w', encoding='utf-8') as file: + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(cfg_dev, file) _config.load_config_developer(cfg_file) - assert 'obs4mips' not in _config.CFG - assert _config.CFG['obs4MIPs'] == project_cfg + assert "obs4mips" not in _config.CFG + assert _config.CFG["obs4MIPs"] == project_cfg # Restore config-developer file _config_validators.validate_config_developer(None) def test_load_config_developer_custom(tmp_path, monkeypatch, mocker): - monkeypatch.setattr(_config, 'CFG', {}) - mocker.patch.object(_config, 'read_cmor_tables', autospec=True) - cfg_file = tmp_path / 'config-developer.yml' - cfg_dev = {'custom': {'cmor_path': '/path/to/tables'}} - with cfg_file.open('w', encoding='utf-8') as file: + monkeypatch.setattr(_config, "CFG", {}) + mocker.patch.object(_config, "read_cmor_tables", autospec=True) + cfg_file = tmp_path / "config-developer.yml" + cfg_dev = {"custom": {"cmor_path": "/path/to/tables"}} + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(cfg_dev, file) _config.load_config_developer(cfg_file) - assert 'custom' in _config.CFG + assert "custom" in _config.CFG # Restore config-developer file _config_validators.validate_config_developer(None) @pytest.mark.parametrize( - 'project,step', + "project,step", [ - ('invalid_project', 'load'), - ('CMIP6', 'load'), - ('EMAC', 'save'), + ("invalid_project", "load"), + ("CMIP6", "load"), + ("EMAC", "save"), ], ) def test_get_ignored_warnings_none(project, step): @@ -283,6 +316,6 @@ def test_get_ignored_warnings_none(project, step): def test_get_ignored_warnings_emac(): """Test ``get_ignored_warnings``.""" - ignored_warnings = get_ignored_warnings('EMAC', 'load') + ignored_warnings = get_ignored_warnings("EMAC", "load") assert isinstance(ignored_warnings, list) assert ignored_warnings diff --git a/tests/unit/config/test_config_object.py b/tests/unit/config/test_config_object.py index 16084effc4..ac301fb43e 100644 --- a/tests/unit/config/test_config_object.py +++ b/tests/unit/config/test_config_object.py @@ -25,46 +25,43 @@ def environment(**kwargs): def test_config_class(): config = { - 'log_level': 'info', - 'exit_on_warning': False, - 'output_file_type': 'png', - 'output_dir': './esmvaltool_output', - 'auxiliary_data_dir': './auxiliary_data', - 'save_intermediary_cubes': False, - 'remove_preproc_dir': True, - 'max_parallel_tasks': None, - 'profile_diagnostic': False, - 'rootpath': { - 'CMIP6': '~/data/CMIP6' - }, - 'drs': { - 'CMIP6': 'default' - }, + "log_level": "info", + "exit_on_warning": False, + "output_file_type": "png", + "output_dir": "./esmvaltool_output", + "auxiliary_data_dir": "./auxiliary_data", + "save_intermediary_cubes": False, + "remove_preproc_dir": True, + "max_parallel_tasks": None, + "profile_diagnostic": False, + "rootpath": {"CMIP6": "~/data/CMIP6"}, + "drs": {"CMIP6": "default"}, } cfg = Config(config) - assert isinstance(cfg['output_dir'], Path) - assert isinstance(cfg['auxiliary_data_dir'], Path) + assert isinstance(cfg["output_dir"], Path) + assert isinstance(cfg["auxiliary_data_dir"], Path) from esmvalcore.config._config import CFG as CFG_DEV + assert CFG_DEV def test_config_update(): - config = Config({'output_dir': 'directory'}) - fail_dict = {'output_dir': 123} + config = Config({"output_dir": "directory"}) + fail_dict = {"output_dir": 123} with pytest.raises(InvalidConfigParameter): config.update(fail_dict) def test_set_bad_item(): - config = Config({'output_dir': 'config'}) + config = Config({"output_dir": "config"}) with pytest.raises(InvalidConfigParameter) as err_exc: - config['bad_item'] = 47 + config["bad_item"] = 47 - assert str(err_exc.value) == '`bad_item` is not a valid config parameter.' + assert str(err_exc.value) == "`bad_item` is not a valid config parameter." def test_config_init(): @@ -73,7 +70,7 @@ def test_config_init(): def test_load_from_file(monkeypatch): - default_config_file = Path(esmvalcore.__file__).parent / 'config-user.yml' + default_config_file = Path(esmvalcore.__file__).parent / "config-user.yml" config = Config() assert not config config.load_from_file(default_config_file) @@ -85,17 +82,17 @@ def test_load_from_file_filenotfound(monkeypatch): config = Config() assert not config - expected_path = Path.home() / '.esmvaltool' / 'not_existent_file.yml' + expected_path = Path.home() / ".esmvaltool" / "not_existent_file.yml" msg = f"Config file '{expected_path}' does not exist" with pytest.raises(FileNotFoundError, match=msg): - config.load_from_file('not_existent_file.yml') + config.load_from_file("not_existent_file.yml") def test_load_from_file_invalidconfigparameter(monkeypatch, tmp_path): """Test `Config.load_from_file`.""" monkeypatch.chdir(tmp_path) - cfg_path = tmp_path / 'test.yml' - cfg_path.write_text('invalid_param: 42') + cfg_path = tmp_path / "test.yml" + cfg_path.write_text("invalid_param: 42") config = Config() assert not config @@ -111,15 +108,15 @@ def test_load_from_file_invalidconfigparameter(monkeypatch, tmp_path): def test_config_key_error(): config = Config() with pytest.raises(KeyError): - config['invalid_key'] + config["invalid_key"] def test_reload(): """Test `Config.reload`.""" - cfg_path = Path(esmvalcore.__file__).parent / 'config-user.yml' + cfg_path = Path(esmvalcore.__file__).parent / "config-user.yml" config = Config(config_file=cfg_path) config.reload() - assert config['config_file'] == cfg_path + assert config["config_file"] == cfg_path def test_reload_fail(): @@ -134,141 +131,141 @@ def test_reload_fail(): def test_session(): - config = Config({'output_dir': 'config'}) + config = Config({"output_dir": "config"}) - session = config.start_session('recipe_name') + session = config.start_session("recipe_name") assert session == config - session['output_dir'] = 'session' + session["output_dir"] = "session" assert session != config def test_session_key_error(): session = Session({}) with pytest.raises(KeyError): - session['invalid_key'] + session["invalid_key"] TEST_GET_CFG_PATH = [ - (None, None, None, '~/.esmvaltool/config-user.yml', False), + (None, None, None, "~/.esmvaltool/config-user.yml", False), ( None, None, - ('any_other_module', '--config_file=cli.yml'), - '~/.esmvaltool/config-user.yml', + ("any_other_module", "--config_file=cli.yml"), + "~/.esmvaltool/config-user.yml", False, ), ( None, None, - ('esmvaltool', 'run', '--max-parallel-tasks=4'), - '~/.esmvaltool/config-user.yml', + ("esmvaltool", "run", "--max-parallel-tasks=4"), + "~/.esmvaltool/config-user.yml", True, ), ( None, None, - ('esmvaltool', '--config_file'), - '~/.esmvaltool/config-user.yml', + ("esmvaltool", "--config_file"), + "~/.esmvaltool/config-user.yml", True, ), ( None, None, - ('esmvaltool', 'run', '--config_file=/cli.yml'), - '/cli.yml', + ("esmvaltool", "run", "--config_file=/cli.yml"), + "/cli.yml", True, ), ( None, None, - ('esmvaltool', 'run', '--config_file=/cli.yml'), - '/cli.yml', + ("esmvaltool", "run", "--config_file=/cli.yml"), + "/cli.yml", True, ), ( None, None, - ('esmvaltool', 'run', '--config-file', '/cli.yml'), - '/cli.yml', + ("esmvaltool", "run", "--config-file", "/cli.yml"), + "/cli.yml", True, ), ( None, None, - ('esmvaltool', 'run', '--config-file=/cli.yml'), - '/cli.yml', + ("esmvaltool", "run", "--config-file=/cli.yml"), + "/cli.yml", True, ), ( None, None, - ('esmvaltool', 'run', '--config-file=relative_cli.yml'), - '~/.esmvaltool/relative_cli.yml', + ("esmvaltool", "run", "--config-file=relative_cli.yml"), + "~/.esmvaltool/relative_cli.yml", True, ), ( None, None, - ('esmvaltool', 'run', '--config-file=existing_cfg.yml'), - 'existing_cfg.yml', + ("esmvaltool", "run", "--config-file=existing_cfg.yml"), + "existing_cfg.yml", True, ), ( None, - {'_ESMVALTOOL_USER_CONFIG_FILE_': '/env.yml'}, - ('esmvaltool', 'run', '--config-file=/cli.yml'), - '/env.yml', + {"_ESMVALTOOL_USER_CONFIG_FILE_": "/env.yml"}, + ("esmvaltool", "run", "--config-file=/cli.yml"), + "/env.yml", True, ), ( None, - {'_ESMVALTOOL_USER_CONFIG_FILE_': '/env.yml'}, + {"_ESMVALTOOL_USER_CONFIG_FILE_": "/env.yml"}, None, - '/env.yml', + "/env.yml", True, ), ( None, - {'_ESMVALTOOL_USER_CONFIG_FILE_': 'existing_cfg.yml'}, - ('esmvaltool', 'run', '--config-file=/cli.yml'), - 'existing_cfg.yml', + {"_ESMVALTOOL_USER_CONFIG_FILE_": "existing_cfg.yml"}, + ("esmvaltool", "run", "--config-file=/cli.yml"), + "existing_cfg.yml", True, ), ( - '/filename.yml', - {'_ESMVALTOOL_USER_CONFIG_FILE_': '/env.yml'}, - ('esmvaltool', 'run', '--config-file=/cli.yml'), - '/filename.yml', + "/filename.yml", + {"_ESMVALTOOL_USER_CONFIG_FILE_": "/env.yml"}, + ("esmvaltool", "run", "--config-file=/cli.yml"), + "/filename.yml", True, ), ( - '/filename.yml', + "/filename.yml", None, - ('esmvaltool', 'run', '--config-file=/cli.yml'), - '/filename.yml', + ("esmvaltool", "run", "--config-file=/cli.yml"), + "/filename.yml", True, ), - ('/filename.yml', None, None, '/filename.yml', False), + ("/filename.yml", None, None, "/filename.yml", False), ( - 'filename.yml', + "filename.yml", None, None, - '~/.esmvaltool/filename.yml', + "~/.esmvaltool/filename.yml", False, ), ( - 'existing_cfg.yml', - {'_ESMVALTOOL_USER_CONFIG_FILE_': '/env.yml'}, - ('esmvaltool', 'run', '--config-file=/cli.yml'), - 'existing_cfg.yml', + "existing_cfg.yml", + {"_ESMVALTOOL_USER_CONFIG_FILE_": "/env.yml"}, + ("esmvaltool", "run", "--config-file=/cli.yml"), + "existing_cfg.yml", True, ), ] @pytest.mark.parametrize( - 'filename,env,cli_args,output,env_var_set', TEST_GET_CFG_PATH + "filename,env,cli_args,output,env_var_set", TEST_GET_CFG_PATH ) def test_get_config_user_path( filename, env, cli_args, output, env_var_set, monkeypatch, tmp_path @@ -276,41 +273,41 @@ def test_get_config_user_path( """Test `Config._get_config_user_path`.""" # Create empty test file monkeypatch.chdir(tmp_path) - (tmp_path / 'existing_cfg.yml').write_text('') + (tmp_path / "existing_cfg.yml").write_text("") if env is None: env = {} if cli_args is None: cli_args = sys.argv - if output == 'existing_cfg.yml': - output = tmp_path / 'existing_cfg.yml' + if output == "existing_cfg.yml": + output = tmp_path / "existing_cfg.yml" else: output = Path(output).expanduser() with environment(**env), arguments(*cli_args): config_path = Config._get_config_user_path(filename) if env_var_set: - assert os.environ['_ESMVALTOOL_USER_CONFIG_FILE_'] == str(output) + assert os.environ["_ESMVALTOOL_USER_CONFIG_FILE_"] == str(output) else: - assert '_ESMVALTOOL_USER_CONFIG_FILE_' not in os.environ + assert "_ESMVALTOOL_USER_CONFIG_FILE_" not in os.environ assert isinstance(config_path, Path) assert config_path == output def test_load_user_config_filenotfound(): """Test `Config._load_user_config`.""" - expected_path = Path.home() / '.esmvaltool' / 'not_existent_file.yml' + expected_path = Path.home() / ".esmvaltool" / "not_existent_file.yml" msg = f"Config file '{expected_path}' does not exist" with pytest.raises(FileNotFoundError, match=msg): - Config._load_user_config('not_existent_file.yml') + Config._load_user_config("not_existent_file.yml") def test_load_user_config_invalidconfigparameter(monkeypatch, tmp_path): """Test `Config._load_user_config`.""" monkeypatch.chdir(tmp_path) - cfg_path = tmp_path / 'test.yml' - cfg_path.write_text('invalid_param: 42') + cfg_path = tmp_path / "test.yml" + cfg_path.write_text("invalid_param: 42") msg = ( f"Failed to parse user configuration file {cfg_path}: `invalid_param` " diff --git a/tests/unit/config/test_config_validator.py b/tests/unit/config/test_config_validator.py index 36dcd763fb..1a8283ce4b 100644 --- a/tests/unit/config/test_config_validator.py +++ b/tests/unit/config/test_config_validator.py @@ -40,91 +40,99 @@ def generate_validator_testcases(valid): validation_tests = ( { - 'validator': validate_bool, - 'success': ((True, True), (False, False)), - 'fail': ((_, ValueError) for _ in ('fail', 2, -1, [])) + "validator": validate_bool, + "success": ((True, True), (False, False)), + "fail": ((_, ValueError) for _ in ("fail", 2, -1, [])), }, { - 'validator': validate_check_level, - 'success': ( + "validator": validate_check_level, + "success": ( (1, 1), (5, 5), - ('dEBUG', 1), - ('default', 3), + ("dEBUG", 1), + ("default", 3), ), - 'fail': ( + "fail": ( (6, ValueError), (0, ValueError), - ('fail', ValueError), + ("fail", ValueError), ), }, { - 'validator': - validate_diagnostics, - 'success': ( - ('/', {'/'}), - ('a ', {'a/*'}), - ('/ a ', {'/', 'a/*'}), - ('/ a a', {'/', 'a/*'}), - (('/', 'a'), {'/', 'a/*'}), + "validator": validate_diagnostics, + "success": ( + ("/", {"/"}), + ("a ", {"a/*"}), + ("/ a ", {"/", "a/*"}), + ("/ a a", {"/", "a/*"}), + (("/", "a"), {"/", "a/*"}), ([], set()), ), - 'fail': ( + "fail": ( (1, TypeError), ([1, 2], TypeError), ), }, { - 'validator': - _listify_validator(validate_float, n_items=2), - 'success': - ((_, [1.5, 2.5]) - for _ in ('1.5, 2.5', [1.5, 2.5], [1.5, 2.5], (1.5, 2.5), - np.array((1.5, 2.5)))), - 'fail': ((_, ValueError) for _ in ('fail', ('a', 1), (1, 2, 3))) + "validator": _listify_validator(validate_float, n_items=2), + "success": ( + (_, [1.5, 2.5]) + for _ in ( + "1.5, 2.5", + [1.5, 2.5], + [1.5, 2.5], + (1.5, 2.5), + np.array((1.5, 2.5)), + ) + ), + "fail": ((_, ValueError) for _ in ("fail", ("a", 1), (1, 2, 3))), }, { - 'validator': - _listify_validator(validate_float, n_items=2), - 'success': - ((_, [1.5, 2.5]) - for _ in ('1.5, 2.5', [1.5, 2.5], [1.5, 2.5], (1.5, 2.5), - np.array((1.5, 2.5)))), - 'fail': ((_, ValueError) for _ in ('fail', ('a', 1), (1, 2, 3))) + "validator": _listify_validator(validate_float, n_items=2), + "success": ( + (_, [1.5, 2.5]) + for _ in ( + "1.5, 2.5", + [1.5, 2.5], + [1.5, 2.5], + (1.5, 2.5), + np.array((1.5, 2.5)), + ) + ), + "fail": ((_, ValueError) for _ in ("fail", ("a", 1), (1, 2, 3))), }, { - 'validator': - _listify_validator(validate_int, n_items=2), - 'success': - ((_, [1, 2]) - for _ in ('1, 2', [1.5, 2.5], [1, 2], (1, 2), np.array((1, 2)))), - 'fail': ((_, ValueError) for _ in ('fail', ('a', 1), (1, 2, 3))) + "validator": _listify_validator(validate_int, n_items=2), + "success": ( + (_, [1, 2]) + for _ in ("1, 2", [1.5, 2.5], [1, 2], (1, 2), np.array((1, 2))) + ), + "fail": ((_, ValueError) for _ in ("fail", ("a", 1), (1, 2, 3))), }, { - 'validator': validate_bool_or_none, - 'success': ((None, None), (True, True), (False, False)), - 'fail': (('A', ValueError), (1, ValueError)), + "validator": validate_bool_or_none, + "success": ((None, None), (True, True), (False, False)), + "fail": (("A", ValueError), (1, ValueError)), }, { - 'validator': validate_int_or_none, - 'success': ((None, None), ), - 'fail': (), + "validator": validate_int_or_none, + "success": ((None, None),), + "fail": (), }, { - 'validator': validate_int_positive_or_none, - 'success': ((None, None), ), - 'fail': (), + "validator": validate_int_positive_or_none, + "success": ((None, None),), + "fail": (), }, { - 'validator': - validate_path, - 'success': ( - ('a/b/c', Path.cwd() / 'a' / 'b' / 'c'), - ('/a/b/c/', Path('/', 'a', 'b', 'c')), - ('~/', Path.home()), + "validator": validate_path, + "success": ( + ("a/b/c", Path.cwd() / "a" / "b" / "c"), + ("/a/b/c/", Path("/", "a", "b", "c")), + ("~/", Path.home()), (Path.home(), Path.home()), ), - 'fail': ( + "fail": ( (None, ValueError), (123, ValueError), (False, ValueError), @@ -132,152 +140,140 @@ def generate_validator_testcases(valid): ), }, { - 'validator': validate_path_or_none, - 'success': ( - ('a/b/c', Path.cwd() / 'a' / 'b' / 'c'), - ('/a/b/c/', Path('/', 'a', 'b', 'c')), - ('~/', Path.home()), + "validator": validate_path_or_none, + "success": ( + ("a/b/c", Path.cwd() / "a" / "b" / "c"), + ("/a/b/c/", Path("/", "a", "b", "c")), + ("~/", Path.home()), (None, None), ), - 'fail': ( + "fail": ( (123, ValueError), (False, ValueError), ([], ValueError), ), }, { - 'validator': - validate_rootpath, - 'success': ( + "validator": validate_rootpath, + "success": ( # Test a single path - ({ - 'default': '/a' - }, { - 'default': [Path('/a')] - }), - ({ - 'default': Path('/a') - }, { - 'default': [Path('/a')] - }), + ({"default": "/a"}, {"default": [Path("/a")]}), + ({"default": Path("/a")}, {"default": [Path("/a")]}), # Test a list of paths - ({ - 'CMIP6': ['/a', '/b'] - }, { - 'CMIP6': [Path('/a'), Path('/b')] - }), - ({ - 'CMIP6': [Path('/a'), Path('/b')] - }, { - 'CMIP6': [Path('/a'), Path('/b')] - }), + ({"CMIP6": ["/a", "/b"]}, {"CMIP6": [Path("/a"), Path("/b")]}), + ( + {"CMIP6": [Path("/a"), Path("/b")]}, + {"CMIP6": [Path("/a"), Path("/b")]}, + ), # Test a dict of paths ( { - 'CMIP6': { - '/a': 'DKRZ', - '/b': 'ESGF', + "CMIP6": { + "/a": "DKRZ", + "/b": "ESGF", }, }, { - 'CMIP6': { - Path('/a'): 'DKRZ', - Path('/b'): 'ESGF', + "CMIP6": { + Path("/a"): "DKRZ", + Path("/b"): "ESGF", }, }, - )), - 'fail': (), + ), + ), + "fail": (), }, { - 'validator': validate_positive, - 'success': ( + "validator": validate_positive, + "success": ( (0.1, 0.1), (1, 1), (1.5, 1.5), ), - 'fail': ( + "fail": ( (0, ValueError), (-1, ValueError), - ('fail', TypeError), + ("fail", TypeError), ), }, { - 'validator': - _listify_validator(validate_string), - 'success': ( - ('', []), - ('a,b', ['a', 'b']), - ('abc', ['abc']), - ('abc, ', ['abc']), - ('abc, ,', ['abc']), - (['a', 'b'], ['a', 'b']), - (('a', 'b'), ['a', 'b']), - (iter(['a', 'b']), ['a', 'b']), - (np.array(['a', 'b']), ['a', 'b']), - ((1, 2), ['1', '2']), - (np.array([1, 2]), ['1', '2']), + "validator": _listify_validator(validate_string), + "success": ( + ("", []), + ("a,b", ["a", "b"]), + ("abc", ["abc"]), + ("abc, ", ["abc"]), + ("abc, ,", ["abc"]), + (["a", "b"], ["a", "b"]), + (("a", "b"), ["a", "b"]), + (iter(["a", "b"]), ["a", "b"]), + (np.array(["a", "b"]), ["a", "b"]), + ((1, 2), ["1", "2"]), + (np.array([1, 2]), ["1", "2"]), ), - 'fail': ( + "fail": ( (set(), ValueError), (1, ValueError), - ) + ), }, { - 'validator': validate_string_or_none, - 'success': ((None, None), ), - 'fail': (), + "validator": validate_string_or_none, + "success": ((None, None),), + "fail": (), }, { - 'validator': validate_search_esgf, - 'success': ( - ('never', 'never'), - ('NEVER', 'never'), - ('when_missing', 'when_missing'), - ('WhEN_MIssIng', 'when_missing'), - ('always', 'always'), - ('Always', 'always'), + "validator": validate_search_esgf, + "success": ( + ("never", "never"), + ("NEVER", "never"), + ("when_missing", "when_missing"), + ("WhEN_MIssIng", "when_missing"), + ("always", "always"), + ("Always", "always"), ), - 'fail': ( + "fail": ( (0, ValueError), (3.14, ValueError), (True, ValueError), - ('fail', ValueError), + ("fail", ValueError), ), }, ) for validator_dict in validation_tests: - validator = validator_dict['validator'] + validator = validator_dict["validator"] if valid: - for arg, target in validator_dict['success']: + for arg, target in validator_dict["success"]: yield validator, arg, target else: - for arg, error_type in validator_dict['fail']: + for arg, error_type in validator_dict["fail"]: yield validator, arg, error_type -@pytest.mark.parametrize('validator, arg, target', - generate_validator_testcases(True)) +@pytest.mark.parametrize( + "validator, arg, target", generate_validator_testcases(True) +) def test_validator_valid(validator, arg, target): res = validator(arg) assert res == target -@pytest.mark.parametrize('validator, arg, exception_type', - generate_validator_testcases(False)) +@pytest.mark.parametrize( + "validator, arg, exception_type", generate_validator_testcases(False) +) def test_validator_invalid(validator, arg, exception_type): with pytest.raises(exception_type): validator(arg) -@pytest.mark.parametrize('remove_version', (current_version, '0.0.1', '9.9.9')) +@pytest.mark.parametrize("remove_version", (current_version, "0.0.1", "9.9.9")) def test_handle_deprecation(remove_version): """Test ``_handle_deprecation``.""" - option = 'test_var' - deprecated_version = '2.7.0' - more_info = ' More information on this is not available.' + option = "test_var" + deprecated_version = "2.7.0" + more_info = " More information on this is not available." - if remove_version != '9.9.9': + if remove_version != "9.9.9": msg = ( r"The configuration option or command line argument `test_var` " r"has been removed in ESMValCore version .* More information on " @@ -302,32 +298,32 @@ def test_handle_deprecation(remove_version): def test_validate_config_developer_none(): """Test ``validate_config_developer``.""" path = validate_config_developer(None) - assert path == Path(esmvalcore.__file__).parent / 'config-developer.yml' + assert path == Path(esmvalcore.__file__).parent / "config-developer.yml" def test_validate_config_developer(tmp_path): """Test ``validate_config_developer``.""" custom_table_path = ( - Path(esmvalcore.__file__).parent / 'cmor' / 'tables' / 'custom' + Path(esmvalcore.__file__).parent / "cmor" / "tables" / "custom" ) cfg_dev = { - 'custom': {'cmor_path': custom_table_path}, - 'CMIP3': {'input_dir': {'default': '/'}}, - 'CMIP5': {'input_dir': {'default': '/'}}, - 'CMIP6': {'input_dir': {'default': '/'}}, - 'CORDEX': {'input_dir': {'default': '/'}}, - 'OBS': {'input_dir': {'default': '/'}}, - 'OBS6': {'input_dir': {'default': '/'}}, - 'obs4MIPs': {'input_dir': {'default': '/'}}, - 'ana4mips': {'input_dir': {'default': '/'}}, - 'native6': {'input_dir': {'default': '/'}}, - 'EMAC': {'input_dir': {'default': '/'}}, - 'IPSLCM': {'input_dir': {'default': '/'}}, - 'ICON': {'input_dir': {'default': '/'}}, - 'CESM': {'input_dir': {'default': '/'}}, + "custom": {"cmor_path": custom_table_path}, + "CMIP3": {"input_dir": {"default": "/"}}, + "CMIP5": {"input_dir": {"default": "/"}}, + "CMIP6": {"input_dir": {"default": "/"}}, + "CORDEX": {"input_dir": {"default": "/"}}, + "OBS": {"input_dir": {"default": "/"}}, + "OBS6": {"input_dir": {"default": "/"}}, + "obs4MIPs": {"input_dir": {"default": "/"}}, + "ana4mips": {"input_dir": {"default": "/"}}, + "native6": {"input_dir": {"default": "/"}}, + "EMAC": {"input_dir": {"default": "/"}}, + "IPSLCM": {"input_dir": {"default": "/"}}, + "ICON": {"input_dir": {"default": "/"}}, + "CESM": {"input_dir": {"default": "/"}}, } - cfg_dev_file = tmp_path / 'cfg-developer.yml' - with open(cfg_dev_file, mode='w', encoding='utf-8') as file: + cfg_dev_file = tmp_path / "cfg-developer.yml" + with open(cfg_dev_file, mode="w", encoding="utf-8") as file: yaml.safe_dump(cfg_dev, file) path = validate_config_developer(cfg_dev_file) diff --git a/tests/unit/config/test_dask.py b/tests/unit/config/test_dask.py index 22e7735628..e965c90a2e 100644 --- a/tests/unit/config/test_dask.py +++ b/tests/unit/config/test_dask.py @@ -5,72 +5,72 @@ def test_get_no_distributed_client(mocker, tmp_path): - mocker.patch.object(_dask, 'CONFIG_FILE', tmp_path / 'nonexistent.yml') + mocker.patch.object(_dask, "CONFIG_FILE", tmp_path / "nonexistent.yml") with _dask.get_distributed_client() as client: assert client is None -@pytest.mark.parametrize('warn_unused_args', [False, True]) +@pytest.mark.parametrize("warn_unused_args", [False, True]) def test_get_distributed_client_external(mocker, tmp_path, warn_unused_args): # Create mock client configuration. cfg = { - 'client': { - 'address': 'tcp://127.0.0.1:42021', + "client": { + "address": "tcp://127.0.0.1:42021", }, } if warn_unused_args: - cfg['cluster'] = {'n_workers': 2} - cfg_file = tmp_path / 'dask.yml' - with cfg_file.open('w', encoding='utf-8') as file: + cfg["cluster"] = {"n_workers": 2} + cfg_file = tmp_path / "dask.yml" + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(cfg, file) - mocker.patch.object(_dask, 'CONFIG_FILE', cfg_file) + mocker.patch.object(_dask, "CONFIG_FILE", cfg_file) # Create mock distributed.Client mock_client = mocker.Mock() - mocker.patch.object(_dask, - 'Client', - create_autospec=True, - return_value=mock_client) + mocker.patch.object( + _dask, "Client", create_autospec=True, return_value=mock_client + ) with _dask.get_distributed_client() as client: assert client is mock_client - _dask.Client.assert_called_with(**cfg['client']) + _dask.Client.assert_called_with(**cfg["client"]) mock_client.close.assert_called() def test_get_distributed_client_slurm(mocker, tmp_path): cfg = { - 'cluster': { - 'type': 'dask_jobqueue.SLURMCluster', - 'queue': 'interactive', - 'cores': '8', - 'memory': '16GiB', + "cluster": { + "type": "dask_jobqueue.SLURMCluster", + "queue": "interactive", + "cores": "8", + "memory": "16GiB", }, } - cfg_file = tmp_path / 'dask.yml' - with cfg_file.open('w', encoding='utf-8') as file: + cfg_file = tmp_path / "dask.yml" + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(cfg, file) - mocker.patch.object(_dask, 'CONFIG_FILE', cfg_file) + mocker.patch.object(_dask, "CONFIG_FILE", cfg_file) # Create mock distributed.Client mock_client = mocker.Mock() - mocker.patch.object(_dask, - 'Client', - create_autospec=True, - return_value=mock_client) + mocker.patch.object( + _dask, "Client", create_autospec=True, return_value=mock_client + ) mock_module = mocker.Mock() mock_cluster_cls = mocker.Mock() mock_module.SLURMCluster = mock_cluster_cls - mocker.patch.object(_dask.importlib, - 'import_module', - create_autospec=True, - return_value=mock_module) + mocker.patch.object( + _dask.importlib, + "import_module", + create_autospec=True, + return_value=mock_module, + ) with _dask.get_distributed_client() as client: assert client is mock_client mock_client.close.assert_called() mock_cluster = mock_cluster_cls.return_value _dask.Client.assert_called_with(address=mock_cluster.scheduler_address) - args = {k: v for k, v in cfg['cluster'].items() if k != 'type'} + args = {k: v for k, v in cfg["cluster"].items() if k != "type"} mock_cluster_cls.assert_called_with(**args) mock_cluster.close.assert_called() diff --git a/tests/unit/config/test_diagnostic.py b/tests/unit/config/test_diagnostic.py index e7c836d283..ea25385db1 100644 --- a/tests/unit/config/test_diagnostic.py +++ b/tests/unit/config/test_diagnostic.py @@ -1,4 +1,5 @@ """Test Diagnostics and TagsManager.""" + import pytest from esmvalcore.config._diagnostics import Diagnostics, TagsManager @@ -12,68 +13,70 @@ def test_diagnostics_class(): path = diagnostics.path - assert diagnostics.recipes == path / 'recipes' - assert diagnostics.references == path / 'references' - assert diagnostics.tags_config == path / 'config-references.yml' - assert diagnostics.scripts == path / 'diag_scripts' + assert diagnostics.recipes == path / "recipes" + assert diagnostics.references == path / "references" + assert diagnostics.tags_config == path / "config-references.yml" + assert diagnostics.scripts == path / "diag_scripts" assert isinstance(diagnostics.load_tags(), TagsManager) def test_tags_manager_setters(): """Test TagsManager setters.""" tags = TagsManager() - tags.set_tag_value('section', 'tag1', 'value1') - assert tags.get_tag_value('section', 'tag1') == 'value1' - - tags.set_tag_values({ - 'section': { - 'tag2': 'value2', - }, - 'other': { - 'tag1': 'value1', - 'tag2': 'value2', - }, - }) - - assert tags.get_tag_value('section', 'tag1') == 'value1' - assert tags.get_tag_value('section', 'tag2') == 'value2' - assert tags.get_tag_value('other', 'tag1') == 'value1' - assert tags.get_tag_value('other', 'tag2') == 'value2' + tags.set_tag_value("section", "tag1", "value1") + assert tags.get_tag_value("section", "tag1") == "value1" + + tags.set_tag_values( + { + "section": { + "tag2": "value2", + }, + "other": { + "tag1": "value1", + "tag2": "value2", + }, + } + ) + + assert tags.get_tag_value("section", "tag1") == "value1" + assert tags.get_tag_value("section", "tag2") == "value2" + assert tags.get_tag_value("other", "tag1") == "value1" + assert tags.get_tag_value("other", "tag2") == "value2" def test_tags_manager(): """Test TagsManager functionality.""" - tags = TagsManager({'section': {'tag1': 123, 'tag2': 345}}) + tags = TagsManager({"section": {"tag1": 123, "tag2": 345}}) - ret = tags.get_tag_value('section', 'tag1') + ret = tags.get_tag_value("section", "tag1") assert ret == 123 - ret = tags.get_tag_values('section', ('tag1', 'tag2')) + ret = tags.get_tag_values("section", ("tag1", "tag2")) assert ret == (123, 345) - dict_with_tags = {'section': ['tag1', 'tag2']} + dict_with_tags = {"section": ["tag1", "tag2"]} tags.replace_tags_in_dict(dict_with_tags) - assert dict_with_tags == {'section': (123, 345)} + assert dict_with_tags == {"section": (123, 345)} def test_tags_manager_fails(): """Test TagsManager fails.""" - tags = TagsManager({'section': {'tag1': 123, 'tag2': 345}}) + tags = TagsManager({"section": {"tag1": 123, "tag2": 345}}) with pytest.raises(ValueError): - tags.get_tag_value(section='undefined', tag='tag1') + tags.get_tag_value(section="undefined", tag="tag1") with pytest.raises(ValueError): - tags.get_tag_value(section='section', tag='undefined') + tags.get_tag_value(section="section", tag="undefined") with pytest.raises(ValueError): - dict_with_undefined_tags = {'section': ['tag1', 'undefined']} + dict_with_undefined_tags = {"section": ["tag1", "undefined"]} tags.replace_tags_in_dict(dict_with_undefined_tags) def test_load_tags_from_non_existent_file(): """Test fallback if no diagnostics are installed.""" - tags = TagsManager.from_file('non-existent') + tags = TagsManager.from_file("non-existent") assert isinstance(tags, TagsManager) assert tags == {} diff --git a/tests/unit/config/test_esgf_pyclient.py b/tests/unit/config/test_esgf_pyclient.py index 2c1a028f7b..f23813bf71 100644 --- a/tests/unit/config/test_esgf_pyclient.py +++ b/tests/unit/config/test_esgf_pyclient.py @@ -1,5 +1,3 @@ -import copy -from collections import defaultdict from pathlib import Path import pytest @@ -8,91 +6,37 @@ from esmvalcore.config import _esgf_pyclient DEFAULT_CONFIG: dict = { - 'logon': { - 'interactive': False, - 'bootstrap': True, - }, - 'search_connection': { - 'urls': [ - 'https://esgf.ceda.ac.uk/esg-search', - 'https://esgf-node.llnl.gov/esg-search', - 'https://esgf-data.dkrz.de/esg-search', - 'https://esgf-node.ipsl.upmc.fr/esg-search', - 'https://esg-dn1.nsc.liu.se/esg-search', - 'https://esgf.nci.org.au/esg-search', - 'https://esgf.nccs.nasa.gov/esg-search', - 'https://esgdata.gfdl.noaa.gov/esg-search', + "search_connection": { + "urls": [ + "https://esgf.ceda.ac.uk/esg-search", + "https://esgf-node.llnl.gov/esg-search", + "https://esgf-data.dkrz.de/esg-search", + "https://esgf-node.ipsl.upmc.fr/esg-search", + "https://esg-dn1.nsc.liu.se/esg-search", + "https://esgf.nci.org.au/esg-search", + "https://esgf.nccs.nasa.gov/esg-search", + "https://esgdata.gfdl.noaa.gov/esg-search", ], - 'distrib': - True, - 'timeout': - 120, - 'cache': - Path.home() / '.esmvaltool' / 'cache' / 'pyesgf-search-results', - 'expire_after': - 86400, + "distrib": True, + "timeout": 120, + "cache": Path.home() + / ".esmvaltool" + / "cache" + / "pyesgf-search-results", + "expire_after": 86400, }, } -CREDENTIALS = { - 'hostname': 'esgf-data.dkrz.de', - 'username': 'cookiemonster', - 'password': 'Welcome01', -} - - -class MockKeyring: - """Mock keyring module.""" - - def __init__(self): - self.items = defaultdict(dict) - - def set_password(self, service_name, username, password): - self.items[service_name][username] = password - - def get_password(self, service_name, username): - return self.items[service_name][username] - - -def test_get_keyring_credentials(monkeypatch): - """Test function get_keyring_credentials.""" - keyring = MockKeyring() - for key, value in CREDENTIALS.items(): - keyring.set_password("ESGF", key, value) - monkeypatch.setattr(_esgf_pyclient, 'keyring', keyring) - - credentials = _esgf_pyclient.get_keyring_credentials() - - assert credentials == CREDENTIALS - - -def test_get_keyring_credentials_no_keyring(mocker): - - mocker.patch.object(_esgf_pyclient, 'keyring', None) - credentials = _esgf_pyclient.get_keyring_credentials() - assert credentials == {} - - -def test_get_keyring_credentials_no_backend(mocker): - - keyring = mocker.patch.object(_esgf_pyclient, 'keyring') - keyring.errors.NoKeyringError = Exception - keyring.get_password.side_effect = keyring.errors.NoKeyringError - credentials = _esgf_pyclient.get_keyring_credentials() - assert credentials == {} - def test_read_config_file(monkeypatch, tmp_path): """Test function read_config_file.""" - cfg_file = tmp_path / 'esgf-pyclient.yml' - monkeypatch.setattr(_esgf_pyclient, 'CONFIG_FILE', cfg_file) + cfg_file = tmp_path / "esgf-pyclient.yml" + monkeypatch.setattr(_esgf_pyclient, "CONFIG_FILE", cfg_file) reference = { - 'logon': { - 'interactive': True - }, + "logon": {"interactive": True}, } - with cfg_file.open('w', encoding='utf-8') as file: + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(reference, file) cfg = _esgf_pyclient.read_config_file() @@ -105,21 +49,19 @@ def test_read_v25_config_file(monkeypatch, tmp_path): For v2.5 and earlier, the config-file contained a single `url` instead of a list of `urls` to specify the ESGF index node. """ - cfg_file = tmp_path / 'esgf-pyclient.yml' - monkeypatch.setattr(_esgf_pyclient, 'CONFIG_FILE', cfg_file) + cfg_file = tmp_path / "esgf-pyclient.yml" + monkeypatch.setattr(_esgf_pyclient, "CONFIG_FILE", cfg_file) cfg_file_content = { - 'search_connection': { - 'url': 'https://some.host/path' - }, + "search_connection": {"url": "https://some.host/path"}, } - with cfg_file.open('w', encoding='utf-8') as file: + with cfg_file.open("w", encoding="utf-8") as file: yaml.safe_dump(cfg_file_content, file) reference = { - 'search_connection': { - 'urls': [ - 'https://some.host/path', + "search_connection": { + "urls": [ + "https://some.host/path", ] } } @@ -128,23 +70,12 @@ def test_read_v25_config_file(monkeypatch, tmp_path): assert cfg == reference -@pytest.mark.parametrize('with_keyring_creds', [True, False]) +@pytest.mark.parametrize("with_keyring_creds", [True, False]) def test_default_config(monkeypatch, mocker, tmp_path, with_keyring_creds): """Test that load_esgf_pyclient_config returns the default config.""" - monkeypatch.setattr(_esgf_pyclient, 'CONFIG_FILE', - tmp_path / 'non-existent.yml') - - credentials = CREDENTIALS if with_keyring_creds else {} - mocker.patch.object( - _esgf_pyclient, - 'get_keyring_credentials', - autospec=True, - return_value=credentials, + monkeypatch.setattr( + _esgf_pyclient, "CONFIG_FILE", tmp_path / "non-existent.yml" ) cfg = _esgf_pyclient.load_esgf_pyclient_config() - - expected = copy.deepcopy(DEFAULT_CONFIG) - expected['logon'].update(credentials) - - assert cfg == expected + assert cfg == DEFAULT_CONFIG diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 9b312d26d3..edc9340fb9 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -10,5 +10,5 @@ def session(tmp_path, monkeypatch): for key, value in CFG_DEFAULT.items(): monkeypatch.setitem(CFG, key, copy.deepcopy(value)) - monkeypatch.setitem(CFG, 'output_dir', tmp_path / 'esmvaltool_output') - return CFG.start_session('recipe_test') + monkeypatch.setitem(CFG, "output_dir", tmp_path / "esmvaltool_output") + return CFG.start_session("recipe_test") diff --git a/tests/unit/documentation/test_changelog.py b/tests/unit/documentation/test_changelog.py index 3ca052c311..e702fb6f8a 100644 --- a/tests/unit/documentation/test_changelog.py +++ b/tests/unit/documentation/test_changelog.py @@ -1,24 +1,33 @@ """Tests for the changelog.""" + import collections import os import re def test_duplications_in_changelog(): - changelog_path = os.path.join(os.path.dirname(__file__), '../../..', - 'doc/changelog.rst') - with open(changelog_path, 'r', encoding='utf-8') as changelog: + changelog_path = os.path.join( + os.path.dirname(__file__), "../../..", "doc/changelog.rst" + ) + with open(changelog_path, "r", encoding="utf-8") as changelog: changelog = changelog.read() # Find all pull requests pr_links = re.compile( - "") + "" + ) links = pr_links.findall(changelog) # Check for duplicates if len(links) != len(set(links)): - print('The following PR are duplicated in the changelog:') - print('\n'.join((link - for link, count in collections.Counter(links).items() - if count > 1))) + print("The following PR are duplicated in the changelog:") + print( + "\n".join( + ( + link + for link, count in collections.Counter(links).items() + if count > 1 + ) + ) + ) assert False diff --git a/tests/unit/esgf/test_download.py b/tests/unit/esgf/test_download.py index 6ba258fac0..8af6d3057e 100644 --- a/tests/unit/esgf/test_download.py +++ b/tests/unit/esgf/test_download.py @@ -1,4 +1,5 @@ """Test `esmvalcore.esgf._download`.""" + import datetime import logging import os @@ -16,60 +17,61 @@ def test_log_speed(monkeypatch, tmp_path): - hosts_file = tmp_path / '.esmvaltool' / 'cache' / 'esgf-hosts.yml' - monkeypatch.setattr(_download, 'HOSTS_FILE', hosts_file) + hosts_file = tmp_path / ".esmvaltool" / "cache" / "esgf-hosts.yml" + monkeypatch.setattr(_download, "HOSTS_FILE", hosts_file) megabyte = 10**6 - _download.log_speed('http://somehost.org/some_file.nc', 100 * megabyte, 10) - _download.log_speed('http://somehost.org/some_other_file.nc', - 200 * megabyte, 16) - _download.log_speed('http://otherhost.org/other_file.nc', 4 * megabyte, 1) + _download.log_speed("http://somehost.org/some_file.nc", 100 * megabyte, 10) + _download.log_speed( + "http://somehost.org/some_other_file.nc", 200 * megabyte, 16 + ) + _download.log_speed("http://otherhost.org/other_file.nc", 4 * megabyte, 1) - with hosts_file.open('r', encoding='utf-8') as file: + with hosts_file.open("r", encoding="utf-8") as file: result = yaml.safe_load(file) expected = { - 'somehost.org': { - 'speed (MB/s)': 11.5, - 'duration (s)': 26, - 'size (bytes)': 300 * megabyte, - 'error': False, + "somehost.org": { + "speed (MB/s)": 11.5, + "duration (s)": 26, + "size (bytes)": 300 * megabyte, + "error": False, }, - 'otherhost.org': { - 'speed (MB/s)': 4, - 'duration (s)': 1, - 'size (bytes)': 4 * megabyte, - 'error': False, + "otherhost.org": { + "speed (MB/s)": 4, + "duration (s)": 1, + "size (bytes)": 4 * megabyte, + "error": False, }, } assert result == expected def test_error(monkeypatch, tmp_path): - hosts_file = tmp_path / '.esmvaltool' / 'cache' / 'esgf-hosts.yml' - monkeypatch.setattr(_download, 'HOSTS_FILE', hosts_file) + hosts_file = tmp_path / ".esmvaltool" / "cache" / "esgf-hosts.yml" + monkeypatch.setattr(_download, "HOSTS_FILE", hosts_file) megabyte = 10**6 - _download.log_speed('http://somehost.org/some_file.nc', 3 * megabyte, 2) - _download.log_error('http://somehost.org/some_file.nc') + _download.log_speed("http://somehost.org/some_file.nc", 3 * megabyte, 2) + _download.log_error("http://somehost.org/some_file.nc") - with hosts_file.open('r', encoding='utf-8') as file: + with hosts_file.open("r", encoding="utf-8") as file: result = yaml.safe_load(file) expected = { - 'somehost.org': { - 'speed (MB/s)': 1.5, - 'duration (s)': 2, - 'size (bytes)': 3 * megabyte, - 'error': True, + "somehost.org": { + "speed (MB/s)": 1.5, + "duration (s)": 2, + "size (bytes)": 3 * megabyte, + "error": True, } } assert result == expected -@pytest.mark.parametrize('age_in_hours', [0.5, 2]) +@pytest.mark.parametrize("age_in_hours", [0.5, 2]) def test_get_preferred_hosts(monkeypatch, tmp_path, age_in_hours): - hosts_file = tmp_path / 'esgf-hosts.yml' + hosts_file = tmp_path / "esgf-hosts.yml" content = textwrap.dedent(""" aims3.llnl.gov: duration (s): 8 @@ -101,7 +103,7 @@ def test_get_preferred_hosts(monkeypatch, tmp_path, age_in_hours): now = datetime.datetime.now().timestamp() file_age = now - age_in_hours * 3600 os.utime(hosts_file, (file_age, file_age)) - monkeypatch.setattr(_download, 'HOSTS_FILE', hosts_file) + monkeypatch.setattr(_download, "HOSTS_FILE", hosts_file) preferred_hosts = _download.get_preferred_hosts() # hosts should be sorted by download speed @@ -109,26 +111,26 @@ def test_get_preferred_hosts(monkeypatch, tmp_path, age_in_hours): # host with a recent error last if age_in_hours < 1: expected = [ - 'aims3.llnl.gov', - 'esgf.ichec.ie', - 'esgf.nci.org.au', - 'esg.lasg.ac.cn', - 'esgdata.gfdl.noaa.gov', + "aims3.llnl.gov", + "esgf.ichec.ie", + "esgf.nci.org.au", + "esg.lasg.ac.cn", + "esgdata.gfdl.noaa.gov", ] else: expected = [ - 'aims3.llnl.gov', - 'esgdata.gfdl.noaa.gov', - 'esgf.ichec.ie', - 'esgf.nci.org.au', - 'esg.lasg.ac.cn', + "aims3.llnl.gov", + "esgdata.gfdl.noaa.gov", + "esgf.ichec.ie", + "esgf.nci.org.au", + "esg.lasg.ac.cn", ] assert preferred_hosts == expected def test_get_preferred_hosts_only_zeros(monkeypatch, tmp_path): """Test ``get_preferred_hosts`` when speed is zero for all entries.""" - hosts_file = tmp_path / 'esgf-hosts.yml' + hosts_file = tmp_path / "esgf-hosts.yml" content = textwrap.dedent(""" aims3.llnl.gov: duration (s): 0 @@ -142,36 +144,40 @@ def test_get_preferred_hosts_only_zeros(monkeypatch, tmp_path): speed (MB/s): 0.0 """).lstrip() hosts_file.write_text(content) - monkeypatch.setattr(_download, 'HOSTS_FILE', hosts_file) + monkeypatch.setattr(_download, "HOSTS_FILE", hosts_file) preferred_hosts = _download.get_preferred_hosts() # The following assert is safe since "the built-in sorted() function is # guaranteed to be stable" # (https://docs.python.org/3/library/functions.html) - expected = ['aims3.llnl.gov', 'esg.lasg.ac.cn'] + expected = ["aims3.llnl.gov", "esg.lasg.ac.cn"] assert preferred_hosts == expected def test_sort_hosts(mocker): """Test that hosts are sorted according to priority by sort_hosts.""" urls = [ - 'http://esgf.nci.org.au/abc.nc', - 'http://esgf2.dkrz.de/abc.nc', - 'http://esgf-data1.ceda.ac.uk/abc.nc', + "http://esgf.nci.org.au/abc.nc", + "http://esgf2.dkrz.de/abc.nc", + "http://esgf-data1.ceda.ac.uk/abc.nc", ] preferred_hosts = [ - 'esgf2.dkrz.de', 'esgf-data1.ceda.ac.uk', 'aims3.llnl.gov' + "esgf2.dkrz.de", + "esgf-data1.ceda.ac.uk", + "aims3.llnl.gov", ] - mocker.patch.object(_download, - 'get_preferred_hosts', - autospec=True, - return_value=preferred_hosts) + mocker.patch.object( + _download, + "get_preferred_hosts", + autospec=True, + return_value=preferred_hosts, + ) sorted_urls = _download.sort_hosts(urls) assert sorted_urls == [ - 'http://esgf.nci.org.au/abc.nc', - 'http://esgf2.dkrz.de/abc.nc', - 'http://esgf-data1.ceda.ac.uk/abc.nc', + "http://esgf.nci.org.au/abc.nc", + "http://esgf2.dkrz.de/abc.nc", + "http://esgf-data1.ceda.ac.uk/abc.nc", ] @@ -179,48 +185,47 @@ def test_get_dataset_id_noop(): file_results = [ FileResult( json={ - 'project': ['CMIP6'], - 'source_id': ['ABC'], - 'dataset_id': 'ABC.v1|hostname.org', + "project": ["CMIP6"], + "source_id": ["ABC"], + "dataset_id": "ABC.v1|hostname.org", }, context=None, ) ] dataset_id = _download.ESGFFile._get_dataset_id(file_results) - assert dataset_id == 'ABC.v1' + assert dataset_id == "ABC.v1" def test_get_dataset_id_obs4mips(): file_results = [ FileResult( json={ - 'project': ['obs4MIPs'], - 'source_id': ['CERES-EBAF'], - 'dataset_id': - 'obs4MIPs.NASA-LaRC.CERES-EBAF.atmos.mon.v20160610|abc.org', + "project": ["obs4MIPs"], + "source_id": ["CERES-EBAF"], + "dataset_id": "obs4MIPs.NASA-LaRC.CERES-EBAF.atmos.mon.v20160610|abc.org", }, context=None, ) ] dataset_id = _download.ESGFFile._get_dataset_id(file_results) - assert dataset_id == 'obs4MIPs.CERES-EBAF.v20160610' + assert dataset_id == "obs4MIPs.CERES-EBAF.v20160610" def test_init(): """Test ESGFFile.__init__().""" - filename = 'tas_ABC_2000-2001.nc' - url = f'http://something.org/ABC/v1/{filename}' + filename = "tas_ABC_2000-2001.nc" + url = f"http://something.org/ABC/v1/{filename}" result = FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 10, - 'source_id': ['ABC'], - 'checksum_type': ['MD5'], - 'checksum': ['abc'], - 'title': filename, - 'url': [url + '|application/netcdf|HTTPServer'] + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 10, + "source_id": ["ABC"], + "checksum_type": ["MD5"], + "checksum": ["abc"], + "title": filename, + "url": [url + "|application/netcdf|HTTPServer"], }, context=None, ) @@ -229,82 +234,82 @@ def test_init(): assert file.name == filename assert file.size == 10 assert file.urls == [url] - assert file._checksums == [('MD5', 'abc')] + assert file._checksums == [("MD5", "abc")] assert file.facets == { - 'dataset': 'ABC', - 'project': 'CMIP6', - 'short_name': 'tas', - 'version': 'v1', + "dataset": "ABC", + "project": "CMIP6", + "short_name": "tas", + "version": "v1", } txt = f"ESGFFile:CMIP6/ABC/v1/{filename} on hosts ['something.org']" assert repr(file) == txt - assert hash(file) == hash(('CMIP6.ABC.v1', filename)) + assert hash(file) == hash(("CMIP6.ABC.v1", filename)) def test_from_results(): """Test ESGFFile._from_results().""" facets = { - 'project': 'CMIP6', - 'variable': 'tas', + "project": "CMIP6", + "variable": "tas", } results = [] for i in range(2): - filename = f'tas_ABC{i}_2000-2001.nc' - url = f'http://something.org/ABC/v1/{filename}' + filename = f"tas_ABC{i}_2000-2001.nc" + url = f"http://something.org/ABC/v1/{filename}" result = FileResult( json={ - 'dataset_id': f'CMIP6.ABC{i}.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 10, - 'source_id': [f'ABC{i}'], - 'title': filename, - 'url': [url + '|application/netcdf|HTTPServer'] + "dataset_id": f"CMIP6.ABC{i}.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 10, + "source_id": [f"ABC{i}"], + "title": filename, + "url": [url + "|application/netcdf|HTTPServer"], }, context=None, ) results.append(result) # Append an invalid result - wrong_var_filename = 'zg_ABC0_2000-2001.nc' + wrong_var_filename = "zg_ABC0_2000-2001.nc" results.append( FileResult( json={ - 'dataset_id': f'CMIP6.ABC{i}.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 10, - 'source_id': [f'ABC{i}'], - 'title': wrong_var_filename, + "dataset_id": f"CMIP6.ABC{i}.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 10, + "source_id": [f"ABC{i}"], + "title": wrong_var_filename, }, context=None, - )) + ) + ) files = _download.ESGFFile._from_results(results, facets) assert len(files) == 2 for i in range(2): - assert files[i].name == f'tas_ABC{i}_2000-2001.nc' + assert files[i].name == f"tas_ABC{i}_2000-2001.nc" def test_sorting(): - result1 = FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 1, - 'title': 'abc_2000-2001.nc', + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 1, + "title": "abc_2000-2001.nc", }, context=None, ) result2 = FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 1, - 'title': 'abc_2001-2002.nc', + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 1, + "title": "abc_2001-2002.nc", }, context=None, ) @@ -319,71 +324,82 @@ def test_sorting(): def test_local_file(): - local_path = '/path/to/somewhere' - filename = 'abc_2000-2001.nc' + local_path = "/path/to/somewhere" + filename = "abc_2000-2001.nc" result = FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 10, - 'source_id': ['ABC'], - 'title': filename, + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 10, + "source_id": ["ABC"], + "title": filename, }, context=None, ) file = _download.ESGFFile([result]) print(file.dataset) - reference_path = Path(local_path) / 'CMIP6' / 'ABC' / 'v1' / filename + reference_path = Path(local_path) / "CMIP6" / "ABC" / "v1" / filename assert file.local_file(local_path) == reference_path def test_merge_datasets(): - filename = 'tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc' + filename = "tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc" url0 = ( - 'http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/' - 'FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/' - 'tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc') + "http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/output1/" + "FIO/FIO-ESM/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/" + "tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc" + ) url1 = ( - 'http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/' - 'output1/FIO/fio-esm/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/' - 'tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc') + "http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/cmip5/" + "output1/FIO/fio-esm/historical/mon/atmos/Amon/r1i1p1/v20121010/tas/" + "tas_Amon_FIO-ESM_historical_r1i1p1_185001-200512.nc" + ) - dataset0 = ('cmip5.output1.FIO.FIO-ESM.historical.' - 'mon.atmos.Amon.r1i1p1.v20121010') - dataset1 = ('cmip5.output1.FIO.fio-esm.historical.' - 'mon.atmos.Amon.r1i1p1.v20121010') + dataset0 = ( + "cmip5.output1.FIO.FIO-ESM.historical." + "mon.atmos.Amon.r1i1p1.v20121010" + ) + dataset1 = ( + "cmip5.output1.FIO.fio-esm.historical." + "mon.atmos.Amon.r1i1p1.v20121010" + ) cmip5_template = ( - 'cmip5.%(product)s.%(valid_institute)s.%(model)s.' - '%(experiment)s.%(time_frequency)s.%(realm)s.%(cmor_table)s.' - '%(ensemble)s') + "cmip5.%(product)s.%(valid_institute)s.%(model)s." + "%(experiment)s.%(time_frequency)s.%(realm)s.%(cmor_table)s." + "%(ensemble)s" + ) results = [ FileResult( { - 'dataset_id': dataset0 + '|esgf2.dkrz.de', - 'dataset_id_template_': [cmip5_template], - 'project': ['CMIP5'], - 'size': 200, - 'title': filename, - 'url': [ - url0 + '|application/netcdf|HTTPServer', + "dataset_id": dataset0 + "|esgf2.dkrz.de", + "dataset_id_template_": [cmip5_template], + "project": ["CMIP5"], + "size": 200, + "title": filename, + "url": [ + url0 + "|application/netcdf|HTTPServer", ], - }, None), + }, + None, + ), FileResult( { - 'dataset_id': dataset1 + '|aims3.llnl.gov', - 'dataset_id_template_': [cmip5_template], - 'project': ['CMIP5'], - 'size': 200, - 'title': filename, - 'url': [ - url1 + '|application/netcdf|HTTPServer', + "dataset_id": dataset1 + "|aims3.llnl.gov", + "dataset_id_template_": [cmip5_template], + "project": ["CMIP5"], + "size": 200, + "title": filename, + "url": [ + url1 + "|application/netcdf|HTTPServer", ], - }, None) + }, + None, + ), ] file = _download.ESGFFile(results) @@ -394,68 +410,62 @@ def test_merge_datasets(): assert file.urls == [url0, url1] -@pytest.mark.parametrize('checksum', ['yes', 'no', 'wrong']) +@pytest.mark.parametrize("checksum", ["yes", "no", "wrong"]) def test_single_download(mocker, tmp_path, checksum): - hosts_file = tmp_path / '.esmvaltool' / 'cache' / 'esgf-hosts.yml' - mocker.patch.object(_download, 'HOSTS_FILE', hosts_file) - - credentials = '/path/to/creds.pem' - mocker.patch.object(_download, - 'get_credentials', - autospec=True, - return_value=credentials) - - response = mocker.create_autospec(requests.Response, - spec_set=True, - instance=True) - response.iter_content.return_value = [b'chunk1', b'chunk2'] - get = mocker.patch.object(_download.requests, - 'get', - autospec=True, - return_value=response) + hosts_file = tmp_path / ".esmvaltool" / "cache" / "esgf-hosts.yml" + mocker.patch.object(_download, "HOSTS_FILE", hosts_file) + + response = mocker.create_autospec( + requests.Response, spec_set=True, instance=True + ) + response.iter_content.return_value = [b"chunk1", b"chunk2"] + get = mocker.patch.object( + _download.requests, "get", autospec=True, return_value=response + ) dest_folder = tmp_path - filename = 'abc_2000-2001.nc' - url = f'http://something.org/CMIP6/ABC/v1/{filename}' + filename = "abc_2000-2001.nc" + url = f"http://something.org/CMIP6/ABC/v1/{filename}" json = { - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 12, - 'source_id': ['ABC'], - 'title': filename, - 'url': [url + '|application/netcdf|HTTPServer'], + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 12, + "source_id": ["ABC"], + "title": filename, + "url": [url + "|application/netcdf|HTTPServer"], } - if checksum == 'yes': - json['checksum'] = ['097c42989a9e5d9dcced7b35ec4b0486'] - json['checksum_type'] = ['MD5'] - if checksum == 'wrong': - json['checksum'] = ['123'] - json['checksum_type'] = ['MD5'] + if checksum == "yes": + json["checksum"] = ["097c42989a9e5d9dcced7b35ec4b0486"] + json["checksum_type"] = ["MD5"] + if checksum == "wrong": + json["checksum"] = ["123"] + json["checksum_type"] = ["MD5"] file = _download.ESGFFile([FileResult(json=json, context=None)]) - if checksum == 'wrong': - with pytest.raises(_download.DownloadError, - match='Wrong MD5 checksum'): + if checksum == "wrong": + with pytest.raises( + _download.DownloadError, match="Wrong MD5 checksum" + ): file.download(dest_folder) return # Add a second url and check that it is not used. - file.urls.append('http://wrong_url.com') + file.urls.append("http://wrong_url.com") local_file = file.download(dest_folder) assert local_file.exists() - reference_path = dest_folder / 'CMIP6' / 'ABC' / 'v1' / filename + reference_path = dest_folder / "CMIP6" / "ABC" / "v1" / filename assert local_file == reference_path # File was downloaded only once get.assert_called_once() # From the correct URL - get.assert_called_with(url, stream=True, timeout=300, cert=credentials) + get.assert_called_with(url, stream=True, timeout=300) # We checked for a valid response response.raise_for_status.assert_called_once() # And requested a reasonable chunk size @@ -463,16 +473,16 @@ def test_single_download(mocker, tmp_path, checksum): def test_download_skip_existing(tmp_path, caplog): - filename = 'test.nc' - dataset = 'dataset' + filename = "test.nc" + dataset = "dataset" dest_folder = tmp_path json = { - 'dataset_id': f'CMIP6.{dataset}.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 12, - 'title': filename, + "dataset_id": f"CMIP6.{dataset}.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 12, + "title": filename, } file = _download.ESGFFile([FileResult(json=json, context=None)]) @@ -489,61 +499,62 @@ def test_download_skip_existing(tmp_path, caplog): def test_single_download_fail(mocker, tmp_path): - hosts_file = tmp_path / '.esmvaltool' / 'cache' / 'esgf-hosts.yml' - mocker.patch.object(_download, 'HOSTS_FILE', hosts_file) + hosts_file = tmp_path / ".esmvaltool" / "cache" / "esgf-hosts.yml" + mocker.patch.object(_download, "HOSTS_FILE", hosts_file) - response = mocker.create_autospec(requests.Response, - spec_set=True, - instance=True) + response = mocker.create_autospec( + requests.Response, spec_set=True, instance=True + ) response.raise_for_status.side_effect = ( - requests.exceptions.RequestException("test error")) - mocker.patch.object(_download.requests, - 'get', - autospec=True, - return_value=response) - - filename = 'test.nc' - dataset = 'dataset' + requests.exceptions.RequestException("test error") + ) + mocker.patch.object( + _download.requests, "get", autospec=True, return_value=response + ) + + filename = "test.nc" + dataset = "dataset" dest_folder = tmp_path - url = f'http://something.org/CMIP6/ABC/v1/{filename}' + url = f"http://something.org/CMIP6/ABC/v1/{filename}" json = { - 'dataset_id': f'CMIP6.{dataset}.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 12, - 'title': filename, - 'url': [url + '|application/netcdf|HTTPServer'], + "dataset_id": f"CMIP6.{dataset}.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 12, + "title": filename, + "url": [url + "|application/netcdf|HTTPServer"], } file = _download.ESGFFile([FileResult(json=json, context=None)]) local_file = file.local_file(dest_folder) - msg = (f"Failed to download file {local_file}, errors:" - "\n" + f"{url}: test error") + msg = ( + f"Failed to download file {local_file}, errors:" + "\n" + f"{url}: test error" + ) with pytest.raises(_download.DownloadError, match=re.escape(msg)): file.download(dest_folder) def test_get_download_message(): - result1 = FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 4 * 10**9, - 'title': 'abc_1850-1900.nc', - 'url': ['http://xyz.org/file1.nc|application/netcdf|HTTPServer'], + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 4 * 10**9, + "title": "abc_1850-1900.nc", + "url": ["http://xyz.org/file1.nc|application/netcdf|HTTPServer"], }, context=None, ) result2 = FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 6 * 10**9, - 'title': 'abc_1900-1950.nc', - 'url': ['http://abc.com/file2.nc|application/netcdf|HTTPServer'], + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 6 * 10**9, + "title": "abc_1900-1950.nc", + "url": ["http://abc.com/file2.nc|application/netcdf|HTTPServer"], }, context=None, ) @@ -567,7 +578,7 @@ def test_download(mocker, tmp_path, caplog): for _ in range(5) ] for i, file in enumerate(test_files): - file.__str__.return_value = f'file{i}.nc' + file.__str__.return_value = f"file{i}.nc" file.local_file.return_value.exists.return_value = False file.size = 200 * 10**6 file.__lt__.return_value = False @@ -590,7 +601,7 @@ def test_download_fail(mocker, tmp_path, caplog): for _ in range(5) ] for i, file in enumerate(test_files): - file.__str__.return_value = f'file{i}.nc' + file.__str__.return_value = f"file{i}.nc" file.local_file.return_value.exists.return_value = False file.size = 100 * 10**6 file.__lt__.return_value = False @@ -615,9 +626,8 @@ def test_download_fail(mocker, tmp_path, caplog): def test_download_noop(caplog): """Test downloading no files.""" - caplog.set_level('DEBUG') - esmvalcore.esgf.download([], dest_folder='/does/not/exist') + caplog.set_level("DEBUG") + esmvalcore.esgf.download([], dest_folder="/does/not/exist") - msg = ("All required data is available locally," - " not downloading anything.") + msg = "All required data is available locally, not downloading anything." assert msg in caplog.text diff --git a/tests/unit/esgf/test_facet.py b/tests/unit/esgf/test_facet.py index ea53651261..2d27c23867 100644 --- a/tests/unit/esgf/test_facet.py +++ b/tests/unit/esgf/test_facet.py @@ -1,4 +1,5 @@ """Test `esmvalcore.esgf.facets`.""" + import pyesgf.search from esmvalcore.esgf import facets @@ -6,7 +7,7 @@ def test_create_dataset_map(monkeypatch, mocker): """Test `esmvalcore.esgf.facets.create_dataset_map`.""" - monkeypatch.setattr(facets, 'FACETS', {'CMIP5': facets.FACETS['CMIP5']}) + monkeypatch.setattr(facets, "FACETS", {"CMIP5": facets.FACETS["CMIP5"]}) conn = mocker.create_autospec( pyesgf.search.SearchConnection, @@ -19,32 +20,29 @@ def test_create_dataset_map(monkeypatch, mocker): instance=True, ) ctx.facet_counts = { - 'model': { - 'ACCESS1.0': 10, - 'BNU-ESM': 20 - }, + "model": {"ACCESS1.0": 10, "BNU-ESM": 20}, } ids = [ - 'cmip5.output1.CSIRO-BOM.ACCESS1-0.1pctCO2.3hr.atmos.3hr.r1i1p1.v1' - '|aims3.llnl.gov', - 'cmip5.output1.BNU.BNU-ESM.rcp45.mon.atmos.Amon.r1i1p1.v20120510' - '|aims3.llnl.gov', + "cmip5.output1.CSIRO-BOM.ACCESS1-0.1pctCO2.3hr.atmos.3hr.r1i1p1.v1" + "|aims3.llnl.gov", + "cmip5.output1.BNU.BNU-ESM.rcp45.mon.atmos.Amon.r1i1p1.v20120510" + "|aims3.llnl.gov", ] results = [ pyesgf.search.results.DatasetResult( - json={ - 'id': id_, - 'score': 1.0 - }, + json={"id": id_, "score": 1.0}, context=None, - ) for id_ in ids + ) + for id_ in ids ] ctx.search.side_effect = [[r] for r in results] conn.new_context.return_value = ctx - mocker.patch.object(facets.pyesgf.search, - 'SearchConnection', - autospec=True, - return_value=conn) + mocker.patch.object( + facets.pyesgf.search, + "SearchConnection", + autospec=True, + return_value=conn, + ) dataset_map = facets.create_dataset_map() - assert dataset_map == {'CMIP5': {'ACCESS1-0': 'ACCESS1.0'}} + assert dataset_map == {"CMIP5": {"ACCESS1-0": "ACCESS1.0"}} diff --git a/tests/unit/esgf/test_logon.py b/tests/unit/esgf/test_logon.py deleted file mode 100644 index c70c4e98d3..0000000000 --- a/tests/unit/esgf/test_logon.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Test the `esmvalcore.esgf._logon` module.""" -import logging - -import pyesgf.logon -import pyesgf.search -import pytest - -from esmvalcore.esgf import _logon - - -def test_get_manager(): - manager = _logon.get_manager() - assert isinstance(manager, pyesgf.logon.LogonManager) - - -@pytest.mark.parametrize('credentials', [ - { - 'interactive': True - }, - { - 'hostname': 'esgf-data.dkrz.de', - 'username': 'cookiemonster', - 'password': 'Welcome01', - 'interactive': False, - }, -]) -def test_logon(mocker, caplog, credentials): - - cfg = {'logon': credentials} - mocker.patch.object(_logon, - 'get_esgf_config', - autospec=True, - return_value=cfg) - manager = mocker.create_autospec(pyesgf.logon.LogonManager, - spec_set=True, - instance=True) - manager.is_logged_on.side_effect = False, True - mocker.patch.object(_logon, 'get_manager', return_value=manager) - - caplog.set_level(logging.INFO) - - _logon.logon() - - manager.logon.assert_called_with(**cfg['logon']) - assert "Logged on to ESGF" in caplog.text - - -def test_logon_fail_message(mocker, caplog): - cfg = {'logon': {'interactive': True}} - mocker.patch.object(_logon, - 'get_esgf_config', - autospec=True, - return_value=cfg) - manager = mocker.create_autospec(pyesgf.logon.LogonManager, - spec_set=True, - instance=True) - manager.is_logged_on.return_value = False - mocker.patch.object(_logon, 'get_manager', return_value=manager) - - _logon.logon() - - manager.logon.assert_called_with(**cfg['logon']) - assert "Failed to log on to ESGF" in caplog.text - - -@pytest.mark.parametrize('logged_on', [True, False]) -def test_get_credentials(mocker, logged_on): - - manager = mocker.create_autospec(pyesgf.logon.LogonManager, - spec_set=False, - instance=True) - manager.is_logged_on.return_value = logged_on - manager.esgf_credentials = '/path/to/creds.pem' - mocker.patch.object(_logon, 'logon', return_value=manager) - - creds = _logon.get_credentials() - - if logged_on: - assert creds == '/path/to/creds.pem' - else: - assert creds is None diff --git a/tests/unit/esgf/test_search.py b/tests/unit/esgf/test_search.py index 046067cb7c..65cd53f1cc 100644 --- a/tests/unit/esgf/test_search.py +++ b/tests/unit/esgf/test_search.py @@ -1,4 +1,5 @@ """Test 1esmvalcore.esgf._search`.""" + import copy import textwrap @@ -11,110 +12,111 @@ OUR_FACETS = ( { - 'dataset': 'cccma_cgcm3_1', - 'ensemble': 'run1', - 'exp': 'historical', - 'frequency': 'mon', - 'project': 'CMIP3', - 'short_name': 'tas', + "dataset": "cccma_cgcm3_1", + "ensemble": "run1", + "exp": "historical", + "frequency": "mon", + "project": "CMIP3", + "short_name": "tas", }, { - 'dataset': 'inmcm4', - 'ensemble': 'r1i1p1', - 'exp': ['historical', 'rcp85'], - 'mip': 'Amon', - 'project': 'CMIP5', - 'short_name': 'tas', + "dataset": "inmcm4", + "ensemble": "r1i1p1", + "exp": ["historical", "rcp85"], + "mip": "Amon", + "project": "CMIP5", + "short_name": "tas", }, { - 'dataset': 'AWI-ESM-1-1-LR', - 'ensemble': 'r1i1p1f1', - 'exp': 'historical', - 'grid': 'gn', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'start_year': 2000, - 'end_year': 2001, + "dataset": "AWI-ESM-1-1-LR", + "ensemble": "r1i1p1f1", + "exp": "historical", + "grid": "gn", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "start_year": 2000, + "end_year": 2001, }, { - 'dataset': 'RACMO22E', - 'driver': 'MOHC-HadGEM2-ES', - 'domain': 'EUR-11', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'frequency': 'mon', - 'project': 'CORDEX', - 'short_name': 'tas', - 'start_year': 1950, - 'end_year': 1952, + "dataset": "RACMO22E", + "driver": "MOHC-HadGEM2-ES", + "domain": "EUR-11", + "ensemble": "r1i1p1", + "exp": "historical", + "frequency": "mon", + "project": "CORDEX", + "short_name": "tas", + "start_year": 1950, + "end_year": 1952, }, { - 'dataset': 'CERES-EBAF', - 'frequency': 'mon', - 'project': 'obs4MIPs', - 'short_name': 'rsutcs', + "dataset": "CERES-EBAF", + "frequency": "mon", + "project": "obs4MIPs", + "short_name": "rsutcs", }, { - 'dataset': 'CERES-EBAF', - 'frequency': '*', - 'project': 'obs4MIPs', - 'short_name': 'rsutcs', + "dataset": "CERES-EBAF", + "frequency": "*", + "project": "obs4MIPs", + "short_name": "rsutcs", }, ) ESGF_FACETS = ( { - 'project': 'CMIP3', - 'model': 'cccma_cgcm3_1', - 'ensemble': 'run1', - 'experiment': 'historical', - 'time_frequency': 'mon', - 'variable': 'tas', + "project": "CMIP3", + "model": "cccma_cgcm3_1", + "ensemble": "run1", + "experiment": "historical", + "time_frequency": "mon", + "variable": "tas", }, { - 'project': 'CMIP5', - 'model': 'INM-CM4', - 'ensemble': 'r1i1p1', - 'experiment': 'historical,rcp85', - 'cmor_table': 'Amon', - 'variable': 'tas', + "project": "CMIP5", + "model": "INM-CM4", + "ensemble": "r1i1p1", + "experiment": "historical,rcp85", + "cmor_table": "Amon", + "variable": "tas", }, { - 'project': 'CMIP6', - 'source_id': 'AWI-ESM-1-1-LR', - 'member_id': 'r1i1p1f1', - 'experiment_id': 'historical', - 'grid_label': 'gn', - 'table_id': 'Amon', - 'variable': 'tas', + "project": "CMIP6", + "source_id": "AWI-ESM-1-1-LR", + "member_id": "r1i1p1f1", + "experiment_id": "historical", + "grid_label": "gn", + "table_id": "Amon", + "variable": "tas", }, { - 'project': 'CORDEX', - 'rcm_name': 'RACMO22E', - 'driving_model': 'MOHC-HadGEM2-ES', - 'domain': 'EUR-11', - 'ensemble': 'r1i1p1', - 'experiment': 'historical', - 'time_frequency': 'mon', - 'variable': 'tas', + "project": "CORDEX", + "rcm_name": "RACMO22E", + "driving_model": "MOHC-HadGEM2-ES", + "domain": "EUR-11", + "ensemble": "r1i1p1", + "experiment": "historical", + "time_frequency": "mon", + "variable": "tas", }, { - 'project': 'obs4MIPs', - 'source_id': 'CERES-EBAF', - 'time_frequency': 'mon', - 'variable': 'rsutcs', + "project": "obs4MIPs", + "source_id": "CERES-EBAF", + "time_frequency": "mon", + "variable": "rsutcs", }, { - 'project': 'obs4MIPs', - 'source_id': 'CERES-EBAF', - 'variable': 'rsutcs', + "project": "obs4MIPs", + "source_id": "CERES-EBAF", + "variable": "rsutcs", }, ) -@pytest.mark.parametrize('our_facets, esgf_facets', - zip(OUR_FACETS, ESGF_FACETS)) +@pytest.mark.parametrize( + "our_facets, esgf_facets", zip(OUR_FACETS, ESGF_FACETS) +) def test_get_esgf_facets(our_facets, esgf_facets): """Test that facet translation by get_esgf_facets works as expected.""" our_facets = copy.deepcopy(our_facets) @@ -128,15 +130,15 @@ def test_get_esgf_facets(our_facets, esgf_facets): def get_mock_connection(mocker, search_results): """Create a mock pyesgf.search.SearchConnection class.""" cfg = { - 'search_connection': { - 'urls': [ - 'https://esgf-index1.example.com/esg-search', - 'https://esgf-index2.example.com/esg-search', + "search_connection": { + "urls": [ + "https://esgf-index1.example.com/esg-search", + "https://esgf-index2.example.com/esg-search", ] }, } mocker.patch.object(_search, "get_esgf_config", return_value=cfg) - mocker.patch.object(_search, 'FIRST_ONLINE_INDEX_NODE', None) + mocker.patch.object(_search, "FIRST_ONLINE_INDEX_NODE", None) ctx = mocker.create_autospec( pyesgf.search.context.FileSearchContext, @@ -146,7 +148,7 @@ def get_mock_connection(mocker, search_results): ctx.search.side_effect = search_results conn_cls = mocker.patch.object( _search.pyesgf.search, - 'SearchConnection', + "SearchConnection", autospec=True, ) conn_cls.return_value.new_context.return_value = ctx @@ -154,80 +156,95 @@ def get_mock_connection(mocker, search_results): def test_esgf_search_files(mocker): - # Set up some fake FileResults - dataset_id = ('cmip5.output1.INM.inmcm4.historical' - '.mon.atmos.Amon.r1i1p1.v20130207') + dataset_id = ( + "cmip5.output1.INM.inmcm4.historical" + ".mon.atmos.Amon.r1i1p1.v20130207" + ) dataset_id_template = ( - 'cmip5.%(product)s.%(valid_institute)s.%(model)s.' - '%(experiment)s.%(time_frequency)s.%(realm)s.%(cmor_table)s.' - '%(ensemble)s' + "cmip5.%(product)s.%(valid_institute)s.%(model)s." + "%(experiment)s.%(time_frequency)s.%(realm)s.%(cmor_table)s." + "%(ensemble)s" + ) + filename0 = "tas_Amon_inmcm4_historical_r1i1p1_185001-189912.nc" + filename1 = "tas_Amon_inmcm4_historical_r1i1p1_190001-200512.nc" + + aims_url0 = ( + "http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/" + "cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/" + "tas/1/" + filename0 + ) + aims_url1 = ( + "http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/" + "cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/" + "tas/1/" + filename1 + ) + dkrz_url = ( + "http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/" + "output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/" + "v20130207/tas/" + filename0 ) - filename0 = 'tas_Amon_inmcm4_historical_r1i1p1_185001-189912.nc' - filename1 = 'tas_Amon_inmcm4_historical_r1i1p1_190001-200512.nc' - - aims_url0 = ('http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/' - 'cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/' - 'tas/1/' + filename0) - aims_url1 = ('http://aims3.llnl.gov/thredds/fileServer/cmip5_css02_data/' - 'cmip5/output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/' - 'tas/1/' + filename1) - dkrz_url = ('http://esgf2.dkrz.de/thredds/fileServer/lta_dataroot/cmip5/' - 'output1/INM/inmcm4/historical/mon/atmos/Amon/r1i1p1/' - 'v20130207/tas/' + filename0) file_aims0 = FileResult( { - 'checksum': ['123'], - 'checksum_type': ['SHA256'], - 'dataset_id': dataset_id + '|aims3.llnl.gov', - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP5'], - 'size': 100, - 'title': filename0, - 'url': [ - aims_url0 + '|application/netcdf|HTTPServer', + "checksum": ["123"], + "checksum_type": ["SHA256"], + "dataset_id": dataset_id + "|aims3.llnl.gov", + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP5"], + "size": 100, + "title": filename0, + "url": [ + aims_url0 + "|application/netcdf|HTTPServer", ], - }, None) + }, + None, + ) file_aims1 = FileResult( { - 'dataset_id': dataset_id + '|aims3.llnl.gov', - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP5'], - 'size': 200, - 'title': filename1, - 'url': [ - aims_url1 + '|application/netcdf|HTTPServer', + "dataset_id": dataset_id + "|aims3.llnl.gov", + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP5"], + "size": 200, + "title": filename1, + "url": [ + aims_url1 + "|application/netcdf|HTTPServer", ], - }, None) + }, + None, + ) file_dkrz = FileResult( { - 'checksum': ['456'], - 'checksum_type': ['MD5'], - 'dataset_id': dataset_id + '|esgf2.dkrz.de', - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP5'], - 'size': 100, - 'title': filename0, - 'url': [dkrz_url + '|application/netcdf|HTTPServer'], - }, None) + "checksum": ["456"], + "checksum_type": ["MD5"], + "dataset_id": dataset_id + "|esgf2.dkrz.de", + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP5"], + "size": 100, + "title": filename0, + "url": [dkrz_url + "|application/netcdf|HTTPServer"], + }, + None, + ) facets = { - 'project': 'CMIP5', - 'model': 'inmcm4', - 'variable': 'tas', + "project": "CMIP5", + "model": "inmcm4", + "variable": "tas", } file_results = [file_aims0, file_aims1, file_dkrz] SearchConnection, context = get_mock_connection( # noqa: N806 - mocker, search_results=[file_results]) + mocker, search_results=[file_results] + ) files = _search.esgf_search_files(facets) SearchConnection.assert_called_once_with( - url='https://esgf-index1.example.com/esg-search') + url="https://esgf-index1.example.com/esg-search" + ) connection = SearchConnection.return_value connection.new_context.assert_called_with( pyesgf.search.context.FileSearchContext, @@ -245,7 +262,7 @@ def test_esgf_search_files(mocker): assert file0.name == filename0 assert file0.dataset == dataset_id assert file0.size == 100 - assert file0._checksums == [('SHA256', '123'), ('MD5', '456')] + assert file0._checksums == [("SHA256", "123"), ("MD5", "456")] urls = sorted(file0.urls) assert len(urls) == 2 assert urls[0] == aims_url0 @@ -269,11 +286,12 @@ def test_esgf_search_uses_second_index_node(mocker): search_result, ] SearchConnection, context = get_mock_connection( # noqa: N806 - mocker, search_results) + mocker, search_results + ) result = _search._search_index_nodes(facets={}) - second_index_node = 'https://esgf-index2.example.com/esg-search' + second_index_node = "https://esgf-index2.example.com/esg-search" assert _search.FIRST_ONLINE_INDEX_NODE == second_index_node assert result == search_result @@ -285,7 +303,8 @@ def test_esgf_search_fails(mocker): requests.exceptions.ConnectTimeout("Timeout error message 2"), ] SearchConnection, context = get_mock_connection( # noqa: N806 - mocker, search_results) + mocker, search_results + ) with pytest.raises(FileNotFoundError) as excinfo: _search.esgf_search_files(facets={}) @@ -300,44 +319,50 @@ def test_esgf_search_fails(mocker): def test_select_latest_versions_filenotfound(mocker): """Test `select_latest_versions` raises FileNotFoundError.""" file = mocker.create_autospec(ESGFFile, instance=True) - file.name = 'ta.nc' - file.dataset = 'CMIP6.MODEL.v1' - file.facets = {'version': 'v1'} - file.__repr__ = lambda _: 'ESGFFile:CMIP6/MODEL/v1/ta.nc' - result = _search.select_latest_versions(files=[file], versions='v2') + file.name = "ta.nc" + file.dataset = "CMIP6.MODEL.v1" + file.facets = {"version": "v1"} + file.__repr__ = lambda _: "ESGFFile:CMIP6/MODEL/v1/ta.nc" + result = _search.select_latest_versions(files=[file], versions="v2") assert result == [] -@pytest.mark.parametrize('timerange,selection', [ - ('1851/1852', slice(1, 3)), - ('1851/P1Y', slice(1, 3)), - ('*', slice(None)), -]) +@pytest.mark.parametrize( + "timerange,selection", + [ + ("1851/1852", slice(1, 3)), + ("1851/P1Y", slice(1, 3)), + ("*", slice(None)), + ], +) def test_select_by_time(timerange, selection): - dataset_id = ('CMIP6.CMIP.AWI.AWI-ESM-1-1-LR.historical' - '.r1i1p1f1.Amon.tas.gn.v20200212') + dataset_id = ( + "CMIP6.CMIP.AWI.AWI-ESM-1-1-LR.historical" + ".r1i1p1f1.Amon.tas.gn.v20200212" + ) dataset_id_template = ( - '%(mip_era)s.%(activity_drs)s.%(institution_id)s.' - '%(source_id)s.%(experiment_id)s.%(member_id)s.%(table_id)s.' - '%(variable_id)s.%(grid_label)s' + "%(mip_era)s.%(activity_drs)s.%(institution_id)s." + "%(source_id)s.%(experiment_id)s.%(member_id)s.%(table_id)s." + "%(variable_id)s.%(grid_label)s" ) filenames = [ - 'tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185001-185012.nc', - 'tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185101-185112.nc', - 'tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185201-185212.nc', - 'tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185301-185312.nc', + "tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185001-185012.nc", + "tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185101-185112.nc", + "tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185201-185212.nc", + "tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_185301-185312.nc", ] results = [ FileResult( json={ - 'title': filename, - 'dataset_id': dataset_id + '|xyz.com', - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP5'], - 'size': 100, + "title": filename, + "dataset_id": dataset_id + "|xyz.com", + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP5"], + "size": 100, }, context=None, - ) for filename in filenames + ) + for filename in filenames ] files = [ESGFFile([r]) for r in results] @@ -347,50 +372,50 @@ def test_select_by_time(timerange, selection): def test_select_by_time_nodate(): - dataset_id = ( - 'cmip3.MIROC.miroc3_2_hires.historical.mon.atmos.run1.tas.v1') + dataset_id = "cmip3.MIROC.miroc3_2_hires.historical.mon.atmos.run1.tas.v1" dataset_id_template = ( - '%(project)s.%(institute)s.%(model)s.%(experiment)s.' - '%(time_frequency)s.%(realm)s.%(ensemble)s.%(variable)s' + "%(project)s.%(institute)s.%(model)s.%(experiment)s." + "%(time_frequency)s.%(realm)s.%(ensemble)s.%(variable)s" ) - filenames = ['tas_A1.nc'] + filenames = ["tas_A1.nc"] results = [ FileResult( json={ - 'title': filename, - 'dataset_id': dataset_id + '|xyz.com', - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP5'], - 'size': 100, + "title": filename, + "dataset_id": dataset_id + "|xyz.com", + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP5"], + "size": 100, }, context=None, - ) for filename in filenames + ) + for filename in filenames ] files = [ESGFFile([r]) for r in results] - result = _search.select_by_time(files, '1851/1852') + result = _search.select_by_time(files, "1851/1852") assert result == files def test_invalid_dataset_id_template(): - dataset_id = ( - 'obs4MIPs.IUP.XCH4_CRDP3.xch4.mon.v100') + dataset_id = "obs4MIPs.IUP.XCH4_CRDP3.xch4.mon.v100" dataset_id_template = ( - '%(project)s.%(institute)s.%(source_id)s.%(time_frequency)s' + "%(project)s.%(institute)s.%(source_id)s.%(time_frequency)s" ) - filenames = ['xch4_ghgcci_l3_v100_200301_201412.nc'] + filenames = ["xch4_ghgcci_l3_v100_200301_201412.nc"] results = [ FileResult( json={ - 'title': filename, - 'dataset_id': dataset_id + '|esgf.ceda.ac.uk', - 'dataset_id_template_': [dataset_id_template], - 'project': ['obs4MIPs'], - 'size': 100, - 'source_id': 'XCH4_CRDP3', + "title": filename, + "dataset_id": dataset_id + "|esgf.ceda.ac.uk", + "dataset_id_template_": [dataset_id_template], + "project": ["obs4MIPs"], + "size": 100, + "source_id": "XCH4_CRDP3", }, context=None, - ) for filename in filenames + ) + for filename in filenames ] file = ESGFFile(results) @@ -398,8 +423,10 @@ def test_invalid_dataset_id_template(): def test_search_unknown_project(): - project = 'Unknown' - msg = (f"Unable to download from ESGF, because project {project} is not on" - " it or is not supported by the esmvalcore.esgf module.") + project = "Unknown" + msg = ( + f"Unable to download from ESGF, because project {project} is not on" + " it or is not supported by the esmvalcore.esgf module." + ) with pytest.raises(ValueError, match=msg): - find_files(project=project, dataset='', short_name='') + find_files(project=project, dataset="", short_name="") diff --git a/tests/unit/experimental/test_output_file.py b/tests/unit/experimental/test_output_file.py index 03c0597418..46ed1edd12 100644 --- a/tests/unit/experimental/test_output_file.py +++ b/tests/unit/experimental/test_output_file.py @@ -7,17 +7,17 @@ def test_output_file_create(): """Test creation of output file objects.""" - image_file = OutputFile.create('some/image.png') + image_file = OutputFile.create("some/image.png") assert isinstance(image_file, ImageFile) - data_file = OutputFile.create('some/data.nc') + data_file = OutputFile.create("some/data.nc") assert isinstance(data_file, DataFile) def test_output_file_locations(): """Test methods for location output files.""" - file = OutputFile('output/drc/file.suffix') + file = OutputFile("output/drc/file.suffix") - assert file.citation_file.name.endswith('_citation.bibtex') - assert file.data_citation_file.name.endswith('_data_citation_info.txt') - assert file.provenance_xml_file.name.endswith('_provenance.xml') + assert file.citation_file.name.endswith("_citation.bibtex") + assert file.data_citation_file.name.endswith("_data_citation_info.txt") + assert file.provenance_xml_file.name.endswith("_provenance.xml") diff --git a/tests/unit/experimental/test_recipe.py b/tests/unit/experimental/test_recipe.py index 32fc22f214..62f2c6a38a 100644 --- a/tests/unit/experimental/test_recipe.py +++ b/tests/unit/experimental/test_recipe.py @@ -4,18 +4,19 @@ from esmvalcore.experimental import get_recipe pytest.importorskip( - 'esmvaltool', - reason='The behaviour of these tests depends on what ``DIAGNOSTICS.path``' - 'points to. This is defined by a forward-reference to ESMValTool, which' - 'is not installed in the CI, but likely to be available in a developer' - 'or user installation.') + "esmvaltool", + reason="The behaviour of these tests depends on what ``DIAGNOSTICS.path``" + "points to. This is defined by a forward-reference to ESMValTool, which" + "is not installed in the CI, but likely to be available in a developer" + "or user installation.", +) def test_recipe(): """Coverage test for Recipe.""" TAGS.set_tag_values(DIAGNOSTICS.load_tags()) - recipe = get_recipe('examples/recipe_python') + recipe = get_recipe("examples/recipe_python") assert isinstance(repr(recipe), str) assert isinstance(str(recipe), str) diff --git a/tests/unit/experimental/test_recipe_info.py b/tests/unit/experimental/test_recipe_info.py index 5b6989caa7..0da598395f 100644 --- a/tests/unit/experimental/test_recipe_info.py +++ b/tests/unit/experimental/test_recipe_info.py @@ -15,29 +15,31 @@ def test_contributor(): """Coverage test for Contributor.""" - TAGS.set_tag_value(section='authors', - tag='doe_john', - value={ - 'name': 'Doe, John', - 'institute': 'Testing', - 'orcid': 'https://orcid.org/0000-0000-0000-0000', - }) - - contributor = Contributor.from_tag('doe_john') - - assert contributor.name == 'John Doe' - assert contributor.institute == 'Testing' - assert contributor.orcid == 'https://orcid.org/0000-0000-0000-0000' + TAGS.set_tag_value( + section="authors", + tag="doe_john", + value={ + "name": "Doe, John", + "institute": "Testing", + "orcid": "https://orcid.org/0000-0000-0000-0000", + }, + ) + + contributor = Contributor.from_tag("doe_john") + + assert contributor.name == "John Doe" + assert contributor.institute == "Testing" + assert contributor.orcid == "https://orcid.org/0000-0000-0000-0000" assert isinstance(repr(contributor), str) assert isinstance(str(contributor), str) def test_contributor_from_dict(): """Test Contributor init from dict.""" - name = 'John Doe' - institute = 'Testing' - orcid = 'https://orcid.org/0000-0000-0000-0000' - attributes = {'name': name, 'institute': institute, 'orcid': orcid} + name = "John Doe" + institute = "Testing" + orcid = "https://orcid.org/0000-0000-0000-0000" + attributes = {"name": name, "institute": institute, "orcid": orcid} author = Contributor.from_dict(attributes=attributes) assert author.name == name assert author.institute == institute @@ -46,39 +48,40 @@ def test_contributor_from_dict(): def test_reference(monkeypatch): """Coverage test for Reference.""" - monkeypatch.setattr(esmvalcore.experimental.recipe_metadata, 'DIAGNOSTICS', - DIAGNOSTICS) + monkeypatch.setattr( + esmvalcore.experimental.recipe_metadata, "DIAGNOSTICS", DIAGNOSTICS + ) - reference = Reference.from_tag('doe2021') + reference = Reference.from_tag("doe2021") assert isinstance(repr(reference), str) assert isinstance(str(reference), str) - assert isinstance(reference.render('markdown'), str) + assert isinstance(reference.render("markdown"), str) - assert str(reference) == 'J. Doe. Test free or fail hard. 2021. doi:0.' + assert str(reference) == "J. Doe. Test free or fail hard. 2021. doi:0." def test_project(): """Coverage test for Project.""" - TAGS.set_tag_value('projects', 'test_project', 'Test Project') + TAGS.set_tag_value("projects", "test_project", "Test Project") - project = Project.from_tag('test_project') + project = Project.from_tag("test_project") assert isinstance(repr(project), str) assert isinstance(str(project), str) - assert project.project == 'Test Project' + assert project.project == "Test Project" def test_recipe_info_str(): """Test `RecipeInfo.__str__`.""" data = { - 'documentation': { - 'title': 'Test recipe', - 'description': 'This is a very empty test recipe.' + "documentation": { + "title": "Test recipe", + "description": "This is a very empty test recipe.", } } - recipe = RecipeInfo(data, filename='/path/to/recipe_test.yml') + recipe = RecipeInfo(data, filename="/path/to/recipe_test.yml") text = textwrap.dedent(""" ## Test recipe diff --git a/tests/unit/experimental/test_recipe_output.py b/tests/unit/experimental/test_recipe_output.py index 756cfaad7f..7d64935596 100644 --- a/tests/unit/experimental/test_recipe_output.py +++ b/tests/unit/experimental/test_recipe_output.py @@ -10,12 +10,12 @@ def test_diagnostic_output_repr(mocker): mocker.create_autospec(recipe_output.TaskOutput, instance=True), ] for i, task in enumerate(tasks): - task.__str__.return_value = f'Task-{i}' + task.__str__.return_value = f"Task-{i}" diagnostic = recipe_output.DiagnosticOutput( - name='diagnostic_name', - title='This is a diagnostic', - description='With a description', + name="diagnostic_name", + title="This is a diagnostic", + description="With a description", task_output=tasks, ) @@ -36,12 +36,15 @@ def test_recipe_output_add_to_filters(): filters = {} valid_attr = recipe_output.RecipeOutput.FILTER_ATTRS[0] - recipe_output.RecipeOutput._add_to_filters(filters, - {valid_attr: "single value"}) recipe_output.RecipeOutput._add_to_filters( - filters, {valid_attr: ["list value 1", "repeated list value"]}) + filters, {valid_attr: "single value"} + ) + recipe_output.RecipeOutput._add_to_filters( + filters, {valid_attr: ["list value 1", "repeated list value"]} + ) recipe_output.RecipeOutput._add_to_filters( - filters, {valid_attr: ["list value 2", "repeated list value"]}) + filters, {valid_attr: ["list value 2", "repeated list value"]} + ) assert len(filters) == 1 assert valid_attr in filters @@ -66,8 +69,10 @@ def test_recipe_output_add_to_filters_no_valid_attributes(): filters = {} invalid = "invalid_attribute" recipe_output.RecipeOutput._add_to_filters(filters, {invalid: "value"}) - assert (invalid not in recipe_output.RecipeOutput.FILTER_ATTRS - and len(filters) == 0) + assert ( + invalid not in recipe_output.RecipeOutput.FILTER_ATTRS + and len(filters) == 0 + ) def test_recipe_output_sort_filters(): @@ -77,6 +82,7 @@ def test_recipe_output_sort_filters(): valid_attr = recipe_output.RecipeOutput.FILTER_ATTRS[0] unsorted_attributes = ["1", "2", "4", "value", "3"] recipe_output.RecipeOutput._add_to_filters( - filters, {valid_attr: unsorted_attributes}) + filters, {valid_attr: unsorted_attributes} + ) filters = recipe_output.RecipeOutput._sort_filters(filters) assert filters[valid_attr] == sorted(unsorted_attributes) diff --git a/tests/unit/experimental/test_utils.py b/tests/unit/experimental/test_utils.py index 54e1a835ac..aad9e3d627 100644 --- a/tests/unit/experimental/test_utils.py +++ b/tests/unit/experimental/test_utils.py @@ -9,16 +9,17 @@ ) pytest.importorskip( - 'esmvaltool', - reason='The behaviour of these tests depends on what ``DIAGNOSTICS.path``' - 'points to. This is defined by a forward-reference to ESMValTool, which' - 'is not installed in the CI, but likely to be available in a developer' - 'or user installation.') + "esmvaltool", + reason="The behaviour of these tests depends on what ``DIAGNOSTICS.path``" + "points to. This is defined by a forward-reference to ESMValTool, which" + "is not installed in the CI, but likely to be available in a developer" + "or user installation.", +) def test_get_recipe(): """Get single recipe.""" - recipe = get_recipe('examples/recipe_python.yml') + recipe = get_recipe("examples/recipe_python.yml") assert isinstance(recipe, Recipe) @@ -32,10 +33,10 @@ def test_recipe_list_find(): """Get all recipes.""" TAGS.set_tag_values(DIAGNOSTICS.load_tags()) - recipes = get_all_recipes(subdir='examples') + recipes = get_all_recipes(subdir="examples") assert isinstance(recipes, RecipeList) - result = recipes.find('python') + result = recipes.find("python") assert isinstance(result, RecipeList) diff --git a/tests/unit/local/test_facets.py b/tests/unit/local/test_facets.py index a2d1044072..4406a27f53 100644 --- a/tests/unit/local/test_facets.py +++ b/tests/unit/local/test_facets.py @@ -6,32 +6,32 @@ @pytest.mark.parametrize( - 'path,drs,expected', + "path,drs,expected", [ ( - '/climate_data/value1/value2/filename.nc', - '{facet1}/{facet2.lower}', + "/climate_data/value1/value2/filename.nc", + "{facet1}/{facet2.lower}", { - 'facet1': 'value1', - 'facet2': 'value2', + "facet1": "value1", + "facet2": "value2", }, ), ( - '/climate_data/value1/value1-value2/filename.nc', - '{facet1}/{facet1}-{facet2}', + "/climate_data/value1/value1-value2/filename.nc", + "{facet1}/{facet1}-{facet2}", { - 'facet1': 'value1', - 'facet2': 'value2', + "facet1": "value1", + "facet2": "value2", }, ), ( - '/climate_data/value-1/value-1-value-2/filename.nc', - '{facet1}/{facet1}-{facet2}', + "/climate_data/value-1/value-1-value-2/filename.nc", + "{facet1}/{facet1}-{facet2}", { - 'facet1': 'value-1', - 'facet2': 'value-2', + "facet1": "value-1", + "facet2": "value-2", }, - ) + ), ], ) def test_path2facets(path, drs, expected): @@ -43,7 +43,7 @@ def test_path2facets(path, drs, expected): def test_localfile(): - file = LocalFile('/a/b.nc') - file.facets = {'a': 'A'} - assert Path(file) == Path('/a/b.nc') - assert file.facets == {'a': 'A'} + file = LocalFile("/a/b.nc") + file.facets = {"a": "A"} + assert Path(file) == Path("/a/b.nc") + assert file.facets == {"a": "A"} diff --git a/tests/unit/local/test_get_data_sources.py b/tests/unit/local/test_get_data_sources.py index 3def03462b..cef6d49891 100644 --- a/tests/unit/local/test_get_data_sources.py +++ b/tests/unit/local/test_get_data_sources.py @@ -7,45 +7,36 @@ from esmvalcore.local import DataSource, _get_data_sources -@pytest.mark.parametrize('rootpath_drs', [ - ( - { - 'CMIP6': { - '/climate_data': 'ESGF' - } - }, - {}, - ), - ( - { - 'CMIP6': ['/climate_data'] - }, - { - 'CMIP6': 'ESGF' - }, - ), - ( - { - 'default': ['/climate_data'] - }, - { - 'CMIP6': 'ESGF' - }, - ), -]) +@pytest.mark.parametrize( + "rootpath_drs", + [ + ( + {"CMIP6": {"/climate_data": "ESGF"}}, + {}, + ), + ( + {"CMIP6": ["/climate_data"]}, + {"CMIP6": "ESGF"}, + ), + ( + {"default": ["/climate_data"]}, + {"CMIP6": "ESGF"}, + ), + ], +) def test_get_data_sources(monkeypatch, rootpath_drs): # Make sure that default config-developer file is used validate_config_developer(None) rootpath, drs = rootpath_drs - monkeypatch.setitem(CFG, 'rootpath', rootpath) - monkeypatch.setitem(CFG, 'drs', drs) - sources = _get_data_sources('CMIP6') + monkeypatch.setitem(CFG, "rootpath", rootpath) + monkeypatch.setitem(CFG, "drs", drs) + sources = _get_data_sources("CMIP6") source = sources[0] assert isinstance(source, DataSource) - assert source.rootpath == Path('/climate_data') - assert '{project}' in source.dirname_template - assert '{short_name}' in source.filename_template + assert source.rootpath == Path("/climate_data") + assert "{project}" in source.dirname_template + assert "{short_name}" in source.filename_template def test_get_data_sources_nodefault(monkeypatch): @@ -54,12 +45,10 @@ def test_get_data_sources_nodefault(monkeypatch): monkeypatch.setitem( CFG, - 'rootpath', + "rootpath", { - 'CMIP5': { - '/climate_data': 'default' - }, + "CMIP5": {"/climate_data": "default"}, }, ) with pytest.raises(KeyError): - _get_data_sources('CMIP6') + _get_data_sources("CMIP6") diff --git a/tests/unit/local/test_replace_tags.py b/tests/unit/local/test_replace_tags.py index 38d0f63f75..a9dfc79ead 100644 --- a/tests/unit/local/test_replace_tags.py +++ b/tests/unit/local/test_replace_tags.py @@ -1,4 +1,5 @@ """Tests for `_replace_tags` in `esmvalcore.local`.""" + from pathlib import Path import pytest @@ -7,42 +8,46 @@ from esmvalcore.local import _replace_tags VARIABLE = { - 'project': 'CMIP6', - 'dataset': 'ACCURATE-MODEL', - 'activity': 'act', - 'exp': 'experiment', - 'institute': 'HMA', - 'ensemble': 'r1i1p1f1', - 'mip': 'Amon', - 'short_name': 'tas', - 'grid': 'gr', + "project": "CMIP6", + "dataset": "ACCURATE-MODEL", + "activity": "act", + "exp": "experiment", + "institute": "HMA", + "ensemble": "r1i1p1f1", + "mip": "Amon", + "short_name": "tas", + "grid": "gr", } def test_replace_tags(): """Tests for `_replace_tags` function.""" path = _replace_tags( - '{activity}/{institute}/{dataset}/{exp}/{ensemble}/{mip}/{short_name}/' - '{grid}/{version}', VARIABLE) + "{activity}/{institute}/{dataset}/{exp}/{ensemble}/{mip}/{short_name}/" + "{grid}/{version}", + VARIABLE, + ) input_file = _replace_tags( - '{short_name}_{mip}_{dataset}_{exp}_{ensemble}_{grid}*.nc', VARIABLE) + "{short_name}_{mip}_{dataset}_{exp}_{ensemble}_{grid}*.nc", VARIABLE + ) output_file = _replace_tags( - '{project}_{dataset}_{mip}_{exp}_{ensemble}_{short_name}', VARIABLE) + "{project}_{dataset}_{mip}_{exp}_{ensemble}_{short_name}", VARIABLE + ) assert path == [ - Path('act/HMA/ACCURATE-MODEL/experiment/r1i1p1f1/Amon/tas/gr/*') + Path("act/HMA/ACCURATE-MODEL/experiment/r1i1p1f1/Amon/tas/gr/*") ] assert input_file == [ - Path('tas_Amon_ACCURATE-MODEL_experiment_r1i1p1f1_gr*.nc') + Path("tas_Amon_ACCURATE-MODEL_experiment_r1i1p1f1_gr*.nc") ] assert output_file == [ - Path('CMIP6_ACCURATE-MODEL_Amon_experiment_r1i1p1f1_tas') + Path("CMIP6_ACCURATE-MODEL_Amon_experiment_r1i1p1f1_tas") ] def test_replace_tags_missing_facet(): """Check that a RecipeError is raised if a required facet is missing.""" - paths = ['{short_name}_{missing}_*.nc'] - variable = {'short_name': 'tas'} + paths = ["{short_name}_{missing}_*.nc"] + variable = {"short_name": "tas"} with pytest.raises(RecipeError) as exc: _replace_tags(paths, variable) @@ -51,37 +56,42 @@ def test_replace_tags_missing_facet(): def test_replace_tags_list_of_str(): paths = [ - 'folder/subfolder/{short_name}', - 'folder2/{short_name}', - 'subfolder/{short_name}', + "folder/subfolder/{short_name}", + "folder2/{short_name}", + "subfolder/{short_name}", ] reference = [ - Path('folder/subfolder/tas'), - Path('folder2/tas'), - Path('subfolder/tas'), + Path("folder/subfolder/tas"), + Path("folder2/tas"), + Path("subfolder/tas"), ] assert sorted(_replace_tags(paths, VARIABLE)) == reference def test_replace_tags_with_subexperiment(): """Tests for `_replace_tags` function.""" - variable = {'sub_experiment': '199411', **VARIABLE} + variable = {"sub_experiment": "199411", **VARIABLE} paths = _replace_tags( - '{activity}/{institute}/{dataset}/{exp}/{ensemble}/{mip}/{short_name}/' - '{grid}/{version}', variable) + "{activity}/{institute}/{dataset}/{exp}/{ensemble}/{mip}/{short_name}/" + "{grid}/{version}", + variable, + ) input_file = _replace_tags( - '{short_name}_{mip}_{dataset}_{exp}_{ensemble}_{grid}*.nc', variable) + "{short_name}_{mip}_{dataset}_{exp}_{ensemble}_{grid}*.nc", variable + ) output_file = _replace_tags( - '{project}_{dataset}_{mip}_{exp}_{ensemble}_{short_name}', variable) + "{project}_{dataset}_{mip}_{exp}_{ensemble}_{short_name}", variable + ) expected_paths = [ Path( - 'act/HMA/ACCURATE-MODEL/experiment/199411-r1i1p1f1/Amon/tas/gr/*'), - Path('act/HMA/ACCURATE-MODEL/experiment/r1i1p1f1/Amon/tas/gr/*'), + "act/HMA/ACCURATE-MODEL/experiment/199411-r1i1p1f1/Amon/tas/gr/*" + ), + Path("act/HMA/ACCURATE-MODEL/experiment/r1i1p1f1/Amon/tas/gr/*"), ] assert sorted(paths) == expected_paths assert input_file == [ - Path('tas_Amon_ACCURATE-MODEL_experiment_199411-r1i1p1f1_gr*.nc') + Path("tas_Amon_ACCURATE-MODEL_experiment_199411-r1i1p1f1_gr*.nc") ] assert output_file == [ - Path('CMIP6_ACCURATE-MODEL_Amon_experiment_199411-r1i1p1f1_tas') + Path("CMIP6_ACCURATE-MODEL_Amon_experiment_199411-r1i1p1f1_tas") ] diff --git a/tests/unit/local/test_select_files.py b/tests/unit/local/test_select_files.py index d2beeb74cd..674912060c 100644 --- a/tests/unit/local/test_select_files.py +++ b/tests/unit/local/test_select_files.py @@ -4,7 +4,6 @@ def test_select_files(): - files = [ "pr_Amon_MPI-ESM1-2-HR_historical_r1i1p1f1_gn_195501-195912.nc", "pr_Amon_MPI-ESM1-2-HR_historical_r1i1p1f1_gn_196001-196412.nc", @@ -12,7 +11,7 @@ def test_select_files(): "pr_Amon_MPI-ESM1-2-HR_historical_r1i1p1f1_gn_197001-197412.nc", ] - result = _select_files(files, '1962/1967') + result = _select_files(files, "1962/1967") expected = [ "pr_Amon_MPI-ESM1-2-HR_historical_r1i1p1f1_gn_196001-196412.nc", @@ -22,9 +21,8 @@ def test_select_files(): assert result == expected -@pytest.mark.parametrize('timerange', ['196201/1967', '1962/196706']) +@pytest.mark.parametrize("timerange", ["196201/1967", "1962/196706"]) def test_select_files_different_length_start_end(timerange): - files = [ "pr_Amon_MPI-ESM1-2-HR_historical_r1i1p1f1_gn_195501-195912.nc", "pr_Amon_MPI-ESM1-2-HR_historical_r1i1p1f1_gn_196001-196412.nc", @@ -51,7 +49,7 @@ def test_select_files_monthly_resolution(): "pr_Amon_EC-Earth3_dcppA-hindcast_s1960-r1i1p1f1_gr_196311-196410.nc", ] - result = _select_files(files, '196201/196205') + result = _select_files(files, "196201/196205") expected = [ "pr_Amon_EC-Earth3_dcppA-hindcast_s1960-r1i1p1f1_gr_196111-196210.nc" @@ -67,10 +65,10 @@ def test_select_files_daily_resolution(): files = [ filename + "19601101-19611031.nc", filename + "19611101-19621031.nc", - filename + "19621101-19631031.nc" + filename + "19621101-19631031.nc", ] - result = _select_files(files, '19600101/19611215') + result = _select_files(files, "19600101/19611215") expected = [ filename + "19601101-19611031.nc", @@ -96,11 +94,11 @@ def test_select_files_sub_daily_resolution(): ] result_no_separator = _select_files( - files_no_separator, - '19600101T0900/19610101T09HH00MM') + files_no_separator, "19600101T0900/19610101T09HH00MM" + ) result_separator = _select_files( - files_separator, - '19600101T0900/19610101T0900') + files_separator, "19600101T0900/19610101T0900" + ) expected_no_separator = [ filename + "196011010900-196110312100.nc", @@ -119,7 +117,8 @@ def test_select_files_time_period(): various resolution.""" filename_date = "pr_Amon_EC-Earth3_dcppA-hindcast_s1960-r1i1p1f1_gr_" filename_datetime = ( - "psl_6hrPlev_EC-Earth3_dcppA-hindcast_s1960-r1i1p1f1_gr_") + "psl_6hrPlev_EC-Earth3_dcppA-hindcast_s1960-r1i1p1f1_gr_" + ) files_date = [ filename_date + "196011-196110.nc", @@ -135,8 +134,8 @@ def test_select_files_time_period(): filename_datetime + "196211010300-196310312100.nc", ] - result_date = _select_files(files_date, '196211/P2Y5M') - result_datetime = _select_files(files_datetime, '19601101T1300/P1Y0M0DT6H') + result_date = _select_files(files_date, "196211/P2Y5M") + result_datetime = _select_files(files_datetime, "19601101T1300/P1Y0M0DT6H") expected_date = [ filename_date + "196211-196310.nc", @@ -164,9 +163,9 @@ def test_select_files_varying_format(): filename + "196211010300-196310312100.nc", ] - result_yearly = _select_files(files, '1960/1962') - result_monthly = _select_files(files, '196011/196210') - result_daily = _select_files(files, '19601101/19601105') + result_yearly = _select_files(files, "1960/1962") + result_monthly = _select_files(files, "196011/196210") + result_daily = _select_files(files, "19601101/19601105") assert result_yearly == files assert result_monthly == files[0:2] diff --git a/tests/unit/local/test_time.py b/tests/unit/local/test_time.py index a01f1b4d05..31afbedee3 100644 --- a/tests/unit/local/test_time.py +++ b/tests/unit/local/test_time.py @@ -1,4 +1,5 @@ """Unit tests for time related functions in `esmvalcore.local`.""" + from pathlib import Path import iris @@ -20,11 +21,11 @@ def _get_esgf_file(path): """Get ESGFFile object.""" result = pyesgf.search.results.FileResult( json={ - 'dataset_id': 'CMIP6.ABC.v1|something.org', - 'dataset_id_template_': ["%(mip_era)s.%(source_id)s"], - 'project': ['CMIP6'], - 'size': 10, - 'title': path, + "dataset_id": "CMIP6.ABC.v1|something.org", + "dataset_id_template_": ["%(mip_era)s.%(source_id)s"], + "project": ["CMIP6"], + "size": 10, + "title": path, }, context=None, ) @@ -32,68 +33,71 @@ def _get_esgf_file(path): FILENAME_CASES = [ - ['var_whatever_1980-1981', 1980, 1981], - ['var_whatever_1980.nc', 1980, 1980], - ['a.b.x_yz_185001-200512.nc', 1850, 2005], - ['var_whatever_19800101-19811231.nc1', 1980, 1981], - ['var_whatever_19800101.nc', 1980, 1980], - ['1980-1981_var_whatever.nc', 1980, 1981], - ['1980_var_whatever.nc', 1980, 1980], - ['var_control-1980_whatever.nc', 1980, 1980], - ['19800101-19811231_var_whatever.nc', 1980, 1981], - ['19800101_var_whatever.nc', 1980, 1980], - ['var_control-19800101_whatever.nc', 1980, 1980], - ['19800101_var_control-1950_whatever.nc', 1980, 1980], - ['var_control-1950_whatever_19800101.nc', 1980, 1980], - ['CM61-LR-hist-03.1950_18500101_19491231_1M_concbc.nc', 1850, 1949], + ["var_whatever_1980-1981", 1980, 1981], + ["var_whatever_1980.nc", 1980, 1980], + ["a.b.x_yz_185001-200512.nc", 1850, 2005], + ["var_whatever_19800101-19811231.nc1", 1980, 1981], + ["var_whatever_19800101.nc", 1980, 1980], + ["1980-1981_var_whatever.nc", 1980, 1981], + ["1980_var_whatever.nc", 1980, 1980], + ["var_control-1980_whatever.nc", 1980, 1980], + ["19800101-19811231_var_whatever.nc", 1980, 1981], + ["19800101_var_whatever.nc", 1980, 1980], + ["var_control-19800101_whatever.nc", 1980, 1980], + ["19800101_var_control-1950_whatever.nc", 1980, 1980], + ["var_control-1950_whatever_19800101.nc", 1980, 1980], + ["CM61-LR-hist-03.1950_18500101_19491231_1M_concbc.nc", 1850, 1949], [ - 'icon-2.6.1_atm_amip_R2B5_r1v1i1p1l1f1_phy_3d_ml_20150101T000000Z.nc', - 2015, 2015 + "icon-2.6.1_atm_amip_R2B5_r1v1i1p1l1f1_phy_3d_ml_20150101T000000Z.nc", + 2015, + 2015, ], - ['pr_A1.186101-200012.nc', 1861, 2000], - ['tas_A1.20C3M_1.CCSM.atmm.1990-01_cat_1999-12.nc', 1990, 1999], - ['E5sf00_1M_1940_032.grb', 1940, 1940], - ['E5sf00_1D_1998-04_167.grb', 1998, 1998], - ['E5sf00_1H_1986-04-11_167.grb', 1986, 1986], - ['E5sf00_1M_1940-1941_032.grb', 1940, 1941], - ['E5sf00_1D_1998-01_1999-12_167.grb', 1998, 1999], - ['E5sf00_1H_2000-01-01_2001-12-31_167.grb', 2000, 2001], + ["pr_A1.186101-200012.nc", 1861, 2000], + ["tas_A1.20C3M_1.CCSM.atmm.1990-01_cat_1999-12.nc", 1990, 1999], + ["E5sf00_1M_1940_032.grb", 1940, 1940], + ["E5sf00_1D_1998-04_167.grb", 1998, 1998], + ["E5sf00_1H_1986-04-11_167.grb", 1986, 1986], + ["E5sf00_1M_1940-1941_032.grb", 1940, 1941], + ["E5sf00_1D_1998-01_1999-12_167.grb", 1998, 1999], + ["E5sf00_1H_2000-01-01_2001-12-31_167.grb", 2000, 2001], ] FILENAME_DATE_CASES = [ - ['var_whatever_1980-1981', '1980', '1981'], - ['var_whatever_1980.nc', '1980', '1980'], - ['a.b.x_yz_185001-200512.nc', '185001', '200512'], - ['var_whatever_19800101-19811231.nc1', '19800101', '19811231'], - ['var_whatever_19800101.nc', '19800101', '19800101'], - ['1980-1981_var_whatever.nc', '1980', '1981'], - ['1980_var_whatever.nc', '1980', '1980'], - ['var_control-1980_whatever.nc', '1980', '1980'], - ['19800101-19811231_var_whatever.nc', '19800101', '19811231'], - ['19800101_var_whatever.nc', '19800101', '19800101'], - ['var_control-19800101_whatever.nc', '19800101', '19800101'], - ['19800101_var_control-1950_whatever.nc', '19800101', '19800101'], - ['var_control-1950_whatever_19800101.nc', '19800101', '19800101'], + ["var_whatever_1980-1981", "1980", "1981"], + ["var_whatever_1980.nc", "1980", "1980"], + ["a.b.x_yz_185001-200512.nc", "185001", "200512"], + ["var_whatever_19800101-19811231.nc1", "19800101", "19811231"], + ["var_whatever_19800101.nc", "19800101", "19800101"], + ["1980-1981_var_whatever.nc", "1980", "1981"], + ["1980_var_whatever.nc", "1980", "1980"], + ["var_control-1980_whatever.nc", "1980", "1980"], + ["19800101-19811231_var_whatever.nc", "19800101", "19811231"], + ["19800101_var_whatever.nc", "19800101", "19800101"], + ["var_control-19800101_whatever.nc", "19800101", "19800101"], + ["19800101_var_control-1950_whatever.nc", "19800101", "19800101"], + ["var_control-1950_whatever_19800101.nc", "19800101", "19800101"], [ - 'CM61-LR-hist-03.1950_18500101_19491231_1M_concbc.nc', '18500101', - '19491231' + "CM61-LR-hist-03.1950_18500101_19491231_1M_concbc.nc", + "18500101", + "19491231", ], [ - 'icon-2.6.1_atm_amip_R2B5_r1v1i1p1l1f1_phy_3d_ml_20150101T000000Z.nc', - '20150101T000000Z', '20150101T000000Z' + "icon-2.6.1_atm_amip_R2B5_r1v1i1p1l1f1_phy_3d_ml_20150101T000000Z.nc", + "20150101T000000Z", + "20150101T000000Z", ], - ['pr_A1.186101-200012.nc', '186101', '200012'], - ['tas_A1.20C3M_1.CCSM.atmm.1990-01_cat_1999-12.nc', '199001', '199912'], - ['E5sf00_1M_1940_032.grb', '1940', '1940'], - ['E5sf00_1D_1998-04_167.grb', '199804', '199804'], - ['E5sf00_1H_1986-04-11_167.grb', '19860411', '19860411'], - ['E5sf00_1M_1940-1941_032.grb', '1940', '1941'], - ['E5sf00_1D_1998-01_1999-12_167.grb', '199801', '199912'], - ['E5sf00_1H_2000-01-01_2001-12-31_167.grb', '20000101', '20011231'], + ["pr_A1.186101-200012.nc", "186101", "200012"], + ["tas_A1.20C3M_1.CCSM.atmm.1990-01_cat_1999-12.nc", "199001", "199912"], + ["E5sf00_1M_1940_032.grb", "1940", "1940"], + ["E5sf00_1D_1998-04_167.grb", "199804", "199804"], + ["E5sf00_1H_1986-04-11_167.grb", "19860411", "19860411"], + ["E5sf00_1M_1940-1941_032.grb", "1940", "1941"], + ["E5sf00_1D_1998-01_1999-12_167.grb", "199801", "199912"], + ["E5sf00_1H_2000-01-01_2001-12-31_167.grb", "20000101", "20011231"], ] -@pytest.mark.parametrize('case', FILENAME_CASES) +@pytest.mark.parametrize("case", FILENAME_CASES) def test_get_start_end_year(case): """Tests for _get_start_end_year function.""" filename, case_start, case_end = case @@ -125,7 +129,7 @@ def test_get_start_end_year(case): assert case_end == end -@pytest.mark.parametrize('case', FILENAME_DATE_CASES) +@pytest.mark.parametrize("case", FILENAME_DATE_CASES) def test_get_start_end_date(case): """Tests for _get_start_end_date function.""" filename, case_start, case_end = case @@ -160,11 +164,11 @@ def test_get_start_end_date(case): def test_read_years_from_cube(monkeypatch, tmp_path): """Try to get years from cube if no date in filename.""" monkeypatch.chdir(tmp_path) - temp_file = LocalFile('test.nc') - cube = iris.cube.Cube([0, 0], var_name='var') - time = iris.coords.DimCoord([0, 366], - 'time', - units='days since 1990-01-01') + temp_file = LocalFile("test.nc") + cube = iris.cube.Cube([0, 0], var_name="var") + time = iris.coords.DimCoord( + [0, 366], "time", units="days since 1990-01-01" + ) cube.add_dim_coord(time, 0) iris.save(cube, temp_file) start, end = _get_start_end_year(temp_file) @@ -175,23 +179,23 @@ def test_read_years_from_cube(monkeypatch, tmp_path): def test_read_datetime_from_cube(monkeypatch, tmp_path): """Try to get datetime from cube if no date in filename.""" monkeypatch.chdir(tmp_path) - temp_file = 'test.nc' - cube = iris.cube.Cube([0, 0], var_name='var') - time = iris.coords.DimCoord([0, 366], - 'time', - units='days since 1990-01-01') + temp_file = "test.nc" + cube = iris.cube.Cube([0, 0], var_name="var") + time = iris.coords.DimCoord( + [0, 366], "time", units="days since 1990-01-01" + ) cube.add_dim_coord(time, 0) iris.save(cube, temp_file) start, end = _get_start_end_date(temp_file) - assert start == '19900101' - assert end == '19910102' + assert start == "19900101" + assert end == "19910102" def test_raises_if_unable_to_deduce(monkeypatch, tmp_path): """Try to get time from cube if no date in filename.""" monkeypatch.chdir(tmp_path) - temp_file = 'test.nc' - cube = iris.cube.Cube([0, 0], var_name='var') + temp_file = "test.nc" + cube = iris.cube.Cube([0, 0], var_name="var") iris.save(cube, temp_file) with pytest.raises(ValueError): _get_start_end_date(temp_file) @@ -202,79 +206,80 @@ def test_raises_if_unable_to_deduce(monkeypatch, tmp_path): def test_fails_if_no_date_present(): """Test raises if no date is present.""" with pytest.raises((ValueError)): - _get_start_end_date('var_whatever') + _get_start_end_date("var_whatever") with pytest.raises((ValueError)): - _get_start_end_year('var_whatever') + _get_start_end_year("var_whatever") def test_get_timerange_from_years(): """Test a `timerange` tag with value `start_year/end_year` can be built from tags `start_year` and `end_year`.""" - variable = {'start_year': 2000, 'end_year': 2002} + variable = {"start_year": 2000, "end_year": 2002} _replace_years_with_timerange(variable) - assert 'start_year' not in variable - assert 'end_year' not in variable - assert variable['timerange'] == '2000/2002' + assert "start_year" not in variable + assert "end_year" not in variable + assert variable["timerange"] == "2000/2002" def test_get_timerange_from_start_year(): """Test a `timerange` tag with value `start_year/start_year` can be built from tag `start_year` when an `end_year` is not given.""" - variable = {'start_year': 2000} + variable = {"start_year": 2000} _replace_years_with_timerange(variable) - assert 'start_year' not in variable - assert variable['timerange'] == '2000/2000' + assert "start_year" not in variable + assert variable["timerange"] == "2000/2000" def test_get_timerange_from_end_year(): """Test a `timerange` tag with value `end_year/end_year` can be built from tag `end_year` when a `start_year` is not given.""" - variable = {'end_year': 2002} + variable = {"end_year": 2002} _replace_years_with_timerange(variable) - assert 'end_year' not in variable - assert variable['timerange'] == '2002/2002' + assert "end_year" not in variable + assert variable["timerange"] == "2002/2002" TEST_DATES_TO_TIMERANGE = [ - (2000, 2000, '2000/2000'), - (1, 2000, '0001/2000'), - (2000, 1, '2000/0001'), - (1, 2, '0001/0002'), - ('2000', '2000', '2000/2000'), - ('1', '2000', '0001/2000'), - (2000, '1', '2000/0001'), - ('1', 2, '0001/0002'), - ('*', '*', '*/*'), - (2000, '*', '2000/*'), - ('2000', '*', '2000/*'), - (1, '*', '0001/*'), - ('1', '*', '0001/*'), - ('*', 2000, '*/2000'), - ('*', '2000', '*/2000'), - ('*', 1, '*/0001'), - ('*', '1', '*/0001'), - ('P5Y', 'P5Y', 'P5Y/P5Y'), - (2000, 'P5Y', '2000/P5Y'), - ('2000', 'P5Y', '2000/P5Y'), - (1, 'P5Y', '0001/P5Y'), - ('1', 'P5Y', '0001/P5Y'), - ('P5Y', 2000, 'P5Y/2000'), - ('P5Y', '2000', 'P5Y/2000'), - ('P5Y', 1, 'P5Y/0001'), - ('P5Y', '1', 'P5Y/0001'), - ('*', 'P5Y', '*/P5Y'), - ('P5Y', '*', 'P5Y/*'), + (2000, 2000, "2000/2000"), + (1, 2000, "0001/2000"), + (2000, 1, "2000/0001"), + (1, 2, "0001/0002"), + ("2000", "2000", "2000/2000"), + ("1", "2000", "0001/2000"), + (2000, "1", "2000/0001"), + ("1", 2, "0001/0002"), + ("*", "*", "*/*"), + (2000, "*", "2000/*"), + ("2000", "*", "2000/*"), + (1, "*", "0001/*"), + ("1", "*", "0001/*"), + ("*", 2000, "*/2000"), + ("*", "2000", "*/2000"), + ("*", 1, "*/0001"), + ("*", "1", "*/0001"), + ("P5Y", "P5Y", "P5Y/P5Y"), + (2000, "P5Y", "2000/P5Y"), + ("2000", "P5Y", "2000/P5Y"), + (1, "P5Y", "0001/P5Y"), + ("1", "P5Y", "0001/P5Y"), + ("P5Y", 2000, "P5Y/2000"), + ("P5Y", "2000", "P5Y/2000"), + ("P5Y", 1, "P5Y/0001"), + ("P5Y", "1", "P5Y/0001"), + ("*", "P5Y", "*/P5Y"), + ("P5Y", "*", "P5Y/*"), ] -@pytest.mark.parametrize('start_date,end_date,expected_timerange', - TEST_DATES_TO_TIMERANGE) +@pytest.mark.parametrize( + "start_date,end_date,expected_timerange", TEST_DATES_TO_TIMERANGE +) def test_dates_to_timerange(start_date, end_date, expected_timerange): """Test ``_dates_to_timerange``.""" timerange = _dates_to_timerange(start_date, end_date) @@ -282,21 +287,21 @@ def test_dates_to_timerange(start_date, end_date, expected_timerange): TEST_TRUNCATE_DATES = [ - ('2000', '2000', (2000, 2000)), - ('200001', '2000', (2000, 2000)), - ('2000', '200001', (2000, 2000)), - ('200001', '2000', (2000, 2000)), - ('200001', '200001', (200001, 200001)), - ('20000102', '200001', (200001, 200001)), - ('200001', '20000102', (200001, 200001)), - ('20000102', '20000102', (20000102, 20000102)), - ('20000102T23:59:59', '20000102', (20000102, 20000102)), - ('20000102', '20000102T23:59:59', (20000102, 20000102)), - ('20000102T235959', '20000102T01:02:03', (20000102235959, 20000102010203)), + ("2000", "2000", (2000, 2000)), + ("200001", "2000", (2000, 2000)), + ("2000", "200001", (2000, 2000)), + ("200001", "2000", (2000, 2000)), + ("200001", "200001", (200001, 200001)), + ("20000102", "200001", (200001, 200001)), + ("200001", "20000102", (200001, 200001)), + ("20000102", "20000102", (20000102, 20000102)), + ("20000102T23:59:59", "20000102", (20000102, 20000102)), + ("20000102", "20000102T23:59:59", (20000102, 20000102)), + ("20000102T235959", "20000102T01:02:03", (20000102235959, 20000102010203)), ] -@pytest.mark.parametrize('date,date_file,expected_output', TEST_TRUNCATE_DATES) +@pytest.mark.parametrize("date,date_file,expected_output", TEST_TRUNCATE_DATES) def test_truncate_dates(date, date_file, expected_output): """Test ``_truncate_dates``.""" output = _truncate_dates(date, date_file) diff --git a/tests/unit/main/test_esmvaltool.py b/tests/unit/main/test_esmvaltool.py index ec3e0596e0..b6a5b96599 100644 --- a/tests/unit/main/test_esmvaltool.py +++ b/tests/unit/main/test_esmvaltool.py @@ -26,11 +26,11 @@ def cfg(mocker, tmp_path): session.__getitem__.side_effect = cfg_dict.__getitem__ session.__setitem__.side_effect = cfg_dict.__setitem__ - output_dir = tmp_path / 'esmvaltool_output' - session.session_dir = output_dir / 'recipe_test' - session.run_dir = session.session_dir / 'run_dir' - session.preproc_dir = session.session_dir / 'preproc_dir' - session._fixed_file_dir = session.preproc_dir / 'fixed_files' + output_dir = tmp_path / "esmvaltool_output" + session.session_dir = output_dir / "recipe_test" + session.run_dir = session.session_dir / "run_dir" + session.preproc_dir = session.session_dir / "preproc_dir" + session._fixed_file_dir = session.preproc_dir / "fixed_files" cfg = mocker.Mock() cfg.start_session.return_value = session @@ -43,29 +43,32 @@ def session(cfg): return cfg.start_session.return_value -@pytest.mark.parametrize('argument,value', [ - ('max_datasets', 2), - ('max_years', 2), - ('skip_nonexistent', True), - ('search_esgf', 'when_missing'), - ('diagnostics', 'diagnostic_name/group_name'), - ('check_level', 'strict'), -]) +@pytest.mark.parametrize( + "argument,value", + [ + ("max_datasets", 2), + ("max_years", 2), + ("skip_nonexistent", True), + ("search_esgf", "when_missing"), + ("diagnostics", "diagnostic_name/group_name"), + ("check_level", "strict"), + ], +) def test_run_command_line_config(mocker, cfg, argument, value): """Check that the configuration is updated from the command line.""" mocker.patch.object( esmvalcore.config, - 'CFG', + "CFG", cfg, ) session = cfg.start_session.return_value program = ESMValTool() - recipe_file = '/path/to/recipe_test.yml' - config_file = '/path/to/config-user.yml' + recipe_file = "/path/to/recipe_test.yml" + config_file = "/path/to/config-user.yml" - mocker.patch.object(program, '_get_recipe', return_value=Path(recipe_file)) - mocker.patch.object(program, '_run') + mocker.patch.object(program, "_get_recipe", return_value=Path(recipe_file)) + mocker.patch.object(program, "_run") program.run(recipe_file, config_file, **{argument: value}) @@ -77,41 +80,36 @@ def test_run_command_line_config(mocker, cfg, argument, value): assert session[argument] == value -@pytest.mark.parametrize('search_esgf', ['never', 'when_missing', 'always']) +@pytest.mark.parametrize("search_esgf", ["never", "when_missing", "always"]) def test_run(mocker, session, search_esgf): - session['search_esgf'] = search_esgf - session['log_level'] = 'default' - session['config_file'] = '/path/to/config-user.yml' - session['remove_preproc_dir'] = True - session['save_intermediary_cubes'] = False - session.cmor_log.read_text.return_value = 'WARNING: attribute not present' + session["search_esgf"] = search_esgf + session["log_level"] = "default" + session["config_file"] = "/path/to/config-user.yml" + session["remove_preproc_dir"] = True + session["save_intermediary_cubes"] = False + session.cmor_log.read_text.return_value = "WARNING: attribute not present" - recipe = Path('/recipe_dir/recipe_test.yml') + recipe = Path("/recipe_dir/recipe_test.yml") # Patch every imported function mocker.patch.object( esmvalcore.config._logging, - 'configure_logging', + "configure_logging", create_autospec=True, ) mocker.patch.object( esmvalcore.config._diagnostics, - 'DIAGNOSTICS', + "DIAGNOSTICS", create_autospec=True, ) mocker.patch.object( esmvalcore._task, - 'resource_usage_logger', - create_autospec=True, - ) - mocker.patch.object( - esmvalcore.esgf._logon, - 'logon', + "resource_usage_logger", create_autospec=True, ) mocker.patch.object( esmvalcore._main, - 'process_recipe', + "process_recipe", create_autospec=True, ) @@ -120,17 +118,12 @@ def test_run(mocker, session, search_esgf): # Check that the correct functions have been called esmvalcore.config._logging.configure_logging.assert_called_once_with( output_dir=session.run_dir, - console_log_level=session['log_level'], + console_log_level=session["log_level"], ) - if search_esgf == 'never': - esmvalcore.esgf._logon.logon.assert_not_called() - else: - esmvalcore.esgf._logon.logon.assert_called_once() - esmvalcore._task.resource_usage_logger.assert_called_once_with( pid=os.getpid(), - filename=session.run_dir / 'resource_usage.txt', + filename=session.run_dir / "resource_usage.txt", ) esmvalcore._main.process_recipe.assert_called_once_with( recipe_file=recipe, @@ -149,7 +142,7 @@ def test_run_session_dir_exists(session): def test_run_session_dir_exists_alternative_fails(mocker, session): mocker.patch.object( esmvalcore._main.Path, - 'mkdir', + "mkdir", side_effect=FileExistsError, ) program = ESMValTool() @@ -160,8 +153,8 @@ def test_run_session_dir_exists_alternative_fails(mocker, session): def test_clean_preproc_dir(session): session.preproc_dir.mkdir(parents=True) session._fixed_file_dir.mkdir(parents=True) - session['remove_preproc_dir'] = True - session['save_intermediary_cubes'] = False + session["remove_preproc_dir"] = True + session["save_intermediary_cubes"] = False program = ESMValTool() program._clean_preproc(session) assert not session.preproc_dir.exists() @@ -171,67 +164,65 @@ def test_clean_preproc_dir(session): def test_do_not_clean_preproc_dir(session): session.preproc_dir.mkdir(parents=True) session._fixed_file_dir.mkdir(parents=True) - session['remove_preproc_dir'] = False - session['save_intermediary_cubes'] = True + session["remove_preproc_dir"] = False + session["save_intermediary_cubes"] = True program = ESMValTool() program._clean_preproc(session) assert session.preproc_dir.exists() assert session._fixed_file_dir.exists() -@mock.patch('esmvalcore._main.entry_points') +@mock.patch("esmvalcore._main.entry_points") def test_header(mock_entry_points, caplog): - entry_point = mock.Mock() - entry_point.dist.name = 'MyEntry' - entry_point.dist.version = 'v42.42.42' - entry_point.name = 'Entry name' + entry_point.dist.name = "MyEntry" + entry_point.dist.version = "v42.42.42" + entry_point.name = "Entry name" mock_entry_points.return_value = [entry_point] with caplog.at_level(logging.INFO): ESMValTool()._log_header( - 'path_to_config_file', - ['path_to_log_file1', 'path_to_log_file2'], + "path_to_config_file", + ["path_to_log_file1", "path_to_log_file2"], ) assert len(caplog.messages) == 8 assert caplog.messages[0] == HEADER - assert caplog.messages[1] == 'Package versions' - assert caplog.messages[2] == '----------------' - assert caplog.messages[3] == f'ESMValCore: {__version__}' - assert caplog.messages[4] == 'MyEntry: v42.42.42' - assert caplog.messages[5] == '----------------' - assert caplog.messages[6] == 'Using config file path_to_config_file' + assert caplog.messages[1] == "Package versions" + assert caplog.messages[2] == "----------------" + assert caplog.messages[3] == f"ESMValCore: {__version__}" + assert caplog.messages[4] == "MyEntry: v42.42.42" + assert caplog.messages[5] == "----------------" + assert caplog.messages[6] == "Using config file path_to_config_file" assert caplog.messages[7] == ( - 'Writing program log files to:\n' - 'path_to_log_file1\n' - 'path_to_log_file2' + "Writing program log files to:\n" + "path_to_log_file1\n" + "path_to_log_file2" ) -@mock.patch('os.path.isfile') +@mock.patch("os.path.isfile") def test_get_recipe(is_file): """Test get recipe.""" is_file.return_value = True - recipe = ESMValTool()._get_recipe('/recipe.yaml') - assert recipe == Path('/recipe.yaml') + recipe = ESMValTool()._get_recipe("/recipe.yaml") + assert recipe == Path("/recipe.yaml") -@mock.patch('os.path.isfile') -@mock.patch('esmvalcore.config._diagnostics.DIAGNOSTICS') +@mock.patch("os.path.isfile") +@mock.patch("esmvalcore.config._diagnostics.DIAGNOSTICS") def test_get_installed_recipe(diagnostics, is_file): - def encountered(path): - return Path(path) == Path('/install_folder/recipe.yaml') + return Path(path) == Path("/install_folder/recipe.yaml") is_file.side_effect = encountered - diagnostics.recipes = Path('/install_folder') - recipe = ESMValTool()._get_recipe('recipe.yaml') - assert recipe == Path('/install_folder/recipe.yaml') + diagnostics.recipes = Path("/install_folder") + recipe = ESMValTool()._get_recipe("recipe.yaml") + assert recipe == Path("/install_folder/recipe.yaml") -@mock.patch('os.path.isfile') +@mock.patch("os.path.isfile") def test_get_recipe_not_found(is_file): """Test get recipe.""" is_file.return_value = False - recipe = ESMValTool()._get_recipe('/recipe.yaml') - assert recipe == Path('/recipe.yaml') + recipe = ESMValTool()._get_recipe("/recipe.yaml") + assert recipe == Path("/recipe.yaml") diff --git a/tests/unit/main/test_main.py b/tests/unit/main/test_main.py index 546115156a..e0012fe3fa 100644 --- a/tests/unit/main/test_main.py +++ b/tests/unit/main/test_main.py @@ -10,16 +10,16 @@ def test_run_recipe_error(mocker, caplog): """Test a run of the tool with a mistake in the recipe.""" program = mocker.patch.object( fire, - 'Fire', + "Fire", create_autospec=True, instance=True, ) msg = "A mistake in the recipe" program.side_effect = RecipeError(msg) - exit_ = mocker.patch.object(sys, 'exit', create_autspec=True) + exit_ = mocker.patch.object(sys, "exit", create_autspec=True) - caplog.set_level('DEBUG') + caplog.set_level("DEBUG") _main.run() print(caplog.text) @@ -27,9 +27,9 @@ def test_run_recipe_error(mocker, caplog): exit_.assert_called_once_with(1) # Check that only the RecipeError is logged above DEBUG level - errors = [r for r in caplog.records if r.levelname != 'DEBUG'] + errors = [r for r in caplog.records if r.levelname != "DEBUG"] assert len(errors) == 1 assert errors[0].message == msg # Check that the stack trace is logged - assert 'Traceback' in caplog.text + assert "Traceback" in caplog.text diff --git a/tests/unit/main/test_parse_resume.py b/tests/unit/main/test_parse_resume.py index 06e936f4ca..19659142c1 100644 --- a/tests/unit/main/test_parse_resume.py +++ b/tests/unit/main/test_parse_resume.py @@ -5,21 +5,21 @@ def create_previous_run(path, suffix): """Create a mock previous run of the tool.""" - prev_run = path / f'recipe_test_{suffix}' - prev_recipe = prev_run / 'run' / 'recipe_test.yml' + prev_run = path / f"recipe_test_{suffix}" + prev_recipe = prev_run / "run" / "recipe_test.yml" prev_recipe.parent.mkdir(parents=True) - prev_recipe.write_text('test') + prev_recipe.write_text("test") return prev_run def test_parse_resume(tmp_path): """Test `esmvalcore._main.parse_resume`.""" - prev_run1 = create_previous_run(tmp_path, '20210923_112001') - prev_run2 = create_previous_run(tmp_path, '20210924_123553') + prev_run1 = create_previous_run(tmp_path, "20210923_112001") + prev_run2 = create_previous_run(tmp_path, "20210924_123553") - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text('test') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text("test") resume_dirs = parse_resume(f"{prev_run1} {prev_run2}", recipe) assert resume_dirs == [prev_run1, prev_run2] @@ -30,7 +30,7 @@ def test_noop(tmp_path): Test that not using the resume option works. """ - recipe = tmp_path / 'recipe_test.yml' + recipe = tmp_path / "recipe_test.yml" resume_dirs = parse_resume(None, recipe) assert resume_dirs == [] @@ -40,10 +40,10 @@ def test_fail_on_different_recipe(tmp_path): Test that trying to resume a different recipe fails. """ - prev_run = create_previous_run(tmp_path, '20210924_123553') + prev_run = create_previous_run(tmp_path, "20210924_123553") - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text('something else') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text("something else") with pytest.raises(ValueError): parse_resume(str(prev_run), recipe) diff --git a/tests/unit/main/test_recipes.py b/tests/unit/main/test_recipes.py index 6b95561c9a..598dc4643d 100644 --- a/tests/unit/main/test_recipes.py +++ b/tests/unit/main/test_recipes.py @@ -1,4 +1,5 @@ """Test the `Recipe` class implementing the `esmvaltool recipes` command.""" + import textwrap import esmvalcore.config._diagnostics @@ -8,15 +9,15 @@ def test_list(mocker, tmp_path, capsys): """Test the command `esmvaltool recipes list`.""" recipe_dir = tmp_path - recipe1 = recipe_dir / 'recipe_test1.yml' - recipe2 = recipe_dir / 'subdir' / 'recipe_test2.yml' + recipe1 = recipe_dir / "recipe_test1.yml" + recipe2 = recipe_dir / "subdir" / "recipe_test2.yml" recipe1.touch() recipe2.parent.mkdir() recipe2.touch() diagnostics = mocker.patch.object( esmvalcore.config._diagnostics, - 'DIAGNOSTICS', + "DIAGNOSTICS", create_autospec=True, ) diagnostics.recipes = recipe_dir @@ -38,12 +39,12 @@ def test_list(mocker, tmp_path, capsys): def test_show(mocker, tmp_path, capsys): """Test the command `esmvaltool recipes list`.""" recipe_dir = tmp_path - recipe = recipe_dir / 'recipe_test.yml' + recipe = recipe_dir / "recipe_test.yml" recipe.write_text("example") diagnostics = mocker.patch.object( esmvalcore.config._diagnostics, - 'DIAGNOSTICS', + "DIAGNOSTICS", create_autospec=True, ) diagnostics.recipes = recipe_dir diff --git a/tests/unit/preprocessor/_area/test_area.py b/tests/unit/preprocessor/_area/test_area.py index e7c3f998c5..9e88002aaa 100644 --- a/tests/unit/preprocessor/_area/test_area.py +++ b/tests/unit/preprocessor/_area/test_area.py @@ -1,4 +1,5 @@ """Unit tests for the :func:`esmvalcore.preprocessor._area` module.""" + import unittest from pathlib import Path @@ -31,78 +32,79 @@ class Test(tests.Test): """Test class for the :func:`esmvalcore.preprocessor._area` module.""" + def setUp(self): """Prepare tests.""" self.coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) data = np.ones((2, 5, 5), dtype=np.float32) times = iris.coords.DimCoord( [0, 1], - standard_name='time', - units='days since 2000-01-01', + standard_name="time", + units="days since 2000-01-01", ) lons = iris.coords.DimCoord( - [i + .5 for i in range(5)], - standard_name='longitude', - bounds=[[i, i + 1.] for i in range(5)], # [0,1] to [4,5] - units='degrees_east', - coord_system=self.coord_sys) + [i + 0.5 for i in range(5)], + standard_name="longitude", + bounds=[[i, i + 1.0] for i in range(5)], # [0,1] to [4,5] + units="degrees_east", + coord_system=self.coord_sys, + ) lats = iris.coords.DimCoord( - [i + .5 for i in range(5)], - standard_name='latitude', - bounds=[[i, i + 1.] for i in range(5)], - units='degrees_north', + [i + 0.5 for i in range(5)], + standard_name="latitude", + bounds=[[i, i + 1.0] for i in range(5)], + units="degrees_north", coord_system=self.coord_sys, ) coords_spec = [(times, 0), (lats, 1), (lons, 2)] self.grid = iris.cube.Cube( data, dim_coords_and_dims=coords_spec, - units='kg m-2 s-1', + units="kg m-2 s-1", ) ndata = np.ones((6, 6)) nlons = iris.coords.DimCoord( [i - 2.5 for i in range(6)], - standard_name='longitude', - bounds=[[i - 3., i - 2.] for i in range(6)], # [3,2] to [4,5] - units='degrees_east', - coord_system=self.coord_sys) + standard_name="longitude", + bounds=[[i - 3.0, i - 2.0] for i in range(6)], # [3,2] to [4,5] + units="degrees_east", + coord_system=self.coord_sys, + ) nlats = iris.coords.DimCoord( [i - 2.5 for i in range(6)], - standard_name='latitude', - bounds=[[i - 3., i - 2.] for i in range(6)], - units='degrees_north', + standard_name="latitude", + bounds=[[i - 3.0, i - 2.0] for i in range(6)], + units="degrees_north", coord_system=self.coord_sys, ) coords_spec = [(nlats, 0), (nlons, 1)] self.negative_grid = iris.cube.Cube( ndata, dim_coords_and_dims=coords_spec, - units='kg m-2 s-1', + units="kg m-2 s-1", ) def _add_cell_measure_to_grid(self): """Add cell_area to self.grid.""" - cube = guess_bounds(self.grid, ['longitude', 'latitude']) + cube = guess_bounds(self.grid, ["longitude", "latitude"]) grid_areas = iris.analysis.cartography.area_weights(cube)[0] measure = iris.coords.CellMeasure( - grid_areas, - standard_name='cell_area', - units='m2', - measure='area') + grid_areas, standard_name="cell_area", units="m2", measure="area" + ) self.grid.add_cell_measure(measure, (1, 2)) def test_area_statistics_mean(self): """Test for area average of a 2D field.""" - self.assertFalse(self.grid.cell_measures('cell_area')) + self.assertFalse(self.grid.cell_measures("cell_area")) - result = area_statistics(self.grid, 'mean') + result = area_statistics(self.grid, "mean") - expected = np.ma.array([1., 1.], dtype=np.float32) + expected = np.ma.array([1.0, 1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') - self.assertFalse(self.grid.cell_measures('cell_area')) - self.assertFalse(result.cell_measures('cell_area')) + self.assertEqual(result.units, "kg m-2 s-1") + self.assertFalse(self.grid.cell_measures("cell_area")) + self.assertFalse(result.cell_measures("cell_area")) def test_area_statistics_cell_measure_mean(self): """Test for area average of a 2D field. @@ -110,49 +112,49 @@ def test_area_statistics_cell_measure_mean(self): The area measure is pre-loaded in the cube. """ self._add_cell_measure_to_grid() - result = area_statistics(self.grid, 'mean') - expected = np.ma.array([1., 1.], dtype=np.float32) + result = area_statistics(self.grid, "mean") + expected = np.ma.array([1.0, 1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') - self.assertTrue(self.grid.cell_measures('cell_area')) - self.assertFalse(result.cell_measures('cell_area')) + self.assertEqual(result.units, "kg m-2 s-1") + self.assertTrue(self.grid.cell_measures("cell_area")) + self.assertFalse(result.cell_measures("cell_area")) def test_area_statistics_min(self): """Test for area average of a 2D field.""" - result = area_statistics(self.grid, 'min') - expected = np.ma.array([1., 1.], dtype=np.float32) + result = area_statistics(self.grid, "min") + expected = np.ma.array([1.0, 1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_area_statistics_max(self): """Test for area average of a 2D field.""" - result = area_statistics(self.grid, 'max') - expected = np.ma.array([1., 1.], dtype=np.float32) + result = area_statistics(self.grid, "max") + expected = np.ma.array([1.0, 1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_area_statistics_median(self): """Test for area average of a 2D field.""" - result = area_statistics(self.grid, 'median') - expected = np.ma.array([1., 1.], dtype=np.float32) + result = area_statistics(self.grid, "median") + expected = np.ma.array([1.0, 1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_area_statistics_std_dev(self): """Test for area average of a 2D field.""" - result = area_statistics(self.grid, 'std_dev') - expected = np.ma.array([0., 0.], dtype=np.float32) + result = area_statistics(self.grid, "std_dev") + expected = np.ma.array([0.0, 0.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_area_statistics_sum(self): """Test for sum of a 2D field.""" - result = area_statistics(self.grid, 'sum') + result = area_statistics(self.grid, "sum") grid_areas = iris.analysis.cartography.area_weights(self.grid) grid_sum = np.sum(grid_areas[0]) expected = np.array([grid_sum, grid_sum]).astype(np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg s-1') + self.assertEqual(result.units, "kg s-1") def test_area_statistics_cell_measure_sum(self): """Test for area sum of a 2D field. @@ -161,45 +163,45 @@ def test_area_statistics_cell_measure_sum(self): """ self._add_cell_measure_to_grid() grid_areas = iris.analysis.cartography.area_weights(self.grid) - result = area_statistics(self.grid, 'sum') + result = area_statistics(self.grid, "sum") grid_sum = np.sum(grid_areas[0]) expected = np.array([grid_sum, grid_sum]).astype(np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg s-1') + self.assertEqual(result.units, "kg s-1") def test_area_statistics_variance(self): """Test for area average of a 2D field.""" - result = area_statistics(self.grid, 'variance') - expected = np.ma.array([0., 0.], dtype=np.float32) + result = area_statistics(self.grid, "variance") + expected = np.ma.array([0.0, 0.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg2 m-4 s-2') + self.assertEqual(result.units, "kg2 m-4 s-2") def test_area_statistics_neg_lon(self): """Test for area average of a 2D field.""" - result = area_statistics(self.negative_grid, 'mean') - expected = np.array([1.], dtype=np.float32) + result = area_statistics(self.negative_grid, "mean") + expected = np.array([1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_area_statistics_rms(self): """Test for area rms of a 2D field.""" - result = area_statistics(self.grid, 'rms') - expected = np.ma.array([1., 1.], dtype=np.float32) + result = area_statistics(self.grid, "rms") + expected = np.ma.array([1.0, 1.0], dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_area_statistics_subtract_mean(self): """Test for area average of a 2D field.""" input_data = self.grid.copy() - self.assertFalse(input_data.cell_measures('cell_area')) + self.assertFalse(input_data.cell_measures("cell_area")) - result = area_statistics(input_data, 'mean', normalize='subtract') + result = area_statistics(input_data, "mean", normalize="subtract") self.assertEqual(input_data, self.grid) self.assertEqual(result.shape, input_data.shape) expected = np.ma.zeros((2, 5, 5), dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertFalse(result.cell_measures('cell_area')) + self.assertFalse(result.cell_measures("cell_area")) self.assertEqual(result.metadata, self.grid.metadata) for coord in self.grid.coords(): self.assertEqual(result.coord(coord.name()), coord) @@ -212,13 +214,13 @@ def test_area_statistics_cell_measure_subtract_mean(self): self._add_cell_measure_to_grid() input_data = self.grid.copy() - result = area_statistics(input_data, 'mean', normalize='subtract') + result = area_statistics(input_data, "mean", normalize="subtract") self.assertEqual(input_data, self.grid) self.assertEqual(result.shape, input_data.shape) expected = np.ma.zeros((2, 5, 5), dtype=np.float32) self.assert_array_equal(result.data, expected) - self.assertFalse(result.cell_measures('cell_area')) + self.assertFalse(result.cell_measures("cell_area")) self.assertEqual(result.metadata, self.grid.metadata) for coord in self.grid.coords(): self.assertEqual(result.coord(coord.name()), coord) @@ -233,20 +235,18 @@ def test_extract_region(self): def test_extract_region_mean(self): """Test for extracting a region and performing the area mean of a 2D field.""" - cube = guess_bounds(self.grid, ['longitude', 'latitude']) + cube = guess_bounds(self.grid, ["longitude", "latitude"]) grid_areas = iris.analysis.cartography.area_weights(cube) measure = iris.coords.CellMeasure( - grid_areas, - standard_name='cell_area', - units='m2', - measure='area') + grid_areas, standard_name="cell_area", units="m2", measure="area" + ) self.grid.add_cell_measure(measure, range(0, measure.ndim)) region = extract_region(self.grid, 1.5, 2.5, 1.5, 2.5) # expected outcome expected = np.ones((2, 2, 2)) self.assert_array_equal(region.data, expected) - result = area_statistics(region, 'mean') - expected_mean = np.ma.array([1., 1.]) + result = area_statistics(region, "mean") + expected_mean = np.ma.array([1.0, 1.0]) self.assert_array_equal(result.data, expected_mean) def test_extract_region_neg_lon(self): @@ -259,20 +259,20 @@ def test_extract_named_region(self): """Test for extracting a named region.""" # tests: # Create a cube with regions - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) time = iris.coords.DimCoord( times, bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian'), + standard_name="time", + units=Unit("days since 1950-01-01", calendar="gregorian"), ) - regions = ['region1', 'region2', 'region3'] + regions = ["region1", "region2", "region3"] region = iris.coords.AuxCoord( regions, - standard_name='region', - units='1', + standard_name="region", + units="1", ) data = np.ones((3, 3)) @@ -283,39 +283,38 @@ def test_extract_named_region(self): ) # test string region - result1 = extract_named_regions(region_cube, 'region1') - expected = np.ones((3, )) + result1 = extract_named_regions(region_cube, "region1") + expected = np.ones((3,)) self.assert_array_equal(result1.data, expected) # test list of regions - result2 = extract_named_regions(region_cube, ['region1', 'region2']) + result2 = extract_named_regions(region_cube, ["region1", "region2"]) expected = np.ones((3, 2)) self.assert_array_equal(result2.data, expected) # test for expected failures: with self.assertRaises(ValueError): - extract_named_regions(region_cube, 'reg_A') - extract_named_regions(region_cube, ['region1', 'reg_A']) + extract_named_regions(region_cube, "reg_A") + extract_named_regions(region_cube, ["region1", "reg_A"]) def create_irregular_grid_cube(data, lons, lats): """Create test cube on irregular grid.""" - times = iris.coords.DimCoord(np.array([10, 20], dtype=np.float64), - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='gregorian')) + times = iris.coords.DimCoord( + np.array([10, 20], dtype=np.float64), + standard_name="time", + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) # Construct cube - nlat = iris.coords.DimCoord(range(data.shape[1]), var_name='nlat') - nlon = iris.coords.DimCoord(range(data.shape[2]), var_name='nlon') - lat = iris.coords.AuxCoord(lats, - var_name='lat', - standard_name='latitude', - units='degrees') - lon = iris.coords.AuxCoord(lons, - var_name='lon', - standard_name='longitude', - units='degrees') + nlat = iris.coords.DimCoord(range(data.shape[1]), var_name="nlat") + nlon = iris.coords.DimCoord(range(data.shape[2]), var_name="nlon") + lat = iris.coords.AuxCoord( + lats, var_name="lat", standard_name="latitude", units="degrees" + ) + lon = iris.coords.AuxCoord( + lons, var_name="lon", standard_name="longitude", units="degrees" + ) dim_coord_spec = [ (times, 0), (nlat, 1), @@ -327,8 +326,8 @@ def create_irregular_grid_cube(data, lons, lats): ] cube = iris.cube.Cube( data, - var_name='tos', - units='K', + var_name="tos", + units="K", dim_coords_and_dims=dim_coord_spec, aux_coords_and_dims=aux_coord_spec, ) @@ -337,62 +336,65 @@ def create_irregular_grid_cube(data, lons, lats): IRREGULAR_EXTRACT_REGION_TESTS = [ { - 'region': (100, 140, -10, 90), - 'mask': np.array( + "region": (100, 140, -10, 90), + "mask": np.array( [ [False], [False], ], dtype=bool, ), - 'data': np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, 1:3, 1:2] + "data": np.arange(18, dtype=np.float32).reshape((2, 3, 3))[ + :, 1:3, 1:2 + ], }, { - 'region': (100, 360, -60, 0), - 'mask': np.array( + "region": (100, 360, -60, 0), + "mask": np.array( [ [True, False], [False, False], ], dtype=bool, ), - 'data': np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, 0:2, 1:3] + "data": np.arange(18, dtype=np.float32).reshape((2, 3, 3))[ + :, 0:2, 1:3 + ], }, { - 'region': (10, 360, 0, 90), - 'mask': np.array( + "region": (10, 360, 0, 90), + "mask": np.array( [ [True, False], [False, False], ], dtype=bool, ), - 'data': np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, 1:, 1:] + "data": np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, 1:, 1:], }, { - 'region': (0, 360, -90, -30), - 'mask': np.array( + "region": (0, 360, -90, -30), + "mask": np.array( [ [False, False, False], ], dtype=bool, ), - 'data': np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, :1, :] + "data": np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, :1, :], }, { - 'region': (200, 10, -90, -60), - 'mask': np.array( + "region": (200, 10, -90, -60), + "mask": np.array( [ [False, True, False], ], dtype=bool, ), - 'data': np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, :1, :] + "data": np.arange(18, dtype=np.float32).reshape((2, 3, 3))[:, :1, :], }, { - 'region': (-150, 50, 50, -50), - 'mask': - np.array( + "region": (-150, 50, 50, -50), + "mask": np.array( [ [False, True, False], [True, True, True], @@ -400,17 +402,10 @@ def create_irregular_grid_cube(data, lons, lats): ], dtype=bool, ), - 'data': - np.arange(18, dtype=np.float32).reshape((2, 3, 3)) - }, - { - 'region': (0, 0, -100, 0), - 'raises': "Invalid start_latitude: -100" - }, - { - 'region': (0, 0, 0, 100), - 'raises': "Invalid end_latitude: 100" + "data": np.arange(18, dtype=np.float32).reshape((2, 3, 3)), }, + {"region": (0, 0, -100, 0), "raises": "Invalid start_latitude: -100"}, + {"region": (0, 0, 0, 100), "raises": "Invalid end_latitude: 100"}, ] @@ -428,7 +423,7 @@ def irregular_extract_region_cube(): ) lats = np.array( [ - [-60, -61., -60], + [-60, -61.0, -60], [0, -1, 0], [60, 60, 60], ], @@ -438,11 +433,11 @@ def irregular_extract_region_cube(): return cube -@pytest.mark.parametrize('case', IRREGULAR_EXTRACT_REGION_TESTS) +@pytest.mark.parametrize("case", IRREGULAR_EXTRACT_REGION_TESTS) def test_extract_region_irregular(irregular_extract_region_cube, case): """Test `extract_region` with data on an irregular grid.""" - start_lon, end_lon, start_lat, end_lat = case['region'] - if 'raises' not in case: + start_lon, end_lon, start_lat, end_lat = case["region"] + if "raises" not in case: cube = extract_region( irregular_extract_region_cube, start_longitude=start_lon, @@ -452,10 +447,10 @@ def test_extract_region_irregular(irregular_extract_region_cube, case): ) for i in range(2): - np.testing.assert_array_equal(cube.data[i].mask, case['mask']) - np.testing.assert_array_equal(cube.data.data, case['data']) + np.testing.assert_array_equal(cube.data[i].mask, case["mask"]) + np.testing.assert_array_equal(cube.data.data, case["data"]) else: - with pytest.raises(ValueError, match=case['raises']): + with pytest.raises(ValueError, match=case["raises"]): extract_region( irregular_extract_region_cube, start_longitude=start_lon, @@ -480,34 +475,46 @@ def create_rotated_grid_cube(data): ) coord_sys_rotated = iris.coord_systems.RotatedGeogCS( - grid_north_pole_latitude, grid_north_pole_longitude) - grid_lat = iris.coords.DimCoord(grid_lats, - var_name='rlon', - standard_name='grid_latitude', - units='degrees', - coord_system=coord_sys_rotated) - grid_lon = iris.coords.DimCoord(grid_lons, - var_name='rlon', - standard_name='grid_longitude', - units='degrees', - coord_system=coord_sys_rotated) + grid_north_pole_latitude, grid_north_pole_longitude + ) + grid_lat = iris.coords.DimCoord( + grid_lats, + var_name="rlon", + standard_name="grid_latitude", + units="degrees", + coord_system=coord_sys_rotated, + ) + grid_lon = iris.coords.DimCoord( + grid_lons, + var_name="rlon", + standard_name="grid_longitude", + units="degrees", + coord_system=coord_sys_rotated, + ) coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) glon, glat = np.meshgrid(grid_lons, grid_lats) lons, lats = iris.analysis.cartography.unrotate_pole( - np.deg2rad(glon), np.deg2rad(glat), grid_north_pole_longitude, - grid_north_pole_latitude) - - lat = iris.coords.AuxCoord(lats, - var_name='lat', - standard_name='latitude', - units='degrees', - coord_system=coord_sys) - lon = iris.coords.AuxCoord(lons, - var_name='lon', - standard_name='longitude', - units='degrees', - coord_system=coord_sys) + np.deg2rad(glon), + np.deg2rad(glat), + grid_north_pole_longitude, + grid_north_pole_latitude, + ) + + lat = iris.coords.AuxCoord( + lats, + var_name="lat", + standard_name="latitude", + units="degrees", + coord_system=coord_sys, + ) + lon = iris.coords.AuxCoord( + lons, + var_name="lon", + standard_name="longitude", + units="degrees", + coord_system=coord_sys, + ) dim_coord_spec = [ (grid_lat, 0), (grid_lon, 1), @@ -518,8 +525,8 @@ def create_rotated_grid_cube(data): ] cube = iris.cube.Cube( data, - var_name='tos', - units='K', + var_name="tos", + units="K", dim_coords_and_dims=dim_coord_spec, aux_coords_and_dims=aux_coord_spec, ) @@ -528,59 +535,59 @@ def create_rotated_grid_cube(data): ROTATED_AREA_STATISTICS_TEST = [ { - 'operator': 'mean', - 'data': np.ones(9, dtype=np.float32).reshape((3, 3)), - 'expected': np.array([1.]), + "operator": "mean", + "data": np.ones(9, dtype=np.float32).reshape((3, 3)), + "expected": np.array([1.0]), }, { - 'operator': 'median', - 'data': np.ones(9, dtype=np.float32).reshape((3, 3)), - 'expected': np.array([1.]), + "operator": "median", + "data": np.ones(9, dtype=np.float32).reshape((3, 3)), + "expected": np.array([1.0]), }, { - 'operator': 'std_dev', - 'data': np.ones(9, dtype=np.float32).reshape((3, 3)), - 'expected': np.array([0.]), + "operator": "std_dev", + "data": np.ones(9, dtype=np.float32).reshape((3, 3)), + "expected": np.array([0.0]), }, { - 'operator': 'sum', - 'data': np.ones(9, dtype=np.float32).reshape((3, 3)), + "operator": "sum", + "data": np.ones(9, dtype=np.float32).reshape((3, 3)), }, { - 'operator': 'variance', - 'data': np.ones(9, dtype=np.float32).reshape((3, 3)), - 'expected': np.array([0.]), + "operator": "variance", + "data": np.ones(9, dtype=np.float32).reshape((3, 3)), + "expected": np.array([0.0]), }, { - 'operator': 'min', - 'data': np.arange(9, dtype=np.float32).reshape((3, 3)), - 'expected': np.array([0.]), + "operator": "min", + "data": np.arange(9, dtype=np.float32).reshape((3, 3)), + "expected": np.array([0.0]), }, { - 'operator': 'max', - 'data': np.arange(9, dtype=np.float32).reshape((3, 3)), - 'expected': np.array([8.]), + "operator": "max", + "data": np.arange(9, dtype=np.float32).reshape((3, 3)), + "expected": np.array([8.0]), }, ] -@pytest.mark.parametrize('case', ROTATED_AREA_STATISTICS_TEST) +@pytest.mark.parametrize("case", ROTATED_AREA_STATISTICS_TEST) def test_area_statistics_rotated(case): """Test `area_statistics` with data on an rotated grid.""" - rotated_cube = create_rotated_grid_cube(case['data']) - operator = case['operator'] + rotated_cube = create_rotated_grid_cube(case["data"]) + operator = case["operator"] cube = area_statistics( rotated_cube, operator, ) - if operator != 'sum': - np.testing.assert_array_equal(cube.data, case['expected']) + if operator != "sum": + np.testing.assert_array_equal(cube.data, case["expected"]) else: cube_tmp = rotated_cube.copy() - cube_tmp.remove_coord('latitude') - cube_tmp.coord('grid_latitude').rename('latitude') - cube_tmp.remove_coord('longitude') - cube_tmp.coord('grid_longitude').rename('longitude') + cube_tmp.remove_coord("latitude") + cube_tmp.coord("grid_latitude").rename("latitude") + cube_tmp.remove_coord("longitude") + cube_tmp.coord("grid_longitude").rename("longitude") grid_areas = iris.analysis.cartography.area_weights(cube_tmp) expected = np.sum(grid_areas).astype(np.float32) np.testing.assert_array_equal(cube.data, expected) @@ -589,15 +596,21 @@ def test_area_statistics_rotated(case): def create_unstructured_grid_cube(): """Create test cube with unstructured grid.""" lat = iris.coords.AuxCoord( - [0, 1, 2], var_name='lat', standard_name='latitude', units='degrees', + [0, 1, 2], + var_name="lat", + standard_name="latitude", + units="degrees", ) lon = iris.coords.AuxCoord( - [0, 1, 2], var_name='lon', standard_name='longitude', units='degrees', + [0, 1, 2], + var_name="lon", + standard_name="longitude", + units="degrees", ) cube = iris.cube.Cube( [0, 10, 20], - var_name='tas', - units='K', + var_name="tas", + units="K", aux_coords_and_dims=[(lat, 0), (lon, 0)], ) return cube @@ -607,7 +620,7 @@ def test_area_statistics_max_irregular_grid(): """Test ``area_statistics``.""" values = np.arange(12).reshape(2, 2, 3) cube = create_irregular_grid_cube(values, values[0, ...], values[0, ...]) - result = area_statistics(cube, 'max') + result = area_statistics(cube, "max") assert isinstance(result, Cube) np.testing.assert_array_equal(result.data, [5, 11]) @@ -615,7 +628,7 @@ def test_area_statistics_max_irregular_grid(): def test_area_statistics_max_unstructured_grid(): """Test ``area_statistics``.""" cube = create_unstructured_grid_cube() - result = area_statistics(cube, 'max') + result = area_statistics(cube, "max") assert isinstance(result, Cube) np.testing.assert_array_equal(result.data, 20) @@ -625,14 +638,14 @@ def test_area_statistics_sum_irregular_grid_fail(): values = np.arange(12).reshape(2, 2, 3) cube = create_irregular_grid_cube(values, values[0, ...], values[0, ...]) with pytest.raises(CoordinateMultiDimError): - area_statistics(cube, 'sum') + area_statistics(cube, "sum") def test_area_statistics_sum_unstructured_grid_fail(): """Test ``area_statistics``.""" cube = create_unstructured_grid_cube() with pytest.raises(CoordinateMultiDimError): - area_statistics(cube, 'sum') + area_statistics(cube, "sum") @pytest.fixture @@ -641,16 +654,19 @@ def make_testcube(): coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) data = np.ones((5, 5), dtype=np.float32) lons = iris.coords.DimCoord( - [i + .5 for i in range(5)], - standard_name='longitude', - bounds=[[i, i + 1.] for i in range(5)], # [0,1] to [4,5] - units='degrees_east', - coord_system=coord_sys) - lats = iris.coords.DimCoord([i + .5 for i in range(5)], - standard_name='latitude', - bounds=[[i, i + 1.] for i in range(5)], - units='degrees_north', - coord_system=coord_sys) + [i + 0.5 for i in range(5)], + standard_name="longitude", + bounds=[[i, i + 1.0] for i in range(5)], # [0,1] to [4,5] + units="degrees_east", + coord_system=coord_sys, + ) + lats = iris.coords.DimCoord( + [i + 0.5 for i in range(5)], + standard_name="latitude", + bounds=[[i, i + 1.0] for i in range(5)], + units="degrees_north", + coord_system=coord_sys, + ) coords_spec = [(lats, 0), (lons, 1)] return iris.cube.Cube(data, dim_coords_and_dims=coords_spec) @@ -659,32 +675,30 @@ def write_shapefile(shape, path, negative_bounds=False): """Write (a) shape(s) to a shapefile.""" # Define a polygon feature geometry with one attribute schema = { - 'geometry': 'Polygon', - 'properties': { - 'id': 'int' - }, + "geometry": "Polygon", + "properties": {"id": "int"}, } if not isinstance(shape, list): shape = [shape] # Write a new Shapefile - with fiona.open(path, 'w', 'ESRI Shapefile', schema) as file: + with fiona.open(path, "w", "ESRI Shapefile", schema) as file: for id_, s in enumerate(shape): if not negative_bounds: - file.write({ - 'geometry': mapping(s), - 'properties': { - 'id': id_ - }, - }) + file.write( + { + "geometry": mapping(s), + "properties": {"id": id_}, + } + ) else: - file.write({ - 'geometry': mapping(s), - 'properties': { - 'id': id_ - }, - 'bounds': [-180, 180, -90, 90], - }) + file.write( + { + "geometry": mapping(s), + "properties": {"id": id_}, + "bounds": [-180, 180, -90, 90], + } + ) @pytest.fixture(params=[(2, 2), (1, 3), (9, 2)]) @@ -692,23 +706,27 @@ def square_shape(request, tmp_path): # Define polygons to test extract_shape slat = request.param[0] slon = request.param[1] - polyg = Polygon([ - (1.0, 1.0 + slat), - (1.0, 1.0), - (1.0 + slon, 1.0), - (1.0 + slon, 1.0 + slat), - ]) - - write_shapefile(polyg, tmp_path / 'test_shape.shp') - write_shapefile(polyg, - tmp_path / 'test_shape_negative_bounds.shp', - negative_bounds=True) + polyg = Polygon( + [ + (1.0, 1.0 + slat), + (1.0, 1.0), + (1.0 + slon, 1.0), + (1.0 + slon, 1.0 + slat), + ] + ) + + write_shapefile(polyg, tmp_path / "test_shape.shp") + write_shapefile( + polyg, + tmp_path / "test_shape_negative_bounds.shp", + negative_bounds=True, + ) # Make corresponding expected masked array (slat, slon) = np.ceil([slat, slon]).astype(int) vals = np.ones((min(slat + 2, 5), min(slon + 2, 5))) mask = vals.copy() - mask[1:1 + slat, 1:1 + slon] = 0 + mask[1 : 1 + slat, 1 : 1 + slon] = 0 return np.ma.masked_array(vals, mask) @@ -721,45 +739,54 @@ def square_composite_shape(request, tmp_path): polyg = [] for n in range(nshape): polyg.append( - Polygon([(1.0 + n, 1.0 + slat), (1.0 + n, 1.0), - (1.0 + n + slon, 1.0), (1.0 + n + slon, 1.0 + slat)])) - write_shapefile(polyg, tmp_path / 'test_shape.shp') - write_shapefile(polyg, - tmp_path / 'test_shape_negative_bounds.shp', - negative_bounds=True) + Polygon( + [ + (1.0 + n, 1.0 + slat), + (1.0 + n, 1.0), + (1.0 + n + slon, 1.0), + (1.0 + n + slon, 1.0 + slat), + ] + ) + ) + write_shapefile(polyg, tmp_path / "test_shape.shp") + write_shapefile( + polyg, + tmp_path / "test_shape_negative_bounds.shp", + negative_bounds=True, + ) # Make corresponding expected masked array (slat, slon) = np.ceil([slat, slon]).astype(int) vals = np.ones((nshape, min(slat + 2, 5), min(slon + 1 + nshape, 5))) mask = vals.copy() for n in range(nshape): - mask[n, 1:1 + slat, 1 + n:1 + n + slon] = 0 + mask[n, 1 : 1 + slat, 1 + n : 1 + n + slon] = 0 return np.ma.masked_array(vals, mask) def _create_sample_full_cube(): - cube = Cube(np.zeros((4, 180, 360)), var_name='co2', units='J') + cube = Cube(np.zeros((4, 180, 360)), var_name="co2", units="J") cube.add_dim_coord( iris.coords.DimCoord( - np.array([10., 40., 70., 110.]), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', calendar='gregorian'), + np.array([10.0, 40.0, 70.0, 110.0]), + standard_name="time", + units=Unit("days since 1950-01-01 00:00:00", calendar="gregorian"), ), 0, ) cube.add_dim_coord( iris.coords.DimCoord( - np.arange(-90., 90., 1.), - standard_name='latitude', - units='degrees', + np.arange(-90.0, 90.0, 1.0), + standard_name="latitude", + units="degrees", ), 1, ) cube.add_dim_coord( iris.coords.DimCoord( - np.arange(0., 360., 1.), - standard_name='longitude', - units='degrees', + np.arange(0.0, 360.0, 1.0), + standard_name="longitude", + units="degrees", ), 2, ) @@ -773,7 +800,7 @@ def _create_sample_full_cube(): def test_crop_cube(make_testcube, square_shape, tmp_path): """Test for cropping a cube by shape bounds.""" - with fiona.open(tmp_path / 'test_shape.shp') as geometries: + with fiona.open(tmp_path / "test_shape.shp") as geometries: result = _crop_cube(make_testcube, *geometries.bounds) expected = square_shape.data np.testing.assert_array_equal(result.data, expected) @@ -782,11 +809,13 @@ def test_crop_cube(make_testcube, square_shape, tmp_path): def test_crop_cube_with_ne_file_imitation(): """Test for cropping a cube by shape bounds.""" cube = _create_sample_full_cube() - bounds = [-10., -99., 370., 100.] + bounds = [-10.0, -99.0, 370.0, 100.0] result = _crop_cube(cube, *tuple(bounds)) - result = (result.coord("latitude").points[-1], - result.coord("longitude").points[-1]) - expected = (89., 359.) + result = ( + result.coord("latitude").points[-1], + result.coord("longitude").points[-1], + ) + expected = (89.0, 359.0) np.testing.assert_allclose(result, expected) @@ -803,14 +832,16 @@ def test_crop_cube_with_ne_file(ne_ocean_shapefile): with fiona.open(ne_ocean_shapefile) as geometries: cube = _create_sample_full_cube() result = _crop_cube(cube, *geometries.bounds, cmor_coords=False) - result = (result.coord("latitude").points[-1], - result.coord("longitude").points[-1]) - expected = (89., 179.) + result = ( + result.coord("latitude").points[-1], + result.coord("longitude").points[-1], + ) + expected = (89.0, 179.0) np.testing.assert_allclose(result, expected) -@pytest.mark.parametrize('crop', [True, False]) -@pytest.mark.parametrize('ids', [None, [0]]) +@pytest.mark.parametrize("crop", [True, False]) +@pytest.mark.parametrize("ids", [None, [0]]) def test_extract_shape(make_testcube, square_shape, tmp_path, crop, ids): """Test for extracting a region with shapefile.""" expected = square_shape @@ -818,12 +849,11 @@ def test_extract_shape(make_testcube, square_shape, tmp_path, crop, ids): # If cropping is not used, embed expected in the original test array original = np.ma.ones((5, 5)) original.mask = np.ones_like(original, dtype=bool) - original[:expected.shape[0], :expected.shape[1]] = expected + original[: expected.shape[0], : expected.shape[1]] = expected expected = original - result = extract_shape(make_testcube, - tmp_path / 'test_shape.shp', - crop=crop, - ids=ids) + result = extract_shape( + make_testcube, tmp_path / "test_shape.shp", crop=crop, ids=ids + ) np.testing.assert_array_equal(result.data.data, expected.data) np.testing.assert_array_equal(result.data.mask, expected.mask) @@ -843,16 +873,19 @@ def test_extract_shape_fx(make_testcube, ne_ocean_shapefile): """Test for extracting a shape from NE file.""" expected = np.ones((5, 5)) cube = make_testcube - measure = iris.coords.CellMeasure(cube.data, - standard_name='cell_area', - var_name='areacello', - units='m2', - measure='area') + measure = iris.coords.CellMeasure( + cube.data, + standard_name="cell_area", + var_name="areacello", + units="m2", + measure="area", + ) ancillary_var = iris.coords.AncillaryVariable( cube.data, - standard_name='land_ice_area_fraction', - var_name='sftgif', - units='%') + standard_name="land_ice_area_fraction", + var_name="sftgif", + units="%", + ) cube.add_cell_measure(measure, (0, 1)) cube.add_ancillary_variable(ancillary_var, (0, 1)) result = extract_shape( @@ -863,12 +896,13 @@ def test_extract_shape_fx(make_testcube, ne_ocean_shapefile): np.testing.assert_array_equal(result.data.data, expected) assert result.cell_measures() - result_measure = result.cell_measure('cell_area').data + result_measure = result.cell_measure("cell_area").data np.testing.assert_array_equal(measure.data, result_measure) assert result.ancillary_variables() result_ancillary_var = result.ancillary_variable( - 'land_ice_area_fraction').data + "land_ice_area_fraction" + ).data np.testing.assert_array_equal(ancillary_var.data, result_ancillary_var) @@ -879,18 +913,19 @@ def test_extract_shape_ne_check_nans(ne_ocean_shapefile): assert not result[:, 90, 180].data.mask.all() -@pytest.mark.parametrize('crop', [True, False]) -def test_extract_shape_negative_bounds(make_testcube, square_shape, tmp_path, - crop): +@pytest.mark.parametrize("crop", [True, False]) +def test_extract_shape_negative_bounds( + make_testcube, square_shape, tmp_path, crop +): """Test for extr a reg with shapefile w/neg ie bound ie (-180, 180).""" expected = square_shape if not crop: # If cropping is not used, embed expected in the original test array original = np.ma.ones((5, 5)) original.mask = np.ones_like(original, dtype=bool) - original[:expected.shape[0], :expected.shape[1]] = expected + original[: expected.shape[0], : expected.shape[1]] = expected expected = original - negative_bounds_shapefile = tmp_path / 'test_shape_negative_bounds.shp' + negative_bounds_shapefile = tmp_path / "test_shape_negative_bounds.shp" result = extract_shape(make_testcube, negative_bounds_shapefile, crop=crop) np.testing.assert_array_equal(result.data.data, expected.data) np.testing.assert_array_equal(result.data.mask, expected.mask) @@ -899,38 +934,41 @@ def test_extract_shape_negative_bounds(make_testcube, square_shape, tmp_path, def test_extract_shape_neg_lon(make_testcube, tmp_path, crop=False): """Test for extr a reg with shapefile w/negative lon.""" (slat, slon) = (2, -2) - polyg = Polygon([ - (1.0, 1.0 + slat), - (1.0, 1.0), - (1.0 + slon, 1.0), - (1.0 + slon, 1.0 + slat), - ]) - write_shapefile(polyg, - tmp_path / 'test_shape_negative_lon.shp', - negative_bounds=True) + polyg = Polygon( + [ + (1.0, 1.0 + slat), + (1.0, 1.0), + (1.0 + slon, 1.0), + (1.0 + slon, 1.0 + slat), + ] + ) + write_shapefile( + polyg, tmp_path / "test_shape_negative_lon.shp", negative_bounds=True + ) expected_data = np.ones((5, 5)) expected_mask = np.ones((5, 5)) expected_mask[1, 0] = False expected_mask[2, 0] = False expected = np.ma.array(expected_data, mask=expected_mask) - negative_bounds_shapefile = tmp_path / 'test_shape_negative_lon.shp' + negative_bounds_shapefile = tmp_path / "test_shape_negative_lon.shp" result = extract_shape(make_testcube, negative_bounds_shapefile, crop=crop) np.testing.assert_array_equal(result.data.data, expected.data) np.testing.assert_array_equal(result.data.mask, expected.mask) -@pytest.mark.parametrize('crop', [True, False]) -@pytest.mark.parametrize('decomposed', [True, False]) -def test_extract_composite_shape(make_testcube, square_composite_shape, - tmp_path, crop, decomposed): +@pytest.mark.parametrize("crop", [True, False]) +@pytest.mark.parametrize("decomposed", [True, False]) +def test_extract_composite_shape( + make_testcube, square_composite_shape, tmp_path, crop, decomposed +): """Test for extracting a region with shapefile.""" expected = square_composite_shape if not crop: # If cropping is not used, embed expected in the original test array original = np.ma.ones((expected.shape[0], 5, 5)) original.mask = np.ones_like(original, dtype=bool) - original[:, :expected.shape[1], :expected.shape[2]] = expected + original[:, : expected.shape[1], : expected.shape[2]] = expected expected = original if not decomposed or expected.shape[0] == 1: @@ -939,37 +977,43 @@ def test_extract_composite_shape(make_testcube, square_composite_shape, mask = expected.max(axis=0).mask expected = np.ma.masked_array(data=data, mask=mask) - result = extract_shape(make_testcube, - tmp_path / 'test_shape.shp', - crop=crop, - decomposed=decomposed) + result = extract_shape( + make_testcube, + tmp_path / "test_shape.shp", + crop=crop, + decomposed=decomposed, + ) np.testing.assert_array_equal(result.data.data, expected.data) np.testing.assert_array_equal(result.data.mask, expected.mask) -@pytest.mark.parametrize('ids', [[0], [1], [2], [1, 2]]) +@pytest.mark.parametrize("ids", [[0], [1], [2], [1, 2]]) def test_extract_specific_shape(make_testcube, tmp_path, ids): """Test for extracting a region with shapefile.""" - slat = 2. - slon = 2. + slat = 2.0 + slon = 2.0 nshape = 3 polyg = [] for n in range(nshape): polyg.append( - Polygon([ - (1.0 + n, 1.0 + slat), - (1.0 + n, 1.0), - (1.0 + n + slon, 1.0), - (1.0 + n + slon, 1.0 + slat), - ]) + Polygon( + [ + (1.0 + n, 1.0 + slat), + (1.0 + n, 1.0), + (1.0 + n + slon, 1.0), + (1.0 + n + slon, 1.0 + slat), + ] + ) ) - write_shapefile(polyg, tmp_path / 'test_shape.shp') + write_shapefile(polyg, tmp_path / "test_shape.shp") - result = extract_shape(make_testcube, - tmp_path / 'test_shape.shp', - crop=True, - decomposed=False, - ids=ids) + result = extract_shape( + make_testcube, + tmp_path / "test_shape.shp", + crop=True, + decomposed=False, + ids=ids, + ) expected_bounds = np.vstack([polyg[i].bounds for i in ids]) @@ -979,8 +1023,8 @@ def test_extract_specific_shape(make_testcube, tmp_path, ids): lat_max = expected_bounds[:, 3] # results from `extract_shape` are padded with masked values - lats = result.coord('latitude')[1:-1] - lons = result.coord('longitude')[1:-1] + lats = result.coord("latitude")[1:-1] + lons = result.coord("longitude")[1:-1] assert np.all((lats.points >= lat_min) & (lats.points <= lat_max)) assert np.all((lons.points >= lon_min) & (lons.points <= lon_max)) @@ -988,36 +1032,45 @@ def test_extract_specific_shape(make_testcube, tmp_path, ids): def test_extract_specific_shape_raises_if_not_present(make_testcube, tmp_path): """Test for extracting a region with shapefile.""" - slat = 2. - slon = 2. + slat = 2.0 + slon = 2.0 nshape = 3 polyg = [] for n in range(nshape): polyg.append( - Polygon([(1.0 + n, 1.0 + slat), (1.0 + n, 1.0), - (1.0 + n + slon, 1.0), (1.0 + n + slon, 1.0 + slat)])) - write_shapefile(polyg, tmp_path / 'test_shape.shp') + Polygon( + [ + (1.0 + n, 1.0 + slat), + (1.0 + n, 1.0), + (1.0 + n + slon, 1.0), + (1.0 + n + slon, 1.0 + slat), + ] + ) + ) + write_shapefile(polyg, tmp_path / "test_shape.shp") with assert_raises(ValueError): - extract_shape(make_testcube, - tmp_path / 'test_shape.shp', - crop=True, - decomposed=False, - ids=[1, 2, 3]) - - -@pytest.mark.parametrize('crop', [True, False]) -@pytest.mark.parametrize('decomposed', [True, False]) -def test_extract_composite_shape_negative_bounds(make_testcube, - square_composite_shape, - tmp_path, crop, decomposed): + extract_shape( + make_testcube, + tmp_path / "test_shape.shp", + crop=True, + decomposed=False, + ids=[1, 2, 3], + ) + + +@pytest.mark.parametrize("crop", [True, False]) +@pytest.mark.parametrize("decomposed", [True, False]) +def test_extract_composite_shape_negative_bounds( + make_testcube, square_composite_shape, tmp_path, crop, decomposed +): """Test for extr a reg with shapefile w/neg bounds ie (-180, 180).""" expected = square_composite_shape if not crop: # If cropping is not used, embed expected in the original test array original = np.ma.ones((expected.shape[0], 5, 5)) original.mask = np.ones_like(original, dtype=bool) - original[:, :expected.shape[1], :expected.shape[2]] = expected + original[:, : expected.shape[1], : expected.shape[2]] = expected expected = original if not decomposed or expected.shape[0] == 1: @@ -1026,11 +1079,13 @@ def test_extract_composite_shape_negative_bounds(make_testcube, mask = expected.max(axis=0).mask expected = np.ma.masked_array(data=data, mask=mask) - negative_bounds_shapefile = tmp_path / 'test_shape_negative_bounds.shp' - result = extract_shape(make_testcube, - negative_bounds_shapefile, - crop=crop, - decomposed=decomposed) + negative_bounds_shapefile = tmp_path / "test_shape_negative_bounds.shp" + result = extract_shape( + make_testcube, + negative_bounds_shapefile, + crop=crop, + decomposed=decomposed, + ) np.testing.assert_array_equal(result.data.data, expected.data) np.testing.assert_array_equal(result.data.mask, expected.mask) @@ -1059,18 +1114,20 @@ def irreg_extract_shape_cube(): return cube -@pytest.mark.parametrize('method', ['contains', 'representative']) +@pytest.mark.parametrize("method", ["contains", "representative"]) def test_extract_shape_irregular(irreg_extract_shape_cube, tmp_path, method): """Test `extract_shape` with a cube on an irregular grid.""" # Points are (lon, lat) - shape = Polygon([ - (0.5, 0.5), - (0.5, 3.0), - (1.5, 3.0), - (1.5, 0.5), - ]) - - shapefile = tmp_path / 'shapefile.shp' + shape = Polygon( + [ + (0.5, 0.5), + (0.5, 3.0), + (1.5, 3.0), + (1.5, 0.5), + ] + ) + + shapefile = tmp_path / "shapefile.shp" write_shapefile(shape, shapefile) cube = extract_shape(irreg_extract_shape_cube, shapefile, method) @@ -1084,7 +1141,7 @@ def test_extract_shape_irregular(irreg_extract_shape_cube, tmp_path, method): ], dtype=bool, ) - if method == 'representative': + if method == "representative": mask[1, 1] = True np.testing.assert_array_equal(cube.data, data) for i in range(2): @@ -1094,18 +1151,18 @@ def test_extract_shape_irregular(irreg_extract_shape_cube, tmp_path, method): def test_extract_shape_wrong_method_raises(make_testcube, ne_ocean_shapefile): msg = "Invalid value for `method`" with pytest.raises(ValueError, match=msg): - extract_shape(make_testcube, ne_ocean_shapefile, method='wrong') + extract_shape(make_testcube, ne_ocean_shapefile, method="wrong") -@pytest.mark.parametrize('ids', [None, []]) -@pytest.mark.parametrize('crop', [True, False]) -@pytest.mark.parametrize('decomposed', [True, False]) +@pytest.mark.parametrize("ids", [None, []]) +@pytest.mark.parametrize("crop", [True, False]) +@pytest.mark.parametrize("decomposed", [True, False]) def test_extract_shape_ar6_all_region(make_testcube, ids, crop, decomposed): """Test for extracting all AR6 regions with shapefile.""" cube = extract_shape( make_testcube, - 'AR6', - method='contains', + "AR6", + method="contains", crop=crop, decomposed=decomposed, ids=ids, @@ -1113,98 +1170,110 @@ def test_extract_shape_ar6_all_region(make_testcube, ids, crop, decomposed): if decomposed: assert cube.shape == (58, 5, 5) - assert cube.coords('shape_id') - assert cube.coord_dims('shape_id') == (0, ) + assert cube.coords("shape_id") + assert cube.coord_dims("shape_id") == (0,) assert np.ma.is_masked(cube.data) else: assert cube.shape == (5, 5) - assert not cube.coords('shape_id') + assert not cube.coords("shape_id") assert not np.ma.is_masked(cube.data) - assert cube.coord('latitude') == make_testcube.coord('latitude') - assert cube.coord('longitude') == make_testcube.coord('longitude') + assert cube.coord("latitude") == make_testcube.coord("latitude") + assert cube.coord("longitude") == make_testcube.coord("longitude") -EAO_MASK = np.array([ - [0, 0, 0, 0, 0], - [0, 0, 1, 1, 1], - [1, 1, 1, 1, 1], - [1, 1, 1, 1, 1], - [1, 1, 1, 1, 1], -], dtype=bool) +EAO_MASK = np.array( + [ + [0, 0, 0, 0, 0], + [0, 0, 1, 1, 1], + [1, 1, 1, 1, 1], + [1, 1, 1, 1, 1], + [1, 1, 1, 1, 1], + ], + dtype=bool, +) -WAF_MASK = np.array([ - [1, 1, 1, 1, 1], - [1, 1, 0, 0, 0], - [0, 0, 0, 0, 0], - [0, 0, 0, 0, 0], - [0, 0, 0, 0, 0], -], dtype=bool) +WAF_MASK = np.array( + [ + [1, 1, 1, 1, 1], + [1, 1, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + ], + dtype=bool, +) @pytest.mark.parametrize( - 'ids', + "ids", [ - {'Acronym': ['EAO']}, - ['Equatorial.Atlantic-Ocean'], + {"Acronym": ["EAO"]}, + ["Equatorial.Atlantic-Ocean"], ], ) -@pytest.mark.parametrize('crop', [True, False]) -@pytest.mark.parametrize('decomposed', [True, False]) +@pytest.mark.parametrize("crop", [True, False]) +@pytest.mark.parametrize("decomposed", [True, False]) def test_extract_shape_ar6_one_region(make_testcube, ids, crop, decomposed): """Test for extracting 1 AR6 regions with shapefile.""" # Adapt lat slightly to test cropping - lat = make_testcube.coord('latitude') - lat.points = [-45., -40., 2.5, 3.5, 4.5] - lat.bounds = [[-50., -41.], [-41., 2.], [2., 3.], [3., 4.], [4., 5.]] + lat = make_testcube.coord("latitude") + lat.points = [-45.0, -40.0, 2.5, 3.5, 4.5] + lat.bounds = [ + [-50.0, -41.0], + [-41.0, 2.0], + [2.0, 3.0], + [3.0, 4.0], + [4.0, 5.0], + ] cube = extract_shape( make_testcube, - 'ar6', - method='contains', + "ar6", + method="contains", crop=crop, decomposed=decomposed, ids=ids, ) - lat = cube.coord('latitude') - lon = cube.coord('longitude') + lat = cube.coord("latitude") + lon = cube.coord("longitude") if decomposed: if crop: assert cube.shape == (3, 5) np.testing.assert_allclose(lat.points, [2.5, 3.5, 4.5]) else: assert cube.shape == (5, 5) - assert lat == make_testcube.coord('latitude') - assert lon == make_testcube.coord('longitude') - assert cube.coords('shape_id') - assert cube.coord_dims('shape_id') == () + assert lat == make_testcube.coord("latitude") + assert lon == make_testcube.coord("longitude") + assert cube.coords("shape_id") + assert cube.coord_dims("shape_id") == () else: # not decomposed if crop: assert cube.shape == (3, 5) np.testing.assert_allclose(lat.points, [2.5, 3.5, 4.5]) else: assert cube.shape == (5, 5) - assert lat == make_testcube.coord('latitude') - assert lon == make_testcube.coord('longitude') - assert not cube.coords('shape_id') + assert lat == make_testcube.coord("latitude") + assert lon == make_testcube.coord("longitude") + assert not cube.coords("shape_id") assert np.ma.is_masked(cube.data) @pytest.mark.parametrize( - 'ids', + "ids", [ - {'Acronym': ['EAO', 'WAF']}, - ['Equatorial.Atlantic-Ocean', 'Western-Africa'], + {"Acronym": ["EAO", "WAF"]}, + ["Equatorial.Atlantic-Ocean", "Western-Africa"], ], ) -@pytest.mark.parametrize('crop', [True, False]) -@pytest.mark.parametrize('decomposed', [True, False]) +@pytest.mark.parametrize("crop", [True, False]) +@pytest.mark.parametrize("decomposed", [True, False]) def test_extract_shape_ar6_two_regions(make_testcube, ids, crop, decomposed): """Test for extracting 2 AR6 regions with shapefile.""" cube = extract_shape( make_testcube, - 'AR6', - method='contains', + "AR6", + method="contains", crop=crop, decomposed=decomposed, ids=ids, @@ -1215,30 +1284,31 @@ def test_extract_shape_ar6_two_regions(make_testcube, ids, crop, decomposed): mask = np.ma.getmaskarray(cube.data) np.testing.assert_array_equal(mask[0], EAO_MASK) np.testing.assert_array_equal(mask[1], WAF_MASK) - assert cube.coords('shape_id') - assert cube.coord_dims('shape_id') == (0, ) + assert cube.coords("shape_id") + assert cube.coord_dims("shape_id") == (0,) else: assert cube.shape == (5, 5) assert not np.ma.is_masked(cube.data) - assert not cube.coords('shape_id') - assert cube.coord('latitude') == make_testcube.coord('latitude') - assert cube.coord('longitude') == make_testcube.coord('longitude') + assert not cube.coords("shape_id") + assert cube.coord("latitude") == make_testcube.coord("latitude") + assert cube.coord("longitude") == make_testcube.coord("longitude") -@pytest.mark.parametrize('ids', [{}, {'a': [1, 2], 'b': [1, 2]}]) +@pytest.mark.parametrize("ids", [{}, {"a": [1, 2], "b": [1, 2]}]) def test_extract_shape_invalid_dict(make_testcube, ids): """Test for extract_shape with invalid ids.""" msg = "If `ids` is given as dict, it needs exactly one entry" with pytest.raises(ValueError, match=msg): - extract_shape(make_testcube, 'ar6', ids=ids) + extract_shape(make_testcube, "ar6", ids=ids) @pytest.fixture def ar6_shapefile(): """Path to AR6 shapefile.""" shapefile = ( - Path(esmvalcore.preprocessor.__file__).parent / 'shapefiles' / - 'ar6.shp' + Path(esmvalcore.preprocessor.__file__).parent + / "shapefiles" + / "ar6.shp" ) return shapefile @@ -1249,16 +1319,16 @@ def test_get_requested_geometries_invalid_ids(ar6_shapefile): with fiona.open(ar6_shapefile) as geometries: with pytest.raises(ValueError, match=msg): _get_requested_geometries( - geometries, {'wrong_attr': [1, 2]}, Path('shape.shp') + geometries, {"wrong_attr": [1, 2]}, Path("shape.shp") ) -@pytest.mark.parametrize('session', [{}, None]) +@pytest.mark.parametrize("session", [{}, None]) def test_update_shapefile_path_abs(session, tmp_path): - """ Test ``update_shapefile_path``.""" + """Test ``update_shapefile_path``.""" if session is not None: - session['auxiliary_data_dir'] = tmp_path - shapefile = tmp_path / 'my_custom_shapefile.shp' + session["auxiliary_data_dir"] = tmp_path + shapefile = tmp_path / "my_custom_shapefile.shp" shapefile.write_text("") # create empty file # Test with Path and str object @@ -1269,17 +1339,17 @@ def test_update_shapefile_path_abs(session, tmp_path): @pytest.mark.parametrize( - 'shapefile', ['aux_dir/ar6.shp', 'ar6.shp', 'ar6', 'AR6', 'aR6'] + "shapefile", ["aux_dir/ar6.shp", "ar6.shp", "ar6", "AR6", "aR6"] ) -@pytest.mark.parametrize('session', [{}, None]) +@pytest.mark.parametrize("session", [{}, None]) def test_update_shapefile_path_rel( shapefile, session, ar6_shapefile, tmp_path ): - """ Test ``update_shapefile_path``.""" + """Test ``update_shapefile_path``.""" if session is not None: - session['auxiliary_data_dir'] = tmp_path - (tmp_path / 'aux_dir').mkdir(parents=True, exist_ok=True) - aux_dir_shapefile = tmp_path / 'aux_dir' / 'ar6.shp' + session["auxiliary_data_dir"] = tmp_path + (tmp_path / "aux_dir").mkdir(parents=True, exist_ok=True) + aux_dir_shapefile = tmp_path / "aux_dir" / "ar6.shp" aux_dir_shapefile.write_text("") # create empty file # Test with Path and str object @@ -1287,9 +1357,9 @@ def test_update_shapefile_path_rel( shapefile_out = _update_shapefile_path(shapefile, session=session) assert isinstance(shapefile_out, Path) - if 'aux_dir' in str(shapefile_in) and session is None: + if "aux_dir" in str(shapefile_in) and session is None: assert shapefile_out == Path(shapefile) - elif 'aux_dir' in str(shapefile): + elif "aux_dir" in str(shapefile): assert shapefile_out == tmp_path / shapefile else: assert shapefile_out == ar6_shapefile @@ -1297,10 +1367,10 @@ def test_update_shapefile_path_rel( def test_zonal_statistics(make_testcube): """Test ``zonal_statistics``.""" - res = zonal_statistics(make_testcube, 'sum') - assert res.coord('latitude') == make_testcube.coord('latitude') - np.testing.assert_allclose(res.coord('longitude').points, [2.5]) - np.testing.assert_allclose(res.coord('longitude').bounds, [[0.0, 5.0]]) + res = zonal_statistics(make_testcube, "sum") + assert res.coord("latitude") == make_testcube.coord("latitude") + np.testing.assert_allclose(res.coord("longitude").points, [2.5]) + np.testing.assert_allclose(res.coord("longitude").bounds, [[0.0, 5.0]]) np.testing.assert_allclose(res.data, [5.0, 5.0, 5.0, 5.0, 5.0]) assert res.dtype == np.float32 @@ -1311,10 +1381,10 @@ def test_zonal_statistics_divide_by_min(make_testcube): make_testcube.data[0, 0] = 0.0 make_testcube.data[1, 0] = -1.0 make_testcube.data[2, 0] = -0.5 - make_testcube.units = 'K' + make_testcube.units = "K" input_data = make_testcube.copy() - res = zonal_statistics(input_data, 'min', normalize='divide') + res = zonal_statistics(input_data, "min", normalize="divide") assert input_data == make_testcube assert res.shape == input_data.shape @@ -1335,21 +1405,21 @@ def test_zonal_statistics_divide_by_min(make_testcube): assert res.long_name == input_data.long_name assert res.cell_methods == input_data.cell_methods assert res.attributes == input_data.attributes - assert res.units == '1' + assert res.units == "1" def test_zonal_statistics_2d_lon_fail(irreg_extract_shape_cube): """Test ``zonal_statistics``.""" with pytest.raises(ValueError): - zonal_statistics(irreg_extract_shape_cube, 'sum') + zonal_statistics(irreg_extract_shape_cube, "sum") def test_meridional_statistics(make_testcube): """Test ``zonal_statistics``.""" - res = meridional_statistics(make_testcube, 'sum') - assert res.coord('longitude') == make_testcube.coord('longitude') - np.testing.assert_allclose(res.coord('latitude').points, [2.5]) - np.testing.assert_allclose(res.coord('latitude').bounds, [[0.0, 5.0]]) + res = meridional_statistics(make_testcube, "sum") + assert res.coord("longitude") == make_testcube.coord("longitude") + np.testing.assert_allclose(res.coord("latitude").points, [2.5]) + np.testing.assert_allclose(res.coord("latitude").bounds, [[0.0, 5.0]]) np.testing.assert_allclose(res.data, [5.0, 5.0, 5.0, 5.0, 5.0]) assert res.dtype == np.float32 @@ -1359,10 +1429,10 @@ def test_meridional_statistics_divide_by_max(make_testcube): make_testcube.data = np.ones(make_testcube.shape, dtype=np.float32) make_testcube.data[0, 0] = 0.25 make_testcube.data[0, 1] = 2.0 - make_testcube.units = 'K' + make_testcube.units = "K" input_data = make_testcube.copy() - res = meridional_statistics(input_data, 'max', normalize='divide') + res = meridional_statistics(input_data, "max", normalize="divide") assert input_data == make_testcube assert res.shape == input_data.shape @@ -1383,67 +1453,71 @@ def test_meridional_statistics_divide_by_max(make_testcube): assert res.long_name == input_data.long_name assert res.cell_methods == input_data.cell_methods assert res.attributes == input_data.attributes - assert res.units == '1' + assert res.units == "1" def test_meridional_statistics_2d_lon_fail(irreg_extract_shape_cube): """Test ``meridional_statistics``.""" with pytest.raises(ValueError): - meridional_statistics(irreg_extract_shape_cube, 'sum') + meridional_statistics(irreg_extract_shape_cube, "sum") def test_meridional_statistics_invalid_norm_fail(make_testcube): """Test ``meridional_statistics``.""" msg = "Expected 'subtract' or 'divide' for `normalize`" with pytest.raises(ValueError, match=msg): - meridional_statistics(make_testcube, 'sum', normalize='x') + meridional_statistics(make_testcube, "sum", normalize="x") def test_time_dependent_volcello(): coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) data = np.ma.ones((2, 3, 2, 2)) - time = iris.coords.DimCoord([15, 45], - standard_name='time', - bounds=[[1., 30.], [30., 60.]], - units=Unit('days since 1950-01-01', - calendar='gregorian')) - - zcoord = iris.coords.DimCoord([0.5, 5., 50.], - long_name='zcoord', - bounds=[[0., 2.5], [2.5, 25.], - [25., 250.]], - units='m', - attributes={'positive': 'down'}) - lons = iris.coords.DimCoord([1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats = iris.coords.DimCoord([1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) + time = iris.coords.DimCoord( + [15, 45], + standard_name="time", + bounds=[[1.0, 30.0], [30.0, 60.0]], + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) + + zcoord = iris.coords.DimCoord( + [0.5, 5.0, 50.0], + long_name="zcoord", + bounds=[[0.0, 2.5], [2.5, 25.0], [25.0, 250.0]], + units="m", + attributes={"positive": "down"}, + ) + lons = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="longitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_east", + coord_system=coord_sys, + ) + lats = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="latitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_north", + coord_system=coord_sys, + ) coords_spec4 = [(time, 0), (zcoord, 1), (lats, 2), (lons, 3)] cube = iris.cube.Cube(data, dim_coords_and_dims=coords_spec4) volcello = iris.coords.CellMeasure( - data, - standard_name='ocean_volume', - units='m3', - measure='volume') + data, standard_name="ocean_volume", units="m3", measure="volume" + ) cube.add_cell_measure(volcello, range(0, volcello.ndim)) cube = extract_shape( cube, - 'AR6', - method='contains', + "AR6", + method="contains", crop=False, decomposed=True, - ids={'Acronym': ['EAO', 'WAF']}, + ids={"Acronym": ["EAO", "WAF"]}, ) - assert cube.shape == cube.cell_measure('ocean_volume').shape + assert cube.shape == cube.cell_measure("ocean_volume").shape -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py b/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py index 7def9afe1b..631eac916d 100644 --- a/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py +++ b/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py @@ -36,21 +36,33 @@ def products_set_to_dict(products): def get_3d_cube(data, **cube_kwargs): """Create 3D cube.""" - time_units = Unit('days since 1850-01-01 00:00:00') - times = iris.coords.DimCoord([3.0, 7.0], - bounds=[[0.0, 6.0], [6.0, 8.0]], - standard_name='time', - var_name='time', long_name='time', - units=time_units) - lats = iris.coords.DimCoord([0.0, 10.0], standard_name='latitude', - var_name='lat', long_name='latitude', - units='degrees_north') - lons = iris.coords.DimCoord([20.0, 30.0], standard_name='longitude', - var_name='lon', long_name='longitude', - units='degrees_east') + time_units = Unit("days since 1850-01-01 00:00:00") + times = iris.coords.DimCoord( + [3.0, 7.0], + bounds=[[0.0, 6.0], [6.0, 8.0]], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + lats = iris.coords.DimCoord( + [0.0, 10.0], + standard_name="latitude", + var_name="lat", + long_name="latitude", + units="degrees_north", + ) + lons = iris.coords.DimCoord( + [20.0, 30.0], + standard_name="longitude", + var_name="lon", + long_name="longitude", + units="degrees_east", + ) coord_specs = [(times, 0), (lats, 1), (lons, 2)] - cube = Cube(data.astype('float32'), - dim_coords_and_dims=coord_specs, **cube_kwargs) + cube = Cube( + data.astype("float32"), dim_coords_and_dims=coord_specs, **cube_kwargs + ) return cube @@ -59,7 +71,7 @@ def regular_cubes(): """Regular cubes.""" cube_data = np.arange(8.0).reshape(2, 2, 2) cube = get_3d_cube( - cube_data, standard_name='air_temperature', var_name='tas', units='K' + cube_data, standard_name="air_temperature", var_name="tas", units="K" ) return CubeList([cube]) @@ -70,25 +82,26 @@ def ref_cubes(): cube_data = np.full((2, 2, 2), 2.0) cube_data[1, 1, 1] = 4.0 cube = get_3d_cube( - cube_data, standard_name='air_temperature', var_name='tas', units='K' + cube_data, standard_name="air_temperature", var_name="tas", units="K" ) return CubeList([cube]) TEST_BIAS = [ - ('absolute', [[[-2.0, -1.0], [0.0, 1.0]], [[2.0, 3.0], [4.0, 3.0]]], 'K'), - ('relative', [[[-1.0, -0.5], [0.0, 0.5]], [[1.0, 1.5], [2.0, 0.75]]], '1'), + ("absolute", [[[-2.0, -1.0], [0.0, 1.0]], [[2.0, 3.0], [4.0, 3.0]]], "K"), + ("relative", [[[-1.0, -0.5], [0.0, 0.5]], [[1.0, 1.5], [2.0, 0.75]]], "1"), ] -@pytest.mark.parametrize('bias_type,data,units', TEST_BIAS) +@pytest.mark.parametrize("bias_type,data,units", TEST_BIAS) def test_bias_products(regular_cubes, ref_cubes, bias_type, data, units): """Test calculation of bias with products.""" - ref_product = PreprocessorFile(ref_cubes, 'REF', - {'reference_for_bias': True}) + ref_product = PreprocessorFile( + ref_cubes, "REF", {"reference_for_bias": True} + ) products = { - PreprocessorFile(regular_cubes, 'A', {'dataset': 'a'}), - PreprocessorFile(regular_cubes, 'B', {'dataset': 'b'}), + PreprocessorFile(regular_cubes, "A", {"dataset": "a"}), + PreprocessorFile(regular_cubes, "B", {"dataset": "b"}), ref_product, } out_products = bias(products, bias_type=bias_type) @@ -97,30 +110,30 @@ def test_bias_products(regular_cubes, ref_cubes, bias_type, data, units): out_dict = products_set_to_dict(out_products) assert len(out_dict) == 2 - product_a = out_dict['A'] - assert product_a.filename == 'A' - assert product_a.attributes == {'units': units, 'dataset': 'a'} + product_a = out_dict["A"] + assert product_a.filename == "A" + assert product_a.attributes == {"units": units, "dataset": "a"} assert len(product_a.cubes) == 1 out_cube = product_a.cubes[0] assert out_cube.dtype == np.float32 assert_allclose(out_cube.data, data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" assert out_cube.units == units assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords product_a.wasderivedfrom.assert_called_once() assert product_a.mock_ancestors == {ref_product} - product_b = out_dict['B'] - assert product_b.filename == 'B' - assert product_b.attributes == {'units': units, 'dataset': 'b'} + product_b = out_dict["B"] + assert product_b.filename == "B" + assert product_b.attributes == {"units": units, "dataset": "b"} assert len(product_b.cubes) == 1 out_cube = product_b.cubes[0] assert out_cube.dtype == np.float32 assert_allclose(out_cube.data, data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" assert out_cube.units == units assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords @@ -128,7 +141,7 @@ def test_bias_products(regular_cubes, ref_cubes, bias_type, data, units): assert product_b.mock_ancestors == {ref_product} -@pytest.mark.parametrize('bias_type,data,units', TEST_BIAS) +@pytest.mark.parametrize("bias_type,data,units", TEST_BIAS) def test_bias_cubes(regular_cubes, ref_cubes, bias_type, data, units): """Test calculation of bias with cubes.""" ref_cube = ref_cubes[0] @@ -140,20 +153,20 @@ def test_bias_cubes(regular_cubes, ref_cubes, bias_type, data, units): assert out_cube.dtype == np.float32 assert_allclose(out_cube.data, data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" assert out_cube.units == units assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords TEST_BIAS_BROADCASTABLE = [ - ('absolute', [[[-2.0, -1.0], [0.0, 1.0]], [[2.0, 3.0], [4.0, 5.0]]], 'K'), - ('relative', [[[-1.0, -0.5], [0.0, 0.5]], [[1.0, 1.5], [2.0, 2.5]]], '1'), + ("absolute", [[[-2.0, -1.0], [0.0, 1.0]], [[2.0, 3.0], [4.0, 5.0]]], "K"), + ("relative", [[[-1.0, -0.5], [0.0, 0.5]], [[1.0, 1.5], [2.0, 2.5]]], "1"), ] -@pytest.mark.parametrize('bias_type,data,units', TEST_BIAS_BROADCASTABLE) +@pytest.mark.parametrize("bias_type,data,units", TEST_BIAS_BROADCASTABLE) def test_bias_cubes_broadcastable( regular_cubes, ref_cubes, bias_type, data, units ): @@ -167,8 +180,8 @@ def test_bias_cubes_broadcastable( assert out_cube.dtype == np.float32 assert_allclose(out_cube.data, data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" assert out_cube.units == units assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords @@ -176,34 +189,34 @@ def test_bias_cubes_broadcastable( def test_denominator_mask_threshold_products(regular_cubes, ref_cubes): """Test denominator_mask_threshold argument with products.""" - ref_product = PreprocessorFile(ref_cubes, 'REF', - {'reference_for_bias': True}) + ref_product = PreprocessorFile( + ref_cubes, "REF", {"reference_for_bias": True} + ) products = { - PreprocessorFile(regular_cubes, 'A', {'dataset': 'a'}), + PreprocessorFile(regular_cubes, "A", {"dataset": "a"}), ref_product, } out_products = bias( - products, bias_type='relative', denominator_mask_threshold=3.0 + products, bias_type="relative", denominator_mask_threshold=3.0 ) assert isinstance(out_products, set) out_dict = products_set_to_dict(out_products) assert len(out_dict) == 1 - product_a = out_dict['A'] - assert product_a.filename == 'A' - assert product_a.attributes == {'units': '1', 'dataset': 'a'} + product_a = out_dict["A"] + assert product_a.filename == "A" + assert product_a.attributes == {"units": "1", "dataset": "a"} assert len(product_a.cubes) == 1 out_cube = product_a.cubes[0] assert out_cube.dtype == np.float32 - expected_data = np.ma.masked_equal([[[42.0, 42.0], - [42.0, 42.0]], - [[42.0, 42.0], - [42.0, 0.75]]], 42.0) + expected_data = np.ma.masked_equal( + [[[42.0, 42.0], [42.0, 42.0]], [[42.0, 42.0], [42.0, 0.75]]], 42.0 + ) assert_allclose(out_cube.data, expected_data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' - assert out_cube.units == '1' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" + assert out_cube.units == "1" assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords product_a.wasderivedfrom.assert_called_once() @@ -216,7 +229,7 @@ def test_denominator_mask_threshold_cubes(regular_cubes, ref_cubes): out_cubes = bias( regular_cubes, ref_cube, - bias_type='relative', + bias_type="relative", denominator_mask_threshold=3.0, ) @@ -224,24 +237,23 @@ def test_denominator_mask_threshold_cubes(regular_cubes, ref_cubes): assert len(out_cubes) == 1 out_cube = out_cubes[0] assert out_cube.dtype == np.float32 - expected_data = np.ma.masked_equal([[[42.0, 42.0], - [42.0, 42.0]], - [[42.0, 42.0], - [42.0, 0.75]]], 42.0) + expected_data = np.ma.masked_equal( + [[[42.0, 42.0], [42.0, 42.0]], [[42.0, 42.0], [42.0, 0.75]]], 42.0 + ) assert_allclose(out_cube.data, expected_data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' - assert out_cube.units == '1' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" + assert out_cube.units == "1" assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords -@pytest.mark.parametrize('bias_type', ['absolute', 'relative']) +@pytest.mark.parametrize("bias_type", ["absolute", "relative"]) def test_keep_reference_dataset(regular_cubes, ref_cubes, bias_type): """Test denominator_mask_threshold argument.""" products = { - PreprocessorFile(regular_cubes, 'A', {'dataset': 'a'}), - PreprocessorFile(ref_cubes, 'REF', {'reference_for_bias': True}) + PreprocessorFile(regular_cubes, "A", {"dataset": "a"}), + PreprocessorFile(ref_cubes, "REF", {"reference_for_bias": True}), } out_products = bias( products, bias_type=bias_type, keep_reference_dataset=True @@ -251,29 +263,29 @@ def test_keep_reference_dataset(regular_cubes, ref_cubes, bias_type): out_dict = products_set_to_dict(out_products) assert len(out_dict) == 2 - product_ref = out_dict['REF'] - assert product_ref.filename == 'REF' - assert product_ref.attributes == {'reference_for_bias': True} + product_ref = out_dict["REF"] + assert product_ref.filename == "REF" + assert product_ref.attributes == {"reference_for_bias": True} assert len(product_ref.cubes) == 1 out_cube = product_ref.cubes[0] assert out_cube.dtype == np.float32 expected_data = [[[2.0, 2.0], [2.0, 2.0]], [[2.0, 2.0], [2.0, 4.0]]] assert_allclose(out_cube.data, expected_data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' - assert out_cube.units == 'K' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" + assert out_cube.units == "K" assert out_cube.dim_coords == ref_cubes[0].dim_coords assert out_cube.aux_coords == ref_cubes[0].aux_coords -@pytest.mark.parametrize('bias_type,data,units', TEST_BIAS) -@pytest.mark.parametrize('keep_ref', [True, False]) +@pytest.mark.parametrize("bias_type,data,units", TEST_BIAS) +@pytest.mark.parametrize("keep_ref", [True, False]) def test_bias_products_and_ref_cube( regular_cubes, ref_cubes, keep_ref, bias_type, data, units ): """Test calculation of bias with products and ref_cube given.""" ref_cube = ref_cubes[0] - products = set([PreprocessorFile(regular_cubes, 'A', {'dataset': 'a'})]) + products = set([PreprocessorFile(regular_cubes, "A", {"dataset": "a"})]) out_products = bias( products, @@ -286,15 +298,15 @@ def test_bias_products_and_ref_cube( out_dict = products_set_to_dict(out_products) assert len(out_dict) == 1 - product_a = out_dict['A'] - assert product_a.filename == 'A' - assert product_a.attributes == {'units': units, 'dataset': 'a'} + product_a = out_dict["A"] + assert product_a.filename == "A" + assert product_a.attributes == {"units": units, "dataset": "a"} assert len(product_a.cubes) == 1 out_cube = product_a.cubes[0] assert out_cube.dtype == np.float32 assert_allclose(out_cube.data, data) - assert out_cube.var_name == 'tas' - assert out_cube.standard_name == 'air_temperature' + assert out_cube.var_name == "tas" + assert out_cube.standard_name == "air_temperature" assert out_cube.units == units assert out_cube.dim_coords == regular_cubes[0].dim_coords assert out_cube.aux_coords == regular_cubes[0].aux_coords @@ -305,9 +317,9 @@ def test_bias_products_and_ref_cube( def test_no_reference_for_bias(regular_cubes, ref_cubes): """Test fail when no reference_for_bias is given.""" products = { - PreprocessorFile(regular_cubes, 'A', {}), - PreprocessorFile(regular_cubes, 'B', {}), - PreprocessorFile(ref_cubes, 'REF', {}), + PreprocessorFile(regular_cubes, "A", {}), + PreprocessorFile(regular_cubes, "B", {}), + PreprocessorFile(ref_cubes, "REF", {}), } msg = "Expected exactly 1 dataset with 'reference_for_bias: true', found 0" with pytest.raises(ValueError, match=msg): @@ -317,9 +329,9 @@ def test_no_reference_for_bias(regular_cubes, ref_cubes): def test_two_references_for_bias(regular_cubes, ref_cubes): """Test fail when two reference_for_bias products are given.""" products = { - PreprocessorFile(regular_cubes, 'A', {'reference_for_bias': False}), - PreprocessorFile(ref_cubes, 'REF1', {'reference_for_bias': True}), - PreprocessorFile(ref_cubes, 'REF2', {'reference_for_bias': True}), + PreprocessorFile(regular_cubes, "A", {"reference_for_bias": False}), + PreprocessorFile(ref_cubes, "REF1", {"reference_for_bias": True}), + PreprocessorFile(ref_cubes, "REF2", {"reference_for_bias": True}), } msg = "Expected exactly 1 dataset with 'reference_for_bias: true', found 2" with pytest.raises(ValueError, match=msg): @@ -329,14 +341,16 @@ def test_two_references_for_bias(regular_cubes, ref_cubes): def test_invalid_bias_type(regular_cubes, ref_cubes): """Test fail when invalid bias_type is given.""" products = { - PreprocessorFile(regular_cubes, 'A', {}), - PreprocessorFile(regular_cubes, 'B', {}), - PreprocessorFile(ref_cubes, 'REF', {'reference_for_bias': True}), + PreprocessorFile(regular_cubes, "A", {}), + PreprocessorFile(regular_cubes, "B", {}), + PreprocessorFile(ref_cubes, "REF", {"reference_for_bias": True}), } - msg = (r"Expected one of \['absolute', 'relative'\] for bias_type, got " - r"'invalid_bias_type'") + msg = ( + r"Expected one of \['absolute', 'relative'\] for bias_type, got " + r"'invalid_bias_type'" + ) with pytest.raises(ValueError, match=msg): - bias(products, bias_type='invalid_bias_type') + bias(products, bias_type="invalid_bias_type") def test_reference_none_cubes(regular_cubes): @@ -350,22 +364,23 @@ def test_reference_none_cubes(regular_cubes): TEST_DISTANCE_METRICS = [ - ('rmse', 2.34520788, 0.0, 'RMSE', 'rmse_tas', 'K'), - ('weighted_rmse', 2.0, 0.0, 'RMSE', 'rmse_tas', 'K'), - ('pearsonr', 0.57735026, 1.0, "Pearson's r", 'pearsonr_tas', '1'), - ('weighted_pearsonr', np.nan, 1.0, "Pearson's r", 'pearsonr_tas', '1'), - ('emd', 1.98625, 0.0, 'EMD', 'emd_tas', 'K'), - ('weighted_emd', 0.9975, 0.0, 'EMD', 'emd_tas', 'K'), + ("rmse", 2.34520788, 0.0, "RMSE", "rmse_tas", "K"), + ("weighted_rmse", 2.0, 0.0, "RMSE", "rmse_tas", "K"), + ("pearsonr", 0.57735026, 1.0, "Pearson's r", "pearsonr_tas", "1"), + ("weighted_pearsonr", np.nan, 1.0, "Pearson's r", "pearsonr_tas", "1"), + ("emd", 1.98625, 0.0, "EMD", "emd_tas", "K"), + ("weighted_emd", 0.9975, 0.0, "EMD", "emd_tas", "K"), ] AREA_WEIGHTS = CellMeasure( np.array([0.0, 0.0, 2.0, 0.0]).reshape(2, 2), - standard_name='cell_area', - units='m2', + standard_name="cell_area", + units="m2", ) +@pytest.mark.parametrize("lazy_weights", [True, False]) @pytest.mark.parametrize( - 'metric,data,ref_data,long_name,var_name,units', TEST_DISTANCE_METRICS + "metric,data,ref_data,long_name,var_name,units", TEST_DISTANCE_METRICS ) def test_distance_metric( regular_cubes, @@ -376,105 +391,118 @@ def test_distance_metric( long_name, var_name, units, + lazy_weights, ): """Test `distance_metric`.""" - regular_cubes[0].add_cell_measure(AREA_WEIGHTS, (1, 2)) + regular_cubes[0].add_cell_measure(AREA_WEIGHTS.copy(), (1, 2)) + if lazy_weights: + regular_cubes[0].cell_measure("cell_area").data = ( + regular_cubes[0].cell_measure("cell_area").lazy_data() + ) ref_product = PreprocessorFile( - ref_cubes, 'REF', {'reference_for_metric': True} + ref_cubes, "REF", {"reference_for_metric": True} ) products = { - PreprocessorFile(regular_cubes, 'A', {'dataset': 'a'}), - PreprocessorFile(regular_cubes, 'B', {'dataset': 'b'}), + PreprocessorFile(regular_cubes, "A", {"dataset": "a"}), + PreprocessorFile(regular_cubes, "B", {"dataset": "b"}), ref_product, } out_products = distance_metric(products, metric) + assert ( + regular_cubes[0].cell_measure("cell_area").has_lazy_data() + is lazy_weights + ) assert isinstance(out_products, set) out_dict = products_set_to_dict(out_products) assert len(out_dict) == 3 expected_attrs = { - 'standard_name': None, - 'long_name': long_name, - 'short_name': var_name, - 'units': units, + "standard_name": None, + "long_name": long_name, + "short_name": var_name, + "units": units, } - product_a = out_dict['A'] - assert product_a.filename == 'A' - assert product_a.attributes == {'dataset': 'a', **expected_attrs} + product_a = out_dict["A"] + assert product_a.filename == "A" + assert product_a.attributes == {"dataset": "a", **expected_attrs} assert len(product_a.cubes) == 1 out_cube = product_a.cubes[0] assert out_cube.shape == () assert out_cube.dtype == np.float32 + assert not out_cube.has_lazy_data() assert_allclose(out_cube.data, np.array(data, dtype=np.float32)) assert out_cube.var_name == var_name assert out_cube.long_name == long_name assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['time', 'latitude', 'longitude']), + CellMethod(metric, ["time", "latitude", "longitude"]), ) product_a.wasderivedfrom.assert_called_once() assert product_a.mock_ancestors == {ref_product} - product_b = out_dict['B'] - assert product_b.filename == 'B' - assert product_b.attributes == {'dataset': 'b', **expected_attrs} + product_b = out_dict["B"] + assert product_b.filename == "B" + assert product_b.attributes == {"dataset": "b", **expected_attrs} assert len(product_b.cubes) == 1 out_cube = product_b.cubes[0] assert out_cube.shape == () assert out_cube.dtype == np.float32 + assert not out_cube.has_lazy_data() assert_allclose(out_cube.data, np.array(data, dtype=np.float32)) assert out_cube.var_name == var_name assert out_cube.long_name == long_name assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['time', 'latitude', 'longitude']), + CellMethod(metric, ["time", "latitude", "longitude"]), ) product_b.wasderivedfrom.assert_called_once() assert product_b.mock_ancestors == {ref_product} - product_ref = out_dict['REF'] - assert product_ref.filename == 'REF' + product_ref = out_dict["REF"] + assert product_ref.filename == "REF" assert product_ref.attributes == { - 'reference_for_metric': True, **expected_attrs + "reference_for_metric": True, + **expected_attrs, } assert len(product_ref.cubes) == 1 out_cube = product_ref.cubes[0] assert out_cube.shape == () assert out_cube.dtype == np.float32 + assert not out_cube.has_lazy_data() assert_allclose(out_cube.data, ref_data) assert out_cube.var_name == var_name assert out_cube.long_name == long_name assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['time', 'latitude', 'longitude']), + CellMethod(metric, ["time", "latitude", "longitude"]), ) product_ref.wasderivedfrom.assert_not_called() assert product_ref.mock_ancestors == set() TEST_DISTANCE_METRICS_LAZY = [ - ('rmse', [1.224744871, 3.082207001], 'RMSE', 'rmse_tas', 'K'), - ('weighted_rmse', [1.2278657, 3.0784798], 'RMSE', 'rmse_tas', 'K'), - ('pearsonr', [np.nan, 0.77459663], "Pearson's r", 'pearsonr_tas', '1'), + ("rmse", [1.224744871, 3.082207001], "RMSE", "rmse_tas", "K"), + ("weighted_rmse", [1.2278657, 3.0784798], "RMSE", "rmse_tas", "K"), + ("pearsonr", [np.nan, 0.77459663], "Pearson's r", "pearsonr_tas", "1"), ( - 'weighted_pearsonr', + "weighted_pearsonr", [np.nan, 0.7745946], "Pearson's r", - 'pearsonr_tas', - '1', + "pearsonr_tas", + "1", ), - ('emd', [0.98, 2.9925], 'EMD', 'emd_tas', 'K'), - ('weighted_emd', [0.9837506, 2.9888833], 'EMD', 'emd_tas', 'K'), + ("emd", [0.98, 2.9925], "EMD", "emd_tas", "K"), + ("weighted_emd", [0.9837506, 2.9888833], "EMD", "emd_tas", "K"), ] @pytest.mark.parametrize( - 'metric,data,long_name,var_name,units', TEST_DISTANCE_METRICS_LAZY + "metric,data,long_name,var_name,units", TEST_DISTANCE_METRICS_LAZY ) def test_distance_metric_lazy( regular_cubes, ref_cubes, metric, data, long_name, var_name, units @@ -483,17 +511,17 @@ def test_distance_metric_lazy( regular_cubes[0].data = da.array(regular_cubes[0].data) ref_cubes[0].data = da.array(ref_cubes[0].data) ref_product = PreprocessorFile( - ref_cubes, 'REF', {'reference_for_metric': True} + ref_cubes, "REF", {"reference_for_metric": True} ) products = { - PreprocessorFile(regular_cubes, 'A', {'dataset': 'a'}), + PreprocessorFile(regular_cubes, "A", {"dataset": "a"}), ref_product, } out_products = distance_metric( products, metric, - coords=['latitude', 'longitude'], + coords=["latitude", "longitude"], keep_reference_dataset=False, ) @@ -501,14 +529,14 @@ def test_distance_metric_lazy( out_dict = products_set_to_dict(out_products) assert len(out_dict) == 1 - product_a = out_dict['A'] - assert product_a.filename == 'A' + product_a = out_dict["A"] + assert product_a.filename == "A" assert product_a.attributes == { - 'dataset': 'a', - 'standard_name': None, - 'long_name': long_name, - 'short_name': var_name, - 'units': units, + "dataset": "a", + "standard_name": None, + "long_name": long_name, + "short_name": var_name, + "units": units, } assert len(product_a.cubes) == 1 out_cube = product_a.cubes[0] @@ -519,80 +547,115 @@ def test_distance_metric_lazy( out_cube.data, np.ma.masked_invalid(np.array(data, dtype=np.float32)), ) - assert out_cube.coord('time') == regular_cubes[0].coord('time') + assert out_cube.coord("time") == regular_cubes[0].coord("time") assert out_cube.var_name == var_name assert out_cube.long_name == long_name assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['latitude', 'longitude']), + CellMethod(metric, ["latitude", "longitude"]), ) product_a.wasderivedfrom.assert_called_once() assert product_a.mock_ancestors == {ref_product} +@pytest.mark.parametrize("lazy_weights", [True, False]) @pytest.mark.parametrize( - 'metric,data,_,long_name,var_name,units', TEST_DISTANCE_METRICS + "metric,data,_,long_name,var_name,units", TEST_DISTANCE_METRICS ) def test_distance_metric_cubes( - regular_cubes, ref_cubes, metric, data, _, long_name, var_name, units + regular_cubes, + ref_cubes, + metric, + data, + _, + long_name, + var_name, + units, + lazy_weights, ): """Test `distance_metric` with cubes.""" - regular_cubes[0].add_cell_measure(AREA_WEIGHTS, (1, 2)) + regular_cubes[0].add_cell_measure(AREA_WEIGHTS.copy(), (1, 2)) + if lazy_weights: + regular_cubes[0].cell_measure("cell_area").data = ( + regular_cubes[0].cell_measure("cell_area").lazy_data() + ) out_cubes = distance_metric(regular_cubes, metric, reference=ref_cubes[0]) + assert ( + regular_cubes[0].cell_measure("cell_area").has_lazy_data() + is lazy_weights + ) assert isinstance(out_cubes, CubeList) assert len(out_cubes) == 1 out_cube = out_cubes[0] assert out_cube.shape == () assert out_cube.dtype == np.float32 + assert not out_cube.has_lazy_data() assert_allclose(out_cube.data, np.array(data, dtype=np.float32)) assert out_cube.var_name == var_name assert out_cube.long_name == long_name assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['time', 'latitude', 'longitude']), + CellMethod(metric, ["time", "latitude", "longitude"]), ) -@pytest.mark.parametrize('lazy', [True, False]) +@pytest.mark.parametrize("lazy_weights", [True, False]) +@pytest.mark.parametrize("lazy", [True, False]) @pytest.mark.parametrize( - 'metric,data,_,long_name,var_name,units', TEST_DISTANCE_METRICS + "metric,data,_,long_name,var_name,units", TEST_DISTANCE_METRICS ) def test_distance_metric_masked_data( - regular_cubes, ref_cubes, metric, data, _, long_name, var_name, units, lazy + regular_cubes, + ref_cubes, + metric, + data, + _, + long_name, + var_name, + units, + lazy, + lazy_weights, ): """Test `distance_metric` with masked data.""" # Test cube - time_units = Unit('days since 1850-01-01 00:00:00') - times = iris.coords.DimCoord([3.0, 7.0, 9.0], - bounds=[[0.0, 6.0], [6.0, 8.0], [8.0, 10.0]], - standard_name='time', - var_name='time', long_name='time', - units=time_units) - lats = regular_cubes[0].coord('latitude') - lons = regular_cubes[0].coord('longitude') + time_units = Unit("days since 1850-01-01 00:00:00") + times = iris.coords.DimCoord( + [3.0, 7.0, 9.0], + bounds=[[0.0, 6.0], [6.0, 8.0], [8.0, 10.0]], + standard_name="time", + var_name="time", + long_name="time", + units=time_units, + ) + lats = regular_cubes[0].coord("latitude") + lons = regular_cubes[0].coord("longitude") coord_specs = [(times, 0), (lats, 1), (lons, 2)] cube_data = np.pad( regular_cubes[0].data, ((0, 1), (0, 0), (0, 0)), - 'constant', + "constant", constant_values=np.nan, ) cube = Cube( np.ma.masked_invalid(cube_data), dim_coords_and_dims=coord_specs ) cube.metadata = regular_cubes[0].metadata - cube.add_cell_measure(AREA_WEIGHTS, (1, 2)) + cube.add_cell_measure(AREA_WEIGHTS.copy(), (1, 2)) + if lazy_weights: + cube.cell_measure("cell_area").data = cube.cell_measure( + "cell_area" + ).lazy_data() # Ref cube ref_cube = cube.copy() ref_data = np.pad( ref_cubes[0].data, ((0, 1), (0, 0), (0, 0)), - 'constant', + "constant", constant_values=np.nan, ) ref_cube.data = np.ma.masked_invalid(ref_data) @@ -604,6 +667,7 @@ def test_distance_metric_masked_data( out_cubes = distance_metric([cube], metric, reference=ref_cube) + assert cube.cell_measure("cell_area").has_lazy_data() is lazy_weights assert isinstance(out_cubes, CubeList) assert len(out_cubes) == 1 out_cube = out_cubes[0] @@ -626,21 +690,35 @@ def test_distance_metric_masked_data( assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['time', 'latitude', 'longitude']), + CellMethod(metric, ["time", "latitude", "longitude"]), ) -@pytest.mark.parametrize('lazy', [True, False]) +@pytest.mark.parametrize("lazy_weights", [True, False]) +@pytest.mark.parametrize("lazy", [True, False]) @pytest.mark.parametrize( - 'metric,_,__,long_name,var_name,units', TEST_DISTANCE_METRICS + "metric,_,__,long_name,var_name,units", TEST_DISTANCE_METRICS ) def test_distance_metric_fully_masked_data( - regular_cubes, ref_cubes, metric, _, __, long_name, var_name, units, lazy + regular_cubes, + ref_cubes, + metric, + _, + __, + long_name, + var_name, + units, + lazy, + lazy_weights, ): """Test `distance_metric` with fully_masked data.""" cube = regular_cubes[0] cube.data = np.ma.masked_invalid(np.full(cube.shape, np.nan)) - cube.add_cell_measure(AREA_WEIGHTS, (1, 2)) + cube.add_cell_measure(AREA_WEIGHTS.copy(), (1, 2)) + if lazy_weights: + cube.cell_measure("cell_area").data = cube.cell_measure( + "cell_area" + ).lazy_data() ref_cube = ref_cubes[0] if lazy: @@ -649,6 +727,7 @@ def test_distance_metric_fully_masked_data( out_cubes = distance_metric([cube], metric, reference=ref_cube) + assert cube.cell_measure("cell_area").has_lazy_data() is lazy_weights assert isinstance(out_cubes, CubeList) assert len(out_cubes) == 1 out_cube = out_cubes[0] @@ -667,27 +746,27 @@ def test_distance_metric_fully_masked_data( assert out_cube.standard_name is None assert out_cube.units == units assert out_cube.cell_methods == ( - CellMethod(metric, ['time', 'latitude', 'longitude']), + CellMethod(metric, ["time", "latitude", "longitude"]), ) TEST_METRICS = [ - 'rmse', - 'weighted_rmse', - 'pearsonr', - 'weighted_pearsonr', - 'emd', - 'weighted_emd', + "rmse", + "weighted_rmse", + "pearsonr", + "weighted_pearsonr", + "emd", + "weighted_emd", ] -@pytest.mark.parametrize('metric', TEST_METRICS) +@pytest.mark.parametrize("metric", TEST_METRICS) def test_no_reference_for_metric(regular_cubes, ref_cubes, metric): """Test fail when no reference_for_metric is given.""" products = { - PreprocessorFile(regular_cubes, 'A', {}), - PreprocessorFile(regular_cubes, 'B', {}), - PreprocessorFile(ref_cubes, 'REF', {}), + PreprocessorFile(regular_cubes, "A", {}), + PreprocessorFile(regular_cubes, "B", {}), + PreprocessorFile(ref_cubes, "REF", {}), } msg = ( "Expected exactly 1 dataset with 'reference_for_metric: true', found 0" @@ -696,27 +775,27 @@ def test_no_reference_for_metric(regular_cubes, ref_cubes, metric): distance_metric(products, metric) -@pytest.mark.parametrize('metric', TEST_METRICS) +@pytest.mark.parametrize("metric", TEST_METRICS) def test_two_references_for_metric(regular_cubes, ref_cubes, metric): """Test fail when two reference_for_metric products are given.""" products = { - PreprocessorFile(regular_cubes, 'A', {'reference_for_metric': False}), - PreprocessorFile(ref_cubes, 'REF1', {'reference_for_metric': True}), - PreprocessorFile(ref_cubes, 'REF2', {'reference_for_metric': True}), + PreprocessorFile(regular_cubes, "A", {"reference_for_metric": False}), + PreprocessorFile(ref_cubes, "REF1", {"reference_for_metric": True}), + PreprocessorFile(ref_cubes, "REF2", {"reference_for_metric": True}), } msg = ( "Expected exactly 1 dataset with 'reference_for_metric: true', found 2" ) with pytest.raises(ValueError, match=msg): - distance_metric(products, 'rmse') + distance_metric(products, "rmse") def test_invalid_metric(regular_cubes, ref_cubes): """Test fail when invalid metric is given.""" products = { - PreprocessorFile(regular_cubes, 'A', {}), - PreprocessorFile(regular_cubes, 'B', {}), - PreprocessorFile(ref_cubes, 'REF', {'reference_for_metric': True}), + PreprocessorFile(regular_cubes, "A", {}), + PreprocessorFile(regular_cubes, "B", {}), + PreprocessorFile(ref_cubes, "REF", {"reference_for_metric": True}), } msg = ( r"Expected one of \['rmse', 'weighted_rmse', 'pearsonr', " @@ -724,10 +803,10 @@ def test_invalid_metric(regular_cubes, ref_cubes): r"'invalid'" ) with pytest.raises(ValueError, match=msg): - distance_metric(products, 'invalid') + distance_metric(products, "invalid") -@pytest.mark.parametrize('metric', TEST_METRICS) +@pytest.mark.parametrize("metric", TEST_METRICS) def test_distance_metric_reference_none_cubes(regular_cubes, metric): """Test distance metric with reference=None with with cubes.""" msg = ( @@ -738,7 +817,7 @@ def test_distance_metric_reference_none_cubes(regular_cubes, metric): distance_metric(regular_cubes, metric) -@pytest.mark.parametrize('metric', TEST_METRICS) +@pytest.mark.parametrize("metric", TEST_METRICS) def test_distance_metric_no_named_dimensions(metric): """Test distance metric with reference=None with with cubes.""" ref_cube = Cube([0, 1]) @@ -751,7 +830,7 @@ def test_distance_metric_no_named_dimensions(metric): distance_metric(cubes, metric, reference=ref_cube) -@pytest.mark.parametrize('metric', TEST_METRICS) +@pytest.mark.parametrize("metric", TEST_METRICS) def test_distance_metric_non_matching_shapes(regular_cubes, metric): """Test distance metric with cubes of different shapes.""" ref_cube = Cube(0) @@ -763,12 +842,12 @@ def test_distance_metric_non_matching_shapes(regular_cubes, metric): distance_metric(regular_cubes, metric, reference=ref_cube) -@pytest.mark.parametrize('metric', TEST_METRICS) +@pytest.mark.parametrize("metric", TEST_METRICS) def test_distance_metric_non_matching_dims(regular_cubes, metric): """Test distance metric with cubes with difference dimensions.""" ref_cube = regular_cubes[0].copy() - ref_cube.remove_coord('time') - new_coord = iris.coords.DimCoord([0.0, 1.0], var_name='not_time') + ref_cube.remove_coord("time") + new_coord = iris.coords.DimCoord([0.0, 1.0], var_name="not_time") ref_cube.add_dim_coord(new_coord, 0) msg = "Cannot calculate distance metric between cube and reference cube" with pytest.raises(ValueError, match=msg): @@ -776,19 +855,19 @@ def test_distance_metric_non_matching_dims(regular_cubes, metric): @pytest.mark.parametrize( - 'metric,error', + "metric,error", [ - ('rmse', False), - ('weighted_rmse', True), - ('pearsonr', False), - ('weighted_pearsonr', True), - ('emd', False), - ('weighted_emd', True), - ] + ("rmse", False), + ("weighted_rmse", True), + ("pearsonr", False), + ("weighted_pearsonr", True), + ("emd", False), + ("weighted_emd", True), + ], ) def test_distance_metric_no_lon_for_area_weights(regular_cubes, metric, error): """Test distance metric with cubes that have no longitude.""" - regular_cubes[0].remove_coord('longitude') + regular_cubes[0].remove_coord("longitude") ref_cube = regular_cubes[0].copy() msg = ( r"Cube .* needs a `longitude` coordinate to calculate cell area " @@ -805,5 +884,5 @@ def test_distance_metric_no_lon_for_area_weights(regular_cubes, metric, error): regular_cubes, metric, reference=ref_cube, - coords=['time', 'latitude'] + coords=["time", "latitude"], ) diff --git a/tests/unit/preprocessor/_cycles/test_cycles.py b/tests/unit/preprocessor/_cycles/test_cycles.py index f1cf8dba41..f05a43d4de 100644 --- a/tests/unit/preprocessor/_cycles/test_cycles.py +++ b/tests/unit/preprocessor/_cycles/test_cycles.py @@ -1,4 +1,5 @@ """Unit tests for :mod:`esmvalcore.preprocessor._cycles`.""" + import iris import iris.coord_categorisation import numpy as np @@ -11,30 +12,45 @@ @pytest.fixture def annual_cycle_cube(): """Cube including annual cycle.""" - time_units = Unit('days since 1850-01-01 00:00:00', calendar='noleap') + time_units = Unit("days since 1850-01-01 00:00:00", calendar="noleap") n_times = 3 * 365 n_lat = 4 time_coord = iris.coords.DimCoord( - np.arange(n_times, dtype=np.float64), var_name='time', - standard_name='time', long_name='time', units=time_units) + np.arange(n_times, dtype=np.float64), + var_name="time", + standard_name="time", + long_name="time", + units=time_units, + ) time_coord.guess_bounds() lat_coord = iris.coords.DimCoord( - np.arange(n_lat, dtype=np.float64) * 10, var_name='lat', - standard_name='latitude', long_name='latitude', units='degrees') + np.arange(n_lat, dtype=np.float64) * 10, + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees", + ) lat_coord.guess_bounds() - new_data = (np.sin(np.arange(n_times) * 2.0 * np.pi / 365.0) * - (np.arange(n_times) + 1.0) * 0.005 + 0.005 * - np.arange(n_times)).reshape(n_times, 1) * np.arange(n_lat) + new_data = ( + np.sin(np.arange(n_times) * 2.0 * np.pi / 365.0) + * (np.arange(n_times) + 1.0) + * 0.005 + + 0.005 * np.arange(n_times) + ).reshape(n_times, 1) * np.arange(n_lat) annual_cycle_cube = iris.cube.Cube( - new_data, var_name='tas', standard_name='air_temperature', - units='K', dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1)]) + new_data, + var_name="tas", + standard_name="air_temperature", + units="K", + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1)], + ) return annual_cycle_cube def test_amplitude_fail_wrong_coord(annual_cycle_cube): """Test amplitude calculation when wrong coordinate is given.""" with pytest.raises(iris.exceptions.CoordinateNotFoundError): - amplitude(annual_cycle_cube, ['year', 'invalid_coord']) + amplitude(annual_cycle_cube, ["year", "invalid_coord"]) ANNUAL_CYCLE_AMPLITUDE = [ @@ -46,21 +62,21 @@ def test_amplitude_fail_wrong_coord(annual_cycle_cube): def test_amplitude_annual_cycle_add_year(annual_cycle_cube): """Test amplitude of annual cycle when year is not given in cube.""" - assert not annual_cycle_cube.coords('year') - amplitude_cube = amplitude(annual_cycle_cube, 'year') + assert not annual_cycle_cube.coords("year") + amplitude_cube = amplitude(annual_cycle_cube, "year") assert amplitude_cube.shape == (3, 4) - assert amplitude_cube.coords('year') + assert amplitude_cube.coords("year") np.testing.assert_allclose(amplitude_cube.data, ANNUAL_CYCLE_AMPLITUDE) assert amplitude_cube.metadata == annual_cycle_cube.metadata def test_amplitude_annual_cycle_do_not_add_year(annual_cycle_cube): """Test amplitude of annual cycle when year is given in cube.""" - assert not annual_cycle_cube.coords('year') - iris.coord_categorisation.add_year(annual_cycle_cube, 'time') - amplitude_cube = amplitude(annual_cycle_cube, 'year') + assert not annual_cycle_cube.coords("year") + iris.coord_categorisation.add_year(annual_cycle_cube, "time") + amplitude_cube = amplitude(annual_cycle_cube, "year") assert amplitude_cube.shape == (3, 4) - assert amplitude_cube.coords('year') + assert amplitude_cube.coords("year") np.testing.assert_allclose(amplitude_cube.data, ANNUAL_CYCLE_AMPLITUDE) assert amplitude_cube.metadata == annual_cycle_cube.metadata @@ -68,20 +84,31 @@ def test_amplitude_annual_cycle_do_not_add_year(annual_cycle_cube): @pytest.fixture def diurnal_cycle_cube(): """Cube including diurnal cycle.""" - time_units = Unit('hours since 1850-01-01 00:00:00', calendar='noleap') + time_units = Unit("hours since 1850-01-01 00:00:00", calendar="noleap") n_days = 2 * 365 n_times = n_days * 4 time_coord = iris.coords.DimCoord( - np.arange(n_times, dtype=np.float64) * 6.0, var_name='time', - standard_name='time', long_name='time', units=time_units) + np.arange(n_times, dtype=np.float64) * 6.0, + var_name="time", + standard_name="time", + long_name="time", + units=time_units, + ) time_coord.guess_bounds() - new_data = np.concatenate(( - [-2.0, -3.0, 0.0, 1.0] * int(n_days / 2), - [-5.0, -1.0, 5.0, 0.0] * int(n_days / 2), - ), axis=None) + new_data = np.concatenate( + ( + [-2.0, -3.0, 0.0, 1.0] * int(n_days / 2), + [-5.0, -1.0, 5.0, 0.0] * int(n_days / 2), + ), + axis=None, + ) diurnal_cycle_cube = iris.cube.Cube( - new_data, var_name='tas', standard_name='air_temperature', - units='K', dim_coords_and_dims=[(time_coord, 0)]) + new_data, + var_name="tas", + standard_name="air_temperature", + units="K", + dim_coords_and_dims=[(time_coord, 0)], + ) return diurnal_cycle_cube @@ -90,25 +117,25 @@ def diurnal_cycle_cube(): def test_amplitude_diurnal_cycle_add_coords(diurnal_cycle_cube): """Test amplitude of diurnal cycle when coords are not given in cube.""" - assert not diurnal_cycle_cube.coords('day_of_year') - assert not diurnal_cycle_cube.coords('year') - amplitude_cube = amplitude(diurnal_cycle_cube, ['day_of_year', 'year']) + assert not diurnal_cycle_cube.coords("day_of_year") + assert not diurnal_cycle_cube.coords("year") + amplitude_cube = amplitude(diurnal_cycle_cube, ["day_of_year", "year"]) assert amplitude_cube.shape == (730,) - assert amplitude_cube.coords('day_of_year') - assert amplitude_cube.coords('year') + assert amplitude_cube.coords("day_of_year") + assert amplitude_cube.coords("year") np.testing.assert_allclose(amplitude_cube.data, DIURNAL_CYCLE_AMPLITUDE) assert amplitude_cube.metadata == diurnal_cycle_cube.metadata def test_amplitude_diurnal_cycle_do_not_add_coords(diurnal_cycle_cube): """Test amplitude of diurnal cycle when coords are given in cube.""" - assert not diurnal_cycle_cube.coords('day_of_year') - assert not diurnal_cycle_cube.coords('year') - iris.coord_categorisation.add_day_of_year(diurnal_cycle_cube, 'time') - iris.coord_categorisation.add_year(diurnal_cycle_cube, 'time') - amplitude_cube = amplitude(diurnal_cycle_cube, ['day_of_year', 'year']) + assert not diurnal_cycle_cube.coords("day_of_year") + assert not diurnal_cycle_cube.coords("year") + iris.coord_categorisation.add_day_of_year(diurnal_cycle_cube, "time") + iris.coord_categorisation.add_year(diurnal_cycle_cube, "time") + amplitude_cube = amplitude(diurnal_cycle_cube, ["day_of_year", "year"]) assert amplitude_cube.shape == (730,) - assert amplitude_cube.coords('day_of_year') - assert amplitude_cube.coords('year') + assert amplitude_cube.coords("day_of_year") + assert amplitude_cube.coords("year") np.testing.assert_allclose(amplitude_cube.data, DIURNAL_CYCLE_AMPLITUDE) assert amplitude_cube.metadata == diurnal_cycle_cube.metadata diff --git a/tests/unit/preprocessor/_derive/test_amoc.py b/tests/unit/preprocessor/_derive/test_amoc.py index 4b323a783b..2565b8a4aa 100644 --- a/tests/unit/preprocessor/_derive/test_amoc.py +++ b/tests/unit/preprocessor/_derive/test_amoc.py @@ -1,4 +1,5 @@ """Test derivation of `amoc`.""" + import iris import iris.fileformats import numpy as np @@ -12,21 +13,27 @@ @pytest.fixture def cubes(): # standard names - msftmyz_name = 'ocean_meridional_overturning_mass_streamfunction' - msftyz_name = 'ocean_y_overturning_mass_streamfunction' - - msftmyz_cube = get_cube([[[[100.]], [[100.]], [[100.]]]], - air_pressure_coord=False, - depth_coord=True, - standard_name=msftmyz_name) - msftyz_cube = get_cube([[[[100.]], [[100.]], [[100.]]]], - air_pressure_coord=False, - depth_coord=True, - standard_name=msftyz_name) - rando_cube = get_cube([[[[100.]], [[100.]], [[100.]]]], - air_pressure_coord=False, - depth_coord=True, - standard_name="air_temperature") + msftmyz_name = "ocean_meridional_overturning_mass_streamfunction" + msftyz_name = "ocean_y_overturning_mass_streamfunction" + + msftmyz_cube = get_cube( + [[[[100.0]], [[100.0]], [[100.0]]]], + air_pressure_coord=False, + depth_coord=True, + standard_name=msftmyz_name, + ) + msftyz_cube = get_cube( + [[[[100.0]], [[100.0]], [[100.0]]]], + air_pressure_coord=False, + depth_coord=True, + standard_name=msftyz_name, + ) + rando_cube = get_cube( + [[[[100.0]], [[100.0]], [[100.0]]]], + air_pressure_coord=False, + depth_coord=True, + standard_name="air_temperature", + ) msftmyz_cube.coord("latitude").points = np.array([26.0]) msftyz_cube.coord("latitude").points = np.array([26.0]) msftyz_cube.coord("latitude").standard_name = "grid_latitude" @@ -71,48 +78,53 @@ def test_amoc_preamble(cubes): def build_ocean_cube(std_name): """Test the actual calculation of the amoc.""" # assemble a decent cube this time - coord_sys = iris.coord_systems.GeogCS( - iris.fileformats.pp.EARTH_RADIUS) + coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) data = np.ones((5, 180, 360, 3), dtype=np.float32) lons = iris.coords.DimCoord( range(0, 360), - standard_name='longitude', + standard_name="longitude", bounds=None, - units='degrees_east', - coord_system=coord_sys) + units="degrees_east", + coord_system=coord_sys, + ) lats = iris.coords.DimCoord( range(-90, 90), - standard_name='latitude', + standard_name="latitude", bounds=None, - units='degrees_north', + units="degrees_north", coord_system=coord_sys, ) depth = iris.coords.DimCoord( - [i * 100. for i in range(2, 7)], - standard_name='depth', - long_name='depth', + [i * 100.0 for i in range(2, 7)], + standard_name="depth", + long_name="depth", bounds=None, ) basin = iris.coords.AuxCoord( - ['atlantic_arctic_ocean', 'indian_pacific_ocean', 'global_ocean'], - standard_name='region', - long_name='atlantic_arctic_ocean', + ["atlantic_arctic_ocean", "indian_pacific_ocean", "global_ocean"], + standard_name="region", + long_name="atlantic_arctic_ocean", bounds=None, ) coords_spec = [(depth, 0), (lats, 1), (lons, 2)] - cube = iris.cube.Cube(data, - dim_coords_and_dims=coords_spec, - standard_name=std_name) - cube.add_aux_coord(basin, data_dims=[3, ]) + cube = iris.cube.Cube( + data, dim_coords_and_dims=coords_spec, standard_name=std_name + ) + cube.add_aux_coord( + basin, + data_dims=[ + 3, + ], + ) return cube def test_amoc_derivation(): """Test the actual computation for amoc.""" - msftmyz_name = 'ocean_meridional_overturning_mass_streamfunction' - msftyz_name = 'ocean_y_overturning_mass_streamfunction' + msftmyz_name = "ocean_meridional_overturning_mass_streamfunction" + msftyz_name = "ocean_y_overturning_mass_streamfunction" derived_var = amoc.DerivedVariable() diff --git a/tests/unit/preprocessor/_derive/test_asr.py b/tests/unit/preprocessor/_derive/test_asr.py index 1400a01a5b..eac47a45bb 100644 --- a/tests/unit/preprocessor/_derive/test_asr.py +++ b/tests/unit/preprocessor/_derive/test_asr.py @@ -1,4 +1,5 @@ """Test derivation of `asr`.""" + import iris import numpy as np import pytest @@ -8,19 +9,22 @@ @pytest.fixture def cubes(): - rsdt_name = 'toa_incoming_shortwave_flux' - rsut_name = 'toa_outgoing_shortwave_flux' - rsdt_cube = iris.cube.Cube([[1.0, 2.0], [0.0, -2.0]], - standard_name=rsdt_name) - rsut_cube = iris.cube.Cube([[7.0, 0.0], [-1.0, 5.0]], - standard_name=rsut_name) - ta_cube = iris.cube.Cube([1.0], standard_name='air_temperature') + rsdt_name = "toa_incoming_shortwave_flux" + rsut_name = "toa_outgoing_shortwave_flux" + rsdt_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, -2.0]], standard_name=rsdt_name + ) + rsut_cube = iris.cube.Cube( + [[7.0, 0.0], [-1.0, 5.0]], standard_name=rsut_name + ) + ta_cube = iris.cube.Cube([1.0], standard_name="air_temperature") return iris.cube.CubeList([rsdt_cube, rsut_cube, ta_cube]) def test_asr_calculation(cubes): derived_var = asr.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[-6.0, 2.0], [1.0, -7.0]])) - assert out_cube.attributes['positive'] == 'down' + np.testing.assert_allclose( + out_cube.data, np.array([[-6.0, 2.0], [1.0, -7.0]]) + ) + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_co2s.py b/tests/unit/preprocessor/_derive/test_co2s.py index 3fd364edd7..1a4985c8ae 100644 --- a/tests/unit/preprocessor/_derive/test_co2s.py +++ b/tests/unit/preprocessor/_derive/test_co2s.py @@ -1,4 +1,5 @@ """Test derivation of ``co2s``.""" + import dask.array as da import iris import numpy as np @@ -9,19 +10,33 @@ def get_coord_spec(include_plev=True): """Coordinate specs for cubes.""" - time_coord = iris.coords.DimCoord([0], var_name='time', - standard_name='time', - units='days since 0000-01-01 00:00:00') - lat_coord = iris.coords.DimCoord([0.0, 1.0], var_name='latitude', - standard_name='latitude', units='degrees') - lon_coord = iris.coords.DimCoord([0.0, 1.0], var_name='longitude', - standard_name='longitude', - units='degrees') + time_coord = iris.coords.DimCoord( + [0], + var_name="time", + standard_name="time", + units="days since 0001-01-01 00:00:00", + ) + lat_coord = iris.coords.DimCoord( + [0.0, 1.0], + var_name="latitude", + standard_name="latitude", + units="degrees", + ) + lon_coord = iris.coords.DimCoord( + [0.0, 1.0], + var_name="longitude", + standard_name="longitude", + units="degrees", + ) + lat_coord.guess_bounds() + lon_coord.guess_bounds() if include_plev: - plev_coord = iris.coords.DimCoord([100000.0, 90000.0, 50000.0], - var_name='plev', - standard_name='air_pressure', - units='Pa') + plev_coord = iris.coords.DimCoord( + [100000.0, 90000.0, 50000.0], + var_name="plev", + standard_name="air_pressure", + units="Pa", + ) coord_spec = [ (time_coord, 0), (plev_coord, 1), @@ -39,14 +54,13 @@ def get_coord_spec(include_plev=True): def get_ps_cube(): """Surface air pressure cube.""" - ps_data = [[[105000.0, 50000.0], - [95000.0, 60000.0]]] + ps_data = [[[105000.0, 50000.0], [95000.0, 60000.0]]] coord_spec = get_coord_spec(include_plev=False) cube = iris.cube.Cube( ps_data, - var_name='ps', - standard_name='surface_air_pressure', - units='Pa', + var_name="ps", + standard_name="surface_air_pressure", + units="Pa", dim_coords_and_dims=coord_spec, ) return cube @@ -56,17 +70,21 @@ def get_ps_cube(): def masked_cubes(): """Masked CO2 cube.""" coord_spec = get_coord_spec() - co2_data = da.ma.masked_less([[[[170.0, -1.0], - [-1.0, -1.0]], - [[150.0, 100.0], - [80.0, -1.0]], - [[100.0, 50.0], - [30.0, 10.0]]]], 0.0) + co2_data = da.ma.masked_less( + [ + [ + [[170.0, -1.0], [-1.0, -1.0]], + [[150.0, 100.0], [80.0, -1.0]], + [[100.0, 50.0], [30.0, 10.0]], + ] + ], + 0.0, + ) co2_cube = iris.cube.Cube( co2_data, - var_name='co2', - standard_name='mole_fraction_of_carbon_dioxide_in_air', - units='1e-6', + var_name="co2", + standard_name="mole_fraction_of_carbon_dioxide_in_air", + units="1e-6", dim_coords_and_dims=coord_spec, ) ps_cube = get_ps_cube() @@ -77,17 +95,20 @@ def masked_cubes(): def unmasked_cubes(): """Unmasked CO2 cube.""" coord_spec = get_coord_spec() - co2_data = da.array([[[[200.0, 100.0], - [80.0, 9.0]], - [[150.0, 80.0], - [70.0, 5.0]], - [[100.0, 50.0], - [30.0, 1.0]]]]) + co2_data = da.array( + [ + [ + [[200.0, 100.0], [80.0, 9.0]], + [[150.0, 80.0], [70.0, 5.0]], + [[100.0, 50.0], [30.0, 1.0]], + ] + ] + ) co2_cube = iris.cube.Cube( co2_data, - var_name='co2', - standard_name='mole_fraction_of_carbon_dioxide_in_air', - units='1e-8', + var_name="co2", + standard_name="mole_fraction_of_carbon_dioxide_in_air", + units="1e-8", dim_coords_and_dims=coord_spec, ) ps_cube = get_ps_cube() @@ -99,17 +120,16 @@ def test_co2_calculate_masked_cubes(masked_cubes): derived_var = co2s.DerivedVariable() out_cube = derived_var.calculate(masked_cubes) assert not np.ma.is_masked(out_cube.data) - np.testing.assert_allclose(out_cube.data, - [[[180.0, 50.0], - [80.0, 10.0]]]) - assert out_cube.units == '1e-6' - plev_coord = out_cube.coord('air_pressure') - assert plev_coord.var_name == 'plev' - assert plev_coord.standard_name == 'air_pressure' - assert plev_coord.long_name == 'pressure' - assert plev_coord.units == 'Pa' - np.testing.assert_allclose(plev_coord.points, - [[[105000.0, 50000.0], [95000.0, 60000.0]]]) + np.testing.assert_allclose(out_cube.data, [[[180.0, 50.0], [80.0, 10.0]]]) + assert out_cube.units == "1e-6" + plev_coord = out_cube.coord("air_pressure") + assert plev_coord.var_name == "plev" + assert plev_coord.standard_name == "air_pressure" + assert plev_coord.long_name == "pressure" + assert plev_coord.units == "Pa" + np.testing.assert_allclose( + plev_coord.points, [[[105000.0, 50000.0], [95000.0, 60000.0]]] + ) def test_co2_calculate_unmasked_cubes(unmasked_cubes): @@ -117,14 +137,13 @@ def test_co2_calculate_unmasked_cubes(unmasked_cubes): derived_var = co2s.DerivedVariable() out_cube = derived_var.calculate(unmasked_cubes) assert not np.ma.is_masked(out_cube.data) - np.testing.assert_allclose(out_cube.data, - [[[2.25, 0.50], - [0.75, 0.02]]]) - assert out_cube.units == '1e-6' - plev_coord = out_cube.coord('air_pressure') - assert plev_coord.var_name == 'plev' - assert plev_coord.standard_name == 'air_pressure' - assert plev_coord.long_name == 'pressure' - assert plev_coord.units == 'Pa' - np.testing.assert_allclose(plev_coord.points, - [[[105000.0, 50000.0], [95000.0, 60000.0]]]) + np.testing.assert_allclose(out_cube.data, [[[2.25, 0.50], [0.75, 0.02]]]) + assert out_cube.units == "1e-6" + plev_coord = out_cube.coord("air_pressure") + assert plev_coord.var_name == "plev" + assert plev_coord.standard_name == "air_pressure" + assert plev_coord.long_name == "pressure" + assert plev_coord.units == "Pa" + np.testing.assert_allclose( + plev_coord.points, [[[105000.0, 50000.0], [95000.0, 60000.0]]] + ) diff --git a/tests/unit/preprocessor/_derive/test_ctotal.py b/tests/unit/preprocessor/_derive/test_ctotal.py index 8b57a983d9..703bee0f18 100644 --- a/tests/unit/preprocessor/_derive/test_ctotal.py +++ b/tests/unit/preprocessor/_derive/test_ctotal.py @@ -1,4 +1,5 @@ """Test derivation of `ctotal`.""" + import iris import numpy as np import pytest @@ -9,23 +10,30 @@ @pytest.fixture def cubes(project): - if project == 'CMIP5': - c_soil_cube = iris.cube.Cube([[1.0, 2.0], [0.0, 20.0]], - units='kg m-2', - standard_name='soil_carbon_content') - elif project == 'CMIP6': + if project == "CMIP5": + c_soil_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, 20.0]], + units="kg m-2", + standard_name="soil_carbon_content", + ) + elif project == "CMIP6": + c_soil_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, 20.0]], + units="kg m-2", + standard_name="soil_mass_content_of_carbon", + ) + elif project == "CMIPX": c_soil_cube = iris.cube.Cube( [[1.0, 2.0], [0.0, 20.0]], - units='kg m-2', - standard_name='soil_mass_content_of_carbon') - elif project == 'CMIPX': - c_soil_cube = iris.cube.Cube([[1.0, 2.0], [0.0, 20.0]], - units='kg m-2', - standard_name='air_temperature') - - c_veg_cube = iris.cube.Cube([[10.0, 20.0], [50.0, 100.0]], - units='kg m-2', - standard_name='vegetation_carbon_content') + units="kg m-2", + standard_name="air_temperature", + ) + + c_veg_cube = iris.cube.Cube( + [[10.0, 20.0], [50.0, 100.0]], + units="kg m-2", + standard_name="vegetation_carbon_content", + ) return iris.cube.CubeList([c_soil_cube, c_veg_cube]) @@ -33,14 +41,15 @@ def _run_test(cubes): """Run the actual test on cubes list.""" derived_var = ctotal.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[11.0, 22.0], [50.0, 120.0]])) - assert out_cube.units == Unit('kg m-2') + np.testing.assert_allclose( + out_cube.data, np.array([[11.0, 22.0], [50.0, 120.0]]) + ) + assert out_cube.units == Unit("kg m-2") -@pytest.mark.parametrize('project', ['CMIP5', 'CMIP6', 'CMIPX']) +@pytest.mark.parametrize("project", ["CMIP5", "CMIP6", "CMIPX"]) def test_ctotal_calculation_cmip5(cubes, project): - if project in ['CMIP5', 'CMIP6']: + if project in ["CMIP5", "CMIP6"]: _run_test(cubes) else: msg = "soil_carbon_content or CMIP6: soil_mass_content_of_carbon" diff --git a/tests/unit/preprocessor/_derive/test_et.py b/tests/unit/preprocessor/_derive/test_et.py index d8c40d6188..fc93914cc7 100644 --- a/tests/unit/preprocessor/_derive/test_et.py +++ b/tests/unit/preprocessor/_derive/test_et.py @@ -1,4 +1,5 @@ """Test derivation of `et`.""" + import iris import numpy as np import pytest @@ -9,10 +10,12 @@ @pytest.fixture def cubes(): - hfls_cube = iris.cube.Cube([[1.0, 2.0], [0.0, -2.0]], - standard_name='surface_upward_latent_heat_flux', - attributes={'positive': 'up', 'test': 1}) - ta_cube = iris.cube.Cube([1.0], standard_name='air_temperature') + hfls_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, -2.0]], + standard_name="surface_upward_latent_heat_flux", + attributes={"positive": "up", "test": 1}, + ) + ta_cube = iris.cube.Cube([1.0], standard_name="air_temperature") return iris.cube.CubeList([hfls_cube, ta_cube]) @@ -20,15 +23,15 @@ def test_et_calculation(cubes): derived_var = et.DerivedVariable() out_cube = derived_var.calculate(cubes) np.testing.assert_allclose( - out_cube.data, np.array([[0.03505071, 0.07010142], - [0.0, -0.07010142]])) - assert out_cube.units == Unit('mm day-1') - assert 'positive' not in out_cube.attributes + out_cube.data, np.array([[0.03505071, 0.07010142], [0.0, -0.07010142]]) + ) + assert out_cube.units == Unit("mm day-1") + assert "positive" not in out_cube.attributes def test_et_calculation_no_positive_attr(cubes): - cubes[0].attributes.pop('positive') - assert cubes[0].attributes == {'test': 1} + cubes[0].attributes.pop("positive") + assert cubes[0].attributes == {"test": 1} derived_var = et.DerivedVariable() out_cube = derived_var.calculate(cubes) - assert 'positive' not in out_cube.attributes + assert "positive" not in out_cube.attributes diff --git a/tests/unit/preprocessor/_derive/test_hfns.py b/tests/unit/preprocessor/_derive/test_hfns.py index 4eeb5889f4..b96e0ff168 100644 --- a/tests/unit/preprocessor/_derive/test_hfns.py +++ b/tests/unit/preprocessor/_derive/test_hfns.py @@ -1,4 +1,5 @@ """Test derivation of ``hfns``.""" + import numpy as np import pytest from iris.cube import CubeList @@ -11,12 +12,20 @@ @pytest.fixture def cubes(): """Input cubes for derivation of ``xch4``.""" - hfls_cube = get_cube([[[1.0]]], air_pressure_coord=False, - standard_name='surface_upward_latent_heat_flux', - var_name='hfls', units='W m-2') - hfss_cube = get_cube([[[1.0]]], air_pressure_coord=False, - standard_name='surface_upward_sensible_heat_flux', - var_name='hfss', units='W m-2') + hfls_cube = get_cube( + [[[1.0]]], + air_pressure_coord=False, + standard_name="surface_upward_latent_heat_flux", + var_name="hfls", + units="W m-2", + ) + hfss_cube = get_cube( + [[[1.0]]], + air_pressure_coord=False, + standard_name="surface_upward_sensible_heat_flux", + var_name="hfss", + units="W m-2", + ) return CubeList([hfls_cube, hfss_cube]) @@ -25,14 +34,14 @@ def test_hfns_calculate(cubes): derived_var = hfns.DerivedVariable() out_cube = derived_var.calculate(cubes) assert out_cube.shape == (1, 1, 1) - assert out_cube.units == 'W m-2' - assert out_cube.coords('time') - assert out_cube.coords('latitude') - assert out_cube.coords('longitude') + assert out_cube.units == "W m-2" + assert out_cube.coords("time") + assert out_cube.coords("latitude") + assert out_cube.coords("longitude") np.testing.assert_allclose(out_cube.data, [[[2.0]]]) - np.testing.assert_allclose(out_cube.coord('time').points, [0.0]) - np.testing.assert_allclose(out_cube.coord('latitude').points, [45.0]) - np.testing.assert_allclose(out_cube.coord('longitude').points, [10.0]) + np.testing.assert_allclose(out_cube.coord("time").points, [0.0]) + np.testing.assert_allclose(out_cube.coord("latitude").points, [45.0]) + np.testing.assert_allclose(out_cube.coord("longitude").points, [10.0]) def test_hfns_required(): @@ -40,6 +49,6 @@ def test_hfns_required(): derived_var = hfns.DerivedVariable() output = derived_var.required(None) assert output == [ - {'short_name': 'hfls'}, - {'short_name': 'hfss'}, + {"short_name": "hfls"}, + {"short_name": "hfss"}, ] diff --git a/tests/unit/preprocessor/_derive/test_lwcre.py b/tests/unit/preprocessor/_derive/test_lwcre.py index c70807a149..2cee12b633 100644 --- a/tests/unit/preprocessor/_derive/test_lwcre.py +++ b/tests/unit/preprocessor/_derive/test_lwcre.py @@ -1,4 +1,5 @@ """Test derivation of `lwcre`.""" + import numpy as np import pytest from iris.cube import Cube, CubeList @@ -9,12 +10,12 @@ @pytest.fixture def cubes(): rlut_cube = Cube( - 3, standard_name='toa_outgoing_longwave_flux', units='W m-2' + 3, standard_name="toa_outgoing_longwave_flux", units="W m-2" ) rlutcs_cube = Cube( 1, - standard_name='toa_outgoing_longwave_flux_assuming_clear_sky', - units='W m-2', + standard_name="toa_outgoing_longwave_flux_assuming_clear_sky", + units="W m-2", ) return CubeList([rlut_cube, rlutcs_cube]) @@ -24,5 +25,5 @@ def test_lwcre_calculation(cubes): derived_var = lwcre.DerivedVariable() out_cube = derived_var.calculate(cubes) np.testing.assert_equal(out_cube.data, -2) - assert out_cube.units == 'W m-2' - assert out_cube.attributes['positive'] == 'down' + assert out_cube.units == "W m-2" + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_netcre.py b/tests/unit/preprocessor/_derive/test_netcre.py index f520b1bc13..87d25b5c67 100644 --- a/tests/unit/preprocessor/_derive/test_netcre.py +++ b/tests/unit/preprocessor/_derive/test_netcre.py @@ -1,4 +1,5 @@ """Test derivation of `netcre`.""" + import numpy as np import pytest from iris.cube import Cube, CubeList @@ -9,20 +10,20 @@ @pytest.fixture def cubes(): rlut_cube = Cube( - 3, standard_name='toa_outgoing_longwave_flux', units='W m-2' + 3, standard_name="toa_outgoing_longwave_flux", units="W m-2" ) rlutcs_cube = Cube( 1, - standard_name='toa_outgoing_longwave_flux_assuming_clear_sky', - units='W m-2', + standard_name="toa_outgoing_longwave_flux_assuming_clear_sky", + units="W m-2", ) rsut_cube = Cube( - 3, standard_name='toa_outgoing_shortwave_flux', units='W m-2' + 3, standard_name="toa_outgoing_shortwave_flux", units="W m-2" ) rsutcs_cube = Cube( 1, - standard_name='toa_outgoing_shortwave_flux_assuming_clear_sky', - units='W m-2', + standard_name="toa_outgoing_shortwave_flux_assuming_clear_sky", + units="W m-2", ) return CubeList([rlut_cube, rlutcs_cube, rsut_cube, rsutcs_cube]) @@ -32,5 +33,5 @@ def test_netcre_calculation(cubes): derived_var = netcre.DerivedVariable() out_cube = derived_var.calculate(cubes) np.testing.assert_equal(out_cube.data, -4) - assert out_cube.units == 'W m-2' - assert out_cube.attributes['positive'] == 'down' + assert out_cube.units == "W m-2" + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_ohc.py b/tests/unit/preprocessor/_derive/test_ohc.py index d882a645f4..1df8508964 100644 --- a/tests/unit/preprocessor/_derive/test_ohc.py +++ b/tests/unit/preprocessor/_derive/test_ohc.py @@ -1,4 +1,5 @@ """Test derivation of `ohc`.""" + import cf_units import iris import numpy as np @@ -9,30 +10,34 @@ @pytest.fixture def cubes(): - volcello_name = 'ocean_volume' - thetao_name = 'sea_water_potential_temperature' - volcello_cube = iris.cube.Cube([[1.0, 1.2], [0.8, 0.2]], - units='m3', - standard_name=volcello_name, - var_name='volcello') - time_coord = iris.coords.DimCoord([0., 1., 2.], - standard_name='time') - thetao_cube = iris.cube.Cube([[[1.0, 1.0], [1.0, 1.0]], - [[1.0, 1.0], [1.0, 1.0]], - [[1.0, 1.0], [1.0, 1.0]]], - units='K', - standard_name=thetao_name, - var_name='thetao', - dim_coords_and_dims=[(time_coord, 0)]) + volcello_name = "ocean_volume" + thetao_name = "sea_water_potential_temperature" + volcello_cube = iris.cube.Cube( + [[1.0, 1.2], [0.8, 0.2]], + units="m3", + standard_name=volcello_name, + var_name="volcello", + ) + time_coord = iris.coords.DimCoord([0.0, 1.0, 2.0], standard_name="time") + thetao_cube = iris.cube.Cube( + [ + [[1.0, 1.0], [1.0, 1.0]], + [[1.0, 1.0], [1.0, 1.0]], + [[1.0, 1.0], [1.0, 1.0]], + ], + units="K", + standard_name=thetao_name, + var_name="thetao", + dim_coords_and_dims=[(time_coord, 0)], + ) return iris.cube.CubeList([volcello_cube, thetao_cube]) def test_ohc_calculation(cubes): derived_var = ohc.DerivedVariable() out_cube = derived_var.calculate(cubes) - assert out_cube.units == cf_units.Unit('J') + assert out_cube.units == cf_units.Unit("J") out_data = out_cube.data val = ohc.RHO_CP.points[0] - volcello_data = np.broadcast_to(cubes[0].data*val, - out_data.shape) + volcello_data = np.broadcast_to(cubes[0].data * val, out_data.shape) np.testing.assert_allclose(out_data, volcello_data) diff --git a/tests/unit/preprocessor/_derive/test_rlntcs.py b/tests/unit/preprocessor/_derive/test_rlntcs.py index 070810b279..4c032f1f93 100644 --- a/tests/unit/preprocessor/_derive/test_rlntcs.py +++ b/tests/unit/preprocessor/_derive/test_rlntcs.py @@ -1,4 +1,5 @@ """Test derivation of `rlntcs`.""" + import iris import numpy as np import pytest @@ -8,16 +9,18 @@ @pytest.fixture def cubes(): - std_name = 'toa_outgoing_longwave_flux_assuming_clear_sky' - rlutcs_cube = iris.cube.Cube([[1.0, 2.0], [0.0, -2.0]], - standard_name=std_name) - ta_cube = iris.cube.Cube([1.0], standard_name='air_temperature') + std_name = "toa_outgoing_longwave_flux_assuming_clear_sky" + rlutcs_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, -2.0]], standard_name=std_name + ) + ta_cube = iris.cube.Cube([1.0], standard_name="air_temperature") return iris.cube.CubeList([rlutcs_cube, ta_cube]) def test_rlntcs_calculation(cubes): derived_var = rlntcs.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[-1.0, -2.0], [0.0, 2.0]])) - assert out_cube.attributes['positive'] == 'down' + np.testing.assert_allclose( + out_cube.data, np.array([[-1.0, -2.0], [0.0, 2.0]]) + ) + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_rlus.py b/tests/unit/preprocessor/_derive/test_rlus.py index 7ac7191f05..94bcf9b52d 100644 --- a/tests/unit/preprocessor/_derive/test_rlus.py +++ b/tests/unit/preprocessor/_derive/test_rlus.py @@ -1,4 +1,5 @@ """Test derivation of `rlus`.""" + import iris import numpy as np import pytest @@ -10,15 +11,15 @@ @pytest.fixture def cubes(): - rlds_name = 'surface_downwelling_longwave_flux_in_air' - rlns_name = 'surface_net_downward_longwave_flux' - rlds_cube = get_cube([[[100.]]], - air_pressure_coord=False, - standard_name=rlds_name) + rlds_name = "surface_downwelling_longwave_flux_in_air" + rlns_name = "surface_net_downward_longwave_flux" + rlds_cube = get_cube( + [[[100.0]]], air_pressure_coord=False, standard_name=rlds_name + ) rlds_cube.attributes["positive"] = "down" - rlns_cube = get_cube([[[50.0]]], - air_pressure_coord=False, - standard_name=rlns_name) + rlns_cube = get_cube( + [[[50.0]]], air_pressure_coord=False, standard_name=rlns_name + ) rlns_cube.attributes["positive"] = "down" rlns_cube.coord("longitude").var_name = "lon" @@ -30,6 +31,5 @@ def cubes(): def test_rlntcs_calculation(cubes): derived_var = rlus.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[[50.0]]])) - assert out_cube.attributes['positive'] == 'up' + np.testing.assert_allclose(out_cube.data, np.array([[[50.0]]])) + assert out_cube.attributes["positive"] == "up" diff --git a/tests/unit/preprocessor/_derive/test_rsnt.py b/tests/unit/preprocessor/_derive/test_rsnt.py index 4cc16f1709..5cff4ef690 100644 --- a/tests/unit/preprocessor/_derive/test_rsnt.py +++ b/tests/unit/preprocessor/_derive/test_rsnt.py @@ -1,4 +1,5 @@ """Test derivation of `rsnt`.""" + import numpy as np import pytest from iris.cube import Cube, CubeList @@ -9,10 +10,10 @@ @pytest.fixture def cubes(): rsdt_cube = Cube( - 3, standard_name='toa_incoming_shortwave_flux', units='W m-2' + 3, standard_name="toa_incoming_shortwave_flux", units="W m-2" ) rsut_cube = Cube( - 1, standard_name='toa_outgoing_shortwave_flux', units='W m-2' + 1, standard_name="toa_outgoing_shortwave_flux", units="W m-2" ) return CubeList([rsdt_cube, rsut_cube]) @@ -22,5 +23,5 @@ def test_rsnt_calculation(cubes): derived_var = rsnt.DerivedVariable() out_cube = derived_var.calculate(cubes) np.testing.assert_equal(out_cube.data, 2) - assert out_cube.units == 'W m-2' - assert out_cube.attributes['positive'] == 'down' + assert out_cube.units == "W m-2" + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_rsntcs.py b/tests/unit/preprocessor/_derive/test_rsntcs.py index 290f123096..4ab30bb204 100644 --- a/tests/unit/preprocessor/_derive/test_rsntcs.py +++ b/tests/unit/preprocessor/_derive/test_rsntcs.py @@ -1,4 +1,5 @@ """Test derivation of `rsntcs`.""" + import iris import numpy as np import pytest @@ -8,19 +9,22 @@ @pytest.fixture def cubes(): - rsdt_name = 'toa_incoming_shortwave_flux' - rsutcs_name = 'toa_outgoing_shortwave_flux_assuming_clear_sky' - rsdt_cube = iris.cube.Cube([[1.0, 2.0], [0.0, -2.0]], - standard_name=rsdt_name) - rsutcs_cube = iris.cube.Cube([[5.0, -1.2], [0.8, -3.0]], - standard_name=rsutcs_name) - ta_cube = iris.cube.Cube([1.0], standard_name='air_temperature') + rsdt_name = "toa_incoming_shortwave_flux" + rsutcs_name = "toa_outgoing_shortwave_flux_assuming_clear_sky" + rsdt_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, -2.0]], standard_name=rsdt_name + ) + rsutcs_cube = iris.cube.Cube( + [[5.0, -1.2], [0.8, -3.0]], standard_name=rsutcs_name + ) + ta_cube = iris.cube.Cube([1.0], standard_name="air_temperature") return iris.cube.CubeList([rsdt_cube, rsutcs_cube, ta_cube]) def test_rsntcs_calculation(cubes): derived_var = rsntcs.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[-4.0, 3.2], [-0.8, 1.0]])) - assert out_cube.attributes['positive'] == 'down' + np.testing.assert_allclose( + out_cube.data, np.array([[-4.0, 3.2], [-0.8, 1.0]]) + ) + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_rsntcsnorm.py b/tests/unit/preprocessor/_derive/test_rsntcsnorm.py index 848636cb36..cbca3962cf 100644 --- a/tests/unit/preprocessor/_derive/test_rsntcsnorm.py +++ b/tests/unit/preprocessor/_derive/test_rsntcsnorm.py @@ -1,4 +1,5 @@ """Test derivation of `rsntcs`.""" + import iris import numpy as np import pytest @@ -9,28 +10,38 @@ @pytest.fixture def cubes(): # names - rsdscs_name = \ - 'surface_downwelling_shortwave_flux_in_air_assuming_clear_sky' - rsdt_name = 'toa_incoming_shortwave_flux' - rsuscs_name = 'surface_upwelling_shortwave_flux_in_air_assuming_clear_sky' - rsutcs_name = 'toa_outgoing_shortwave_flux_assuming_clear_sky' - attributes = {'positive': 'down', 'test': 1} + rsdscs_name = ( + "surface_downwelling_shortwave_flux_in_air_assuming_clear_sky" + ) + rsdt_name = "toa_incoming_shortwave_flux" + rsuscs_name = "surface_upwelling_shortwave_flux_in_air_assuming_clear_sky" + rsutcs_name = "toa_outgoing_shortwave_flux_assuming_clear_sky" + attributes = {"positive": "down", "test": 1} # cubes - rsdscs_cube = iris.cube.Cube([[1.0, 2.0], [0.0, -2.0]], - standard_name=rsdscs_name, - attributes=attributes) - rsdt_cube = iris.cube.Cube([[1.0, 2.0], [2.0, -2.0]], - standard_name=rsdt_name, - attributes=attributes) - rsuscs_cube = iris.cube.Cube([[1.0, 2.0], [0.0, -2.0]], - standard_name=rsuscs_name, - attributes=attributes) - rsutcs_cube = iris.cube.Cube([[5.0, -1.2], [0.8, -3.0]], - standard_name=rsutcs_name, - attributes=attributes) - return iris.cube.CubeList([rsdscs_cube, rsdt_cube, - rsuscs_cube, rsutcs_cube]) + rsdscs_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, -2.0]], + standard_name=rsdscs_name, + attributes=attributes, + ) + rsdt_cube = iris.cube.Cube( + [[1.0, 2.0], [2.0, -2.0]], + standard_name=rsdt_name, + attributes=attributes, + ) + rsuscs_cube = iris.cube.Cube( + [[1.0, 2.0], [0.0, -2.0]], + standard_name=rsuscs_name, + attributes=attributes, + ) + rsutcs_cube = iris.cube.Cube( + [[5.0, -1.2], [0.8, -3.0]], + standard_name=rsutcs_name, + attributes=attributes, + ) + return iris.cube.CubeList( + [rsdscs_cube, rsdt_cube, rsuscs_cube, rsutcs_cube] + ) def test_rsntcs_calculation(cubes): @@ -39,15 +50,16 @@ def test_rsntcs_calculation(cubes): # (rsdscs_cube - rsuscs_cube)) / rsdt_cube) * 100.0 derived_var = rsnstcsnorm.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[-400., 160.], [60., -50.0]])) - assert out_cube.units == '%' + np.testing.assert_allclose( + out_cube.data, np.array([[-400.0, 160.0], [60.0, -50.0]]) + ) + assert out_cube.units == "%" def test_rsntcs_calculation_no_positive_attr(cubes): for cube in cubes: - cube.attributes.pop('positive') - assert cube.attributes == {'test': 1} + cube.attributes.pop("positive") + assert cube.attributes == {"test": 1} derived_var = rsnstcsnorm.DerivedVariable() out_cube = derived_var.calculate(cubes) - assert 'positive' not in out_cube.attributes + assert "positive" not in out_cube.attributes diff --git a/tests/unit/preprocessor/_derive/test_rsus.py b/tests/unit/preprocessor/_derive/test_rsus.py index 7636913561..2dd7ef888f 100644 --- a/tests/unit/preprocessor/_derive/test_rsus.py +++ b/tests/unit/preprocessor/_derive/test_rsus.py @@ -1,4 +1,5 @@ """Test derivation of `rsus`.""" + import iris import numpy as np import pytest @@ -10,15 +11,15 @@ @pytest.fixture def cubes(): - rsds_name = 'surface_downwelling_shortwave_flux_in_air' - rsns_name = 'surface_net_downward_shortwave_flux' - rsds_cube = get_cube([[[100.]]], - air_pressure_coord=False, - standard_name=rsds_name) + rsds_name = "surface_downwelling_shortwave_flux_in_air" + rsns_name = "surface_net_downward_shortwave_flux" + rsds_cube = get_cube( + [[[100.0]]], air_pressure_coord=False, standard_name=rsds_name + ) rsds_cube.attributes["positive"] = "down" - rsns_cube = get_cube([[[50.0]]], - air_pressure_coord=False, - standard_name=rsns_name) + rsns_cube = get_cube( + [[[50.0]]], air_pressure_coord=False, standard_name=rsns_name + ) rsns_cube.attributes["positive"] = "down" rsns_cube.coord("longitude").var_name = "lon" @@ -30,6 +31,5 @@ def cubes(): def test_rsntcs_calculation(cubes): derived_var = rsus.DerivedVariable() out_cube = derived_var.calculate(cubes) - np.testing.assert_allclose(out_cube.data, - np.array([[[50.0]]])) - assert out_cube.attributes['positive'] == 'up' + np.testing.assert_allclose(out_cube.data, np.array([[[50.0]]])) + assert out_cube.attributes["positive"] == "up" diff --git a/tests/unit/preprocessor/_derive/test_sfcwind.py b/tests/unit/preprocessor/_derive/test_sfcwind.py index a249c4fe9c..1ec7c76d33 100644 --- a/tests/unit/preprocessor/_derive/test_sfcwind.py +++ b/tests/unit/preprocessor/_derive/test_sfcwind.py @@ -1,4 +1,5 @@ """Test derivation of ``sfcwind``.""" + import numpy as np import pytest from iris.cube import CubeList @@ -11,16 +12,20 @@ @pytest.fixture def cubes(): """Input cubes for derivation of ``sfcwind``.""" - uas_cube = get_cube([[[3.0]]], - air_pressure_coord=False, - standard_name='eastward_wind', - var_name='uas', - units='m s-1') - vas_cube = get_cube([[[4.0]]], - air_pressure_coord=False, - standard_name='northward_wind', - var_name='vas', - units='m s-1') + uas_cube = get_cube( + [[[3.0]]], + air_pressure_coord=False, + standard_name="eastward_wind", + var_name="uas", + units="m s-1", + ) + vas_cube = get_cube( + [[[4.0]]], + air_pressure_coord=False, + standard_name="northward_wind", + var_name="vas", + units="m s-1", + ) return CubeList([uas_cube, vas_cube]) @@ -29,21 +34,17 @@ def test_sfcwind_calculate(cubes): derived_var = sfcwind.DerivedVariable() required_vars = derived_var.required("CMIP5") expected_required_vars = [ - { - 'short_name': 'uas' - }, - { - 'short_name': 'vas' - }, + {"short_name": "uas"}, + {"short_name": "vas"}, ] assert required_vars == expected_required_vars out_cube = derived_var.calculate(cubes) assert out_cube.shape == (1, 1, 1) - assert out_cube.units == 'm s-1' - assert out_cube.coords('time') - assert out_cube.coords('latitude') - assert out_cube.coords('longitude') + assert out_cube.units == "m s-1" + assert out_cube.coords("time") + assert out_cube.coords("latitude") + assert out_cube.coords("longitude") np.testing.assert_allclose(out_cube.data, [[[5.0]]]) - np.testing.assert_allclose(out_cube.coord('time').points, [0.0]) - np.testing.assert_allclose(out_cube.coord('latitude').points, [45.0]) - np.testing.assert_allclose(out_cube.coord('longitude').points, [10.0]) + np.testing.assert_allclose(out_cube.coord("time").points, [0.0]) + np.testing.assert_allclose(out_cube.coord("latitude").points, [45.0]) + np.testing.assert_allclose(out_cube.coord("longitude").points, [10.0]) diff --git a/tests/unit/preprocessor/_derive/test_shared.py b/tests/unit/preprocessor/_derive/test_shared.py index 23b4ee9047..aa0de3b234 100644 --- a/tests/unit/preprocessor/_derive/test_shared.py +++ b/tests/unit/preprocessor/_derive/test_shared.py @@ -12,25 +12,44 @@ def get_cube(data, air_pressure_coord=True, depth_coord=False, **kwargs): """Get sample cube.""" - time_coord = iris.coords.DimCoord([0.0], standard_name='time', - var_name='time', - units='days since 1950-01-01 00:00:00') - plev_coord = iris.coords.DimCoord([90000.0, 80000.0], - standard_name='air_pressure', - var_name='plev', units='Pa') - dpth_coord = iris.coords.DimCoord([100.0, 600.0, 7000.0], - standard_name='depth', - var_name='lev', units='m') - lat_coord = iris.coords.DimCoord([45.0], standard_name='latitude', - var_name='lat', units='degrees') - lon_coord = iris.coords.DimCoord([10.0], standard_name='longitude', - var_name='lon', units='degrees') + time_coord = iris.coords.DimCoord( + [0.0], + standard_name="time", + var_name="time", + units="days since 1950-01-01 00:00:00", + ) + plev_coord = iris.coords.DimCoord( + [90000.0, 80000.0], + standard_name="air_pressure", + var_name="plev", + units="Pa", + ) + dpth_coord = iris.coords.DimCoord( + [100.0, 600.0, 7000.0], + standard_name="depth", + var_name="lev", + units="m", + ) + lat_coord = iris.coords.DimCoord( + [45.0], standard_name="latitude", var_name="lat", units="degrees" + ) + lon_coord = iris.coords.DimCoord( + [10.0], standard_name="longitude", var_name="lon", units="degrees" + ) if air_pressure_coord: - coord_specs = [(time_coord, 0), (plev_coord, 1), (lat_coord, 2), - (lon_coord, 3)] + coord_specs = [ + (time_coord, 0), + (plev_coord, 1), + (lat_coord, 2), + (lon_coord, 3), + ] elif depth_coord: - coord_specs = [(time_coord, 0), (dpth_coord, 1), (lat_coord, 2), - (lon_coord, 3)] + coord_specs = [ + (time_coord, 0), + (dpth_coord, 1), + (lat_coord, 2), + (lon_coord, 3), + ] else: coord_specs = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] cube = iris.cube.Cube(data, dim_coords_and_dims=coord_specs, **kwargs) @@ -39,28 +58,39 @@ def get_cube(data, air_pressure_coord=True, depth_coord=False, **kwargs): def test_column_average(): """Test calculation of column-average.""" - cube = get_cube([[[[1.0]], [[2.0]]]], air_pressure_coord=True, - var_name='ch4', units='1') - hus_cube = get_cube([[[[0.2]], [[0.2]]]], air_pressure_coord=True, - var_name='hus', units='1') - zg_cube = get_cube([[[100.0]]], air_pressure_coord=False, var_name='zg', - units='m') - ps_cube = get_cube([[[100000.0]]], air_pressure_coord=False, var_name='ps', - units='Pa') + cube = get_cube( + [[[[1.0]], [[2.0]]]], + air_pressure_coord=True, + var_name="ch4", + units="1", + ) + hus_cube = get_cube( + [[[[0.2]], [[0.2]]]], + air_pressure_coord=True, + var_name="hus", + units="1", + ) + zg_cube = get_cube( + [[[100.0]]], air_pressure_coord=False, var_name="zg", units="m" + ) + ps_cube = get_cube( + [[[100000.0]]], air_pressure_coord=False, var_name="ps", units="Pa" + ) x_cube = column_average(cube, hus_cube, zg_cube, ps_cube) assert x_cube.shape == (1, 1, 1) - assert x_cube.units == '1' - assert x_cube.coords('time') - assert x_cube.coords('air_pressure') - assert x_cube.coords('latitude') - assert x_cube.coords('longitude') + assert x_cube.units == "1" + assert x_cube.coords("time") + assert x_cube.coords("air_pressure") + assert x_cube.coords("latitude") + assert x_cube.coords("longitude") np.testing.assert_allclose(x_cube.data, [[[1.85]]]) - np.testing.assert_allclose(x_cube.coord('time').points, [0.0]) - np.testing.assert_allclose(x_cube.coord('air_pressure').points, 85000.0) - np.testing.assert_allclose(x_cube.coord('air_pressure').bounds, - [[80000.0, 90000.0]]) - np.testing.assert_allclose(x_cube.coord('latitude').points, [45.0]) - np.testing.assert_allclose(x_cube.coord('longitude').points, [10.0]) + np.testing.assert_allclose(x_cube.coord("time").points, [0.0]) + np.testing.assert_allclose(x_cube.coord("air_pressure").points, 85000.0) + np.testing.assert_allclose( + x_cube.coord("air_pressure").bounds, [[80000.0, 90000.0]] + ) + np.testing.assert_allclose(x_cube.coord("latitude").points, [45.0]) + np.testing.assert_allclose(x_cube.coord("longitude").points, [10.0]) def test_col_is_not_monotonic(): @@ -85,14 +115,18 @@ def test_keeping_column_length(): col = np.array([1000, 900, 800]) col = np.insert(col, 0, plev) col = np.append(col, top_limit) - assert (len(_get_pressure_level_widths(col, air_pressure_axis=0)) == - len(col) - 2) + assert ( + len(_get_pressure_level_widths(col, air_pressure_axis=0)) + == len(col) - 2 + ) col = np.atleast_2d(col) - assert (_get_pressure_level_widths(col, air_pressure_axis=1).shape == - (1, 3)) + assert _get_pressure_level_widths(col, air_pressure_axis=1).shape == (1, 3) col = np.atleast_3d(col) - assert (_get_pressure_level_widths(col, air_pressure_axis=1).shape == - (1, 3, 1)) + assert _get_pressure_level_widths(col, air_pressure_axis=1).shape == ( + 1, + 3, + 1, + ) def test_low_lev_surf_press(): @@ -103,14 +137,19 @@ def test_low_lev_surf_press(): col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([50, 100, 845]) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=0), - result) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=0), result + ) col = np.atleast_2d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_2d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_2d(result), + ) col = np.atleast_3d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_3d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_3d(result), + ) def test_low_lev_above_surf_press(): @@ -121,14 +160,19 @@ def test_low_lev_above_surf_press(): col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([70, 100, 845]) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=0), - result) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=0), result + ) col = np.atleast_2d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_2d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_2d(result), + ) col = np.atleast_3d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_3d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_3d(result), + ) def test_low_lev_below_surf_press(): @@ -139,27 +183,37 @@ def test_low_lev_below_surf_press(): col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([0, 120, 845]) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=0), - result) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=0), result + ) col = np.atleast_2d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_2d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_2d(result), + ) col = np.atleast_3d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_3d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_3d(result), + ) col = np.array([np.NaN, np.NaN, 900, 800]) col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([0, 0, 120, 845]) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=0), - result) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=0), result + ) col = np.atleast_2d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_2d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_2d(result), + ) col = np.atleast_3d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_3d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_3d(result), + ) def test_high_level_top_limit(): @@ -170,14 +224,19 @@ def test_high_level_top_limit(): col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([70, 50 + 895 / 2, 895 / 2]) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=0), - result) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=0), result + ) col = np.atleast_2d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_2d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_2d(result), + ) col = np.atleast_3d(col) - assert np.array_equal(_get_pressure_level_widths(col, air_pressure_axis=1), - np.atleast_3d(result)) + assert np.array_equal( + _get_pressure_level_widths(col, air_pressure_axis=1), + np.atleast_3d(result), + ) def test_high_level_above_top_limit(): diff --git a/tests/unit/preprocessor/_derive/test_siextent.py b/tests/unit/preprocessor/_derive/test_siextent.py index 4b362d6731..576a678daa 100644 --- a/tests/unit/preprocessor/_derive/test_siextent.py +++ b/tests/unit/preprocessor/_derive/test_siextent.py @@ -1,4 +1,5 @@ """Test derivation of `ohc`.""" + import cf_units import iris import numpy as np @@ -10,53 +11,50 @@ @pytest.fixture def cubes_sic(): - sic_name = 'sea_ice_area_fraction' - time_coord = iris.coords.DimCoord([0., 1., 2.], - standard_name='time') - sic_cube = iris.cube.Cube([[[20, 10], [10, 10]], - [[10, 10], [10, 10]], - [[10, 10], [10, 10]]], - units='%', - standard_name=sic_name, - var_name='sic', - dim_coords_and_dims=[(time_coord, 0)]) + sic_name = "sea_ice_area_fraction" + time_coord = iris.coords.DimCoord([0.0, 1.0, 2.0], standard_name="time") + sic_cube = iris.cube.Cube( + [[[20, 10], [10, 10]], [[10, 10], [10, 10]], [[10, 10], [10, 10]]], + units="%", + standard_name=sic_name, + var_name="sic", + dim_coords_and_dims=[(time_coord, 0)], + ) return iris.cube.CubeList([sic_cube]) @pytest.fixture def cubes_siconca(): - sic_name = 'sea_ice_area_fraction' - time_coord = iris.coords.DimCoord([0., 1., 2.], - standard_name='time') - sic_cube = iris.cube.Cube([[[20, 10], [10, 10]], - [[10, 10], [10, 10]], - [[10, 10], [10, 10]]], - units='%', - standard_name=sic_name, - var_name='siconca', - dim_coords_and_dims=[(time_coord, 0)]) + sic_name = "sea_ice_area_fraction" + time_coord = iris.coords.DimCoord([0.0, 1.0, 2.0], standard_name="time") + sic_cube = iris.cube.Cube( + [[[20, 10], [10, 10]], [[10, 10], [10, 10]], [[10, 10], [10, 10]]], + units="%", + standard_name=sic_name, + var_name="siconca", + dim_coords_and_dims=[(time_coord, 0)], + ) return iris.cube.CubeList([sic_cube]) @pytest.fixture def cubes(): - sic_name = 'sea_ice_area_fraction' - time_coord = iris.coords.DimCoord([0., 1., 2.], - standard_name='time') - sic_cube = iris.cube.Cube([[[20, 10], [10, 10]], - [[10, 10], [10, 10]], - [[10, 10], [10, 10]]], - units='%', - standard_name=sic_name, - var_name='sic', - dim_coords_and_dims=[(time_coord, 0)]) - siconca_cube = iris.cube.Cube([[[20, 10], [10, 10]], - [[10, 10], [10, 10]], - [[10, 10], [10, 10]]], - units='%', - standard_name=sic_name, - var_name='siconca', - dim_coords_and_dims=[(time_coord, 0)]) + sic_name = "sea_ice_area_fraction" + time_coord = iris.coords.DimCoord([0.0, 1.0, 2.0], standard_name="time") + sic_cube = iris.cube.Cube( + [[[20, 10], [10, 10]], [[10, 10], [10, 10]], [[10, 10], [10, 10]]], + units="%", + standard_name=sic_name, + var_name="sic", + dim_coords_and_dims=[(time_coord, 0)], + ) + siconca_cube = iris.cube.Cube( + [[[20, 10], [10, 10]], [[10, 10], [10, 10]], [[10, 10], [10, 10]]], + units="%", + standard_name=sic_name, + var_name="siconca", + dim_coords_and_dims=[(time_coord, 0)], + ) return iris.cube.CubeList([sic_cube, siconca_cube]) @@ -64,11 +62,11 @@ def test_siextent_calculation_sic(cubes_sic): """Test function ``calculate`` when sic is available.""" derived_var = siextent.DerivedVariable() out_cube = derived_var.calculate(cubes_sic) - assert out_cube.units == cf_units.Unit('m2') + assert out_cube.units == cf_units.Unit("m2") out_data = out_cube.data expected = np.ma.ones_like(cubes_sic[0].data) expected.mask = True - expected[0][0][0] = 1. + expected[0][0][0] = 1.0 np.testing.assert_array_equal(out_data.mask, expected.mask) np.testing.assert_array_equal(out_data[0][0][0], expected[0][0][0]) @@ -77,11 +75,11 @@ def test_siextent_calculation_siconca(cubes_siconca): """Test function ``calculate`` when siconca is available.""" derived_var = siextent.DerivedVariable() out_cube = derived_var.calculate(cubes_siconca) - assert out_cube.units == cf_units.Unit('m2') + assert out_cube.units == cf_units.Unit("m2") out_data = out_cube.data expected = np.ma.ones_like(cubes_siconca[0].data) expected.mask = True - expected[0][0][0] = 1. + expected[0][0][0] = 1.0 np.testing.assert_array_equal(out_data.mask, expected.mask) np.testing.assert_array_equal(out_data[0][0][0], expected[0][0][0]) @@ -90,20 +88,22 @@ def test_siextent_calculation(cubes): """Test function ``calculate`` when sic and siconca are available.""" derived_var = siextent.DerivedVariable() out_cube = derived_var.calculate(cubes) - assert out_cube.units == cf_units.Unit('m2') + assert out_cube.units == cf_units.Unit("m2") out_data = out_cube.data expected = np.ma.ones_like(cubes[0].data) expected.mask = True - expected[0][0][0] = 1. + expected[0][0][0] = 1.0 np.testing.assert_array_equal(out_data.mask, expected.mask) np.testing.assert_array_equal(out_data[0][0][0], expected[0][0][0]) def test_siextent_no_data(cubes_sic): derived_var = siextent.DerivedVariable() - cubes_sic[0].var_name = 'wrong' - msg = ('Derivation of siextent failed due to missing variables ' - 'sic and siconca.') + cubes_sic[0].var_name = "wrong" + msg = ( + "Derivation of siextent failed due to missing variables " + "sic and siconca." + ) with pytest.raises(RecipeError, match=msg): derived_var.calculate(cubes_sic) @@ -113,6 +113,6 @@ def test_siextent_required(): derived_var = siextent.DerivedVariable() output = derived_var.required(None) assert output == [ - {'short_name': 'sic', 'optional': 'true'}, - {'short_name': 'siconca', 'optional': 'true'} + {"short_name": "sic", "optional": "true"}, + {"short_name": "siconca", "optional": "true"}, ] diff --git a/tests/unit/preprocessor/_derive/test_sm.py b/tests/unit/preprocessor/_derive/test_sm.py index 7417b8f190..bdc9852aa6 100644 --- a/tests/unit/preprocessor/_derive/test_sm.py +++ b/tests/unit/preprocessor/_derive/test_sm.py @@ -8,18 +8,17 @@ def test_sm(): - points = da.arange(0, 4, 2).astype(np.float32) - bounds = da.asarray([[-1., 1.], [1., 3]]) + bounds = da.asarray([[-1.0, 1.0], [1.0, 3]]) depth = iris.coords.AuxCoord( points, bounds=bounds, - standard_name='depth', + standard_name="depth", ) cube = iris.cube.Cube( da.asarray([0, 998.2]), - var_name='mrsos', + var_name="mrsos", aux_coords_and_dims=[ (depth, 0), ], @@ -27,6 +26,6 @@ def test_sm(): result = DerivedVariable.calculate(iris.cube.CubeList([cube])) assert result.has_lazy_data() - assert result.coord('depth').has_lazy_points() - assert result.coord('depth').has_lazy_bounds() + assert result.coord("depth").has_lazy_points() + assert result.coord("depth").has_lazy_bounds() assert_array_equal(result.data, np.array([0, 0.5])) diff --git a/tests/unit/preprocessor/_derive/test_soz.py b/tests/unit/preprocessor/_derive/test_soz.py new file mode 100644 index 0000000000..ac977f1e0b --- /dev/null +++ b/tests/unit/preprocessor/_derive/test_soz.py @@ -0,0 +1,127 @@ +"""Test derivation of ``soz``.""" + +import dask.array as da +import iris +import numpy as np +import pytest + +import esmvalcore.preprocessor._derive.soz as soz + +from .test_toz import get_masked_o3_cube, get_masked_o3_hybrid_plevs_cube + + +def get_o3_cube(): + """Get ``o3`` input cube.""" + o3_cube = get_masked_o3_cube() + o3_cube.data = da.ma.masked_greater( + [ + [ + [[500.0, 700.0], [800.0, 900.0]], + [[1251.0, 1249.0], [1260.0, 1200.0]], + [[1000.0, 2000.0], [3000.0, 12000.0]], + ] + ], + 10000.0, + ) + o3_cube.units = "1e-10" + return o3_cube + + +@pytest.fixture +def cubes(): + """Input cubes for derivation of ``soz``.""" + o3_cube = get_o3_cube() + return iris.cube.CubeList([o3_cube]) + + +@pytest.fixture +def cubes_no_lon(): + """Zonal mean input cubes for derivation of ``soz``.""" + o3_cube = get_o3_cube() + o3_cube = o3_cube.collapsed("longitude", iris.analysis.MEAN) + o3_cube.remove_coord("longitude") + return iris.cube.CubeList([o3_cube]) + + +@pytest.fixture +def cubes_hybrid_plevs(): + """Input cubes with hybrid pressure levels for derivation of ``soz``.""" + o3_cube = get_masked_o3_hybrid_plevs_cube() + o3_cube.data = da.ma.masked_greater( + [ + [ + [[500.0, 700.0], [800.0, 900.0]], + [[1251.0, 1249.0], [1260.0, 1200.0]], + [[1000.0, 2000.0], [3000.0, 12000.0]], + ] + ], + 10000.0, + ) + o3_cube.units = "1e-10" + return iris.cube.CubeList([o3_cube]) + + +def test_soz_calculate(cubes): + """Test function ``calculate``.""" + derived_var = soz.DerivedVariable() + + out_cube = derived_var.calculate(cubes) + + assert out_cube.units == "m" + assert out_cube.shape == (1, 2, 2) + expected_data = np.ma.masked_invalid( + [ + [ + [29.543266581831194e-5, 110.2066965482645e-5], + [195.06585289042815e-5, np.nan], + ] + ], + ) + expected_mask = [[[False, False], [False, True]]] + np.testing.assert_allclose(out_cube.data, expected_data) + np.testing.assert_allclose(out_cube.data.mask, expected_mask) + + +def test_soz_calculate_no_lon(cubes_no_lon): + """Test function ``calculate`` for zonal mean cubes.""" + derived_var = soz.DerivedVariable() + + out_cube = derived_var.calculate(cubes_no_lon) + + assert out_cube.units == "m" + assert out_cube.shape == (1, 2, 1) + assert not np.ma.is_masked(out_cube.data) + np.testing.assert_allclose( + out_cube.data, [[[82.65502241119836e-5], [165.31004482239672e-5]]] + ) + + +def test_soz_calculate_hybrid_plevs(cubes_hybrid_plevs): + """Test function ``calculate`` for cubes with hybrid pressure levels.""" + derived_var = soz.DerivedVariable() + + out_cube = derived_var.calculate(cubes_hybrid_plevs) + + assert out_cube.units == "m" + assert out_cube.shape == (1, 2, 2) + expected_data = np.ma.masked_invalid( + [[[np.nan, 32.40347475318536e-5], [44.53039332403313e-5, np.nan]]] + ) + expected_mask = [[[True, False], [False, True]]] + np.testing.assert_allclose(out_cube.data, expected_data) + np.testing.assert_allclose(out_cube.data.mask, expected_mask) + + +@pytest.mark.parametrize( + "project,out", + [ + ("CMIP5", [{"short_name": "tro3"}]), + ("TEST", [{"short_name": "tro3"}]), + ("CMIP6", [{"short_name": "o3"}]), + ], +) +def test_soz_required(project, out): + """Test function ``required``.""" + derived_var = soz.DerivedVariable() + output = derived_var.required(project) + assert output == out diff --git a/tests/unit/preprocessor/_derive/test_swcre.py b/tests/unit/preprocessor/_derive/test_swcre.py index 165477061a..3656343a04 100644 --- a/tests/unit/preprocessor/_derive/test_swcre.py +++ b/tests/unit/preprocessor/_derive/test_swcre.py @@ -1,4 +1,5 @@ """Test derivation of `swcre`.""" + import numpy as np import pytest from iris.cube import Cube, CubeList @@ -9,12 +10,12 @@ @pytest.fixture def cubes(): rsut_cube = Cube( - 3, standard_name='toa_outgoing_shortwave_flux', units='W m-2' + 3, standard_name="toa_outgoing_shortwave_flux", units="W m-2" ) rsutcs_cube = Cube( 1, - standard_name='toa_outgoing_shortwave_flux_assuming_clear_sky', - units='W m-2', + standard_name="toa_outgoing_shortwave_flux_assuming_clear_sky", + units="W m-2", ) return CubeList([rsut_cube, rsutcs_cube]) @@ -24,5 +25,5 @@ def test_swcre_calculation(cubes): derived_var = swcre.DerivedVariable() out_cube = derived_var.calculate(cubes) np.testing.assert_equal(out_cube.data, -2) - assert out_cube.units == 'W m-2' - assert out_cube.attributes['positive'] == 'down' + assert out_cube.units == "W m-2" + assert out_cube.attributes["positive"] == "down" diff --git a/tests/unit/preprocessor/_derive/test_toz.py b/tests/unit/preprocessor/_derive/test_toz.py index 262deab1e0..82e17ae811 100644 --- a/tests/unit/preprocessor/_derive/test_toz.py +++ b/tests/unit/preprocessor/_derive/test_toz.py @@ -1,49 +1,117 @@ """Test derivation of ``toz``.""" + import dask.array as da import iris import numpy as np import pytest import esmvalcore.preprocessor._derive.toz as toz + from .test_co2s import get_coord_spec, get_ps_cube -@pytest.fixture -def masked_cubes(): - """Masked O3 cube.""" +def get_masked_o3_cube(): + """Get masked ``o3`` cube.""" coord_spec = get_coord_spec() - o3_data = da.ma.masked_less([[[[0.0, -1.0], - [-1.0, -1.0]], - [[1.0, 2.0], - [3.0, -1.0]], - [[2.0, 2.0], - [2.0, 2.0]]]], 0.0) + o3_data = da.ma.masked_less( + [ + [ + [[0.0, -1.0], [-1.0, -1.0]], + [[1.0, 2.0], [3.0, -1.0]], + [[2.0, 2.0], [2.0, 2.0]], + ] + ], + 0.0, + ) o3_cube = iris.cube.Cube( o3_data, - var_name='o3', - standard_name='mole_fraction_of_ozone_in_air', - units='1e-9', + var_name="o3", + standard_name="mole_fraction_of_ozone_in_air", + units="1e-9", dim_coords_and_dims=coord_spec, ) + return o3_cube + + +def get_masked_o3_hybrid_plevs_cube(): + """Get masked ``o3`` cube with hybrid pressure levels.""" + o3_cube = get_masked_o3_cube() + o3_cube.remove_coord("air_pressure") + + ap_coord = iris.coords.AuxCoord( + [0.0, 10000.0, 0.0], var_name="ap", units="Pa" + ) + b_coord = iris.coords.AuxCoord([0.95, 0.8, 0.7], var_name="b", units="1") + ps_coord = iris.coords.AuxCoord( + [[[100000.0, 100000.0], [100000.0, 100000.0]]], + var_name="ps", + units="Pa", + ) + z_coord = iris.coords.DimCoord( + [0.95, 0.9, 0.7], + var_name="lev", + units="1", + attributes={"positive": "down"}, + ) + o3_cube.add_aux_coord(ap_coord, 1) + o3_cube.add_aux_coord(b_coord, 1) + o3_cube.add_aux_coord(ps_coord, (0, 2, 3)) + o3_cube.add_dim_coord(z_coord, 1) + + aux_factory = iris.aux_factory.HybridPressureFactory( + delta=ap_coord, sigma=b_coord, surface_air_pressure=ps_coord + ) + o3_cube.add_aux_factory(aux_factory) + + return o3_cube + + +@pytest.fixture +def masked_cubes(): + """Masked O3 cube.""" + o3_cube = get_masked_o3_cube() ps_cube = get_ps_cube() return iris.cube.CubeList([o3_cube, ps_cube]) +@pytest.fixture +def masked_cubes_no_lon(): + """Masked zonal mean O3 cube.""" + o3_cube = get_masked_o3_cube() + o3_cube = o3_cube.collapsed("longitude", iris.analysis.MEAN) + o3_cube.remove_coord("longitude") + ps_cube = get_ps_cube() + ps_cube.data = [[[101300.0, 101300.0], [101300.0, 101300.0]]] + return iris.cube.CubeList([o3_cube, ps_cube]) + + +@pytest.fixture +def masked_cubes_hybrid_plevs(): + """Masked zonal mean O3 cube on hybrid levels.""" + o3_cube = get_masked_o3_hybrid_plevs_cube() + ps_cube = get_ps_cube() + ps_cube.data = [[[101300.0, 101300.0], [101300.0, 101300.0]]] + return iris.cube.CubeList([o3_cube, ps_cube]) + + @pytest.fixture def unmasked_cubes(): """Unmasked O3 cube.""" coord_spec = get_coord_spec() - o3_data = da.array([[[[2.0, 1.0], - [0.8, 1.0]], - [[1.5, 0.8], - [2.0, 3.0]], - [[4.0, 1.0], - [3.0, 2.0]]]]) + o3_data = da.array( + [ + [ + [[2.0, 1.0], [0.8, 1.0]], + [[1.5, 0.8], [2.0, 3.0]], + [[4.0, 1.0], [3.0, 2.0]], + ] + ], + ) o3_cube = iris.cube.Cube( o3_data, - var_name='o3', - standard_name='mole_fraction_of_ozone_in_air', - units='1e-9', + var_name="o3", + standard_name="mole_fraction_of_ozone_in_air", + units="1e-9", dim_coords_and_dims=coord_spec, ) ps_cube = get_ps_cube() @@ -53,30 +121,77 @@ def unmasked_cubes(): def test_toz_calculate_masked_cubes(masked_cubes): """Test function ``calculate`` with masked cube.""" derived_var = toz.DerivedVariable() + out_cube = derived_var.calculate(masked_cubes) + + assert out_cube.units == "m" assert not np.ma.is_masked(out_cube.data) - np.testing.assert_allclose(out_cube.data, - [[[1.2988646378902597, 0.7871906896304607], - [1.6924599827054907, 0.9446288275565529]]]) - assert out_cube.units == 'DU' + np.testing.assert_allclose( + out_cube.data, + [ + [ + [1.2988646378902597e-5, 0.7871906896304607e-5], + [1.6924599827054907e-5, 0.9446288275565529e-5], + ] + ], + ) + + +def test_toz_calculate_masked_cubes_no_lon(masked_cubes_no_lon): + """Test function ``calculate`` with zonal mean masked cube.""" + derived_var = toz.DerivedVariable() + + out_cube = derived_var.calculate(masked_cubes_no_lon) + + assert out_cube.units == "m" + assert not np.ma.is_masked(out_cube.data) + np.testing.assert_allclose( + out_cube.data, + [[[1.3972634740940675e-5], [1.6924599827054907e-5]]], + ) + + +def test_toz_calculate_masked_cubes_hybrid_plevs(masked_cubes_hybrid_plevs): + """Test function ``calculate`` with zonal mean masked cube.""" + derived_var = toz.DerivedVariable() + + out_cube = derived_var.calculate(masked_cubes_hybrid_plevs) + + assert out_cube.units == "m" + assert not np.ma.is_masked(out_cube.data) + np.testing.assert_allclose( + out_cube.data, + [ + [ + [0.33701601399804104e-5, 0.3739155775744688e-5], + [0.440334792012039e-5, 0.19679767240761517e-5], + ] + ], + ) def test_toz_calculate_unmasked_cubes(unmasked_cubes): """Test function ``calculate`` with unmasked cube.""" derived_var = toz.DerivedVariable() + out_cube = derived_var.calculate(unmasked_cubes) + + assert out_cube.units == "m" assert not np.ma.is_masked(out_cube.data) - np.testing.assert_allclose(out_cube.data, - [[[2.65676858, 0.39359534], - [2.04669579, 0.94462883]]]) - assert out_cube.units == 'DU' + np.testing.assert_allclose( + out_cube.data, + [[[2.65676858e-5, 0.39359534e-5], [2.04669579e-5, 0.94462883e-5]]], + ) -@pytest.mark.parametrize('project,out', [ - ('CMIP5', [{'short_name': 'tro3'}, {'short_name': 'ps'}]), - ('TEST', [{'short_name': 'tro3'}, {'short_name': 'ps'}]), - ('CMIP6', [{'short_name': 'o3'}, {'short_name': 'ps'}]), -]) +@pytest.mark.parametrize( + "project,out", + [ + ("CMIP5", [{"short_name": "tro3"}, {"short_name": "ps"}]), + ("TEST", [{"short_name": "tro3"}, {"short_name": "ps"}]), + ("CMIP6", [{"short_name": "o3"}, {"short_name": "ps", "mip": "Amon"}]), + ], +) def test_toz_required(project, out): """Test function ``required``.""" derived_var = toz.DerivedVariable() diff --git a/tests/unit/preprocessor/_derive/test_troz.py b/tests/unit/preprocessor/_derive/test_troz.py new file mode 100644 index 0000000000..7789e1e500 --- /dev/null +++ b/tests/unit/preprocessor/_derive/test_troz.py @@ -0,0 +1,137 @@ +"""Test derivation of ``troz``.""" + +import iris +import numpy as np +import pytest +from iris.coords import AuxCoord, DimCoord +from iris.util import broadcast_to_shape + +import esmvalcore.preprocessor._derive.troz as troz + +from .test_toz import get_masked_o3_cube, get_ps_cube + + +def get_o3_cube(): + """Get ``o3`` input cube.""" + o3_cube = get_masked_o3_cube() + o3_cube.data = [ + [ + [[50.0, 70.0], [80.0, 90.0]], + [[70.0, 90.0], [100.0, 110.0]], + [[130, 140.0], [150.0, 160.0]], + ] + ] + o3_cube.units = "1e-9" + return o3_cube + + +@pytest.fixture +def cubes(): + """Input cubes for derivation of ``troz``.""" + o3_cube = get_o3_cube() + ps_cube = get_ps_cube() + ps_cube.data = [[[101300.0, 101300.0], [101300.0, 101300.0]]] + return iris.cube.CubeList([o3_cube, ps_cube]) + + +@pytest.fixture +def cubes_no_lon(): + """Zonal mean input cubes for derivation of ``troz``.""" + o3_cube = get_o3_cube() + o3_cube = o3_cube.collapsed("longitude", iris.analysis.MEAN) + o3_cube.remove_coord("longitude") + ps_cube = get_ps_cube() + ps_cube.data = [[[101300.0, 101300.0], [101300.0, 101300.0]]] + return iris.cube.CubeList([o3_cube, ps_cube]) + + +@pytest.fixture +def cubes_hybrid_plevs(): + """Input cubes with hybrid pressure levels for derivation of ``troz``.""" + o3_cube = get_o3_cube() + plev_coord = o3_cube.coord("air_pressure") + hybrid_plev_coord = AuxCoord( + broadcast_to_shape( + plev_coord.points, o3_cube.shape, o3_cube.coord_dims(plev_coord) + ), + ) + hybrid_plev_coord.metadata = plev_coord.metadata + alt_coord = DimCoord( + [0.0, 1000.0, 3000.0], + standard_name="altitude", + attributes={"positive": "up"}, + ) + o3_cube.remove_coord(plev_coord) + o3_cube.add_aux_coord(hybrid_plev_coord, (0, 1, 2, 3)) + o3_cube.add_dim_coord(alt_coord, 1) + + ps_cube = get_ps_cube() + ps_cube.data = [[[101300.0, 101300.0], [101300.0, 101300.0]]] + + return iris.cube.CubeList([o3_cube, ps_cube]) + + +def test_troz_calculate(cubes): + """Test function ``calculate``.""" + derived_var = troz.DerivedVariable() + + out_cube = derived_var.calculate(cubes) + + assert out_cube.units == "m" + assert out_cube.shape == (1, 2, 2) + assert not np.ma.is_masked(out_cube.data) + expected_data = [ + [ + [16.255487740869038e-5, 21.1833014579557e-5], + [23.647208316499057e-5, 26.111115175042404e-5], + ] + ] + np.testing.assert_allclose(out_cube.data, expected_data) + + +def test_troz_calculate_no_lon(cubes_no_lon): + """Test function ``calculate`` for zonal mean cubes.""" + derived_var = troz.DerivedVariable() + + out_cube = derived_var.calculate(cubes_no_lon) + + assert out_cube.units == "m" + assert out_cube.shape == (1, 2, 1) + assert not np.ma.is_masked(out_cube.data) + print(out_cube.data) + np.testing.assert_allclose( + out_cube.data, [[[18.71939459941235e-5], [24.87916174577070e-5]]] + ) + + +def test_troz_calculate_hybrid_plevs(cubes_hybrid_plevs): + """Test function ``calculate`` for cubes with hybrid pressure levels.""" + derived_var = troz.DerivedVariable() + + out_cube = derived_var.calculate(cubes_hybrid_plevs) + + assert out_cube.units == "m" + assert out_cube.shape == (1, 2, 2) + assert not np.ma.is_masked(out_cube.data) + expected_data = [ + [ + [31.581106479612044e-5, 27.640725083830575e-5], + [18.198372761713192e-5, 20.071886603033692e-5], + ] + ] + np.testing.assert_allclose(out_cube.data, expected_data) + + +@pytest.mark.parametrize( + "project,out", + [ + ("CMIP5", [{"short_name": "tro3"}, {"short_name": "ps"}]), + ("TEST", [{"short_name": "tro3"}, {"short_name": "ps"}]), + ("CMIP6", [{"short_name": "o3"}, {"short_name": "ps", "mip": "Amon"}]), + ], +) +def test_toz_required(project, out): + """Test function ``required``.""" + derived_var = troz.DerivedVariable() + output = derived_var.required(project) + assert output == out diff --git a/tests/unit/preprocessor/_derive/test_uajet.py b/tests/unit/preprocessor/_derive/test_uajet.py index 64aba89619..a381ce7e45 100644 --- a/tests/unit/preprocessor/_derive/test_uajet.py +++ b/tests/unit/preprocessor/_derive/test_uajet.py @@ -1,55 +1,67 @@ """Test derivation of `uajet`.""" + import iris import numpy as np import pytest from esmvalcore.preprocessor._derive import uajet -TIME_COORD = iris.coords.DimCoord([1.0, 2.0, 3.0], standard_name='time') -LEV_COORD = iris.coords.DimCoord([80000.0, 83000.0, 87000.0], - standard_name='air_pressure') -LON_COORD = iris.coords.DimCoord([0.0, 90.0, 180.0, 270.0], - bounds=[ - [-45., 45.], - [45., 135.], - [135., 225.], - [225., 315.], - ], - standard_name='longitude') +TIME_COORD = iris.coords.DimCoord([1.0, 2.0, 3.0], standard_name="time") +LEV_COORD = iris.coords.DimCoord( + [80000.0, 83000.0, 87000.0], standard_name="air_pressure" +) +LON_COORD = iris.coords.DimCoord( + [0.0, 90.0, 180.0, 270.0], + bounds=[ + [-45.0, 45.0], + [45.0, 135.0], + [135.0, 225.0], + [225.0, 315.0], + ], + standard_name="longitude", +) def broadcast(lat_array): - target_shape = (len(LEV_COORD.points), len(lat_array), - len(LON_COORD.points)) + target_shape = ( + len(LEV_COORD.points), + len(lat_array), + len(LON_COORD.points), + ) lat_array = np.expand_dims(lat_array, -1) lat_array = np.broadcast_to(lat_array, target_shape) return lat_array def gaussian(lat_array, shift): - return np.exp(-(lat_array - shift)**2 / (2 * 10**2)) + return np.exp(-((lat_array - shift) ** 2) / (2 * 10**2)) @pytest.fixture def cubes(): lat_array = np.array( - [-90.0, -80.0, -70.0, -60.0, -50.0, -40.0, -30.0, -20.0, -10.0, 0.0]) - lat_coord = iris.coords.DimCoord(lat_array, standard_name='latitude') + [-90.0, -80.0, -70.0, -60.0, -50.0, -40.0, -30.0, -20.0, -10.0, 0.0] + ) + lat_coord = iris.coords.DimCoord(lat_array, standard_name="latitude") # Produce data using Gaussian y_40 = broadcast(gaussian(lat_array, -40.0)) y_50 = broadcast(gaussian(lat_array, -50.0)) y_60 = broadcast(gaussian(lat_array, -60.0)) y_data = np.array([y_40, y_50, y_60]) - ua_cube = iris.cube.Cube(y_data, - standard_name='eastward_wind', - dim_coords_and_dims=[(TIME_COORD, 0), - (LEV_COORD, 1), - (lat_coord, 2), - (LON_COORD, 3)]) + ua_cube = iris.cube.Cube( + y_data, + standard_name="eastward_wind", + dim_coords_and_dims=[ + (TIME_COORD, 0), + (LEV_COORD, 1), + (lat_coord, 2), + (LON_COORD, 3), + ], + ) # Dummy cube - ta_cube = iris.cube.Cube([1.0], standard_name='air_temperature') + ta_cube = iris.cube.Cube([1.0], standard_name="air_temperature") return iris.cube.CubeList([ua_cube, ta_cube]) @@ -58,12 +70,12 @@ def test_uajet_calculation(cubes): out_cube = derived_var.calculate(cubes) real_cube = iris.cube.Cube( [-40.0, -50.0, -60.0], - units='degrees_north', + units="degrees_north", dim_coords_and_dims=[(TIME_COORD, 0)], attributes={ - 'plev': 85000, - 'lat_range_0': -80.0, - 'lat_range_1': -30.0, + "plev": 85000, + "lat_range_0": -80.0, + "lat_range_1": -30.0, }, ) assert out_cube == real_cube diff --git a/tests/unit/preprocessor/_derive/test_xch4.py b/tests/unit/preprocessor/_derive/test_xch4.py index e9980fbcac..4f1452e6dd 100644 --- a/tests/unit/preprocessor/_derive/test_xch4.py +++ b/tests/unit/preprocessor/_derive/test_xch4.py @@ -1,4 +1,5 @@ """Test derivation of ``xch4``.""" + import iris import numpy as np import pytest @@ -11,19 +12,34 @@ @pytest.fixture def cubes(): """Input cubes for derivation of ``xch4``.""" - xch4_cube = get_cube([[[[1.0]], [[2.0]]]], air_pressure_coord=True, - standard_name='mole_fraction_of_methane_in_air', - var_name='ch4', units='1e-3') - hus_cube = get_cube([[[[0.2]], [[0.2]]]], air_pressure_coord=True, - standard_name='specific_humidity', var_name='hus', - units='%') - zg_cube = get_cube([[[100.0]]], air_pressure_coord=False, - standard_name='geopotential_height', var_name='zg', - - units='m') - ps_cube = get_cube([[[100000.0]]], air_pressure_coord=False, - standard_name='surface_air_pressure', var_name='ps', - units='Pa') + xch4_cube = get_cube( + [[[[1.0]], [[2.0]]]], + air_pressure_coord=True, + standard_name="mole_fraction_of_methane_in_air", + var_name="ch4", + units="1e-3", + ) + hus_cube = get_cube( + [[[[0.2]], [[0.2]]]], + air_pressure_coord=True, + standard_name="specific_humidity", + var_name="hus", + units="%", + ) + zg_cube = get_cube( + [[[100.0]]], + air_pressure_coord=False, + standard_name="geopotential_height", + var_name="zg", + units="m", + ) + ps_cube = get_cube( + [[[100000.0]]], + air_pressure_coord=False, + standard_name="surface_air_pressure", + var_name="ps", + units="Pa", + ) return iris.cube.CubeList([xch4_cube, hus_cube, zg_cube, ps_cube]) @@ -32,18 +48,19 @@ def test_xch4_calculate(cubes): derived_var = xch4.DerivedVariable() out_cube = derived_var.calculate(cubes) assert out_cube.shape == (1, 1, 1) - assert out_cube.units == '1' - assert out_cube.coords('time') - assert out_cube.coords('air_pressure') - assert out_cube.coords('latitude') - assert out_cube.coords('longitude') + assert out_cube.units == "1" + assert out_cube.coords("time") + assert out_cube.coords("air_pressure") + assert out_cube.coords("latitude") + assert out_cube.coords("longitude") np.testing.assert_allclose(out_cube.data, [[[1.85e-3]]]) - np.testing.assert_allclose(out_cube.coord('time').points, [0.0]) - np.testing.assert_allclose(out_cube.coord('air_pressure').points, 85000.0) - np.testing.assert_allclose(out_cube.coord('air_pressure').bounds, - [[80000.0, 90000.0]]) - np.testing.assert_allclose(out_cube.coord('latitude').points, [45.0]) - np.testing.assert_allclose(out_cube.coord('longitude').points, [10.0]) + np.testing.assert_allclose(out_cube.coord("time").points, [0.0]) + np.testing.assert_allclose(out_cube.coord("air_pressure").points, 85000.0) + np.testing.assert_allclose( + out_cube.coord("air_pressure").bounds, [[80000.0, 90000.0]] + ) + np.testing.assert_allclose(out_cube.coord("latitude").points, [45.0]) + np.testing.assert_allclose(out_cube.coord("longitude").points, [10.0]) def test_xch4_required(): @@ -51,8 +68,8 @@ def test_xch4_required(): derived_var = xch4.DerivedVariable() output = derived_var.required(None) assert output == [ - {'short_name': 'ch4'}, - {'short_name': 'hus'}, - {'short_name': 'zg'}, - {'short_name': 'ps'}, + {"short_name": "ch4"}, + {"short_name": "hus"}, + {"short_name": "zg"}, + {"short_name": "ps"}, ] diff --git a/tests/unit/preprocessor/_derive/test_xco2.py b/tests/unit/preprocessor/_derive/test_xco2.py index 7fa956e376..c7cc7af072 100644 --- a/tests/unit/preprocessor/_derive/test_xco2.py +++ b/tests/unit/preprocessor/_derive/test_xco2.py @@ -1,4 +1,5 @@ """Test derivation of ``xco2``.""" + import iris import numpy as np import pytest @@ -11,19 +12,34 @@ @pytest.fixture def cubes(): """Input cubes for derivation of ``xco2``.""" - co2_cube = get_cube([[[[1.0]], [[2.0]]]], air_pressure_coord=True, - standard_name='mole_fraction_of_carbon_dioxide_in_air', - var_name='co2', units='1e-6') - hus_cube = get_cube([[[[0.2]], [[0.2]]]], air_pressure_coord=True, - standard_name='specific_humidity', var_name='hus', - units='%') - zg_cube = get_cube([[[100.0]]], air_pressure_coord=False, - standard_name='geopotential_height', var_name='zg', - - units='m') - ps_cube = get_cube([[[100000.0]]], air_pressure_coord=False, - standard_name='surface_air_pressure', var_name='ps', - units='Pa') + co2_cube = get_cube( + [[[[1.0]], [[2.0]]]], + air_pressure_coord=True, + standard_name="mole_fraction_of_carbon_dioxide_in_air", + var_name="co2", + units="1e-6", + ) + hus_cube = get_cube( + [[[[0.2]], [[0.2]]]], + air_pressure_coord=True, + standard_name="specific_humidity", + var_name="hus", + units="%", + ) + zg_cube = get_cube( + [[[100.0]]], + air_pressure_coord=False, + standard_name="geopotential_height", + var_name="zg", + units="m", + ) + ps_cube = get_cube( + [[[100000.0]]], + air_pressure_coord=False, + standard_name="surface_air_pressure", + var_name="ps", + units="Pa", + ) return iris.cube.CubeList([co2_cube, hus_cube, zg_cube, ps_cube]) @@ -32,18 +48,19 @@ def test_xco2_calculate(cubes): derived_var = xco2.DerivedVariable() out_cube = derived_var.calculate(cubes) assert out_cube.shape == (1, 1, 1) - assert out_cube.units == '1' - assert out_cube.coords('time') - assert out_cube.coords('air_pressure') - assert out_cube.coords('latitude') - assert out_cube.coords('longitude') + assert out_cube.units == "1" + assert out_cube.coords("time") + assert out_cube.coords("air_pressure") + assert out_cube.coords("latitude") + assert out_cube.coords("longitude") np.testing.assert_allclose(out_cube.data, [[[1.85e-6]]]) - np.testing.assert_allclose(out_cube.coord('time').points, [0.0]) - np.testing.assert_allclose(out_cube.coord('air_pressure').points, 85000.0) - np.testing.assert_allclose(out_cube.coord('air_pressure').bounds, - [[80000.0, 90000.0]]) - np.testing.assert_allclose(out_cube.coord('latitude').points, [45.0]) - np.testing.assert_allclose(out_cube.coord('longitude').points, [10.0]) + np.testing.assert_allclose(out_cube.coord("time").points, [0.0]) + np.testing.assert_allclose(out_cube.coord("air_pressure").points, 85000.0) + np.testing.assert_allclose( + out_cube.coord("air_pressure").bounds, [[80000.0, 90000.0]] + ) + np.testing.assert_allclose(out_cube.coord("latitude").points, [45.0]) + np.testing.assert_allclose(out_cube.coord("longitude").points, [10.0]) def test_xco2_required(): @@ -51,8 +68,8 @@ def test_xco2_required(): derived_var = xco2.DerivedVariable() output = derived_var.required(None) assert output == [ - {'short_name': 'co2'}, - {'short_name': 'hus'}, - {'short_name': 'zg'}, - {'short_name': 'ps'}, + {"short_name": "co2"}, + {"short_name": "hus"}, + {"short_name": "zg"}, + {"short_name": "ps"}, ] diff --git a/tests/unit/preprocessor/_detrend/test_detrend.py b/tests/unit/preprocessor/_detrend/test_detrend.py index 9b53f65d51..b3cdf30b17 100644 --- a/tests/unit/preprocessor/_detrend/test_detrend.py +++ b/tests/unit/preprocessor/_detrend/test_detrend.py @@ -4,11 +4,10 @@ import iris import iris.coords -from iris.cube import Cube import numpy as np import pytest from cf_units import Unit - +from iris.cube import Cube from numpy.testing import assert_array_almost_equal from esmvalcore.preprocessor._detrend import detrend @@ -17,34 +16,34 @@ def _create_sample_cube(): cube = Cube( np.array((np.arange(1, 25), np.arange(25, 49))), - var_name='co2', - units='J' + var_name="co2", + units="J", ) cube.add_dim_coord( iris.coords.DimCoord( - np.arange(15., 720., 30.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', calendar='gregorian'), + np.arange(15.0, 720.0, 30.0), + standard_name="time", + units=Unit("days since 1950-01-01 00:00:00", calendar="gregorian"), ), 1, ) cube.add_dim_coord( iris.coords.DimCoord( np.arange(1, 3), - standard_name='latitude', + standard_name="latitude", ), 0, ) return cube -@pytest.mark.parametrize('method', ['linear', 'constant']) +@pytest.mark.parametrize("method", ["linear", "constant"]) def test_decadal_average(method): """Test for decadal average.""" cube = _create_sample_cube() - result = detrend(cube, 'time', method) - if method == 'linear': + result = detrend(cube, "time", method) + if method == "linear": expected = np.zeros([2, 24]) else: expected = np.array( @@ -53,5 +52,5 @@ def test_decadal_average(method): assert_array_almost_equal(result.data, expected) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_mapping/test_mapping.py b/tests/unit/preprocessor/_mapping/test_mapping.py index dc838b3a54..40654be01b 100644 --- a/tests/unit/preprocessor/_mapping/test_mapping.py +++ b/tests/unit/preprocessor/_mapping/test_mapping.py @@ -1,4 +1,5 @@ """Unit tests for the esmvalcore.preprocessor._mapping module.""" + from unittest import mock import cf_units @@ -6,8 +7,11 @@ import numpy as np import tests -from esmvalcore.preprocessor._mapping import (get_empty_data, map_slices, - ref_to_dims_index) +from esmvalcore.preprocessor._mapping import ( + get_empty_data, + map_slices, + ref_to_dims_index, +) class TestHelpers(tests.Test): @@ -17,18 +21,18 @@ def setUp(self): """Set up basic fixtures.""" self.coord_system = mock.Mock(return_value=None) self.scalar_coord = mock.sentinel.scalar_coord - self.scalar_coord.name = lambda: 'scalar_coord' + self.scalar_coord.name = lambda: "scalar_coord" self.coord = mock.sentinel.coord self.coords = mock.Mock(return_value=[self.scalar_coord, self.coord]) def coord(name_or_coord): """Return coord for mock cube.""" - if name_or_coord == 'coord': + if name_or_coord == "coord": return self.coord - elif name_or_coord == 'scalar_coord': + elif name_or_coord == "scalar_coord": return self.scalar_coord else: - raise iris.exceptions.CoordinateNotFoundError('') + raise iris.exceptions.CoordinateNotFoundError("") def coord_dims(coord): """Return associated dims for coord in mock cube.""" @@ -37,7 +41,7 @@ def coord_dims(coord): elif coord == self.scalar_coord: return [] else: - raise iris.exceptions.CoordinateNotFoundError('') + raise iris.exceptions.CoordinateNotFoundError("") self.cube = mock.Mock( spec=iris.cube.Cube, @@ -68,23 +72,29 @@ def test_ref_to_dims_index__invalid_int(self): def test_ref_to_dims_index__scalar_coord(self): """Test ref_to_dims_index with scalar coordinate.""" - self.assertRaises(ValueError, ref_to_dims_index, self.cube, - 'scalar_coord') + self.assertRaises( + ValueError, ref_to_dims_index, self.cube, "scalar_coord" + ) def test_ref_to_dims_index__valid_coordinate_name(self): """Test ref_to_dims_index with valid coordinate name.""" - dims = ref_to_dims_index(self.cube, 'coord') + dims = ref_to_dims_index(self.cube, "coord") self.assertEqual([0], dims) def test_ref_to_dims_index__invalid_coordinate_name(self): """Test ref_to_dims_index with invalid coordinate name.""" - self.assertRaises(iris.exceptions.CoordinateNotFoundError, - ref_to_dims_index, self.cube, 'test') + self.assertRaises( + iris.exceptions.CoordinateNotFoundError, + ref_to_dims_index, + self.cube, + "test", + ) def test_ref_to_dims_index__invalid_type(self): """Test ref_to_dims_index with invalid argument.""" - self.assertRaises(ValueError, ref_to_dims_index, self.cube, - mock.sentinel.something) + self.assertRaises( + ValueError, ref_to_dims_index, self.cube, mock.sentinel.something + ) class Test(tests.Test): @@ -96,42 +106,42 @@ def setup_coordinates(self): """Set up coordinates for mock cube.""" self.time = mock.Mock( spec=iris.coords.DimCoord, - standard_name='time', - long_name='time', - shape=(3, ), + standard_name="time", + long_name="time", + shape=(3,), ) self.z = mock.Mock( spec=iris.coords.DimCoord, - standard_name='height', - long_name='height', - shape=(4, ), + standard_name="height", + long_name="height", + shape=(4,), ) self.src_latitude = mock.Mock( spec=iris.coords.DimCoord, - standard_name='latitude', - long_name='latitude', - shape=(5, ), + standard_name="latitude", + long_name="latitude", + shape=(5,), points=np.array([1.1, 2.2, 3.3, 4.4, 5.5]), ) self.src_longitude = mock.Mock( spec=iris.coords.DimCoord, - standard_name='longitude', - long_name='longitude', - shape=(6, ), + standard_name="longitude", + long_name="longitude", + shape=(6,), points=np.array([1.1, 2.2, 3.3, 4.4, 5.5, 6.6]), ) self.dst_latitude = mock.Mock( spec=iris.coords.DimCoord, - standard_name='latitude', - long_name='latitude', - shape=(2, ), + standard_name="latitude", + long_name="latitude", + shape=(2,), points=np.array([1.1, 2.2]), ) self.dst_longitude = mock.Mock( spec=iris.coords.DimCoord, - standard_name='longitude', - long_name='longitude', - shape=(2, ), + standard_name="longitude", + long_name="longitude", + shape=(2,), points=np.array([1.1, 2.2]), ) @@ -139,19 +149,19 @@ def setUp(self): """Set up fixtures for mapping test.""" self.coord_system = mock.Mock(return_value=None) self.scalar_coord = mock.sentinel.scalar_coord - self.scalar_coord.name = lambda: 'scalar_coord' + self.scalar_coord.name = lambda: "scalar_coord" self.setup_coordinates() def src_coord(name_or_coord): """Return coord for mock source cube.""" - if name_or_coord in ['latitude', self.src_latitude]: + if name_or_coord in ["latitude", self.src_latitude]: return self.src_latitude - elif name_or_coord in ['longitude', self.src_longitude]: + elif name_or_coord in ["longitude", self.src_longitude]: return self.src_longitude - elif name_or_coord == 'scalar_coord': + elif name_or_coord == "scalar_coord": return self.scalar_coord else: - raise iris.exceptions.CoordinateNotFoundError('') + raise iris.exceptions.CoordinateNotFoundError("") def coord_dims(coord): """Return coord dim for mock cubes.""" @@ -166,17 +176,20 @@ def coord_dims(coord): elif coord == self.scalar_coord: return [] else: - raise iris.exceptions.CoordinateNotFoundError('') + raise iris.exceptions.CoordinateNotFoundError("") def src_coords(*args, **kwargs): """Return selected coords for source cube.""" # pylint: disable=unused-argument # Here, args is ignored. dim_coords_list = [ - self.time, self.z, self.src_latitude, self.src_longitude + self.time, + self.z, + self.src_latitude, + self.src_longitude, ] - contains_dimension = kwargs.get('contains_dimension', None) - dim_coords = kwargs.get('dim_coords', None) + contains_dimension = kwargs.get("contains_dimension", None) + dim_coords = kwargs.get("dim_coords", None) if contains_dimension is not None: if dim_coords: return [dim_coords_list[contains_dimension]] @@ -191,9 +204,9 @@ def src_repr_coords(*args, **kwargs): # pylint: disable=unused-argument # Here, args is ignored. dim_coords = [self.src_latitude, self.src_longitude] - if kwargs.get('dim_coords', False): + if kwargs.get("dim_coords", False): return dim_coords - if 'contains_dimension' in kwargs: + if "contains_dimension" in kwargs: return dim_coords return [self.scalar_coord] + dim_coords @@ -202,7 +215,7 @@ def dst_repr_coords(*args, **kwargs): # pylint: disable=unused-argument # Here, args is ignored. dim_coords = [self.dst_latitude, self.dst_longitude] - if kwargs.get('dim_coords', False): + if kwargs.get("dim_coords", False): return dim_coords return [self.scalar_coord] + dim_coords @@ -215,10 +228,10 @@ def dst_repr_coords(*args, **kwargs): coord_dims=coord_dims, ndim=4, shape=(3, 4, 5, 6), - standard_name='sea_surface_temperature', - long_name='Sea surface temperature', - var_name='tos', - units=cf_units.Unit('K'), + standard_name="sea_surface_temperature", + long_name="Sea surface temperature", + var_name="tos", + units=cf_units.Unit("K"), attributes={}, cell_methods={}, aux_coords=[], @@ -239,17 +252,22 @@ def dst_repr_coords(*args, **kwargs): aux_coords=[], ) - @mock.patch('esmvalcore.preprocessor._mapping.get_empty_data') - @mock.patch('iris.cube.Cube') + @mock.patch("esmvalcore.preprocessor._mapping.get_empty_data") + @mock.patch("iris.cube.Cube") def test_map_slices(self, mock_cube, mock_get_empty_data): """Test map_slices.""" mock_get_empty_data.return_value = mock.sentinel.empty_data mock_cube.aux_coords = [] - dst = map_slices(self.src_cube, lambda s: np.ones((2, 2)), - self.src_repr, self.dst_repr) + dst = map_slices( + self.src_cube, + lambda s: np.ones((2, 2)), + self.src_repr, + self.dst_repr, + ) self.assertEqual(dst, mock_cube.return_value) - dim_coords = self.src_cube.coords(dim_coords=True)[:2] \ - + self.dst_repr.coords(dim_coords=True) + dim_coords = self.src_cube.coords(dim_coords=True)[ + :2 + ] + self.dst_repr.coords(dim_coords=True) dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] mock_cube.assert_called_once_with( data=mock.sentinel.empty_data, diff --git a/tests/unit/preprocessor/_mask/test_mask.py b/tests/unit/preprocessor/_mask/test_mask.py index 44cb0246f9..9eca669d38 100644 --- a/tests/unit/preprocessor/_mask/test_mask.py +++ b/tests/unit/preprocessor/_mask/test_mask.py @@ -2,107 +2,124 @@ import unittest -import numpy as np - import iris import iris.fileformats -import tests +import numpy as np from cf_units import Unit -from esmvalcore.preprocessor._mask import (_apply_fx_mask, - count_spells, _get_fx_mask, - mask_above_threshold, - mask_below_threshold, - mask_glaciated, mask_inside_range, - mask_outside_range) + +import tests +from esmvalcore.preprocessor._mask import ( + _apply_mask, + _get_fx_mask, + count_spells, + mask_above_threshold, + mask_below_threshold, + mask_glaciated, + mask_inside_range, + mask_outside_range, +) class Test(tests.Test): """Test class for _mask.""" + def setUp(self): """Prepare tests.""" coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - self.data2 = np.array([[0., 1.], [2., 3.]]) + self.data2 = np.array([[0.0, 1.0], [2.0, 3.0]]) # Two points near the south pole and two points in the southern ocean - lons2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord([-89.5, -70], - standard_name='latitude', - bounds=[[-90., -89.], [-70.5, -69.5]], - units='degrees_north', - coord_system=coord_sys) + lons2 = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="longitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_east", + coord_system=coord_sys, + ) + lats2 = iris.coords.DimCoord( + [-89.5, -70], + standard_name="latitude", + bounds=[[-90.0, -89.0], [-70.5, -69.5]], + units="degrees_north", + coord_system=coord_sys, + ) coords_spec3 = [(lats2, 0), (lons2, 1)] self.arr = iris.cube.Cube(self.data2, dim_coords_and_dims=coords_spec3) - self.time_cube = iris.cube.Cube(np.arange(1, 25), - var_name='co2', - units='J') + self.time_cube = iris.cube.Cube( + np.arange(1, 25), var_name="co2", units="J" + ) self.time_cube.add_dim_coord( - iris.coords.DimCoord(np.arange(15., 720., 30.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='gregorian')), 0) - self.fx_data = np.array([20., 60., 50.]) + iris.coords.DimCoord( + np.arange(15.0, 720.0, 30.0), + standard_name="time", + units=Unit( + "days since 1950-01-01 00:00:00", calendar="gregorian" + ), + ), + 0, + ) + self.fx_data = np.array([20.0, 60.0, 50.0]) def test_apply_fx_mask_on_nonmasked_data(self): """Test _apply_fx_mask func.""" dummy_fx_mask = np.ma.array((True, False, True)) - app_mask = _apply_fx_mask(dummy_fx_mask, - self.time_cube.data[0:3].astype('float64')) - app_mask = app_mask.compute() - fixed_mask = np.ma.array(self.time_cube.data[0:3].astype('float64'), - mask=dummy_fx_mask) + app_mask = _apply_mask( + dummy_fx_mask, self.time_cube.data[0:3].astype("float64") + ) + fixed_mask = np.ma.array( + self.time_cube.data[0:3].astype("float64"), mask=dummy_fx_mask + ) self.assert_array_equal(fixed_mask, app_mask) def test_apply_fx_mask_on_masked_data(self): """Test _apply_fx_mask func.""" dummy_fx_mask = np.ma.array((True, True, True)) - masked_data = np.ma.array(self.time_cube.data[0:3].astype('float64'), - mask=np.ma.array((False, True, False))) - app_mask = _apply_fx_mask(dummy_fx_mask, masked_data) - app_mask = app_mask.compute() - fixed_mask = np.ma.array(self.time_cube.data[0:3].astype('float64'), - mask=dummy_fx_mask) + masked_data = np.ma.array( + self.time_cube.data[0:3].astype("float64"), + mask=np.ma.array((False, True, False)), + ) + app_mask = _apply_mask(dummy_fx_mask, masked_data) + fixed_mask = np.ma.array( + self.time_cube.data[0:3].astype("float64"), mask=dummy_fx_mask + ) self.assert_array_equal(fixed_mask, app_mask) def test_count_spells(self): """Test count_spells func.""" - ref_spells = count_spells(self.time_cube.data, -1000., 0, 1) + ref_spells = count_spells(self.time_cube.data, -1000.0, 0, 1) np.testing.assert_equal(24, ref_spells) - ref_spells = count_spells(self.time_cube.data, -1000., 0, 2) + ref_spells = count_spells(self.time_cube.data, -1000.0, 0, 2) np.testing.assert_equal(12, ref_spells) def test_get_fx_mask(self): """Test _get_fx_mask func.""" # sftlf: land. sea - computed = _get_fx_mask(self.fx_data, 'land', 'sftlf') + computed = _get_fx_mask(self.fx_data, "land", "sftlf") expected = np.array([False, True, False]) self.assert_array_equal(expected, computed) - computed = _get_fx_mask(self.fx_data, 'sea', 'sftlf') + computed = _get_fx_mask(self.fx_data, "sea", "sftlf") expected = np.array([True, False, True]) self.assert_array_equal(expected, computed) # sftof: land, sea - computed = _get_fx_mask(self.fx_data, 'land', 'sftof') + computed = _get_fx_mask(self.fx_data, "land", "sftof") expected = np.array([True, False, False]) self.assert_array_equal(expected, computed) - computed = _get_fx_mask(self.fx_data, 'sea', 'sftof') + computed = _get_fx_mask(self.fx_data, "sea", "sftof") expected = np.array([False, True, True]) self.assert_array_equal(expected, computed) # sftgif: ice, landsea - computed = _get_fx_mask(self.fx_data, 'ice', 'sftgif') + computed = _get_fx_mask(self.fx_data, "ice", "sftgif") expected = np.array([False, True, False]) self.assert_array_equal(expected, computed) - computed = _get_fx_mask(self.fx_data, 'landsea', 'sftgif') + computed = _get_fx_mask(self.fx_data, "landsea", "sftgif") expected = np.array([True, False, True]) self.assert_array_equal(expected, computed) def test_mask_glaciated(self): """Test to mask glaciated (NE mask)""" - result = mask_glaciated(self.arr, mask_out='glaciated') - expected = np.ma.masked_array(self.data2, - mask=np.array([[True, True], - [False, False]])) + result = mask_glaciated(self.arr, mask_out="glaciated") + expected = np.ma.masked_array( + self.data2, mask=np.array([[True, True], [False, False]]) + ) self.assert_array_equal(result.data, expected) def test_mask_above_threshold(self): @@ -130,5 +147,5 @@ def test_mask_outside_range(self): self.assert_array_equal(result.data, expected) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_mask/test_mask_multimodel.py b/tests/unit/preprocessor/_mask/test_mask_multimodel.py index 1e9416fe39..b04a9aab71 100644 --- a/tests/unit/preprocessor/_mask/test_mask_multimodel.py +++ b/tests/unit/preprocessor/_mask/test_mask_multimodel.py @@ -31,8 +31,9 @@ def __init__(self, cubes, filename, **kwargs): def assert_array_equal(array_1, array_2): """Assert that (masked) array 1 equals (masked) array 2.""" if np.ma.is_masked(array_1) or np.ma.is_masked(array_2): - np.testing.assert_array_equal(np.ma.getmaskarray(array_1), - np.ma.getmaskarray(array_2)) + np.testing.assert_array_equal( + np.ma.getmaskarray(array_1), np.ma.getmaskarray(array_2) + ) mask = np.ma.getmaskarray(array_1) np.testing.assert_array_equal(array_1[~mask], array_2[~mask]) else: @@ -41,19 +42,24 @@ def assert_array_equal(array_1, array_2): def _get_cube(ndim): """Create stock cube.""" - time_coord = iris.coords.DimCoord([1], var_name='time', - standard_name='time', - units='days since 1850-01-01') - lev_coord = iris.coords.DimCoord([10, 5], var_name='plev', - standard_name='air_pressure', units='hPa') - lat_coord = iris.coords.DimCoord([1], var_name='lat', - standard_name='latitude', units='degrees') - lon_coord = iris.coords.DimCoord([0, 1], var_name='lon', - standard_name='longitude', - units='degrees') - x_coord = iris.coords.DimCoord([-1], var_name='x', - long_name='Arbitrary coordinate', - units='no unit') + time_coord = iris.coords.DimCoord( + [1], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + lev_coord = iris.coords.DimCoord( + [10, 5], var_name="plev", standard_name="air_pressure", units="hPa" + ) + lat_coord = iris.coords.DimCoord( + [1], var_name="lat", standard_name="latitude", units="degrees" + ) + lon_coord = iris.coords.DimCoord( + [0, 1], var_name="lon", standard_name="longitude", units="degrees" + ) + x_coord = iris.coords.DimCoord( + [-1], var_name="x", long_name="Arbitrary coordinate", units="no unit" + ) if ndim == 0: cube_data = 42 @@ -69,17 +75,27 @@ def _get_cube(ndim): coord_spec = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] elif ndim == 4: cube_data = np.arange(1 * 2 * 1 * 2).reshape(1, 2, 1, 2) - coord_spec = [(time_coord, 0), (lev_coord, 1), (lat_coord, 2), - (lon_coord, 3)] + coord_spec = [ + (time_coord, 0), + (lev_coord, 1), + (lat_coord, 2), + (lon_coord, 3), + ] elif ndim == 5: cube_data = np.arange(1 * 2 * 1 * 2 * 1).reshape(1, 2, 1, 2, 1) - coord_spec = [(time_coord, 0), (lev_coord, 1), (lat_coord, 2), - (lon_coord, 3), (x_coord, 4)] + coord_spec = [ + (time_coord, 0), + (lev_coord, 1), + (lat_coord, 2), + (lon_coord, 3), + (x_coord, 4), + ] else: assert False, f"Invalid ndim: {ndim}" - cube = iris.cube.Cube(cube_data, var_name='y', long_name='Y', - dim_coords_and_dims=coord_spec) + cube = iris.cube.Cube( + cube_data, var_name="y", long_name="Y", dim_coords_and_dims=coord_spec + ) return cube @@ -134,7 +150,7 @@ def test_get_shape(cube_0d, cube_5d): assert _get_shape(cubes) == (1, 2, 1, 2, 1) cubes = iris.cube.CubeList([cube_0d, cube_5d]) - msg = 'Expected cubes with identical shapes, got shapes' + msg = "Expected cubes with identical shapes, got shapes" with pytest.raises(ValueError, match=msg): _get_shape(cubes) @@ -176,14 +192,14 @@ def test_multimodel_mask_cubes_3d(cube_3d): def test_multimodel_mask_products_1d(cube_1d): """Test ``_multimodel_mask_products`` with 1D cubes.""" products = [ - PreprocessorFile(iris.cube.CubeList([cube_1d]), 'A'), - PreprocessorFile(iris.cube.CubeList([cube_1d, cube_1d]), 'B'), + PreprocessorFile(iris.cube.CubeList([cube_1d]), "A"), + PreprocessorFile(iris.cube.CubeList([cube_1d, cube_1d]), "B"), ] out_products = _multimodel_mask_products(products, (1,)) assert out_products == products - assert out_products[0].filename == 'A' + assert out_products[0].filename == "A" assert out_products[0].cubes == iris.cube.CubeList([cube_1d]) - assert out_products[1].filename == 'B' + assert out_products[1].filename == "B" assert out_products[1].cubes == iris.cube.CubeList([cube_1d, cube_1d]) for product in out_products: product.copy_provenance.assert_not_called() @@ -192,14 +208,14 @@ def test_multimodel_mask_products_1d(cube_1d): m_array = np.ma.masked_equal([33], 33) cube_masked = cube_1d.copy(m_array) - prod_a = PreprocessorFile(iris.cube.CubeList([cube_1d]), 'A') - prod_b = PreprocessorFile(iris.cube.CubeList([cube_masked]), 'B') + prod_a = PreprocessorFile(iris.cube.CubeList([cube_1d]), "A") + prod_b = PreprocessorFile(iris.cube.CubeList([cube_masked]), "B") products = [prod_a, prod_b] out_products = _multimodel_mask_products(products, (1,)) assert out_products == products - assert out_products[0].filename == 'A' + assert out_products[0].filename == "A" assert_array_equal(out_products[0].cubes[0].data, m_array) - assert out_products[1].filename == 'B' + assert out_products[1].filename == "B" assert out_products[1].cubes == iris.cube.CubeList([cube_masked]) out_products[0].copy_provenance.assert_not_called() out_products[0].wasderivedfrom.assert_called_once_with(prod_b) @@ -212,14 +228,14 @@ def test_multimodel_mask_products_1d(cube_1d): def test_multimodel_mask_products_5d(cube_5d): """Test ``_multimodel_mask_products`` with 5D cubes.""" products = [ - PreprocessorFile(iris.cube.CubeList([cube_5d]), 'A'), - PreprocessorFile(iris.cube.CubeList([cube_5d, cube_5d]), 'B'), + PreprocessorFile(iris.cube.CubeList([cube_5d]), "A"), + PreprocessorFile(iris.cube.CubeList([cube_5d, cube_5d]), "B"), ] out_products = _multimodel_mask_products(products, (1, 2, 1, 2, 1)) assert out_products == products - assert out_products[0].filename == 'A' + assert out_products[0].filename == "A" assert out_products[0].cubes == iris.cube.CubeList([cube_5d]) - assert out_products[1].filename == 'B' + assert out_products[1].filename == "B" assert out_products[1].cubes == iris.cube.CubeList([cube_5d, cube_5d]) for product in out_products: product.copy_provenance.assert_not_called() @@ -230,16 +246,16 @@ def test_multimodel_mask_products_5d(cube_5d): m_array_2 = np.ma.masked_equal([[[[[1], [1]]], [[[3], [33]]]]], 33) cube_masked_1 = cube_5d.copy(m_array_1) cube_masked_2 = cube_5d.copy(m_array_2) - prod_a = PreprocessorFile(iris.cube.CubeList([cube_5d]), 'A') - prod_b = PreprocessorFile(iris.cube.CubeList([cube_masked_1]), 'B') - prod_c = PreprocessorFile(iris.cube.CubeList([cube_masked_2]), 'C') + prod_a = PreprocessorFile(iris.cube.CubeList([cube_5d]), "A") + prod_b = PreprocessorFile(iris.cube.CubeList([cube_masked_1]), "B") + prod_c = PreprocessorFile(iris.cube.CubeList([cube_masked_2]), "C") products = [prod_a, prod_b, prod_c] out_products = _multimodel_mask_products(products, (1, 2, 1, 2, 1)) expected_data = np.ma.masked_equal([[[[[33], [1]]], [[[33], [33]]]]], 33) assert out_products == products - assert out_products[0].filename == 'A' - assert out_products[1].filename == 'B' - assert out_products[2].filename == 'C' + assert out_products[0].filename == "A" + assert out_products[1].filename == "B" + assert out_products[2].filename == "C" for product in out_products: assert len(product.cubes) == 1 assert_array_equal(product.cubes[0].data, expected_data) @@ -257,15 +273,15 @@ def test_multimodel_mask_products_5d(cube_5d): def test_mask_multimodel_fail(cube_1d, cube_2d): """Test ``mask_multimodel`` expected fail.""" cubes = iris.cube.CubeList([cube_1d, cube_2d]) - msg = 'Expected cubes with identical shapes, got shapes' + msg = "Expected cubes with identical shapes, got shapes" with pytest.raises(ValueError, match=msg): mask_multimodel(cubes) products = [ cube_1d, - PreprocessorFile(iris.cube.CubeList([cube_1d]), 'A'), + PreprocessorFile(iris.cube.CubeList([cube_1d]), "A"), ] - msg = 'Input type for mask_multimodel not understood.' + msg = "Input type for mask_multimodel not understood." with pytest.raises(TypeError, match=msg): mask_multimodel(products) with pytest.raises(TypeError, match=msg): @@ -281,8 +297,8 @@ def test_mask_multimodel_empty(): assert out_cubes == iris.cube.CubeList([]) products = [ - PreprocessorFile(iris.cube.CubeList([]), 'A'), - PreprocessorFile(iris.cube.CubeList([]), 'B'), + PreprocessorFile(iris.cube.CubeList([]), "A"), + PreprocessorFile(iris.cube.CubeList([]), "B"), ] out_products = mask_multimodel(products) assert out_products is products @@ -302,16 +318,16 @@ def test_mask_multimodel(cube_2d, cube_4d): m_array_2 = np.ma.masked_equal([[[[1, 33]], [[3, 3]]]], 33) cube_masked_1 = cube_4d.copy(m_array_1) cube_masked_2 = cube_4d.copy(m_array_2) - prod_a = PreprocessorFile(iris.cube.CubeList([cube_4d]), 'A') - prod_b = PreprocessorFile(iris.cube.CubeList([cube_masked_1]), 'B') - prod_c = PreprocessorFile(iris.cube.CubeList([cube_masked_2]), 'C') + prod_a = PreprocessorFile(iris.cube.CubeList([cube_4d]), "A") + prod_b = PreprocessorFile(iris.cube.CubeList([cube_masked_1]), "B") + prod_c = PreprocessorFile(iris.cube.CubeList([cube_masked_2]), "C") products = [prod_a, prod_b, prod_c] out_products = mask_multimodel(products) expected_data = np.ma.masked_equal([[[[33, 33]], [[33, 3]]]], 33) assert out_products == products - assert out_products[0].filename == 'A' - assert out_products[1].filename == 'B' - assert out_products[2].filename == 'C' + assert out_products[0].filename == "A" + assert out_products[1].filename == "B" + assert out_products[2].filename == "C" for product in out_products: assert len(product.cubes) == 1 assert_array_equal(product.cubes[0].data, expected_data) diff --git a/tests/unit/preprocessor/_multimodel/test_multimodel.py b/tests/unit/preprocessor/_multimodel/test_multimodel.py index 656af00ac9..5bc5513451 100644 --- a/tests/unit/preprocessor/_multimodel/test_multimodel.py +++ b/tests/unit/preprocessor/_multimodel/test_multimodel.py @@ -18,21 +18,26 @@ from esmvalcore.preprocessor import multi_model_statistics from esmvalcore.preprocessor._supplementary_vars import add_ancillary_variable -SPAN_OPTIONS = ('overlap', 'full') +SPAN_OPTIONS = ("overlap", "full") -FREQUENCY_OPTIONS = ('daily', 'monthly', 'yearly') # hourly +FREQUENCY_OPTIONS = ("daily", "monthly", "yearly") # hourly -CALENDAR_OPTIONS = ('360_day', '365_day', 'standard', 'proleptic_gregorian', - 'julian') +CALENDAR_OPTIONS = ( + "360_day", + "365_day", + "standard", + "proleptic_gregorian", + "julian", +) EQUAL_NAMES = [ - ['var_name'], - ['standard_name'], - ['long_name'], - ['var_name', 'standard_name'], - ['var_name', 'long_name'], - ['standard_name', 'long_name'], - ['var_name', 'standard_name', 'long_name'], + ["var_name"], + ["standard_name"], + ["long_name"], + ["var_name", "standard_name"], + ["var_name", "long_name"], + ["standard_name", "long_name"], + ["var_name", "standard_name", "long_name"], ] @@ -47,20 +52,22 @@ def assert_array_allclose(this, other): @pytest.fixture def cubes_with_arbitrary_dimensions(): """Create cubes with non-standard dimensions.""" - a_coord = DimCoord([1, 2, 3], var_name='a') - b_coord = DimCoord([1], var_name='b') - s_coord = AuxCoord(0, var_name='s') + a_coord = DimCoord([1, 2, 3], var_name="a") + b_coord = DimCoord([1], var_name="b") + s_coord = AuxCoord(0, var_name="s") cube_kwargs = { - 'var_name': 'x', - 'dim_coords_and_dims': [(a_coord, 0), (b_coord, 1)], - 'aux_coords_and_dims': [(s_coord, ())], + "var_name": "x", + "dim_coords_and_dims": [(a_coord, 0), (b_coord, 1)], + "aux_coords_and_dims": [(s_coord, ())], } - cubes = CubeList([ - Cube([[0.0], [0.0], [0.0]], **cube_kwargs), - Cube([[0.0], [2.0], [1.0]], **cube_kwargs), - Cube([[0.0], [4.0], [2.0]], **cube_kwargs), - ]) + cubes = CubeList( + [ + Cube([[0.0], [0.0], [0.0]], **cube_kwargs), + Cube([[0.0], [2.0], [1.0]], **cube_kwargs), + Cube([[0.0], [4.0], [2.0]], **cube_kwargs), + ] + ) return cubes @@ -68,11 +75,11 @@ def cubes_with_arbitrary_dimensions(): @pytest.fixture def cubes_5d(): """Create 5d cubes.""" - a_coord = DimCoord([1], var_name='a') - b_coord = DimCoord([1], var_name='b') - c_coord = DimCoord([1], var_name='c') - d_coord = DimCoord([1], var_name='d') - e_coord = DimCoord([1], var_name='e') + a_coord = DimCoord([1], var_name="a") + b_coord = DimCoord([1], var_name="b") + c_coord = DimCoord([1], var_name="c") + d_coord = DimCoord([1], var_name="d") + e_coord = DimCoord([1], var_name="e") coord_spec = [ (a_coord, 0), (b_coord, 1), @@ -81,39 +88,44 @@ def cubes_5d(): (e_coord, 4), ] - cubes = CubeList([ - Cube(np.full((1, 1, 1, 1, 1), 1.0), dim_coords_and_dims=coord_spec), - Cube(np.full((1, 1, 1, 1, 1), 2.0), dim_coords_and_dims=coord_spec), - ]) + cubes = CubeList( + [ + Cube( + np.full((1, 1, 1, 1, 1), 1.0), dim_coords_and_dims=coord_spec + ), + Cube( + np.full((1, 1, 1, 1, 1), 2.0), dim_coords_and_dims=coord_spec + ), + ] + ) return cubes -def timecoord(frequency, - calendar='standard', - offset='days since 1850-01-01', - num=3): +def timecoord( + frequency, calendar="standard", offset="days since 1850-01-01", num=3 +): """Return a time coordinate with the given time points and calendar.""" time_points = range(1, num + 1) - if frequency == 'hourly': + if frequency == "hourly": dates = [datetime(1850, 1, 1, i, 0, 0) for i in time_points] - if frequency == 'daily': + if frequency == "daily": dates = [datetime(1850, 1, i, 0, 0, 0) for i in time_points] - elif frequency == 'monthly': + elif frequency == "monthly": dates = [datetime(1850, i, 15, 0, 0, 0) for i in time_points] - elif frequency == 'yearly': + elif frequency == "yearly": dates = [datetime(1850 + i - 1, 7, 1, 0, 0, 0) for i in time_points] unit = Unit(offset, calendar=calendar) points = date2num(dates, unit) - return DimCoord(points, standard_name='time', units=unit) + return DimCoord(points, standard_name="time", units=unit) def generate_cube_from_dates( dates, - calendar='standard', - offset='days since 1850-01-01', + calendar="standard", + offset="days since 1850-01-01", fill_val=1, len_data=3, var_name=None, @@ -144,11 +156,11 @@ def generate_cube_from_dates( else: len_data = len(dates) unit = Unit(offset, calendar=calendar) - time = DimCoord(date2num(dates, unit), - standard_name='time', - units=unit) + time = DimCoord( + date2num(dates, unit), standard_name="time", units=unit + ) - data = np.array((fill_val, ) * len_data, dtype=np.float32) + data = np.array((fill_val,) * len_data, dtype=np.float32) if lazy: data = da.from_array(data) @@ -169,12 +181,14 @@ def get_cubes_for_validation_test(frequency, lazy=False): cube2.data = data2 # Cube with deviating time coord - cube3 = generate_cube_from_dates(frequency, - calendar='360_day', - offset='days since 1950-01-01', - len_data=2, - fill_val=9, - lazy=lazy) + cube3 = generate_cube_from_dates( + frequency, + calendar="360_day", + offset="days since 1950-01-01", + len_data=2, + fill_val=9, + lazy=lazy, + ) return [cube1, cube2, cube3] @@ -184,76 +198,93 @@ def get_cube_for_equal_coords_test(num_cubes): cubes = [] for num in range(num_cubes): - cube = generate_cube_from_dates('monthly') + cube = generate_cube_from_dates("monthly") cubes.append(cube) # Create cubes that have one exactly equal coordinate ('year'), one # coordinate with matching names ('m') and one coordinate with non-matching # names - year_coord = AuxCoord([1, 2, 3], var_name='year', long_name='year', - units='1', attributes={'test': 1}) - m_coord = AuxCoord([1, 2, 3], var_name='m', long_name='m', units='s', - attributes={'test': 0}) - x_coord = AuxCoord([1, 2, 3], var_name='x', long_name='x', units='s', - attributes={'test': 2}) - for (idx, cube) in enumerate(cubes): + year_coord = AuxCoord( + [1, 2, 3], + var_name="year", + long_name="year", + units="1", + attributes={"test": 1}, + ) + m_coord = AuxCoord( + [1, 2, 3], + var_name="m", + long_name="m", + units="s", + attributes={"test": 0}, + ) + x_coord = AuxCoord( + [1, 2, 3], + var_name="x", + long_name="x", + units="s", + attributes={"test": 2}, + ) + for idx, cube in enumerate(cubes): new_m_coord = m_coord.copy() - new_m_coord.var_name = f'm_{idx}' + new_m_coord.var_name = f"m_{idx}" new_x_coord = x_coord.copy() - new_x_coord.long_name = f'x_{idx}' + new_x_coord.long_name = f"x_{idx}" cube.add_aux_coord(year_coord.copy(), 0) cube.add_aux_coord(new_m_coord, 0) cube.add_aux_coord(new_x_coord, 0) - assert cube.coord('year').metadata is not year_coord.metadata - assert cube.coord('year').metadata == year_coord.metadata - assert cube.coord('m').metadata is not m_coord.metadata - assert cube.coord('m').metadata != m_coord.metadata - assert cube.coord(f'x_{idx}').metadata is not x_coord.metadata - assert cube.coord(f'x_{idx}').metadata != x_coord.metadata + assert cube.coord("year").metadata is not year_coord.metadata + assert cube.coord("year").metadata == year_coord.metadata + assert cube.coord("m").metadata is not m_coord.metadata + assert cube.coord("m").metadata != m_coord.metadata + assert cube.coord(f"x_{idx}").metadata is not x_coord.metadata + assert cube.coord(f"x_{idx}").metadata != x_coord.metadata return cubes VALIDATION_DATA_SUCCESS = ( - ('full', 'mean', (5, 5, 3)), - ('full', {'operator': 'mean'}, (5, 5, 3)), - ('full', 'std_dev', (5.656854249492381, 4, 2.8284271247461903)), - ('full', 'std', (5.656854249492381, 4, 2.8284271247461903)), - ('full', 'min', (1, 1, 1)), - ('full', 'max', (9, 9, 5)), - ('full', 'median', (5, 5, 3)), - ('full', {'operator': 'percentile', 'percent': 50.0}, (5, 5, 3)), - ('full', 'p50', (5, 5, 3)), - ('full', 'p99.5', (8.96, 8.96, 4.98)), - ('full', 'peak', (9, 9, 5)), - ('overlap', 'mean', (5, 5)), - ('overlap', 'std_dev', (5.656854249492381, 4)), - ('overlap', 'std', (5.656854249492381, 4)), - ('overlap', 'min', (1, 1)), - ('overlap', 'max', (9, 9)), - ('overlap', 'median', (5, 5)), - ('overlap', {'operator': 'percentile', 'percent': 50.0}, (5, 5)), - ('overlap', 'p50', (5, 5)), - ('overlap', 'p99.5', (8.96, 8.96)), - ('overlap', 'peak', (9, 9)), + ("full", "mean", (5, 5, 3)), + ("full", {"operator": "mean"}, (5, 5, 3)), + ("full", "std_dev", (5.656854249492381, 4, 2.8284271247461903)), + ("full", "std", (5.656854249492381, 4, 2.8284271247461903)), + ("full", "min", (1, 1, 1)), + ("full", "max", (9, 9, 5)), + ("full", "median", (5, 5, 3)), + ("full", {"operator": "percentile", "percent": 50.0}, (5, 5, 3)), + ("full", "p50", (5, 5, 3)), + ("full", "p99.5", (8.96, 8.96, 4.98)), + ("full", "peak", (9, 9, 5)), + ("overlap", "mean", (5, 5)), + ("overlap", "std_dev", (5.656854249492381, 4)), + ("overlap", "std", (5.656854249492381, 4)), + ("overlap", "min", (1, 1)), + ("overlap", "max", (9, 9)), + ("overlap", "median", (5, 5)), + ("overlap", {"operator": "percentile", "percent": 50.0}, (5, 5)), + ("overlap", "p50", (5, 5)), + ("overlap", "p99.5", (8.96, 8.96)), + ("overlap", "peak", (9, 9)), # test multiple statistics - ('overlap', ('min', 'max'), ((1, 1), (9, 9))), - ('overlap', ('min', {'operator': 'max'}), ((1, 1), (9, 9))), - ('full', ('min', 'max'), ((1, 1, 1), (9, 9, 5))), - ('full', ( - {'operator': 'percentile', 'percent': 50.0}, - {'operator': 'percentile', 'percent': 99.5} - ), ((5, 5, 3), (8.96, 8.96, 4.98))), + ("overlap", ("min", "max"), ((1, 1), (9, 9))), + ("overlap", ("min", {"operator": "max"}), ((1, 1), (9, 9))), + ("full", ("min", "max"), ((1, 1, 1), (9, 9, 5))), + ( + "full", + ( + {"operator": "percentile", "percent": 50.0}, + {"operator": "percentile", "percent": 99.5}, + ), + ((5, 5, 3), (8.96, 8.96, 4.98)), + ), ) @pytest.mark.parametrize( - 'length,slices', + "length,slices", [ (1, [slice(0, 1)]), - (25000, [slice(0, 8334), - slice(8334, 16668), - slice(16668, 25000)]), + (25000, [slice(0, 8334), slice(8334, 16668), slice(16668, 25000)]), ], ) def test_compute_slices(length, slices): @@ -322,15 +353,15 @@ def test_compute_slices_equals_end_index(): assert slices == expected_slices -@pytest.mark.parametrize('frequency', FREQUENCY_OPTIONS) -@pytest.mark.parametrize('span, statistics, expected', VALIDATION_DATA_SUCCESS) +@pytest.mark.parametrize("frequency", FREQUENCY_OPTIONS) +@pytest.mark.parametrize("span, statistics, expected", VALIDATION_DATA_SUCCESS) def test_multimodel_statistics(frequency, span, statistics, expected): """High level test for multicube statistics function.""" cubes = get_cubes_for_validation_test(frequency) if isinstance(statistics, (str, dict)): - statistics = (statistics, ) - expected = (expected, ) + statistics = (statistics,) + expected = (expected,) result = multi_model_statistics(cubes, span, statistics) @@ -342,27 +373,27 @@ def test_multimodel_statistics(frequency, span, statistics, expected): result_cube = result[stat_id] # make sure that temporary coord has been removed with pytest.raises(iris.exceptions.CoordinateNotFoundError): - result_cube.coord('multi-model') + result_cube.coord("multi-model") # test that real data in => real data out assert result_cube.has_lazy_data() is False expected_data = np.ma.array(expected[i], mask=False) assert_array_allclose(result_cube.data, expected_data) -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_lazy_data_consistent_times(span): """Test laziness of multimodel statistics with consistent time axis.""" cubes = ( - generate_cube_from_dates('monthly', fill_val=1, lazy=True), - generate_cube_from_dates('monthly', fill_val=3, lazy=True), - generate_cube_from_dates('monthly', fill_val=6, lazy=True), + generate_cube_from_dates("monthly", fill_val=1, lazy=True), + generate_cube_from_dates("monthly", fill_val=3, lazy=True), + generate_cube_from_dates("monthly", fill_val=6, lazy=True), ) for cube in cubes: assert cube.has_lazy_data() - statistic = 'sum' - statistics = (statistic, ) + statistic = "sum" + statistics = (statistic,) result = mm._multicube_statistics(cubes, span=span, statistics=statistics) @@ -370,7 +401,7 @@ def test_lazy_data_consistent_times(span): assert result_cube.has_lazy_data() -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_lazy_data_inconsistent_times(span): """Test laziness of multimodel statistics with inconsistent time axis. @@ -379,18 +410,21 @@ def test_lazy_data_inconsistent_times(span): """ cubes = ( generate_cube_from_dates( - [datetime(1850, i, 15, 0, 0, 0) for i in range(1, 10)], lazy=True), + [datetime(1850, i, 15, 0, 0, 0) for i in range(1, 10)], lazy=True + ), generate_cube_from_dates( - [datetime(1850, i, 15, 0, 0, 0) for i in range(3, 8)], lazy=True), + [datetime(1850, i, 15, 0, 0, 0) for i in range(3, 8)], lazy=True + ), generate_cube_from_dates( - [datetime(1850, i, 15, 0, 0, 0) for i in range(2, 9)], lazy=True), + [datetime(1850, i, 15, 0, 0, 0) for i in range(2, 9)], lazy=True + ), ) for cube in cubes: assert cube.has_lazy_data() - statistic = 'sum' - statistics = (statistic, ) + statistic = "sum" + statistics = (statistic,) result = mm._multicube_statistics(cubes, span=span, statistics=statistics) @@ -398,58 +432,61 @@ def test_lazy_data_inconsistent_times(span): assert result_cube.has_lazy_data() -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_multicube_stats_dict_keys(span): """Test output dict keys of ``_multicube_statistics``.""" cubes = ( - generate_cube_from_dates('monthly', fill_val=1), - generate_cube_from_dates('monthly', fill_val=3), - generate_cube_from_dates('monthly', fill_val=6), + generate_cube_from_dates("monthly", fill_val=1), + generate_cube_from_dates("monthly", fill_val=3), + generate_cube_from_dates("monthly", fill_val=6), ) statistics = [ - 'mean', - {'operator': 'sum'}, - {'operator': 'percentile', 'percent': 50}, - {'operator': 'percentile', 'percent': 95.0}, + "mean", + {"operator": "sum"}, + {"operator": "percentile", "percent": 50}, + {"operator": "percentile", "percent": 95.0}, ] result = mm._multicube_statistics(cubes, span=span, statistics=statistics) assert isinstance(result, dict) assert len(result) == 4 - assert 'mean' in result - assert 'sum' in result - assert 'percentile50' in result - assert 'percentile95.0' in result + assert "mean" in result + assert "sum" in result + assert "percentile50" in result + assert "percentile95.0" in result VALIDATION_DATA_FAIL = ( - ('percentile', ValueError), - ('wpercentile', ValueError), - ('count', ValueError), - ('proportion', ValueError), + ("percentile", ValueError), + ("wpercentile", ValueError), + ("count", ValueError), + ("proportion", ValueError), ) -@pytest.mark.parametrize('statistic, error', VALIDATION_DATA_FAIL) +@pytest.mark.parametrize("statistic, error", VALIDATION_DATA_FAIL) def test_unsupported_statistics_fail(statistic, error): """Check that unsupported statistics raise an exception.""" - cubes = get_cubes_for_validation_test('monthly') - span = 'overlap' - statistics = (statistic, ) + cubes = get_cubes_for_validation_test("monthly") + span = "overlap" + statistics = (statistic,) with pytest.raises(error): _ = multi_model_statistics(cubes, span, statistics) -@pytest.mark.parametrize('calendar1, calendar2, expected', ( - ('360_day', '360_day', ('360_day',)), - ('365_day', '365_day', ('365_day',)), - ('365_day', '360_day', ('standard', 'gregorian')), - ('360_day', '365_day', ('standard', 'gregorian')), - ('standard', '365_day', ('standard', 'gregorian')), - ('proleptic_gregorian', 'julian', ('standard', 'gregorian')), - ('julian', '365_day', ('standard', 'gregorian')), -)) +@pytest.mark.parametrize( + "calendar1, calendar2, expected", + ( + ("360_day", "360_day", ("360_day",)), + ("365_day", "365_day", ("365_day",)), + ("365_day", "360_day", ("standard", "gregorian")), + ("360_day", "365_day", ("standard", "gregorian")), + ("standard", "365_day", ("standard", "gregorian")), + ("proleptic_gregorian", "julian", ("standard", "gregorian")), + ("julian", "365_day", ("standard", "gregorian")), + ), +) def test_get_consistent_time_unit(calendar1, calendar2, expected): """Test same calendar returned or default if calendars differ. @@ -457,15 +494,15 @@ def test_get_consistent_time_unit(calendar1, calendar2, expected): If the calendars are not the same, return 'standard'. """ cubes = ( - generate_cube_from_dates('monthly', calendar=calendar1), - generate_cube_from_dates('monthly', calendar=calendar2), + generate_cube_from_dates("monthly", calendar=calendar1), + generate_cube_from_dates("monthly", calendar=calendar2), ) result = mm._get_consistent_time_unit(cubes) assert result.calendar in expected -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_align(span): """Test _align function.""" # TODO --> check that if a cube is extended, @@ -475,25 +512,25 @@ def test_align(span): cubes = [] for calendar in CALENDAR_OPTIONS: - cube = generate_cube_from_dates('monthly', - calendar=calendar, - len_data=3) + cube = generate_cube_from_dates( + "monthly", calendar=calendar, len_data=3 + ) cubes.append(cube) result_cubes = mm._align_time_coord(cubes, span) - calendars = set(cube.coord('time').units.calendar for cube in result_cubes) + calendars = set(cube.coord("time").units.calendar for cube in result_cubes) assert len(calendars) == 1 - assert list(calendars)[0] in ('standard', 'gregorian') + assert list(calendars)[0] in ("standard", "gregorian") shapes = set(cube.shape for cube in result_cubes) assert len(shapes) == 1 - assert tuple(shapes)[0] == (len_data, ) + assert tuple(shapes)[0] == (len_data,) -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_combine_same_shape(span): """Test _combine with same shape of cubes.""" len_data = 3 @@ -501,10 +538,9 @@ def test_combine_same_shape(span): cubes = [] for i in range(num_cubes): - cube = generate_cube_from_dates('monthly', - '360_day', - fill_val=i, - len_data=len_data) + cube = generate_cube_from_dates( + "monthly", "360_day", fill_val=i, len_data=len_data + ) cubes.append(cube) result_cube = mm._combine(cubes) @@ -513,10 +549,9 @@ def test_combine_same_shape(span): assert dim_coord.var_name == mm.CONCAT_DIM assert result_cube.shape == (num_cubes, len_data) - desired = np.linspace((0, ) * len_data, - num_cubes - 1, - num=num_cubes, - dtype=int) + desired = np.linspace( + (0,) * len_data, num_cubes - 1, num=num_cubes, dtype=int + ) np.testing.assert_equal(result_cube.data, desired) @@ -526,7 +561,7 @@ def test_combine_different_shape_fail(): cubes = [] for num in range(1, num_cubes + 1): - cube = generate_cube_from_dates('monthly', '360_day', len_data=num) + cube = generate_cube_from_dates("monthly", "360_day", len_data=num) cubes.append(cube) msg = ( @@ -543,9 +578,9 @@ def test_combine_inconsistent_var_names_fail(): cubes = [] for num in range(num_cubes): - cube = generate_cube_from_dates('monthly', - '360_day', - var_name=f'test_var_{num}') + cube = generate_cube_from_dates( + "monthly", "360_day", var_name=f"test_var_{num}" + ) cubes.append(cube) msg = ( @@ -558,8 +593,8 @@ def test_combine_inconsistent_var_names_fail(): def test_combine_differing_scalar_coords_fail(): """Test _combine with differing scalar coordinates.""" - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(2)) - scalar_coord_0 = AuxCoord(0.0, standard_name='height', units='m') + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(2)) + scalar_coord_0 = AuxCoord(0.0, standard_name="height", units="m") cubes[0].add_aux_coord(scalar_coord_0, ()) msg = ( @@ -570,10 +605,10 @@ def test_combine_differing_scalar_coords_fail(): mm._combine(cubes) -@pytest.mark.parametrize('scalar_coord', ['p0', 'ptop']) +@pytest.mark.parametrize("scalar_coord", ["p0", "ptop"]) def test_combine_with_special_scalar_coords_to_remove(scalar_coord): """Test _combine with scalar coordinates that should be removed.""" - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) scalar_coord_0 = AuxCoord(0.0, var_name=scalar_coord) scalar_coord_1 = AuxCoord(1.0, var_name=scalar_coord) cubes[0].add_aux_coord(scalar_coord_0, ()) @@ -589,10 +624,10 @@ def test_combine_equal_coordinates(): merged_cube = mm._combine(cubes) # The equal coordinate ('year') was not changed - assert merged_cube.coord('year').var_name == 'year' - assert merged_cube.coord('year').standard_name is None - assert merged_cube.coord('year').long_name == 'year' - assert merged_cube.coord('year').attributes == {'test': 1} + assert merged_cube.coord("year").var_name == "year" + assert merged_cube.coord("year").standard_name is None + assert merged_cube.coord("year").long_name == "year" + assert merged_cube.coord("year").attributes == {"test": 1} def test_combine_non_equal_coordinates(): @@ -603,14 +638,14 @@ def test_combine_non_equal_coordinates(): # The var_name of the matching name coordinate ('m') has been removed, and # the non-equal one ('x') does not have a long_name anymore # Both coordinates lost their attributes - assert merged_cube.coord('m').var_name is None - assert merged_cube.coord('m').standard_name is None - assert merged_cube.coord('m').long_name == 'm' - assert merged_cube.coord('m').attributes == {} - assert merged_cube.coord('x').var_name == 'x' - assert merged_cube.coord('x').standard_name is None - assert merged_cube.coord('x').long_name is None - assert merged_cube.coord('x').attributes == {} + assert merged_cube.coord("m").var_name is None + assert merged_cube.coord("m").standard_name is None + assert merged_cube.coord("m").long_name == "m" + assert merged_cube.coord("m").attributes == {} + assert merged_cube.coord("x").var_name == "x" + assert merged_cube.coord("x").standard_name is None + assert merged_cube.coord("x").long_name is None + assert merged_cube.coord("x").attributes == {} def test_equalise_coordinate_metadata_no_cubes(): @@ -620,37 +655,37 @@ def test_equalise_coordinate_metadata_no_cubes(): def test_equalise_coordinate_metadata_one_cube(): """Test _equalise_coordinate_metadata doesn't fail with a single cubes.""" - cube = generate_cube_from_dates('monthly') + cube = generate_cube_from_dates("monthly") new_cube = cube.copy() mm._equalise_coordinate_metadata([new_cube]) assert new_cube is not cube assert new_cube == cube -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_edge_case_different_time_offsets(span): cubes = ( - generate_cube_from_dates('monthly', - '360_day', - offset='days since 1888-01-01'), - generate_cube_from_dates('monthly', - '360_day', - offset='days since 1899-01-01'), + generate_cube_from_dates( + "monthly", "360_day", offset="days since 1888-01-01" + ), + generate_cube_from_dates( + "monthly", "360_day", offset="days since 1899-01-01" + ), ) - statistic = 'min' - statistics = (statistic, ) + statistic = "min" + statistics = (statistic,) result = multi_model_statistics(cubes, span, statistics) result_cube = result[statistic] - time_coord = result_cube.coord('time') + time_coord = result_cube.coord("time") - assert time_coord.units.calendar in ('standard', 'gregorian') - assert time_coord.units.origin == 'days since 1850-01-01' + assert time_coord.units.calendar in ("standard", "gregorian") + assert time_coord.units.origin == "days since 1850-01-01" - desired = np.array((14., 45., 73.)) + desired = np.array((14.0, 45.0, 73.0)) np.testing.assert_array_equal(time_coord.points, desired) @@ -666,7 +701,7 @@ def generate_cubes_with_non_overlapping_timecoords(): ) -@pytest.mark.xfail(reason='Multimodel statistics returns the original cubes.') +@pytest.mark.xfail(reason="Multimodel statistics returns the original cubes.") def test_edge_case_time_no_overlap_fail(): """Test case when time coords do not overlap using span='overlap'. @@ -675,11 +710,11 @@ def test_edge_case_time_no_overlap_fail(): """ cubes = generate_cubes_with_non_overlapping_timecoords() - statistic = 'min' - statistics = (statistic, ) + statistic = "min" + statistics = (statistic,) with pytest.raises(ValueError): - _ = multi_model_statistics(cubes, 'overlap', statistics) + _ = multi_model_statistics(cubes, "overlap", statistics) def test_edge_case_time_no_overlap_success(): @@ -690,16 +725,16 @@ def test_edge_case_time_no_overlap_success(): """ cubes = generate_cubes_with_non_overlapping_timecoords() - statistic = 'min' - statistics = (statistic, ) + statistic = "min" + statistics = (statistic,) - result = multi_model_statistics(cubes, 'full', statistics) + result = multi_model_statistics(cubes, "full", statistics) result_cube = result[statistic] - assert result_cube.coord('time').shape == (6, ) + assert result_cube.coord("time").shape == (6,) -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_edge_case_time_not_in_middle_of_months(span): """Test case when time coords are not on 15th for monthly data. @@ -715,26 +750,26 @@ def test_edge_case_time_not_in_middle_of_months(span): generate_cube_from_dates(dates2), ) - statistic = 'min' - statistics = (statistic, ) + statistic = "min" + statistics = (statistic,) result = multi_model_statistics(cubes, span, statistics) result_cube = result[statistic] - time_coord = result_cube.coord('time') + time_coord = result_cube.coord("time") - desired = np.array((14., 45., 73.)) + desired = np.array((14.0, 45.0, 73.0)) np.testing.assert_array_equal(time_coord.points, desired) -@pytest.mark.parametrize('span', SPAN_OPTIONS) +@pytest.mark.parametrize("span", SPAN_OPTIONS) def test_edge_case_sub_daily_data_fail(span): """Test case when cubes with sub-daily time coords are passed.""" - cube = generate_cube_from_dates('hourly') + cube = generate_cube_from_dates("hourly") cubes = (cube, cube) - statistic = 'min' - statistics = (statistic, ) + statistic = "min" + statistics = (statistic,) with pytest.raises(ValueError): _ = multi_model_statistics(cubes, span, statistics) @@ -742,16 +777,16 @@ def test_edge_case_sub_daily_data_fail(span): def test_unify_time_coordinates(): """Test set common calendar.""" - cube1 = generate_cube_from_dates('monthly', - calendar='360_day', - offset='days since 1850-01-01') - cube2 = generate_cube_from_dates('monthly', - calendar='standard', - offset='days since 1943-05-16') + cube1 = generate_cube_from_dates( + "monthly", calendar="360_day", offset="days since 1850-01-01" + ) + cube2 = generate_cube_from_dates( + "monthly", calendar="standard", offset="days since 1943-05-16" + ) mm._unify_time_coordinates([cube1, cube2]) - assert cube1.coord('time') == cube2.coord('time') + assert cube1.coord("time") == cube2.coord("time") class PreprocessorFile: @@ -773,7 +808,7 @@ def group(self, keys: list) -> str: values from .attributes """ if not keys: - return '' + return "" if isinstance(keys, str): keys = [keys] @@ -783,16 +818,16 @@ def group(self, keys: list) -> str: attribute = self.attributes.get(key) if attribute: if isinstance(attribute, (list, tuple)): - attribute = '-'.join(attribute) + attribute = "-".join(attribute) identifier.append(attribute) - return '_'.join(identifier) + return "_".join(identifier) def test_return_products(): """Check that the right product set is returned.""" - cube1 = generate_cube_from_dates('monthly', fill_val=1) - cube2 = generate_cube_from_dates('monthly', fill_val=9) + cube1 = generate_cube_from_dates("monthly", fill_val=1) + cube2 = generate_cube_from_dates("monthly", fill_val=9) input1 = PreprocessorFile(cube1) input2 = PreprocessorFile(cube2) @@ -800,57 +835,69 @@ def test_return_products(): products = set([input1, input2]) output = PreprocessorFile() - output_products = {'': {'mean': output}} + output_products = {"": {"mean": output}} kwargs = { - 'statistics': ['mean'], - 'span': 'full', - 'output_products': output_products[''] + "statistics": ["mean"], + "span": "full", + "output_products": output_products[""], } - result1 = mm._multiproduct_statistics(products, - keep_input_datasets=True, - **kwargs) + result1 = mm._multiproduct_statistics( + products, keep_input_datasets=True, **kwargs + ) - result2 = mm._multiproduct_statistics(products, - keep_input_datasets=False, - **kwargs) + result2 = mm._multiproduct_statistics( + products, keep_input_datasets=False, **kwargs + ) assert result1 == set([input1, input2, output]) assert result2 == set([output]) - kwargs['output_products'] = output_products + kwargs["output_products"] = output_products result3 = mm.multi_model_statistics(products, **kwargs) - result4 = mm.multi_model_statistics(products, - keep_input_datasets=False, - **kwargs) + result4 = mm.multi_model_statistics( + products, keep_input_datasets=False, **kwargs + ) assert result3 == result1 assert result4 == result2 def test_ensemble_products(): - cube1 = generate_cube_from_dates('monthly', fill_val=1) - cube2 = generate_cube_from_dates('monthly', fill_val=9) + cube1 = generate_cube_from_dates("monthly", fill_val=1) + cube2 = generate_cube_from_dates("monthly", fill_val=9) attributes1 = { - 'project': 'project', 'dataset': 'dataset', - 'exp': 'exp', 'ensemble': '1'} + "project": "project", + "dataset": "dataset", + "exp": "exp", + "ensemble": "1", + } input1 = PreprocessorFile(cube1, attributes=attributes1) attributes2 = { - 'project': 'project', 'dataset': 'dataset', - 'exp': 'exp', 'ensemble': '2'} + "project": "project", + "dataset": "dataset", + "exp": "exp", + "ensemble": "2", + } input2 = PreprocessorFile(cube2, attributes=attributes2) attributes3 = { - 'project': 'project', 'dataset': 'dataset2', - 'exp': 'exp', 'ensemble': '1'} + "project": "project", + "dataset": "dataset2", + "exp": "exp", + "ensemble": "1", + } input3 = PreprocessorFile(cube1, attributes=attributes3) attributes4 = { - 'project': 'project', 'dataset': 'dataset2', - 'exp': 'exp', 'ensemble': '2'} + "project": "project", + "dataset": "dataset2", + "exp": "exp", + "ensemble": "2", + } input4 = PreprocessorFile(cube1, attributes=attributes4) products = set([input1, input2, input3, input4]) @@ -858,16 +905,16 @@ def test_ensemble_products(): output1 = PreprocessorFile() output2 = PreprocessorFile() output_products = { - 'project_dataset_exp': {'mean': output1}, - 'project_dataset2_exp': {'mean': output2}} + "project_dataset_exp": {"mean": output1}, + "project_dataset2_exp": {"mean": output2}, + } kwargs = { - 'statistics': ['mean'], - 'output_products': output_products, + "statistics": ["mean"], + "output_products": output_products, } - result = mm.ensemble_statistics( - products, **kwargs) + result = mm.ensemble_statistics(products, **kwargs) assert len(result) == 2 @@ -876,44 +923,44 @@ def test_ignore_tas_scalar_height_coord(): tas_2m = generate_cube_from_dates("monthly") tas_1p5m = generate_cube_from_dates("monthly") - for cube, height in zip([tas_2m, tas_1p5m], [2., 1.5]): + for cube, height in zip([tas_2m, tas_1p5m], [2.0, 1.5]): cube.rename("air_temperature") cube.attributes["short_name"] = "tas" cube.add_aux_coord( - iris.coords.AuxCoord([height], var_name="height", units="m")) + iris.coords.AuxCoord([height], var_name="height", units="m") + ) result = mm.multi_model_statistics( - [tas_2m, tas_2m.copy(), tas_1p5m], statistics=['mean'], span='full') + [tas_2m, tas_2m.copy(), tas_1p5m], statistics=["mean"], span="full" + ) # iris automatically averages the value of the scalar coordinate. - assert len(result['mean'].coords("height")) == 1 + assert len(result["mean"].coords("height")) == 1 assert result["mean"].coord("height").points == 1.75 PRODUCTS = [ - CubeList(generate_cube_from_dates('monthly') for _ in range(3)), - [ - PreprocessorFile(generate_cube_from_dates('monthly')) for _ in range(3) - ], + CubeList(generate_cube_from_dates("monthly") for _ in range(3)), + [PreprocessorFile(generate_cube_from_dates("monthly")) for _ in range(3)], ] -SCALAR_COORD = AuxCoord(2.0, standard_name='height', units='m') +SCALAR_COORD = AuxCoord(2.0, standard_name="height", units="m") PRODUCTS[0][0].add_aux_coord(SCALAR_COORD, ()) PRODUCTS[1][0].cubes[0].add_aux_coord(SCALAR_COORD, ()) PRODUCTS[1] = set(PRODUCTS[1]) -@pytest.mark.parametrize('products', PRODUCTS) +@pytest.mark.parametrize("products", PRODUCTS) def test_ignore_different_scalar_coords(products): """Ignore different scalar coords if desired.""" - stat = 'mean' + stat = "mean" output = PreprocessorFile() - output_products = {'': {stat: output}} + output_products = {"": {stat: output}} kwargs = { - 'statistics': [stat], - 'span': 'full', - 'output_products': output_products, - 'keep_input_datasets': False, - 'ignore_scalar_coords': True, + "statistics": [stat], + "span": "full", + "output_products": output_products, + "keep_input_datasets": False, + "ignore_scalar_coords": True, } results = mm.multi_model_statistics(products, **kwargs) @@ -928,17 +975,17 @@ def test_ignore_different_scalar_coords(products): assert not cube.coords(dimensions=()) -@pytest.mark.parametrize('products', PRODUCTS) +@pytest.mark.parametrize("products", PRODUCTS) def test_do_not_ignore_different_scalar_coords(products): """Do not ignore different scalar coords if desired.""" - stat = 'mean' + stat = "mean" output = PreprocessorFile() - output_products = {'': {stat: output}} + output_products = {"": {stat: output}} kwargs = { - 'statistics': [stat], - 'span': 'full', - 'output_products': output_products, - 'keep_input_datasets': False, + "statistics": [stat], + "span": "full", + "output_products": output_products, + "keep_input_datasets": False, } msg = ( @@ -961,50 +1008,52 @@ def test_daily_inconsistent_calendars(): start = date2num(datetime(1852, 1, 1), ref_standard) # 1852 is a leap year, and include 1 extra day at the end - leapdates = cftime.num2date(start + np.arange(367), - ref_standard.name, ref_standard.calendar) + leapdates = cftime.num2date( + start + np.arange(367), ref_standard.name, ref_standard.calendar + ) - noleapdates = cftime.num2date(start + np.arange(365), - ref_noleap.name, ref_noleap.calendar) + noleapdates = cftime.num2date( + start + np.arange(365), ref_noleap.name, ref_noleap.calendar + ) leapcube = generate_cube_from_dates( leapdates, - calendar='standard', - offset='days since 1850-01-01', + calendar="standard", + offset="days since 1850-01-01", fill_val=1, ) noleapcube = generate_cube_from_dates( noleapdates, - calendar='noleap', - offset='days since 1850-01-01', + calendar="noleap", + offset="days since 1850-01-01", fill_val=3, ) cubes = [leapcube, noleapcube] # span=full - aligned_cubes = mm._align_time_coord(cubes, span='full') + aligned_cubes = mm._align_time_coord(cubes, span="full") for cube in aligned_cubes: - assert cube.coord('time').units.calendar in ("standard", "gregorian") - assert cube.shape == (367, ) - assert cube[59].coord('time').points == 789 # 29 Feb 1852 + assert cube.coord("time").units.calendar in ("standard", "gregorian") + assert cube.shape == (367,) + assert cube[59].coord("time").points == 789 # 29 Feb 1852 np.ma.is_masked(aligned_cubes[1][366].data) # outside original range - result = multi_model_statistics(cubes, span="full", statistics=['mean']) - result_cube = result['mean'] + result = multi_model_statistics(cubes, span="full", statistics=["mean"]) + result_cube = result["mean"] assert result_cube[59].data == 2 # looked up nearest neighbour assert result_cube[366].data == 1 # outside original range # span=overlap - aligned_cubes = mm._align_time_coord(cubes, span='overlap') + aligned_cubes = mm._align_time_coord(cubes, span="overlap") for cube in aligned_cubes: - assert cube.coord('time').units.calendar in ("standard", "gregorian") - assert cube.shape == (365, ) - assert cube[59].coord('time').points == 790 # 1 March 1852 + assert cube.coord("time").units.calendar in ("standard", "gregorian") + assert cube.shape == (365,) + assert cube[59].coord("time").points == 790 # 1 March 1852 - result = multi_model_statistics(cubes, span="overlap", statistics=['mean']) - result_cube = result['mean'] + result = multi_model_statistics(cubes, span="overlap", statistics=["mean"]) + result_cube = result["mean"] assert result_cube[59].data == 2 @@ -1016,65 +1065,73 @@ def test_remove_fx_variables(): add_ancillary_variable(cube1, fx_cube) cube2 = generate_cube_from_dates("monthly", fill_val=9) - result = mm.multi_model_statistics([cube1, cube2], - statistics=['mean'], - span='full') - assert result['mean'].ancillary_variables() == [] + result = mm.multi_model_statistics( + [cube1, cube2], statistics=["mean"], span="full" + ) + assert result["mean"].ancillary_variables() == [] def test_no_warn_model_dim_non_contiguous(recwarn): """Test that now warning is raised that model dim is non-contiguous.""" coord = DimCoord( [0.5, 1.5], - bounds=[[0, 1.], [1., 2.]], - standard_name='time', - units='days since 1850-01-01', + bounds=[[0, 1.0], [1.0, 2.0]], + standard_name="time", + units="days since 1850-01-01", ) cube1 = iris.cube.Cube([1, 1], dim_coords_and_dims=[(coord, 0)]) cube2 = iris.cube.Cube([2, 2], dim_coords_and_dims=[(coord, 0)]) cubes = [cube1, cube2] - multi_model_statistics(cubes, span="overlap", statistics=['mean']) - msg = ("Collapsing a non-contiguous coordinate. " - "Metadata may not be fully descriptive for 'multi-model'.") + multi_model_statistics(cubes, span="overlap", statistics=["mean"]) + msg = ( + "Collapsing a non-contiguous coordinate. " + "Metadata may not be fully descriptive for 'multi-model'." + ) for warning in recwarn: assert str(warning.message) != msg def test_map_to_new_time_int_coords(): """Test ``_map_to_new_time`` with integer time coords.""" - cube = generate_cube_from_dates('yearly') - iris.coord_categorisation.add_year(cube, 'time') - decade_coord = AuxCoord([1850, 1850, 1850], bounds=[[1845, 1855]] * 3, - long_name='decade') + cube = generate_cube_from_dates("yearly") + iris.coord_categorisation.add_year(cube, "time") + decade_coord = AuxCoord( + [1850, 1850, 1850], bounds=[[1845, 1855]] * 3, long_name="decade" + ) cube.add_aux_coord(decade_coord, 0) target_points = [200.0, 500.0, 1000.0] out_cube = mm._map_to_new_time(cube, target_points) - assert_array_allclose(out_cube.data, - np.ma.masked_invalid([1.0, 1.0, np.nan])) - assert_array_allclose(out_cube.coord('time').points, target_points) - assert_array_allclose(out_cube.coord('year').points, - np.ma.masked_invalid([1850, 1851, np.nan])) - assert_array_allclose(out_cube.coord('decade').points, - np.ma.masked_invalid([1850, 1850, np.nan])) - assert out_cube.coord('year').bounds is None - assert out_cube.coord('decade').bounds is None - assert np.issubdtype(out_cube.coord('year').dtype, np.integer) - assert np.issubdtype(out_cube.coord('decade').dtype, np.integer) + assert_array_allclose( + out_cube.data, np.ma.masked_invalid([1.0, 1.0, np.nan]) + ) + assert_array_allclose(out_cube.coord("time").points, target_points) + assert_array_allclose( + out_cube.coord("year").points, + np.ma.masked_invalid([1850, 1851, np.nan]), + ) + assert_array_allclose( + out_cube.coord("decade").points, + np.ma.masked_invalid([1850, 1850, np.nan]), + ) + assert out_cube.coord("year").bounds is None + assert out_cube.coord("decade").bounds is None + assert np.issubdtype(out_cube.coord("year").dtype, np.integer) + assert np.issubdtype(out_cube.coord("decade").dtype, np.integer) def test_arbitrary_dims_5d(cubes_5d): """Test ``multi_model_statistics`` with 5D cubes.""" stat_cubes = multi_model_statistics( cubes_5d, - span='overlap', - statistics=['sum'], + span="overlap", + statistics=["sum"], ) assert len(stat_cubes) == 1 - assert 'sum' in stat_cubes - stat_cube = stat_cubes['sum'] + assert "sum" in stat_cubes + stat_cube = stat_cubes["sum"] assert stat_cube.shape == (1, 1, 1, 1, 1) assert_array_allclose( stat_cube.data, @@ -1086,12 +1143,12 @@ def test_arbitrary_dims_2d(cubes_with_arbitrary_dimensions): """Test ``multi_model_statistics`` with arbitrary dimensions.""" stat_cubes = multi_model_statistics( cubes_with_arbitrary_dimensions, - span='overlap', - statistics=['sum'], + span="overlap", + statistics=["sum"], ) assert len(stat_cubes) == 1 - assert 'sum' in stat_cubes - stat_cube = stat_cubes['sum'] + assert "sum" in stat_cubes + stat_cube = stat_cubes["sum"] assert stat_cube.shape == (3, 1) assert_array_allclose(stat_cube.data, np.ma.array([[0.0], [6.0], [3.0]])) @@ -1101,12 +1158,12 @@ def test_arbitrary_dims_1d_1(cubes_with_arbitrary_dimensions): cubes = [cube[0] for cube in cubes_with_arbitrary_dimensions] stat_cubes = multi_model_statistics( cubes, - span='overlap', - statistics=['sum'], + span="overlap", + statistics=["sum"], ) assert len(stat_cubes) == 1 - assert 'sum' in stat_cubes - stat_cube = stat_cubes['sum'] + assert "sum" in stat_cubes + stat_cube = stat_cubes["sum"] assert stat_cube.shape == (1,) assert_array_allclose(stat_cube.data, np.ma.array([0.0])) @@ -1116,12 +1173,12 @@ def test_arbitrary_dims_1d_3(cubes_with_arbitrary_dimensions): cubes = [cube[:, 0] for cube in cubes_with_arbitrary_dimensions] stat_cubes = multi_model_statistics( cubes, - span='overlap', - statistics=['sum'], + span="overlap", + statistics=["sum"], ) assert len(stat_cubes) == 1 - assert 'sum' in stat_cubes - stat_cube = stat_cubes['sum'] + assert "sum" in stat_cubes + stat_cube = stat_cubes["sum"] assert stat_cube.shape == (3,) assert_array_allclose(stat_cube.data, np.ma.array([0.0, 6.0, 3.0])) @@ -1131,53 +1188,54 @@ def test_arbitrary_dims_0d(cubes_with_arbitrary_dimensions): cubes = [cube[0, 0] for cube in cubes_with_arbitrary_dimensions] stat_cubes = multi_model_statistics( cubes, - span='overlap', - statistics=['sum'], + span="overlap", + statistics=["sum"], ) assert len(stat_cubes) == 1 - assert 'sum' in stat_cubes - stat_cube = stat_cubes['sum'] + assert "sum" in stat_cubes + stat_cube = stat_cubes["sum"] assert stat_cube.shape == () assert_array_allclose(stat_cube.data, np.ma.array(0.0)) -@pytest.mark.parametrize('equal_names', EQUAL_NAMES) +@pytest.mark.parametrize("equal_names", EQUAL_NAMES) def test_preserve_equal_name_cubes(equal_names): """Test ``multi_model_statistics`` with equal-name cubes.""" - all_names = ['var_name', 'standard_name', 'long_name'] - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + all_names = ["var_name", "standard_name", "long_name"] + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) # Prepare names of input cubes accordingly - for (idx, cube) in enumerate(cubes): + for idx, cube in enumerate(cubes): for name in all_names: if name in equal_names or idx != 0: - setattr(cube, name, 'air_pressure') + setattr(cube, name, "air_pressure") else: # Different value for first cube if non-equal name setattr(cube, name, None) - stat_cubes = multi_model_statistics(cubes, span='overlap', - statistics=['sum']) + stat_cubes = multi_model_statistics( + cubes, span="overlap", statistics=["sum"] + ) assert len(stat_cubes) == 1 - stat_cube = stat_cubes['sum'] + stat_cube = stat_cubes["sum"] assert_array_allclose(stat_cube.data, np.ma.array([5.0, 5.0, 5.0])) for name in all_names: - assert getattr(stat_cube, name) == 'air_pressure' + assert getattr(stat_cube, name) == "air_pressure" -@pytest.mark.parametrize('equal_names', EQUAL_NAMES) +@pytest.mark.parametrize("equal_names", EQUAL_NAMES) def test_equal_name_different_units_cubes(equal_names): """Test ``multi_model_statistics`` with equal-name non-equal unit cubes.""" - all_names = ['var_name', 'standard_name', 'long_name'] - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + all_names = ["var_name", "standard_name", "long_name"] + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) # Prepare names of input cubes accordingly - cubes[0].units = 'kg' - for (idx, cube) in enumerate(cubes): + cubes[0].units = "kg" + for idx, cube in enumerate(cubes): for name in all_names: if name in equal_names or idx != 0: - setattr(cube, name, 'air_pressure') + setattr(cube, name, "air_pressure") else: # Different value for first cube if non-equal name setattr(cube, name, None) @@ -1186,68 +1244,69 @@ def test_equal_name_different_units_cubes(equal_names): "array" ) with pytest.raises(ValueError, match=msg): - multi_model_statistics(cubes, span='overlap', statistics=['sum']) + multi_model_statistics(cubes, span="overlap", statistics=["sum"]) def test_equalise_var_metadata(): """Test ``_equalise_var_metadata``.""" cubes = CubeList( - generate_cube_from_dates('monthly', var_name='x') for _ in range(5) + generate_cube_from_dates("monthly", var_name="x") for _ in range(5) ) # Prepare names of input cubes accordingly - cubes[0].units = 'kg' - cubes[0].standard_name = 'air_pressure' - cubes[0].long_name = 'b' - cubes[1].units = 'kg' - cubes[1].standard_name = 'air_pressure' - cubes[1].long_name = 'a' - cubes[1].var_name = 'y' - cubes[2].units = 'kg' - cubes[3].units = 'm' - cubes[3].long_name = 'X' - cubes[4].units = 'm' - cubes[4].long_name = 'X' + cubes[0].units = "kg" + cubes[0].standard_name = "air_pressure" + cubes[0].long_name = "b" + cubes[1].units = "kg" + cubes[1].standard_name = "air_pressure" + cubes[1].long_name = "a" + cubes[1].var_name = "y" + cubes[2].units = "kg" + cubes[3].units = "m" + cubes[3].long_name = "X" + cubes[4].units = "m" + cubes[4].long_name = "X" mm._equalise_var_metadata(cubes) - assert cubes[0].standard_name == 'air_pressure' - assert cubes[0].long_name == 'a' - assert cubes[0].var_name == 'x' - assert cubes[0].units == 'kg' - assert cubes[1].standard_name == 'air_pressure' - assert cubes[1].long_name == 'a' - assert cubes[1].var_name == 'x' - assert cubes[1].units == 'kg' + assert cubes[0].standard_name == "air_pressure" + assert cubes[0].long_name == "a" + assert cubes[0].var_name == "x" + assert cubes[0].units == "kg" + assert cubes[1].standard_name == "air_pressure" + assert cubes[1].long_name == "a" + assert cubes[1].var_name == "x" + assert cubes[1].units == "kg" assert cubes[2].standard_name is None assert cubes[2].long_name is None - assert cubes[2].var_name == 'x' - assert cubes[2].units == 'kg' + assert cubes[2].var_name == "x" + assert cubes[2].units == "kg" assert cubes[3].standard_name is None - assert cubes[3].long_name == 'X' - assert cubes[3].var_name == 'x' - assert cubes[3].units == 'm' + assert cubes[3].long_name == "X" + assert cubes[3].var_name == "x" + assert cubes[3].units == "m" assert cubes[4].standard_name is None - assert cubes[4].long_name == 'X' - assert cubes[4].var_name == 'x' - assert cubes[4].units == 'm' + assert cubes[4].long_name == "X" + assert cubes[4].var_name == "x" + assert cubes[4].units == "m" def test_preserve_equal_coordinates(): """Test ``multi_model_statistics`` with equal input coordinates.""" cubes = get_cube_for_equal_coords_test(5) - stat_cubes = multi_model_statistics(cubes, span='overlap', - statistics=['sum']) + stat_cubes = multi_model_statistics( + cubes, span="overlap", statistics=["sum"] + ) assert len(stat_cubes) == 1 - stat_cube = stat_cubes['sum'] + stat_cube = stat_cubes["sum"] assert_array_allclose(stat_cube.data, np.ma.array([5.0, 5.0, 5.0])) # The equal coordinate 'year' was not changed - assert stat_cube.coord('year').var_name == 'year' - assert stat_cube.coord('year').standard_name is None - assert stat_cube.coord('year').long_name == 'year' - assert stat_cube.coord('year').attributes == {'test': 1} + assert stat_cube.coord("year").var_name == "year" + assert stat_cube.coord("year").standard_name is None + assert stat_cube.coord("year").long_name == "year" + assert stat_cube.coord("year").attributes == {"test": 1} def test_preserve_non_equal_coordinates(): @@ -1256,52 +1315,54 @@ def test_preserve_non_equal_coordinates(): # Use "circular" attribute for one cube to check that it is set to "False" # for each cube - cubes[2].coord('time').circular = False + cubes[2].coord("time").circular = False - stat_cubes = multi_model_statistics(cubes, span='overlap', - statistics=['sum']) + stat_cubes = multi_model_statistics( + cubes, span="overlap", statistics=["sum"] + ) assert len(stat_cubes) == 1 - stat_cube = stat_cubes['sum'] + stat_cube = stat_cubes["sum"] assert_array_allclose(stat_cube.data, np.ma.array([5.0, 5.0, 5.0])) # The attributes and circular property of the non-equal coordinate 'time' # (due to differing circular) have been removed - assert stat_cube.coord('time').attributes == {} - assert stat_cube.coord('time').circular is False + assert stat_cube.coord("time").attributes == {} + assert stat_cube.coord("time").circular is False # The long_name and attributes of the non-equal coordinate 'x' have been # removed - assert stat_cube.coord('x').var_name == 'x' - assert stat_cube.coord('x').standard_name is None - assert stat_cube.coord('x').long_name is None - assert stat_cube.coord('x').attributes == {} + assert stat_cube.coord("x").var_name == "x" + assert stat_cube.coord("x").standard_name is None + assert stat_cube.coord("x").long_name is None + assert stat_cube.coord("x").attributes == {} -@pytest.mark.parametrize('equal_names', EQUAL_NAMES) +@pytest.mark.parametrize("equal_names", EQUAL_NAMES) def test_preserve_equal_name_coordinates(equal_names): """Test ``multi_model_statistics`` with equal-name coordinates.""" - all_names = ['var_name', 'standard_name', 'long_name'] - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + all_names = ["var_name", "standard_name", "long_name"] + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) # Prepare names of coordinates of input cubes accordingly - for (idx, cube) in enumerate(cubes): - time_coord = cube.coord('time') + for idx, cube in enumerate(cubes): + time_coord = cube.coord("time") for name in all_names: if name in equal_names or idx != 0: - setattr(time_coord, name, 'time') + setattr(time_coord, name, "time") else: # Different value for first cube if non-equal name setattr(time_coord, name, None) # Use different coordinate attributes for each cube so the different # coordinates are not exactly identical - time_coord.attributes = {'test': idx} + time_coord.attributes = {"test": idx} - stat_cubes = multi_model_statistics(cubes, span='overlap', - statistics=['sum']) + stat_cubes = multi_model_statistics( + cubes, span="overlap", statistics=["sum"] + ) assert len(stat_cubes) == 1 - stat_cube = stat_cubes['sum'] + stat_cube = stat_cubes["sum"] assert_array_allclose(stat_cube.data, np.ma.array([5.0, 5.0, 5.0])) assert len(stat_cube.coords()) == 1 @@ -1309,19 +1370,19 @@ def test_preserve_equal_name_coordinates(equal_names): for name in all_names: if name in equal_names: - assert getattr(time_coord, name) == 'time' + assert getattr(time_coord, name) == "time" else: assert getattr(time_coord, name) is None - assert time_coord.name() == 'time' - assert time_coord.units == 'days since 1850-01-01' + assert time_coord.name() == "time" + assert time_coord.units == "days since 1850-01-01" assert time_coord.attributes == {} def test_ignore_equal_coordinates(): """Test ``_get_equal_coord_names_metadata``.""" - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) - equal_coords_metadata = [cubes[0].coord('time').metadata] + equal_coords_metadata = [cubes[0].coord("time").metadata] equal_names_metadata = mm._get_equal_coord_names_metadata( cubes, equal_coords_metadata, @@ -1332,24 +1393,20 @@ def test_ignore_equal_coordinates(): assert not equal_names_metadata -@pytest.mark.parametrize('cube_idx', [0, 1, 2, 3, 4]) +@pytest.mark.parametrize("cube_idx", [0, 1, 2, 3, 4]) def test_ignore_duplicate_equal_name_coordinates(cube_idx): """Test ``_get_equal_coord_names_metadata``.""" - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) # Add duplicate scalar coordinate d_coord_0 = AuxCoord( - 0.0, - var_name='d', - long_name='d', - units='m', - attributes={'test': 1} + 0.0, var_name="d", long_name="d", units="m", attributes={"test": 1} ) d_coord_1 = AuxCoord( 1.0, - var_name='d', - long_name='d', - units='m', + var_name="d", + long_name="d", + units="m", ) for cube in cubes: cube.add_aux_coord(d_coord_0, ()) @@ -1360,30 +1417,30 @@ def test_ignore_duplicate_equal_name_coordinates(cube_idx): # The equal_names_metadata dict should only contain the equal 'time' # dimension, not the duplicate dimension assert len(equal_names_metadata) == 1 - assert 'time' in equal_names_metadata + assert "time" in equal_names_metadata def test_ignore_non_existing_coordinates(): """Test ``_get_equal_coord_names_metadata``.""" - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) # Add coordinate only for first cube - cubes[0].add_aux_coord(AuxCoord(0.0, long_name='x'), ()) + cubes[0].add_aux_coord(AuxCoord(0.0, long_name="x"), ()) equal_names_metadata = mm._get_equal_coord_names_metadata(cubes, []) # The equal_names_metadata dict should only contain the equal 'time' # dimension, not the coordinate that only exists for the first cube assert len(equal_names_metadata) == 1 - assert 'time' in equal_names_metadata + assert "time" in equal_names_metadata def test_ignore_coordinates_different_units(): """Test ``_get_equal_coord_names_metadata``.""" - cubes = CubeList(generate_cube_from_dates('monthly') for _ in range(5)) + cubes = CubeList(generate_cube_from_dates("monthly") for _ in range(5)) # Adapt time units of one cube - cubes[3].coord('time').units = 'days since 1900-01-01' + cubes[3].coord("time").units = "days since 1900-01-01" equal_names_metadata = mm._get_equal_coord_names_metadata(cubes, []) @@ -1396,7 +1453,7 @@ def test_empty_input_multi_model_statistics(): """Check that ``multi_model_statistics`` fails with empty input.""" msg = "Cannot perform multicube statistics for an empty list of cubes" with pytest.raises(ValueError, match=msg): - mm.multi_model_statistics([], span='full', statistics=['mean']) + mm.multi_model_statistics([], span="full", statistics=["mean"]) def test_empty_input_ensemble_statistics(): @@ -1404,38 +1461,38 @@ def test_empty_input_ensemble_statistics(): msg = "Cannot perform multicube statistics for an empty list of cubes" with pytest.raises(ValueError, match=msg): mm.ensemble_statistics( - [], span='full', statistics=['mean'], output_products=[] + [], span="full", statistics=["mean"], output_products=[] ) STATS = [ - 'mean', - {'operator': 'median'}, - 'min', - 'max', - 'p42.314', - {'operator': 'percentile', 'percent': 42.314}, - 'std_dev', + "mean", + {"operator": "median"}, + "min", + "max", + "p42.314", + {"operator": "percentile", "percent": 42.314}, + "std_dev", ] -@pytest.mark.parametrize('stat', STATS) +@pytest.mark.parametrize("stat", STATS) @pytest.mark.parametrize( - 'products', + "products", [ - CubeList([generate_cube_from_dates('monthly')]), - set([PreprocessorFile(generate_cube_from_dates('monthly'))]), + CubeList([generate_cube_from_dates("monthly")]), + set([PreprocessorFile(generate_cube_from_dates("monthly"))]), ], ) def test_single_input_multi_model_statistics(products, stat): """Check that ``multi_model_statistics`` works with a single cube.""" output = PreprocessorFile() - output_products = {'': {mm._get_stat_identifier(stat): output}} + output_products = {"": {mm._get_stat_identifier(stat): output}} kwargs = { - 'statistics': [stat], - 'span': 'full', - 'output_products': output_products, - 'keep_input_datasets': False, + "statistics": [stat], + "span": "full", + "output_products": output_products, + "keep_input_datasets": False, } results = mm.multi_model_statistics(products, **kwargs) @@ -1450,7 +1507,7 @@ def test_single_input_multi_model_statistics(products, stat): assert len(result.cubes) == 1 cube = result.cubes[0] - if stat == 'std_dev': + if stat == "std_dev": assert_array_allclose( cube.data, np.ma.masked_invalid([np.nan, np.nan, np.nan]) ) @@ -1458,31 +1515,31 @@ def test_single_input_multi_model_statistics(products, stat): assert_array_allclose(cube.data, np.ma.array([1.0, 1.0, 1.0])) -@pytest.mark.parametrize('stat', STATS) +@pytest.mark.parametrize("stat", STATS) @pytest.mark.parametrize( - 'products', + "products", [ - CubeList([generate_cube_from_dates('monthly')]), - {PreprocessorFile(generate_cube_from_dates('monthly'))}, + CubeList([generate_cube_from_dates("monthly")]), + {PreprocessorFile(generate_cube_from_dates("monthly"))}, ], ) def test_single_input_ensemble_statistics(products, stat): """Check that ``ensemble_statistics`` works with a single cube.""" stat_id = mm._get_stat_identifier(stat) - cube = generate_cube_from_dates('monthly') + cube = generate_cube_from_dates("monthly") attributes = { - 'project': 'project', - 'dataset': 'dataset', - 'exp': 'exp', - 'ensemble': '1', + "project": "project", + "dataset": "dataset", + "exp": "exp", + "ensemble": "1", } products = {PreprocessorFile(cube, attributes=attributes)} output = PreprocessorFile() - output_products = {'project_dataset_exp': {stat_id: output}} + output_products = {"project_dataset_exp": {stat_id: output}} kwargs = { - 'statistics': [stat], - 'output_products': output_products, + "statistics": [stat], + "output_products": output_products, } results = mm.ensemble_statistics(products, **kwargs) @@ -1492,7 +1549,7 @@ def test_single_input_ensemble_statistics(products, stat): assert len(result.cubes) == 1 cube = result.cubes[0] - if stat == 'std_dev': + if stat == "std_dev": assert_array_allclose( cube.data, np.ma.masked_invalid([np.nan, np.nan, np.nan]) ) @@ -1502,36 +1559,42 @@ def test_single_input_ensemble_statistics(products, stat): def test_operator_missing_in_stat(): """Test no operator in stat dict.""" - cubes = CubeList([generate_cube_from_dates('monthly')]) + cubes = CubeList([generate_cube_from_dates("monthly")]) msg = ( "`statistic` given as dictionary, but missing required key `operator`" ) with pytest.raises(ValueError) as exc: - mm.multi_model_statistics(cubes, 'overlap', [{'no': 'operator'}]) + mm.multi_model_statistics(cubes, "overlap", [{"no": "operator"}]) assert msg in str(exc) @pytest.mark.parametrize( - 'statistic,output', + "statistic,output", [ - ('mean', ('mean', {})), - ({'operator': 'mean'}, ('mean', {})), - ({'operator': 'mean', 'weights': False}, ('mean', {'weights': False})), - ('percentile', ('percentile', {})), - ({'operator': 'percentile', 'percent': 50}, - ('percentile', {'percent': 50})), - ({'operator': 'wpercentile', 'weights': False}, - ('wpercentile', {'weights': False})), - ({'operator': 'wpercentile', 'weights': False, 'percent': 5.0}, - ('wpercentile', {'weights': False, 'percent': 5.0})), - ] + ("mean", ("mean", {})), + ({"operator": "mean"}, ("mean", {})), + ({"operator": "mean", "weights": False}, ("mean", {"weights": False})), + ("percentile", ("percentile", {})), + ( + {"operator": "percentile", "percent": 50}, + ("percentile", {"percent": 50}), + ), + ( + {"operator": "wpercentile", "weights": False}, + ("wpercentile", {"weights": False}), + ), + ( + {"operator": "wpercentile", "weights": False, "percent": 5.0}, + ("wpercentile", {"weights": False, "percent": 5.0}), + ), + ], ) def test_get_operator_and_kwargs(statistic, output): """Test ``_get_operator_and_kwargs``.""" assert mm._get_operator_and_kwargs(statistic) == output -@pytest.mark.parametrize('statistic', [{}, {'no': 'op'}]) +@pytest.mark.parametrize("statistic", [{}, {"no": "op"}]) def test_get_operator_and_kwargs_operator_missing(statistic): """Test ``_get_operator_and_kwargs``.""" msg = ( @@ -1542,17 +1605,19 @@ def test_get_operator_and_kwargs_operator_missing(statistic): @pytest.mark.parametrize( - 'statistic,output', + "statistic,output", [ - ('mean', 'mean'), - ({'operator': 'mean'}, 'mean'), - ({'operator': 'mean', 'weights': False}, 'mean'), - ('percentile', 'percentile'), - ({'operator': 'percentile', 'percent': 50}, 'percentile50'), - ({'operator': 'wpercentile', 'weights': False}, 'wpercentile'), - ({'operator': 'wpercentile', 'weights': False, 'percent': 5.0}, - 'wpercentile5.0'), - ] + ("mean", "mean"), + ({"operator": "mean"}, "mean"), + ({"operator": "mean", "weights": False}, "mean"), + ("percentile", "percentile"), + ({"operator": "percentile", "percent": 50}, "percentile50"), + ({"operator": "wpercentile", "weights": False}, "wpercentile"), + ( + {"operator": "wpercentile", "weights": False, "percent": 5.0}, + "wpercentile5.0", + ), + ], ) def test_get_stat_identifier(statistic, output): """Test ``_get_stat_identifier``.""" diff --git a/tests/unit/preprocessor/_other/test_other.py b/tests/unit/preprocessor/_other/test_other.py index e5d6a871e1..a2237bfb6a 100644 --- a/tests/unit/preprocessor/_other/test_other.py +++ b/tests/unit/preprocessor/_other/test_other.py @@ -34,20 +34,22 @@ def test_clip(self): cube.add_dim_coord( iris.coords.DimCoord( np.arange(3), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='gregorian'), + standard_name="time", + units=Unit( + "days since 1950-01-01 00:00:00", calendar="gregorian" + ), ), 0, ) # Cube needs to be copied, since it is modified in-place and test cube # should not change. - assert_array_equal(clip(cube.copy(), 0, None).data, - np.array([0, 0, 10])) - assert_array_equal(clip(cube.copy(), None, 0).data, - np.array([-10, 0, 0])) - assert_array_equal(clip(cube.copy(), -1, 2).data, - np.array([-1, 0, 2])) + assert_array_equal( + clip(cube.copy(), 0, None).data, np.array([0, 0, 10]) + ) + assert_array_equal( + clip(cube.copy(), None, 0).data, np.array([-10, 0, 0]) + ) + assert_array_equal(clip(cube.copy(), -1, 2).data, np.array([-1, 0, 2])) # Masked cube TODO # No parameters specified with self.assertRaises(ValueError): @@ -65,7 +67,7 @@ def cube(): ) cube_data = np.swapaxes(cube_data, 0, -1) cube = get_3d_cube( - cube_data, standard_name='air_temperature', var_name='tas', units='K' + cube_data, standard_name="air_temperature", var_name="tas", units="K" ) return cube @@ -73,30 +75,30 @@ def cube(): def assert_metadata(cube, normalization=None): """Assert correct metadata.""" assert cube.standard_name is None - if normalization == 'sum': - assert cube.long_name == 'Relative Frequency' - assert cube.var_name == 'relative_frequency_tas' - assert cube.units == '1' - elif normalization == 'integral': - assert cube.long_name == 'Density' - assert cube.var_name == 'density_tas' - assert cube.units == 'K-1' + if normalization == "sum": + assert cube.long_name == "Relative Frequency" + assert cube.var_name == "relative_frequency_tas" + assert cube.units == "1" + elif normalization == "integral": + assert cube.long_name == "Density" + assert cube.var_name == "density_tas" + assert cube.units == "K-1" else: - assert cube.long_name == 'Frequency' - assert cube.var_name == 'frequency_tas' - assert cube.units == '1' + assert cube.long_name == "Frequency" + assert cube.var_name == "frequency_tas" + assert cube.units == "1" assert cube.attributes == {} assert cube.cell_methods == () - assert cube.coords('air_temperature') - bin_coord = cube.coord('air_temperature') - assert bin_coord.standard_name == 'air_temperature' - assert bin_coord.var_name == 'tas' + assert cube.coords("air_temperature") + bin_coord = cube.coord("air_temperature") + assert bin_coord.standard_name == "air_temperature" + assert bin_coord.var_name == "tas" assert bin_coord.long_name is None - assert bin_coord.units == 'K' + assert bin_coord.units == "K" assert bin_coord.attributes == {} -@pytest.mark.parametrize('lazy', [False, True]) +@pytest.mark.parametrize("lazy", [False, True]) def test_histogram_defaults(cube, lazy): """Test `histogram`.""" if lazy: @@ -117,7 +119,7 @@ def test_histogram_defaults(cube, lazy): result.data, [1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0] ) np.testing.assert_allclose(result.data.mask, [False] * 10) - bin_coord = result.coord('air_temperature') + bin_coord = result.coord("air_temperature") bin_coord.shape == (10,) bin_coord.dtype == np.float64 bin_coord.bounds_dtype == np.float64 @@ -142,9 +144,9 @@ def test_histogram_defaults(cube, lazy): ) -@pytest.mark.parametrize('normalization', [None, 'sum', 'integral']) -@pytest.mark.parametrize('weights', [False, None]) -@pytest.mark.parametrize('lazy', [False, True]) +@pytest.mark.parametrize("normalization", [None, "sum", "integral"]) +@pytest.mark.parametrize("weights", [False, None]) +@pytest.mark.parametrize("lazy", [False, True]) def test_histogram_over_time(cube, lazy, weights, normalization): """Test `histogram`.""" if lazy: @@ -153,7 +155,7 @@ def test_histogram_over_time(cube, lazy, weights, normalization): result = histogram( input_cube, - coords=['time'], + coords=["time"], bins=[4.5, 6.5, 8.5, 10.5], bin_range=(4.5, 10.5), weights=weights, @@ -162,43 +164,50 @@ def test_histogram_over_time(cube, lazy, weights, normalization): assert input_cube == cube assert_metadata(result, normalization=normalization) - assert result.coord('latitude') == input_cube.coord('latitude') - assert result.coord('longitude') == input_cube.coord('longitude') + assert result.coord("latitude") == input_cube.coord("latitude") + assert result.coord("longitude") == input_cube.coord("longitude") assert result.shape == (2, 2, 3) if lazy: assert result.has_lazy_data() else: assert not result.has_lazy_data() assert result.dtype == np.float32 - if normalization == 'integral': - expected_data = np.ma.masked_invalid([ - [[np.nan, np.nan, np.nan], [0.5, 0.0, 0.0]], - [[np.nan, np.nan, np.nan], [0.25, 0.25, 0.0]], - ]) - elif normalization == 'sum': - expected_data = np.ma.masked_invalid([ - [[np.nan, np.nan, np.nan], [1.0, 0.0, 0.0]], - [[np.nan, np.nan, np.nan], [0.5, 0.5, 0.0]], - ]) + if normalization == "integral": + expected_data = np.ma.masked_invalid( + [ + [[np.nan, np.nan, np.nan], [0.5, 0.0, 0.0]], + [[np.nan, np.nan, np.nan], [0.25, 0.25, 0.0]], + ] + ) + elif normalization == "sum": + expected_data = np.ma.masked_invalid( + [ + [[np.nan, np.nan, np.nan], [1.0, 0.0, 0.0]], + [[np.nan, np.nan, np.nan], [0.5, 0.5, 0.0]], + ] + ) else: - expected_data = np.ma.masked_invalid([ - [[np.nan, np.nan, np.nan], [1.0, 0.0, 0.0]], - [[np.nan, np.nan, np.nan], [1.0, 1.0, 0.0]], - ]) + expected_data = np.ma.masked_invalid( + [ + [[np.nan, np.nan, np.nan], [1.0, 0.0, 0.0]], + [[np.nan, np.nan, np.nan], [1.0, 1.0, 0.0]], + ] + ) np.testing.assert_allclose(result.data, expected_data) np.testing.assert_allclose(result.data.mask, expected_data.mask) - bin_coord = result.coord('air_temperature') + bin_coord = result.coord("air_temperature") bin_coord.shape == (10,) bin_coord.dtype == np.float64 bin_coord.bounds_dtype == np.float64 np.testing.assert_allclose(bin_coord.points, [5.5, 7.5, 9.5]) np.testing.assert_allclose( - bin_coord.bounds, [[4.5, 6.5], [6.5, 8.5], [8.5, 10.5]], + bin_coord.bounds, + [[4.5, 6.5], [6.5, 8.5], [8.5, 10.5]], ) -@pytest.mark.parametrize('normalization', [None, 'sum', 'integral']) -@pytest.mark.parametrize('lazy', [False, True]) +@pytest.mark.parametrize("normalization", [None, "sum", "integral"]) +@pytest.mark.parametrize("lazy", [False, True]) def test_histogram_fully_masked(cube, lazy, normalization): """Test `histogram`.""" cube.data = np.ma.masked_all((2, 2, 2), dtype=np.float32) @@ -214,9 +223,14 @@ def test_histogram_fully_masked(cube, lazy, normalization): else: assert not result.has_lazy_data() assert result.dtype == np.float32 - np.testing.assert_allclose(result.data, np.ma.masked_all(10,)) + np.testing.assert_allclose( + result.data, + np.ma.masked_all( + 10, + ), + ) np.testing.assert_equal(result.data.mask, [True] * 10) - bin_coord = result.coord('air_temperature') + bin_coord = result.coord("air_temperature") bin_coord.shape == (10,) bin_coord.dtype == np.float64 bin_coord.bounds_dtype == np.float64 @@ -241,16 +255,16 @@ def test_histogram_fully_masked(cube, lazy, normalization): ) -@pytest.mark.parametrize('normalization', [None, 'sum', 'integral']) +@pytest.mark.parametrize("normalization", [None, "sum", "integral"]) @pytest.mark.parametrize( - 'weights', + "weights", [ True, np.array([[[6, 6], [6, 6]], [[2, 2], [2, 2]]]), da.array([[[6, 6], [6, 6]], [[2, 2], [2, 2]]]), - ] + ], ) -@pytest.mark.parametrize('lazy', [False, True]) +@pytest.mark.parametrize("lazy", [False, True]) def test_histogram_weights(cube, lazy, weights, normalization): """Test `histogram`.""" if lazy: @@ -259,7 +273,7 @@ def test_histogram_weights(cube, lazy, weights, normalization): result = histogram( input_cube, - coords=['time', 'longitude'], + coords=["time", "longitude"], bins=[0.0, 2.0, 4.0, 8.0], weights=weights, normalization=normalization, @@ -267,18 +281,18 @@ def test_histogram_weights(cube, lazy, weights, normalization): assert input_cube == cube assert_metadata(result, normalization=normalization) - assert result.coord('latitude') == input_cube.coord('latitude') + assert result.coord("latitude") == input_cube.coord("latitude") assert result.shape == (2, 3) if lazy: assert result.has_lazy_data() else: assert not result.has_lazy_data() assert result.dtype == np.float32 - if normalization == 'integral': + if normalization == "integral": expected_data = np.ma.masked_invalid( [[0.25, 0.0, 0.125], [0.0, 0.0, 0.25]] ) - elif normalization == 'sum': + elif normalization == "sum": expected_data = np.ma.masked_invalid( [[0.5, 0.0, 0.5], [0.0, 0.0, 1.0]] ) @@ -288,36 +302,37 @@ def test_histogram_weights(cube, lazy, weights, normalization): ) np.testing.assert_allclose(result.data, expected_data) np.testing.assert_allclose(result.data.mask, expected_data.mask) - bin_coord = result.coord('air_temperature') + bin_coord = result.coord("air_temperature") bin_coord.shape == (10,) bin_coord.dtype == np.float64 bin_coord.bounds_dtype == np.float64 np.testing.assert_allclose(bin_coord.points, [1.0, 3.0, 6.0]) np.testing.assert_allclose( - bin_coord.bounds, [[0.0, 2.0], [2.0, 4.0], [4.0, 8.0]], + bin_coord.bounds, + [[0.0, 2.0], [2.0, 4.0], [4.0, 8.0]], ) @pytest.fixture def cube_with_rich_metadata(): """Cube with rich metadata.""" - time = DimCoord([0], bounds=[[-1, 1]], var_name='time', units='s') - sigma = DimCoord([0], var_name='sigma', units='1') - lat = DimCoord([0], var_name='lat', units='degrees') - lon = DimCoord([0], var_name='lon', units='degrees') - ptop = AuxCoord(0, var_name='ptop', units='Pa') - psur = AuxCoord([[0]], var_name='ps', units='Pa') + time = DimCoord([0], bounds=[[-1, 1]], var_name="time", units="s") + sigma = DimCoord([0], var_name="sigma", units="1") + lat = DimCoord([0], var_name="lat", units="degrees") + lon = DimCoord([0], var_name="lon", units="degrees") + ptop = AuxCoord(0, var_name="ptop", units="Pa") + psur = AuxCoord([[0]], var_name="ps", units="Pa") sigma_factory = AtmosphereSigmaFactory(ptop, sigma, psur) - cell_area = CellMeasure([[1]], var_name='area', units='m2', measure='area') - anc = AncillaryVariable([0], var_name='anc') + cell_area = CellMeasure([[1]], var_name="area", units="m2", measure="area") + anc = AncillaryVariable([0], var_name="anc") cube = Cube( np.ones((1, 1, 1, 1), dtype=np.float32), standard_name=None, - long_name='Air Temperature', + long_name="Air Temperature", var_name=None, - units='K', - attributes={'test': '1'}, - cell_methods=(CellMethod('point', 'sigma'),), + units="K", + attributes={"test": "1"}, + cell_methods=(CellMethod("point", "sigma"),), dim_coords_and_dims=[(time, 0), (sigma, 1), (lat, 2), (lon, 3)], aux_coords_and_dims=[(ptop, ()), (psur, (2, 3))], aux_factories=[sigma_factory], @@ -327,9 +342,9 @@ def cube_with_rich_metadata(): return cube -@pytest.mark.parametrize('normalization', [None, 'sum', 'integral']) -@pytest.mark.parametrize('weights', [True, False, None]) -@pytest.mark.parametrize('lazy', [False, True]) +@pytest.mark.parametrize("normalization", [None, "sum", "integral"]) +@pytest.mark.parametrize("weights", [True, False, None]) +@pytest.mark.parametrize("lazy", [False, True]) def test_histogram_metadata( cube_with_rich_metadata, lazy, weights, normalization ): @@ -340,7 +355,7 @@ def test_histogram_metadata( result = histogram( input_cube, - coords=['time'], + coords=["time"], bins=[0.0, 1.0, 2.0], bin_range=(0.0, 2.0), weights=weights, @@ -351,51 +366,49 @@ def test_histogram_metadata( assert result.shape == (1, 1, 1, 2) assert result.standard_name is None - if normalization == 'sum': - assert result.long_name == 'Relative Frequency of Air Temperature' - assert result.var_name == 'relative_frequency' - assert result.units == '1' - elif normalization == 'integral': - assert result.long_name == 'Density of Air Temperature' - assert result.var_name == 'density' - assert result.units == 'K-1' + if normalization == "sum": + assert result.long_name == "Relative Frequency of Air Temperature" + assert result.var_name == "relative_frequency" + assert result.units == "1" + elif normalization == "integral": + assert result.long_name == "Density of Air Temperature" + assert result.var_name == "density" + assert result.units == "K-1" else: - assert result.long_name == 'Frequency of Air Temperature' - assert result.var_name == 'frequency' - assert result.units == '1' - assert result.attributes == {'test': '1'} - assert result.cell_methods == (CellMethod('point', 'sigma'),) - - assert not result.coords('time', dim_coords=True) - for dim_coord in ('sigma', 'lat', 'lon'): - assert ( - result.coord(dim_coord, dim_coords=True) == - input_cube.coord(dim_coord, dim_coords=True) + assert result.long_name == "Frequency of Air Temperature" + assert result.var_name == "frequency" + assert result.units == "1" + assert result.attributes == {"test": "1"} + assert result.cell_methods == (CellMethod("point", "sigma"),) + + assert not result.coords("time", dim_coords=True) + for dim_coord in ("sigma", "lat", "lon"): + assert result.coord(dim_coord, dim_coords=True) == input_cube.coord( + dim_coord, dim_coords=True ) - assert ( - result.coord_dims(dim_coord) == - (input_cube.coord_dims(dim_coord)[0] - 1,) + assert result.coord_dims(dim_coord) == ( + input_cube.coord_dims(dim_coord)[0] - 1, ) - assert result.coords('Air Temperature', dim_coords=True) - bin_coord = result.coord('Air Temperature') + assert result.coords("Air Temperature", dim_coords=True) + bin_coord = result.coord("Air Temperature") assert result.coord_dims(bin_coord) == (3,) assert bin_coord.standard_name is None - assert bin_coord.long_name == 'Air Temperature' + assert bin_coord.long_name == "Air Temperature" assert bin_coord.var_name is None - assert bin_coord.units == 'K' + assert bin_coord.units == "K" assert bin_coord.attributes == {} - assert result.coords('time', dim_coords=False) - assert result.coord_dims('time') == () - assert result.coord('ptop') == input_cube.coord('ptop') - assert result.coord('ps') == input_cube.coord('ps') + assert result.coords("time", dim_coords=False) + assert result.coord_dims("time") == () + assert result.coord("ptop") == input_cube.coord("ptop") + assert result.coord("ps") == input_cube.coord("ps") assert len(result.aux_factories) == 1 assert isinstance(result.aux_factories[0], AtmosphereSigmaFactory) assert result.ancillary_variables() == input_cube.ancillary_variables() assert result.cell_measures() == input_cube.cell_measures() -@pytest.mark.parametrize('lazy', [False, True]) +@pytest.mark.parametrize("lazy", [False, True]) def test_histogram_fully_masked_no_bin_range(cube, lazy): """Test `histogram`.""" cube.data = np.ma.masked_all((2, 2, 2), dtype=np.float32) @@ -416,7 +429,7 @@ def test_histogram_invalid_bins(cube): r"bins cannot be a str \(got 'auto'\), must be int or Sequence of int" ) with pytest.raises(TypeError, match=msg): - histogram(cube, bins='auto') + histogram(cube, bins="auto") def test_histogram_invalid_normalization(cube): @@ -426,8 +439,8 @@ def test_histogram_invalid_normalization(cube): r"'invalid'" ) with pytest.raises(ValueError, match=msg): - histogram(cube, normalization='invalid') + histogram(cube, normalization="invalid") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_regrid/__init__.py b/tests/unit/preprocessor/_regrid/__init__.py index db2f7c48a6..a6869d33cf 100644 --- a/tests/unit/preprocessor/_regrid/__init__.py +++ b/tests/unit/preprocessor/_regrid/__init__.py @@ -3,6 +3,8 @@ """ +from typing import Literal + import iris import iris.fileformats import numpy as np @@ -15,7 +17,7 @@ def _make_vcoord(data, dtype=None): """ if dtype is None: - dtype = np.dtype('int8') + dtype = np.dtype("int8") if isinstance(data, int): data = np.arange(data, dtype=dtype) @@ -24,12 +26,13 @@ def _make_vcoord(data, dtype=None): # Create a pressure vertical coordinate. kwargs = dict( - standard_name='air_pressure', - long_name='Pressure', - var_name='plev', - units='hPa', - attributes=dict(positive='down'), - coord_system=None) + standard_name="air_pressure", + long_name="Pressure", + var_name="plev", + units="hPa", + attributes=dict(positive="down"), + coord_system=None, + ) try: zcoord = DimCoord(data, **kwargs) @@ -39,50 +42,58 @@ def _make_vcoord(data, dtype=None): return zcoord -def _make_cube(data, - aux_coord=True, - dim_coord=True, - dtype=None, - rotated=False): +def _make_cube( + data, + aux_coord=True, + dim_coord=True, + dtype=None, + grid: Literal["regular", "rotated", "mesh"] = "regular", +): """ Create a 3d synthetic test cube. """ if dtype is None: - dtype = np.dtype('int8') + dtype = np.dtype("int8") if not isinstance(data, np.ndarray): data = np.empty(data, dtype=dtype) z, y, x = data.shape + if grid == "mesh": + # Meshes have a single lat/lon dimension. + data = data.reshape(z, -1) # Create the cube. cm = CellMethod( - method='mean', coords='time', intervals='20 minutes', comments=None) + method="mean", coords="time", intervals="20 minutes", comments=None + ) kwargs = dict( - standard_name='air_temperature', - long_name='Air Temperature', - var_name='ta', - units='K', - attributes=dict(cube='attribute'), - cell_methods=(cm, )) + standard_name="air_temperature", + long_name="Air Temperature", + var_name="ta", + units="K", + attributes=dict(cube="attribute"), + cell_methods=(cm,), + ) cube = iris.cube.Cube(data, **kwargs) # Create a synthetic test vertical coordinate. if dim_coord: cube.add_dim_coord(_make_vcoord(z, dtype=dtype), 0) - # Create a synthetic test latitude coordinate. - if rotated: + if grid == "rotated": + # Create a synthetic test latitude coordinate. data = np.arange(y, dtype=dtype) + 1 cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) kwargs = dict( - standard_name='grid_latitude', - long_name='latitude in rotated pole grid', - var_name='rlat', - units='degrees', - attributes=dict(latitude='attribute'), - coord_system=cs) + standard_name="grid_latitude", + long_name="latitude in rotated pole grid", + var_name="rlat", + units="degrees", + attributes=dict(latitude="attribute"), + coord_system=cs, + ) ycoord = DimCoord(data, **kwargs) if data.size > 1: ycoord.guess_bounds() @@ -91,26 +102,91 @@ def _make_cube(data, # Create a synthetic test longitude coordinate. data = np.arange(x, dtype=dtype) + 1 kwargs = dict( - standard_name='grid_longitude', - long_name='longitude in rotated pole grid', - var_name='rlon', - units='degrees', - attributes=dict(longitude='attribute'), - coord_system=cs) + standard_name="grid_longitude", + long_name="longitude in rotated pole grid", + var_name="rlon", + units="degrees", + attributes=dict(longitude="attribute"), + coord_system=cs, + ) xcoord = DimCoord(data, **kwargs) if data.size > 1: xcoord.guess_bounds() cube.add_dim_coord(xcoord, 2) - else: + elif grid == "mesh": + # This constructs a trivial rectangular mesh with square faces: + # 0. 1. 2. + # 0. +---+---+- + # | x | x | + # 1. +---+---+- + # | x | x | + # 2. +---+---+- + # where + # + is a node location + # x is a face location + # the lines between the nodes are the boundaries of the faces + # and the number are degrees latitude/longitude. + # + node_data_x = np.arange(x + 1) + 0.5 + node_data_y = np.arange(y + 1) + 0.5 + node_x, node_y = [ + AuxCoord(a.ravel(), name) + for a, name in zip( + np.meshgrid(node_data_x, node_data_y), + ["longitude", "latitude"], + ) + ] + face_data_x = np.arange(x) + 1 + face_data_y = np.arange(y) + 1 + face_x, face_y = [ + AuxCoord(a.ravel(), name) + for a, name in zip( + np.meshgrid(face_data_x, face_data_y), + ["longitude", "latitude"], + ) + ] + # Build the face connectivity indices by creating an array of squares + # and adding an offset of 1 more to each next square and then dropping: + # * the last column of connectivities - those would connect the last + # nodes in a row to the first nodes of the next row + # * the last row of connectivities - those refer to nodes outside the + # grid + n_nodes_x = len(node_data_x) + n_nodes_y = len(node_data_y) + square = np.array([0, n_nodes_x, n_nodes_x + 1, 1]) + connectivities = ( + ( + np.tile(square, (n_nodes_y * n_nodes_x, 1)) + + np.arange(n_nodes_y * n_nodes_x).reshape(-1, 1) + ) + .reshape(n_nodes_y, n_nodes_x, 4)[:-1, :-1] + .reshape(-1, 4) + ) + face_connectivity = iris.mesh.Connectivity( + indices=connectivities, + cf_role="face_node_connectivity", + ) + mesh = iris.mesh.MeshXY( + topology_dimension=2, + node_coords_and_axes=[(node_x, "X"), (node_y, "Y")], + face_coords_and_axes=[(face_x, "X"), (face_y, "Y")], + connectivities=[face_connectivity], + ) + lon, lat = mesh.to_MeshCoords("face") + cube.add_aux_coord(lon, 1) + cube.add_aux_coord(lat, 1) + elif grid == "regular": + # Create a synthetic test latitude coordinate. data = np.arange(y, dtype=dtype) + 1 cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) kwargs = dict( - standard_name='latitude', - long_name='Latitude', - var_name='lat', - units='degrees_north', - attributes=dict(latitude='attribute'), - coord_system=cs) + standard_name="latitude", + long_name="Latitude", + var_name="lat", + units="degrees_north", + attributes=dict(latitude="attribute"), + coord_system=cs, + ) ycoord = DimCoord(data, **kwargs) if data.size > 1: ycoord.guess_bounds() @@ -119,12 +195,13 @@ def _make_cube(data, # Create a synthetic test longitude coordinate. data = np.arange(x, dtype=dtype) + 1 kwargs = dict( - standard_name='longitude', - long_name='Longitude', - var_name='lon', - units='degrees_east', - attributes=dict(longitude='attribute'), - coord_system=cs) + standard_name="longitude", + long_name="Longitude", + var_name="lon", + units="degrees_east", + attributes=dict(longitude="attribute"), + coord_system=cs, + ) xcoord = DimCoord(data, **kwargs) if data.size > 1: xcoord.guess_bounds() @@ -133,14 +210,14 @@ def _make_cube(data, # Create a synthetic test 2d auxiliary coordinate # that spans the vertical dimension. if aux_coord: - data = np.arange(np.prod((z, y)), dtype=dtype).reshape(z, y) + hsize = y * x if grid == "mesh" else y + data = np.arange(np.prod((z, hsize)), dtype=dtype).reshape(z, hsize) kwargs = dict( - standard_name=None, - long_name='Pressure Slice', - var_name='aplev', - units='hPa', - attributes=dict(positive='down'), - coord_system=None) + long_name="Pressure Slice", + var_name="aplev", + units="hPa", + attributes=dict(positive="down"), + ) zycoord = AuxCoord(data, **kwargs) cube.add_aux_coord(zycoord, (0, 1)) diff --git a/tests/unit/preprocessor/_regrid/test__create_cube.py b/tests/unit/preprocessor/_regrid/test__create_cube.py index e0f65eab78..0308df61ae 100644 --- a/tests/unit/preprocessor/_regrid/test__create_cube.py +++ b/tests/unit/preprocessor/_regrid/test__create_cube.py @@ -16,16 +16,18 @@ class Test(tests.Test): def setUp(self): shape = (3, 2, 1) - self.dtype = np.dtype('int8') + self.dtype = np.dtype("int8") self.cube = _make_cube(shape, dtype=self.dtype) def test_invalid_shape__data_mismatch_with_levels(self): levels = np.array([0, 1]) - emsg = 'Mismatch between data and levels' + emsg = "Mismatch between data and levels" with self.assertRaisesRegex(ValueError, emsg): create_cube( - self.cube, self.cube.data, - self.cube.coord(axis='z', dim_coords=True), levels + self.cube, + self.cube.data, + self.cube.coord(axis="z", dim_coords=True), + levels, ) def test(self): @@ -33,8 +35,7 @@ def test(self): data = np.empty(shape) levels = np.array([10, 20]) result = create_cube( - self.cube, data, - self.cube.coord(axis='z', dim_coords=True), levels + self.cube, data, self.cube.coord(axis="z", dim_coords=True), levels ) expected = _make_cube(data, aux_coord=False, dim_coord=False) vcoord = _make_vcoord(levels) @@ -46,8 +47,7 @@ def test_non_monotonic(self): data = np.empty(shape) levels = np.array([10, 10]) result = create_cube( - self.cube, data, - self.cube.coord(axis='z', dim_coords=True), levels + self.cube, data, self.cube.coord(axis="z", dim_coords=True), levels ) expected = _make_cube(data, aux_coord=False, dim_coord=False) vcoord = _make_vcoord(levels) @@ -59,13 +59,13 @@ def test_collapse(self): data = np.empty(shape) levels = np.array([123]) result = create_cube( - self.cube, data, self.cube.coord(axis='z', dim_coords=True), - levels) + self.cube, data, self.cube.coord(axis="z", dim_coords=True), levels + ) expected = _make_cube(data, aux_coord=False, dim_coord=False)[0] vcoord = _make_vcoord(levels) expected.add_aux_coord(vcoord) self.assertEqual(result, expected) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test__stock_cube.py b/tests/unit/preprocessor/_regrid/test__stock_cube.py index 8142b66ed8..220c7195e8 100644 --- a/tests/unit/preprocessor/_regrid/test__stock_cube.py +++ b/tests/unit/preprocessor/_regrid/test__stock_cube.py @@ -29,14 +29,18 @@ def _check(self, dx, dy, lat_off=True, lon_off=True): mid_dx, mid_dy = dx / 2, dy / 2 if lat_off and lon_off: expected_lat_points = np.linspace( - _LAT_MIN + mid_dy, _LAT_MAX - mid_dy, int(_LAT_RANGE / dy)) + _LAT_MIN + mid_dy, _LAT_MAX - mid_dy, int(_LAT_RANGE / dy) + ) expected_lon_points = np.linspace( - _LON_MIN + mid_dx, _LON_MAX - mid_dx, int(_LON_RANGE / dx)) + _LON_MIN + mid_dx, _LON_MAX - mid_dx, int(_LON_RANGE / dx) + ) else: - expected_lat_points = np.linspace(_LAT_MIN, _LAT_MAX, - int(_LAT_RANGE / dy) + 1) - expected_lon_points = np.linspace(_LON_MIN, _LON_MAX - dx, - int(_LON_RANGE / dx)) + expected_lat_points = np.linspace( + _LAT_MIN, _LAT_MAX, int(_LAT_RANGE / dy) + 1 + ) + expected_lon_points = np.linspace( + _LON_MIN, _LON_MAX - dx, int(_LON_RANGE / dx) + ) # Check the stock cube coordinates. self.assertEqual(self.mock_DimCoord.call_count, 2) @@ -45,19 +49,23 @@ def _check(self, dx, dy, lat_off=True, lon_off=True): # Check the latitude coordinate creation. [args], kwargs = call_lats self.assert_array_equal(args, expected_lat_points) - expected_lat_kwargs = dict(standard_name='latitude', - units='degrees_north', - var_name='lat', - circular=False) + expected_lat_kwargs = dict( + standard_name="latitude", + units="degrees_north", + var_name="lat", + circular=False, + ) self.assertEqual(kwargs, expected_lat_kwargs) # Check the longitude coordinate creation. [args], kwargs = call_lons self.assert_array_equal(args, expected_lon_points) - expected_lon_kwargs = dict(standard_name='longitude', - units='degrees_east', - var_name='lon', - circular=False) + expected_lon_kwargs = dict( + standard_name="longitude", + units="degrees_east", + var_name="lon", + circular=False, + ) self.assertEqual(kwargs, expected_lon_kwargs) # Check that the coordinate guess_bounds method has been called. @@ -78,11 +86,12 @@ def _check(self, dx, dy, lat_off=True, lon_off=True): def setUp(self): self.Cube = mock.sentinel.Cube self.mock_Cube = self.patch( - 'esmvalcore.preprocessor._regrid.Cube', return_value=self.Cube + "esmvalcore.preprocessor._regrid.Cube", return_value=self.Cube ) self.mock_coord = mock.Mock(spec=iris.coords.DimCoord) self.mock_DimCoord = self.patch( - 'iris.coords.DimCoord', return_value=self.mock_coord) + "iris.coords.DimCoord", return_value=self.mock_coord + ) self.mocks = [self.mock_Cube, self.mock_coord, self.mock_DimCoord] def tearDown(self) -> None: @@ -90,44 +99,45 @@ def tearDown(self) -> None: return super().tearDown() def test_invalid_cell_spec__alpha(self): - emsg = 'Invalid MxN cell specification' + emsg = "Invalid MxN cell specification" with self.assertRaisesRegex(ValueError, emsg): - _global_stock_cube('Ax1') + _global_stock_cube("Ax1") def test_invalid_cell_spec__separator(self): - emsg = 'Invalid MxN cell specification' + emsg = "Invalid MxN cell specification" with self.assertRaisesRegex(ValueError, emsg): - _global_stock_cube('1y1') + _global_stock_cube("1y1") def test_invalid_cell_spec__longitude(self): - emsg = 'Invalid longitude delta in MxN cell specification' + emsg = "Invalid longitude delta in MxN cell specification" with self.assertRaisesRegex(ValueError, emsg): - _global_stock_cube('1.3x1') + _global_stock_cube("1.3x1") def test_invalid_cell_spec__latitude(self): - emsg = 'Invalid latitude delta in MxN cell specification' + emsg = "Invalid latitude delta in MxN cell specification" with self.assertRaisesRegex(ValueError, emsg): - _global_stock_cube('1x2.3') + _global_stock_cube("1x2.3") def test_specs(self): - specs = ['0.5x0.5', '1x1', '2.5x2.5', '5x5', '10x10'] + specs = ["0.5x0.5", "1x1", "2.5x2.5", "5x5", "10x10"] for spec in specs: result = _global_stock_cube(spec) self.assertEqual(result, self.Cube) - self._check(*list(map(float, spec.split('x')))) + self._check(*list(map(float, spec.split("x")))) def test_specs_no_offset(self): - specs = ['0.5x0.5', '1x1', '2.5x2.5', '5x5', '10x10'] + specs = ["0.5x0.5", "1x1", "2.5x2.5", "5x5", "10x10"] for spec in specs: - result = _global_stock_cube(spec, - lat_offset=False, - lon_offset=False) + result = _global_stock_cube( + spec, lat_offset=False, lon_offset=False + ) self.assertEqual(result, self.Cube) self._check( - *list(map(float, spec.split('x'))), + *list(map(float, spec.split("x"))), lat_off=False, - lon_off=False) + lon_off=False, + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test_broadcast_to_shape.py b/tests/unit/preprocessor/_regrid/test_broadcast_to_shape.py deleted file mode 100644 index a9b8f586bf..0000000000 --- a/tests/unit/preprocessor/_regrid/test_broadcast_to_shape.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Test function :func:`iris.util.broadcast_to_shape`.""" - -from unittest import mock - -import dask -import dask.array as da -import numpy as np -import numpy.ma as ma - -from esmvalcore.preprocessor._regrid import broadcast_to_shape -from tests import assert_array_equal - - -def test_same_shape(): - # broadcast to current shape should result in no change - a = np.random.random([2, 3]) - b = broadcast_to_shape(a, a.shape, (0, 1)) - assert_array_equal(b, a) - - -def test_added_dimensions(): - # adding two dimensions, on at the front and one in the middle of - # the existing dimensions - a = np.random.random([2, 3]) - b = broadcast_to_shape(a, (5, 2, 4, 3), (1, 3)) - for i in range(5): - for j in range(4): - assert_array_equal(b[i, :, j, :], a) - - -def test_added_dimensions_transpose(): - # adding dimensions and having the dimensions of the input - # transposed - a = np.random.random([2, 3]) - b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1)) - for i in range(5): - for j in range(4): - assert_array_equal(b[i, :, j, :].T, a) - - -@mock.patch.object(dask.base, "compute", wraps=dask.base.compute) -def test_lazy_added_dimensions_transpose(mocked_compute): - # adding dimensions and having the dimensions of the input - # transposed - a = da.random.random([2, 3]) - b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1)) - mocked_compute.assert_not_called() - for i in range(5): - for j in range(4): - assert_array_equal(b[i, :, j, :].T.compute(), a.compute()) - - -def test_masked(): - # masked arrays are also accepted - a = np.random.random([2, 3]) - m = ma.array(a, mask=[[0, 1, 0], [0, 1, 1]]) - b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) - for i in range(5): - for j in range(4): - assert_array_equal(b[i, :, j, :].T, m) - - -@mock.patch.object(dask.base, "compute", wraps=dask.base.compute) -def test_lazy_masked(mocked_compute): - # masked arrays are also accepted - a = np.random.random([2, 3]) - m = da.ma.masked_array(a, mask=[[0, 1, 0], [0, 1, 1]]) - b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) - mocked_compute.assert_not_called() - for i in range(5): - for j in range(4): - assert_array_equal(b[i, :, j, :].compute().T, m.compute()) - - -@mock.patch.object(dask.base, "compute", wraps=dask.base.compute) -def test_lazy_chunks(mocked_compute): - # chunks can be specified along with the target shape and are only used - # along new dimensions or on dimensions that have size 1 in the source - # array. - m = da.ma.masked_array( - data=[[1, 2, 3, 4, 5]], - mask=[[0, 1, 0, 0, 0]], - ).rechunk((1, 2)) - b = broadcast_to_shape( - m, - dim_map=(1, 2), - shape=(3, 4, 5), - chunks=( - 1, # used because target is new dim - 2, # used because input size 1 - 3, # not used because broadcast does not rechunk - ), - ) - mocked_compute.assert_not_called() - for i in range(3): - for j in range(4): - assert_array_equal(b[i, j, :].compute(), m[0].compute()) - assert b.chunks == ((1, 1, 1), (2, 2), (2, 2, 1)) - - -def test_masked_degenerate(): - # masked arrays can have degenerate masks too - a = np.random.random([2, 3]) - m = ma.array(a) - b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) - for i in range(5): - for j in range(4): - assert_array_equal(b[i, :, j, :].T, m) diff --git a/tests/unit/preprocessor/_regrid/test_extract_levels.py b/tests/unit/preprocessor/_regrid/test_extract_levels.py index dac91112f6..1103afab87 100644 --- a/tests/unit/preprocessor/_regrid/test_extract_levels.py +++ b/tests/unit/preprocessor/_regrid/test_extract_levels.py @@ -1,4 +1,5 @@ """Unit tests for :func:`esmvalcore.preprocessor.regrid.extract_levels`.""" + from unittest import mock import dask.array as da @@ -20,27 +21,31 @@ class Test(tests.Test): - def setUp(self): self.shape = (3, 2, 1) self.z = self.shape[0] - self.dtype = np.dtype('int8') - data = np.arange(np.prod(self.shape), - dtype=self.dtype).reshape(self.shape) + self.dtype = np.dtype("int8") + data = np.arange(np.prod(self.shape), dtype=self.dtype).reshape( + self.shape + ) self.cube = _make_cube(data, dtype=self.dtype) - self.created_cube = mock.Mock(var_name='created_cube') + self.created_cube = mock.Mock(var_name="created_cube") self.created_cube.astype.return_value = mock.sentinel.astype_result self.mock_create_cube = self.patch( - 'esmvalcore.preprocessor._regrid._create_cube', - return_value=self.created_cube) + "esmvalcore.preprocessor._regrid._create_cube", + return_value=self.created_cube, + ) self.schemes = [ - 'linear', 'nearest', 'linear_extrapolate', 'nearest_extrapolate', + "linear", + "nearest", + "linear_extrapolate", + "nearest_extrapolate", ] def test_invalid_scheme__unknown(self): levels = mock.sentinel.levels scheme = mock.sentinel.scheme - emsg = 'Unknown vertical interpolation scheme' + emsg = "Unknown vertical interpolation scheme" with self.assertRaisesRegex(ValueError, emsg): extract_levels(self.cube, levels, scheme) @@ -49,10 +54,10 @@ def test_vertical_schemes(self): def test_parse_vertical_schemes(self): reference = { - 'linear': ('linear', 'nan'), - 'nearest': ('nearest', 'nan'), - 'linear_extrapolate': ('linear', 'nearest'), - 'nearest_extrapolate': ('nearest', 'nearest'), + "linear": ("linear", "nan"), + "nearest": ("nearest", "nan"), + "linear_extrapolate": ("linear", "nearest"), + "nearest_extrapolate": ("nearest", "nearest"), } for scheme in self.schemes: interpolation, extrapolation = parse_vertical_scheme(scheme) @@ -60,23 +65,22 @@ def test_parse_vertical_schemes(self): def test_nop__levels_match(self): vcoord = _make_vcoord(self.z, dtype=self.dtype) - self.assertEqual(self.cube.coord(axis='z', dim_coords=True), vcoord) + self.assertEqual(self.cube.coord(axis="z", dim_coords=True), vcoord) levels = vcoord.points - result = extract_levels(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, "linear") self.assertEqual(id(result), id(self.cube)) self.assertEqual(result, self.cube) def test_extraction(self): levels = [0, 2] - result = extract_levels(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, "linear") data = np.array([0, 1, 4, 5], dtype=self.dtype).reshape(2, 2, 1) - expected = _make_cube(data, - aux_coord=False, - dim_coord=False, - dtype=self.dtype) - coord = self.cube.coord('Pressure Slice').copy() + expected = _make_cube( + data, aux_coord=False, dim_coord=False, dtype=self.dtype + ) + coord = self.cube.coord("Pressure Slice").copy() expected.add_aux_coord(coord[levels], (0, 1)) - coord = self.cube.coord('air_pressure').copy() + coord = self.cube.coord("air_pressure").copy() expected.add_dim_coord(coord[levels], 0) self.assertEqual(result, expected) @@ -84,41 +88,46 @@ def test_fx_extraction(self): levels = [0, 2] area_data = np.ones((2, 1)) volume_data = np.ones(self.shape) - area_measure = iris.coords.CellMeasure(area_data, - standard_name='cell_area', - var_name='areacella', - units='m2', - measure='area') - volume_measure = iris.coords.CellMeasure(volume_data, - standard_name='ocean_volume', - var_name='volcello', - units='m3', - measure='volume') + area_measure = iris.coords.CellMeasure( + area_data, + standard_name="cell_area", + var_name="areacella", + units="m2", + measure="area", + ) + volume_measure = iris.coords.CellMeasure( + volume_data, + standard_name="ocean_volume", + var_name="volcello", + units="m3", + measure="volume", + ) ancillary_2d = iris.coords.AncillaryVariable( area_data, - standard_name='land_area_fraction', - var_name='sftlf', - units='%') + standard_name="land_area_fraction", + var_name="sftlf", + units="%", + ) ancillary_3d = iris.coords.AncillaryVariable( volume_data, - standard_name='height_above_reference_ellipsoid', - var_name='zfull', - units='m') + standard_name="height_above_reference_ellipsoid", + var_name="zfull", + units="m", + ) self.cube.add_cell_measure(area_measure, (1, 2)) self.cube.add_cell_measure(volume_measure, (0, 1, 2)) self.cube.add_ancillary_variable(ancillary_2d, (1, 2)) self.cube.add_ancillary_variable(ancillary_3d, (0, 1, 2)) - result = extract_levels(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, "linear") data = np.array([0, 1, 4, 5], dtype=self.dtype).reshape(2, 2, 1) - expected = _make_cube(data, - aux_coord=False, - dim_coord=False, - dtype=self.dtype) - coord = self.cube.coord('Pressure Slice').copy() + expected = _make_cube( + data, aux_coord=False, dim_coord=False, dtype=self.dtype + ) + coord = self.cube.coord("Pressure Slice").copy() expected.add_aux_coord(coord[levels], (0, 1)) - coord = self.cube.coord('air_pressure').copy() + coord = self.cube.coord("air_pressure").copy() expected.add_dim_coord(coord[levels], 0) expected.add_cell_measure(area_measure, (1, 2)) expected.add_ancillary_variable(ancillary_2d, (1, 2)) @@ -129,132 +138,150 @@ def test_fx_extraction(self): def test_extraction__failure(self): levels = [0, 2] - with mock.patch('iris.cube.Cube.extract', return_value=None): - emsg = 'Failed to extract levels' + with mock.patch("iris.cube.Cube.extract", return_value=None): + emsg = "Failed to extract levels" with self.assertRaisesRegex(ValueError, emsg): - extract_levels(self.cube, levels, 'linear') + extract_levels(self.cube, levels, "linear") def test_interpolation(self): new_data = np.array(True) levels = np.array([0.5, 1.5]) - scheme = 'linear' - with mock.patch('stratify.interpolate', - return_value=new_data) as mocker: + scheme = "linear" + with mock.patch( + "stratify.interpolate", return_value=new_data + ) as mocker: result = extract_levels(self.cube, levels, scheme) self.assertEqual(result, self.created_cube) args, kwargs = mocker.call_args # Check the stratify.interpolate args ... self.assertEqual(len(args), 3) self.assert_array_equal(args[0], levels) - pts = self.cube.coord(axis='z', dim_coords=True).points - src_levels_broadcast = np.broadcast_to(pts.reshape(self.z, 1, 1), - self.cube.shape) + pts = self.cube.coord(axis="z", dim_coords=True).points + src_levels_broadcast = np.broadcast_to( + pts.reshape(self.z, 1, 1), self.cube.shape + ) self.assert_array_equal(args[1], src_levels_broadcast) self.assert_array_equal(args[2], self.cube.data) # Check the stratify.interpolate kwargs ... self.assertEqual( - kwargs, dict(axis=0, interpolation=scheme, - extrapolation='nan')) + kwargs, dict(axis=0, interpolation=scheme, extrapolation="nan") + ) args, kwargs = self.mock_create_cube.call_args # Check the _create_cube args ... self.assertEqual(len(args), 4) self.assertEqual(args[0], self.cube) self.assert_array_equal(args[1], np.ma.array(new_data)) - self.assert_array_equal(args[2], - self.cube.coord(axis='z', dim_coords=True)) + self.assert_array_equal( + args[2], self.cube.coord(axis="z", dim_coords=True) + ) self.assert_array_equal(args[3], levels) # Check the _create_cube kwargs ... self.assertEqual(kwargs, dict()) def test_preserve_2d_fx_interpolation(self): area_data = np.ones((2, 1)) - area_measure = iris.coords.CellMeasure(area_data, - standard_name='cell_area', - var_name='areacella', - units='m2', - measure='area') + area_measure = iris.coords.CellMeasure( + area_data, + standard_name="cell_area", + var_name="areacella", + units="m2", + measure="area", + ) ancillary_2d = iris.coords.AncillaryVariable( area_data, - standard_name='land_area_fraction', - var_name='sftlf', - units='%') + standard_name="land_area_fraction", + var_name="sftlf", + units="%", + ) self.cube.add_cell_measure(area_measure, (1, 2)) self.cube.add_ancillary_variable(ancillary_2d, (1, 2)) result_data = np.array([0, 1, 4, 5], dtype=self.dtype).reshape(2, 2, 1) result = _make_cube(result_data) _preserve_fx_vars(self.cube, result) self.assertEqual(self.cube.cell_measures(), result.cell_measures()) - self.assertEqual(self.cube.ancillary_variables(), - result.ancillary_variables()) + self.assertEqual( + self.cube.ancillary_variables(), result.ancillary_variables() + ) def test_preserve_2d_fx_interpolation_single_level(self): result = self.cube[0, :, :] area_data = np.ones((2, 1)) - area_measure = iris.coords.CellMeasure(area_data, - standard_name='cell_area', - var_name='areacella', - units='m2', - measure='area') + area_measure = iris.coords.CellMeasure( + area_data, + standard_name="cell_area", + var_name="areacella", + units="m2", + measure="area", + ) ancillary_2d = iris.coords.AncillaryVariable( area_data, - standard_name='land_area_fraction', - var_name='sftlf', - units='%') + standard_name="land_area_fraction", + var_name="sftlf", + units="%", + ) self.cube.add_cell_measure(area_measure, (1, 2)) self.cube.add_ancillary_variable(ancillary_2d, (1, 2)) _preserve_fx_vars(self.cube, result) self.assertEqual(self.cube.cell_measures(), result.cell_measures()) - self.assertEqual(self.cube.ancillary_variables(), - result.ancillary_variables()) + self.assertEqual( + self.cube.ancillary_variables(), result.ancillary_variables() + ) def test_do_not_preserve_3d_fx_interpolation(self): volume_data = np.ones(self.shape) - volume_measure = iris.coords.CellMeasure(volume_data, - standard_name='ocean_volume', - var_name='volcello', - units='m3', - measure='volume') + volume_measure = iris.coords.CellMeasure( + volume_data, + standard_name="ocean_volume", + var_name="volcello", + units="m3", + measure="volume", + ) ancillary_3d = iris.coords.AncillaryVariable( volume_data, - standard_name='height_above_reference_ellipsoid', - var_name='zfull', - units='m') + standard_name="height_above_reference_ellipsoid", + var_name="zfull", + units="m", + ) self.cube.add_cell_measure(volume_measure, (0, 1, 2)) self.cube.add_ancillary_variable(ancillary_3d, (0, 1, 2)) result_data = np.array([0, 1, 4, 5], dtype=self.dtype).reshape(2, 2, 1) result = _make_cube(result_data) - with self.assertLogs(level='WARNING') as cm: + with self.assertLogs(level="WARNING") as cm: _preserve_fx_vars(self.cube, result) self.assertEqual( cm.records[0].getMessage(), - 'Discarding use of z-axis dependent cell measure ' - 'volcello in variable ta, as z-axis has been interpolated') + "Discarding use of z-axis dependent cell measure " + "volcello in variable ta, as z-axis has been interpolated", + ) self.assertEqual( cm.records[1].getMessage(), - 'Discarding use of z-axis dependent ancillary variable ' - 'zfull in variable ta, as z-axis has been interpolated') + "Discarding use of z-axis dependent ancillary variable " + "zfull in variable ta, as z-axis has been interpolated", + ) def test_interpolation__extrapolated_nan_filling(self): new_data = np.array([0, np.nan]) levels = [0.5, 1.5] - scheme = 'nearest' - with mock.patch('stratify.interpolate', - return_value=new_data) as mocker: + scheme = "nearest" + with mock.patch( + "stratify.interpolate", return_value=new_data + ) as mocker: result = extract_levels(self.cube, levels, scheme) self.assertEqual(result, self.created_cube) args, kwargs = mocker.call_args # Check the stratify.interpolate args ... self.assertEqual(len(args), 3) self.assert_array_equal(args[0], levels) - pts = self.cube.coord(axis='z', dim_coords=True).points - src_levels_broadcast = np.broadcast_to(pts.reshape(self.z, 1, 1), - self.cube.shape) + pts = self.cube.coord(axis="z", dim_coords=True).points + src_levels_broadcast = np.broadcast_to( + pts.reshape(self.z, 1, 1), self.cube.shape + ) self.assert_array_equal(args[1], src_levels_broadcast) self.assert_array_equal(args[2], self.cube.data) # Check the stratify.interpolate kwargs ... self.assertEqual( - kwargs, dict(axis=0, interpolation=scheme, - extrapolation='nan')) + kwargs, dict(axis=0, interpolation=scheme, extrapolation="nan") + ) args, kwargs = self.mock_create_cube.call_args # Check the _create_cube args ... self.assertEqual(len(args), 4) @@ -264,8 +291,9 @@ def test_interpolation__extrapolated_nan_filling(self): new_data_mask[new_data == _MDI] = True new_data = np.ma.array(new_data, mask=new_data_mask) self.assert_array_equal(args[1], new_data) - self.assert_array_equal(args[2], - self.cube.coord(axis='z', dim_coords=True)) + self.assert_array_equal( + args[2], self.cube.coord(axis="z", dim_coords=True) + ) self.assert_array_equal(args[3], levels) # Check the _create_cube kwargs ... self.assertEqual(kwargs, dict()) @@ -275,28 +303,30 @@ def test_interpolation__masked(self): new_data = np.empty([len(levels)] + list(self.shape[1:]), dtype=float) new_data[:, 0, :] = np.nan new_data_mask = np.isnan(new_data) - scheme = 'linear' + scheme = "linear" mask = [[[False], [True]], [[True], [False]], [[False], [False]]] masked = ma.empty(self.shape) masked.mask = mask cube = _make_cube(masked, dtype=self.dtype) - with mock.patch('stratify.interpolate', - return_value=new_data) as mocker: + with mock.patch( + "stratify.interpolate", return_value=new_data + ) as mocker: result = extract_levels(cube, levels, scheme) self.assertEqual(result, mock.sentinel.astype_result) args, kwargs = mocker.call_args # Check the stratify.interpolate args ... self.assertEqual(len(args), 3) self.assert_array_equal(args[0], levels) - pts = cube.coord(axis='z', dim_coords=True).points - src_levels_broadcast = np.broadcast_to(pts.reshape(self.z, 1, 1), - cube.shape) + pts = cube.coord(axis="z", dim_coords=True).points + src_levels_broadcast = np.broadcast_to( + pts.reshape(self.z, 1, 1), cube.shape + ) self.assert_array_equal(args[1], src_levels_broadcast) self.assert_array_equal(args[2], np.ma.filled(masked, np.nan)) # Check the stratify.interpolate kwargs ... self.assertEqual( - kwargs, dict(axis=0, interpolation=scheme, - extrapolation='nan')) + kwargs, dict(axis=0, interpolation=scheme, extrapolation="nan") + ) args, kwargs = self.mock_create_cube.call_args input_cube = args[0] # in-place for new extract_levels with nan's @@ -313,19 +343,20 @@ def test_interpolation__masked(self): self.assert_array_equal(args[1], new_data) self.assertTrue(ma.isMaskedArray(args[1])) self.assert_array_equal(args[1].mask, new_data_mask) - self.assert_array_equal(args[2], - self.cube.coord(axis='z', dim_coords=True)) + self.assert_array_equal( + args[2], self.cube.coord(axis="z", dim_coords=True) + ) self.assert_array_equal(args[3], levels) # Check the _create_cube kwargs ... self.assertEqual(kwargs, dict()) def test_rechunk_aux_factory_dependencies(): - delta = iris.coords.AuxCoord( points=np.array([0.0, 1.0, 2.0], dtype=np.float64), - bounds=np.array([[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]], - dtype=np.float64), + bounds=np.array( + [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]], dtype=np.float64 + ), long_name="level_pressure", units="Pa", ) @@ -349,24 +380,25 @@ def test_rechunk_aux_factory_dependencies(): da.asarray( np.arange(3 * 2 * 2).astype(np.float32).reshape(3, 2, 2), chunks=(1, 2, 2), - ), ) + ), + ) cube.add_aux_coord(delta, 0) cube.add_aux_coord(sigma, 0) cube.add_aux_coord(surface_air_pressure, [1, 2]) cube.add_aux_factory(factory) - result = _rechunk_aux_factory_dependencies(cube, 'air_pressure') + result = _rechunk_aux_factory_dependencies(cube, "air_pressure") # Check that the 'air_pressure' coordinate of the resulting cube has been # rechunked: assert ( (1, 1, 1), - (2, ), - (2, ), - ) == result.coord('air_pressure').core_points().chunks + (2,), + (2,), + ) == result.coord("air_pressure").core_points().chunks # Check that the original cube has not been modified: assert ( - (3, ), - (2, ), - (2, ), - ) == cube.coord('air_pressure').core_points().chunks + (3,), + (2,), + (2,), + ) == cube.coord("air_pressure").core_points().chunks diff --git a/tests/unit/preprocessor/_regrid/test_extract_point.py b/tests/unit/preprocessor/_regrid/test_extract_point.py index 304e071b36..f131463f3e 100644 --- a/tests/unit/preprocessor/_regrid/test_extract_point.py +++ b/tests/unit/preprocessor/_regrid/test_extract_point.py @@ -15,7 +15,6 @@ class Test(tests.Test): - def setUp(self): # Use an Iris test cube with coordinates that have a coordinate # system, see the following issue for more details: @@ -27,22 +26,23 @@ def test_invalid_scheme__unknown(self): dummy = mock.sentinel.dummy emsg = "Unknown interpolation scheme, got 'non-existent'" with self.assertRaisesRegex(ValueError, emsg): - extract_point(dummy, dummy, dummy, 'non-existent') + extract_point(dummy, dummy, dummy, "non-existent") def test_interpolation_schemes(self): - self.assertEqual(set(POINT_INTERPOLATION_SCHEMES.keys()), - set(self.schemes)) + self.assertEqual( + set(POINT_INTERPOLATION_SCHEMES.keys()), set(self.schemes) + ) def test_extract_point_interpolation_schemes(self): - latitude = -90. - longitude = 0. + latitude = -90.0 + longitude = 0.0 for scheme in self.schemes: result = extract_point(self.src_cube, latitude, longitude, scheme) self._assert_coords(result, latitude, longitude) def test_extract_point(self): - latitude = 90. - longitude = -180. + latitude = 90.0 + longitude = -180.0 for scheme in self.schemes: result = extract_point(self.src_cube, latitude, longitude, scheme) self._assert_coords(result, latitude, longitude) @@ -56,5 +56,5 @@ def _assert_coords(self, cube, ref_lat, ref_lon): self.assertEqual(lon_points[0], ref_lon) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test_extract_regional_grid.py b/tests/unit/preprocessor/_regrid/test_extract_regional_grid.py index c3f4dfe79e..15055a7658 100644 --- a/tests/unit/preprocessor/_regrid/test_extract_regional_grid.py +++ b/tests/unit/preprocessor/_regrid/test_extract_regional_grid.py @@ -23,10 +23,17 @@ def clear_lru_cache(): _global_stock_cube.cache_clear() -SPEC_KEYS = ('start_longitude', 'end_longitude', 'step_longitude', - 'start_latitude', 'end_latitude', 'step_latitude') +SPEC_KEYS = ( + "start_longitude", + "end_longitude", + "step_longitude", + "start_latitude", + "end_latitude", + "step_latitude", +) PASSING_SPECS = tuple( - dict(zip(SPEC_KEYS, spec)) for spec in ( + dict(zip(SPEC_KEYS, spec)) + for spec in ( (0, 360, 5, -90, 90, 5), (0, 360, 20, -90, 90, 20), (0, 21, 5, -90, 90, 1), @@ -41,10 +48,12 @@ def clear_lru_cache(): (0, 360, 5, 0, 0, 5), (0, 9, 0.1, 45, 54, 0.1), (3.75, 11.75, 0.5, 46.25, 52.25, 0.5), - )) + ) +) FAILING_SPECS = tuple( - dict(zip(SPEC_KEYS, spec)) for spec in ( + dict(zip(SPEC_KEYS, spec)) + for spec in ( # (0, 360, 5, -90, 90, 5), (0, 360, 5, -90, 180, 5), (0, 360, 5, -180, 90, 5), @@ -53,21 +62,22 @@ def clear_lru_cache(): (0, -360, 5, -90, 90, 5), (0, 360, 0, -90, 90, 5), (0, 360, 5, -90, 90, 0), - )) + ) +) -@pytest.mark.parametrize('spec', PASSING_SPECS) +@pytest.mark.parametrize("spec", PASSING_SPECS) def test_extract_regional_grid_passing(spec): """Test regridding with regional target spec.""" - global_cube = _global_stock_cube('10x10') - scheme = 'linear' + global_cube = _global_stock_cube("10x10") + scheme = "linear" result_cube = regrid(global_cube, target_grid=spec, scheme=scheme) expected_latvals, expected_lonvals = _spec_to_latlonvals(**spec) - lat_coord = result_cube.coord('latitude') - lon_coord = result_cube.coord('longitude') + lat_coord = result_cube.coord("latitude") + lon_coord = result_cube.coord("longitude") np.testing.assert_array_equal(lat_coord.points, expected_latvals) np.testing.assert_array_equal(lon_coord.points, expected_lonvals) @@ -76,31 +86,31 @@ def test_extract_regional_grid_passing(spec): assert lon_coord.has_bounds() -@pytest.mark.parametrize('spec', FAILING_SPECS) +@pytest.mark.parametrize("spec", FAILING_SPECS) def test_extract_regional_grid_failing(spec): """Test failing input for spec.""" - global_cube = _global_stock_cube('10x10') - scheme = 'linear' + global_cube = _global_stock_cube("10x10") + scheme = "linear" with pytest.raises(ValueError): _ = regrid(global_cube, target_grid=spec, scheme=scheme) -@pytest.mark.parametrize('spec', PASSING_SPECS) +@pytest.mark.parametrize("spec", PASSING_SPECS) def test_spec_to_latlonvals(spec): """Test lat/lon val specification.""" latvals, lonvals = _spec_to_latlonvals(**spec) - lat_step = spec['step_latitude'] - assert latvals[0] == spec['start_latitude'] + lat_step = spec["step_latitude"] + assert latvals[0] == spec["start_latitude"] lat_diff = latvals[-1] - latvals[0] assert Decimal(lat_diff) % Decimal(str(lat_step)) == 0 np.testing.assert_allclose(np.diff(latvals), lat_step) - assert spec['end_latitude'] >= latvals[-1] + assert spec["end_latitude"] >= latvals[-1] - lon_step = spec['step_longitude'] - assert lonvals[0] == spec['start_longitude'] + lon_step = spec["step_longitude"] + assert lonvals[0] == spec["start_longitude"] lon_diff = lonvals[-1] - lonvals[0] assert Decimal(lon_diff) % Decimal(str(lon_step)) == 0 np.testing.assert_allclose(np.diff(lonvals), lon_step) - assert spec['end_longitude'] >= lonvals[-1] + assert spec["end_longitude"] >= lonvals[-1] diff --git a/tests/unit/preprocessor/_regrid/test_regrid.py b/tests/unit/preprocessor/_regrid/test_regrid.py index f7ffff1228..69589a7a8c 100644 --- a/tests/unit/preprocessor/_regrid/test_regrid.py +++ b/tests/unit/preprocessor/_regrid/test_regrid.py @@ -1,5 +1,6 @@ """Unit tests for the :func:`esmvalcore.preprocessor.regrid.regrid` function.""" + import dask import dask.array as da import iris @@ -19,7 +20,7 @@ def clear_regridder_cache(monkeypatch): """Clear regridder cache before test runs.""" monkeypatch.setattr( - esmvalcore.preprocessor._regrid, '_CACHED_REGRIDDERS', {} + esmvalcore.preprocessor._regrid, "_CACHED_REGRIDDERS", {} ) @@ -28,7 +29,7 @@ def _make_coord(start: float, stop: float, step: int, *, name: str): coord = iris.coords.DimCoord( np.linspace(start, stop, step), standard_name=name, - units='degrees', + units="degrees", ) coord.guess_bounds() return coord @@ -36,8 +37,8 @@ def _make_coord(start: float, stop: float, step: int, *, name: str): def _make_cube(*, lat: tuple, lon: tuple): """Helper function for creating a cube.""" - lat_coord = _make_coord(*lat, name='latitude') - lon_coord = _make_coord(*lon, name='longitude') + lat_coord = _make_coord(*lat, name="latitude") + lon_coord = _make_coord(*lon, name="longitude") return iris.cube.Cube( np.zeros( @@ -76,11 +77,11 @@ def cube_30x30(): return _make_cube(lat=LAT_SPEC4, lon=LON_SPEC4) -SCHEMES = ['area_weighted', 'linear', 'nearest'] +SCHEMES = ["area_weighted", "linear", "nearest"] -@pytest.mark.parametrize('cache_weights', [True, False]) -@pytest.mark.parametrize('scheme', SCHEMES) +@pytest.mark.parametrize("cache_weights", [True, False]) +@pytest.mark.parametrize("scheme", SCHEMES) def test_builtin_regridding(scheme, cache_weights, cube_10x10, cube_30x30): """Test `regrid.`""" _cached_regridders = esmvalcore.preprocessor._regrid._CACHED_REGRIDDERS @@ -88,8 +89,8 @@ def test_builtin_regridding(scheme, cache_weights, cube_10x10, cube_30x30): res = regrid(cube_10x10, cube_30x30, scheme, cache_weights=cache_weights) - assert res.coord('latitude') == cube_30x30.coord('latitude') - assert res.coord('longitude') == cube_30x30.coord('longitude') + assert res.coord("latitude") == cube_30x30.coord("latitude") + assert res.coord("longitude") == cube_30x30.coord("longitude") assert res.dtype == np.float32 assert np.allclose(res.data, 0.0) @@ -101,7 +102,7 @@ def test_builtin_regridding(scheme, cache_weights, cube_10x10, cube_30x30): assert not _cached_regridders -@pytest.mark.parametrize('scheme', SCHEMES) +@pytest.mark.parametrize("scheme", SCHEMES) def test_invalid_target_grid(scheme, cube_10x10, mocker): """Test `regrid.`""" target_grid = mocker.sentinel.target_grid @@ -112,9 +113,12 @@ def test_invalid_target_grid(scheme, cube_10x10, mocker): def test_invalid_scheme(cube_10x10, cube_30x30): """Test `regrid.`""" - msg = "Got invalid regridding scheme string 'wibble'" + msg = ( + "Regridding scheme 'wibble' not available for regular data, " + "expected one of: area_weighted, linear, nearest" + ) with pytest.raises(ValueError, match=msg): - regrid(cube_10x10, cube_30x30, 'wibble') + regrid(cube_10x10, cube_30x30, "wibble") def test_regrid_generic_missing_reference(cube_10x10, cube_30x30): @@ -128,10 +132,10 @@ def test_regrid_generic_invalid_reference(cube_10x10, cube_30x30): """Test `regrid.`""" msg = "Could not import specified generic regridding module." with pytest.raises(ValueError, match=msg): - regrid(cube_10x10, cube_30x30, {'reference': 'this.does:not.exist'}) + regrid(cube_10x10, cube_30x30, {"reference": "this.does:not.exist"}) -@pytest.mark.parametrize('cache_weights', [True, False]) +@pytest.mark.parametrize("cache_weights", [True, False]) def test_regrid_generic_regridding(cache_weights, cube_10x10, cube_30x30): """Test `regrid.`""" _cached_regridders = esmvalcore.preprocessor._regrid._CACHED_REGRIDDERS @@ -141,13 +145,13 @@ def test_regrid_generic_regridding(cache_weights, cube_10x10, cube_30x30): cube_10x10, cube_30x30, { - 'reference': 'iris.analysis:Linear', - 'extrapolation_mode': 'mask', + "reference": "iris.analysis:Linear", + "extrapolation_mode": "mask", }, cache_weights=cache_weights, ) cube_lin = regrid( - cube_10x10, cube_30x30, 'linear', cache_weights=cache_weights + cube_10x10, cube_30x30, "linear", cache_weights=cache_weights ) assert cube_gen.dtype == np.float32 assert cube_lin.dtype == np.float32 @@ -163,7 +167,7 @@ def test_regrid_generic_regridding(cache_weights, cube_10x10, cube_30x30): (30,), (30,), ) - key_2 = ('linear', (18,), (36,), (30,), (30,)) + key_2 = ("linear", (18,), (36,), (30,), (30,)) assert key_1 in _cached_regridders assert key_2 in _cached_regridders else: @@ -171,45 +175,45 @@ def test_regrid_generic_regridding(cache_weights, cube_10x10, cube_30x30): @pytest.mark.parametrize( - 'cube2_spec, expected', + "cube2_spec, expected", ( # equal lat/lon ( { - 'lat': LAT_SPEC1, - 'lon': LON_SPEC1, + "lat": LAT_SPEC1, + "lon": LON_SPEC1, }, True, ), # different lon shape ( { - 'lat': LAT_SPEC1, - 'lon': LON_SPEC2, + "lat": LAT_SPEC1, + "lon": LON_SPEC2, }, False, ), # different lat shape ( { - 'lat': LAT_SPEC2, - 'lon': LON_SPEC1, + "lat": LAT_SPEC2, + "lon": LON_SPEC1, }, False, ), # different lon values ( { - 'lat': LAT_SPEC1, - 'lon': LON_SPEC3, + "lat": LAT_SPEC1, + "lon": LON_SPEC3, }, False, ), # different lat values ( { - 'lat': LAT_SPEC3, - 'lon': LON_SPEC1, + "lat": LAT_SPEC3, + "lon": LON_SPEC1, }, False, ), @@ -226,89 +230,165 @@ def test_horizontal_grid_is_close(cube2_spec: dict, expected: bool): def test_regrid_is_skipped_if_grids_are_the_same(): """Test that regridding is skipped if the grids are the same.""" cube = _make_cube(lat=LAT_SPEC1, lon=LON_SPEC1) - scheme = 'linear' + scheme = "linear" # regridding to the same spec returns the same cube - expected_same_cube = regrid(cube, target_grid='10x10', scheme=scheme) + expected_same_cube = regrid(cube, target_grid="10x10", scheme=scheme) assert expected_same_cube is cube # regridding to a different spec returns a different cube - expected_different_cube = regrid(cube, target_grid='5x5', scheme=scheme) + expected_different_cube = regrid(cube, target_grid="5x5", scheme=scheme) assert expected_different_cube is not cube -def make_test_cube(shape): - data = da.empty(shape, dtype=np.float32) +def make_test_cube_rectilinear(shape): + chunks = ["auto"] * len(shape) + chunks[-2] = chunks[-1] = None + data = da.empty(shape, chunks=chunks, dtype=np.float32) cube = iris.cube.Cube(data) if len(shape) > 2: cube.add_dim_coord( iris.coords.DimCoord( np.arange(shape[0]), - standard_name='time', + standard_name="time", ), 0, ) cube.add_dim_coord( iris.coords.DimCoord( - np.linspace(-90., 90., shape[-2], endpoint=True), - standard_name='latitude', + np.linspace(-90.0, 90.0, shape[-2], endpoint=True), + standard_name="latitude", ), len(shape) - 2, ) cube.add_dim_coord( iris.coords.DimCoord( - np.linspace(0., 360., shape[-1]), - standard_name='longitude', + np.linspace(0.0, 360.0, shape[-1]), + standard_name="longitude", ), len(shape) - 1, ) return cube -def test_rechunk_on_increased_grid(): - """Test that an increase in grid size rechunks.""" - with dask.config.set({'array.chunk-size': '128 M'}): +def make_test_cube_irregular(shape): + data = da.empty(shape, dtype=np.float32) + cube = iris.cube.Cube(data) + if len(shape) > 2: + cube.add_dim_coord( + iris.coords.DimCoord( + np.arange(shape[0]), + standard_name="time", + ), + 0, + ) + lat_points = np.linspace(-90.0, 90.0, shape[-2], endpoint=True) + lon_points = np.linspace(0.0, 360.0, shape[-1]) + + cube.add_aux_coord( + iris.coords.AuxCoord( + np.broadcast_to(lat_points.reshape(-1, 1), shape[-2:]), + standard_name="latitude", + ), + (-2, -1), + ) + cube.add_aux_coord( + iris.coords.AuxCoord( + np.broadcast_to(lon_points.reshape(1, -1), shape[-2:]), + standard_name="longitude", + ), + (-2, -1), + ) + return cube + + +def make_test_cube_unstructured(shape): + data = da.empty(shape, dtype=np.float32) + cube = iris.cube.Cube(data) + if len(shape) > 1: + cube.add_dim_coord( + iris.coords.DimCoord( + np.arange(shape[0]), + standard_name="time", + ), + 0, + ) + lat_points = np.linspace(-90.0, 90.0, shape[-1], endpoint=True) + lon_points = np.linspace(0.0, 360.0, shape[-1]) + + cube.add_aux_coord( + iris.coords.AuxCoord( + lat_points, + standard_name="latitude", + ), + (-1,), + ) + cube.add_aux_coord( + iris.coords.AuxCoord( + lon_points, + standard_name="longitude", + ), + (-1,), + ) + return cube - time_dim = 246 - src_grid_dims = (91, 180) - data = da.empty((time_dim, ) + src_grid_dims, dtype=np.float32) +@pytest.mark.parametrize( + "grids", + [ + ("rectilinear", "rectilinear"), + ("rectilinear", "irregular"), + ("irregular", "rectilinear"), + ("irregular", "irregular"), + ("unstructured", "rectilinear"), + ], +) +def test_rechunk_on_increased_grid(grids): + """Test that an increase in grid size rechunks.""" + with dask.config.set({"array.chunk-size": "128 M"}): + src_grid, tgt_grid = grids + src_dims = (246, 91, 180) + if src_grid == "unstructured": + src_dims = src_dims[:-2] + (np.prod(src_dims[-2:]),) tgt_grid_dims = (2, 361, 720) - tgt_grid = make_test_cube(tgt_grid_dims) - result = _rechunk(iris.cube.Cube(data), tgt_grid) + src_cube = globals()[f"make_test_cube_{src_grid}"](src_dims) + tgt_grid = globals()[f"make_test_cube_{tgt_grid}"](tgt_grid_dims) + result = _rechunk(src_cube, tgt_grid) - assert result.core_data().chunks == ((123, 123), (91, ), (180, )) + expected = ((123, 123), (91,), (180,)) + if src_grid == "unstructured": + expected = expected[:-2] + (np.prod(expected[-2:]),) + assert result.core_data().chunks == expected def test_no_rechunk_on_decreased_grid(): """Test that a decrease in grid size does not rechunk.""" - with dask.config.set({'array.chunk-size': '128 M'}): - - time_dim = 200 - src_grid_dims = (361, 720) - data = da.empty((time_dim, ) + src_grid_dims, dtype=np.float32) + with dask.config.set({"array.chunk-size": "128 M"}): + src_dims = (200, 361, 720) + src_cube = make_test_cube_rectilinear(src_dims) tgt_grid_dims = (91, 180) - tgt_grid = make_test_cube(tgt_grid_dims) + tgt_grid_cube = make_test_cube_rectilinear(tgt_grid_dims) - result = _rechunk(iris.cube.Cube(data), tgt_grid) + expected = src_cube.core_data().chunks + result = _rechunk(src_cube, tgt_grid_cube) - assert result.core_data().chunks == data.chunks + assert result.core_data().chunks == expected -def test_no_rechunk_2d(): - """Test that a 2D cube is not rechunked.""" - with dask.config.set({'array.chunk-size': '64 MiB'}): - +def test_no_rechunk_horizontal_only(): + """Test that a horizontal only cube is not rechunked.""" + with dask.config.set({"array.chunk-size": "64 MiB"}): src_grid_dims = (361, 720) - data = da.empty(src_grid_dims, dtype=np.float32) + src_cube = make_test_cube_rectilinear(src_grid_dims) tgt_grid_dims = (3601, 7200) - tgt_grid = da.empty(tgt_grid_dims, dtype=np.float32) + tgt_grid_cube = make_test_cube_rectilinear(tgt_grid_dims) - result = _rechunk(iris.cube.Cube(data), iris.cube.Cube(tgt_grid)) + expected = src_cube.core_data().chunks + result = _rechunk(src_cube, tgt_grid_cube) - assert result.core_data().chunks == data.chunks + assert result.core_data().chunks == expected def test_no_rechunk_non_lazy(): @@ -319,57 +399,23 @@ def test_no_rechunk_non_lazy(): assert result.data is cube.data -def test_no_rechunk_unsupported_grid(): - """Test that 2D target coordinates are ignored. - - Because they are not supported at the moment. This could be - implemented at a later stage if needed. - """ - cube = iris.cube.Cube(da.arange(2 * 4).reshape([1, 2, 4])) - tgt_grid_dims = (5, 10) - tgt_data = da.empty(tgt_grid_dims, dtype=np.float32) - tgt_grid = iris.cube.Cube(tgt_data) - lat_points = np.linspace(-90., 90., tgt_grid_dims[0], endpoint=True) - lon_points = np.linspace(0., 360., tgt_grid_dims[1]) - - tgt_grid.add_aux_coord( - iris.coords.AuxCoord( - np.broadcast_to(lat_points.reshape(-1, 1), tgt_grid_dims), - standard_name='latitude', - ), - (0, 1), - ) - tgt_grid.add_aux_coord( - iris.coords.AuxCoord( - np.broadcast_to(lon_points.reshape(1, -1), tgt_grid_dims), - standard_name='longitude', - ), - (0, 1), - ) - - expected_chunks = cube.core_data().chunks - result = _rechunk(cube, tgt_grid) - assert result is cube - assert result.core_data().chunks == expected_chunks - - -@pytest.mark.parametrize('scheme', SCHEMES) +@pytest.mark.parametrize("scheme", SCHEMES) def test_regridding_weights_use_cache(scheme, cube_10x10, cube_30x30, mocker): """Test `regrid.`""" _cached_regridders = esmvalcore.preprocessor._regrid._CACHED_REGRIDDERS assert _cached_regridders == {} - src_lat = cube_10x10.coord('latitude') - src_lon = cube_10x10.coord('longitude') - tgt_lat = cube_30x30.coord('latitude') - tgt_lon = cube_30x30.coord('longitude') + src_lat = cube_10x10.coord("latitude") + src_lon = cube_10x10.coord("longitude") + tgt_lat = cube_30x30.coord("latitude") + tgt_lon = cube_30x30.coord("longitude") key = (scheme, (18,), (36,), (30,), (30,)) _cached_regridders[key] = {} _cached_regridders[key][(src_lat, src_lon, tgt_lat, tgt_lon)] = ( mocker.sentinel.regridder ) mock_load_scheme = mocker.patch.object( - esmvalcore.preprocessor._regrid, '_load_scheme', autospec=True + esmvalcore.preprocessor._regrid, "_load_scheme", autospec=True ) reg = _get_regridder(cube_10x10, cube_30x30, scheme, cache_weights=True) @@ -385,7 +431,7 @@ def test_regridding_weights_use_cache(scheme, cube_10x10, cube_30x30, mocker): def test_clear_regridding_weights_cache(): """Test `regrid.cache_clear().`""" _cached_regridders = esmvalcore.preprocessor._regrid._CACHED_REGRIDDERS - _cached_regridders['test'] = 'test' + _cached_regridders["test"] = "test" regrid.cache_clear() diff --git a/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py b/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py index 371ab49684..0a2f1d4279 100644 --- a/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py +++ b/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py @@ -1,5 +1,5 @@ """Unit tests for the esmvalcore.preprocessor._regrid_esmpy module.""" -import sys + from unittest import mock import cf_units @@ -76,9 +76,9 @@ class MockUnmappedAction(mock.Mock): ESMF_REGRID_METHODS = { - 'linear': MockRegridMethod.BILINEAR, - 'area_weighted': MockRegridMethod.CONSERVE, - 'nearest': MockRegridMethod.NEAREST_STOD, + "linear": MockRegridMethod.BILINEAR, + "area_weighted": MockRegridMethod.CONSERVE, + "nearest": MockRegridMethod.NEAREST_STOD, } MASK_REGRIDDING_MASK_VALUE = { @@ -88,19 +88,29 @@ class MockUnmappedAction(mock.Mock): } -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.MASK_REGRIDDING_MASK_VALUE', - MASK_REGRIDDING_MASK_VALUE) -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.ESMF_REGRID_METHODS', - ESMF_REGRID_METHODS) -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Manager', mock.Mock) -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.GridItem', - MockGridItem) -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.RegridMethod', - MockRegridMethod) -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.StaggerLoc', - MockStaggerLoc) -@mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.UnmappedAction', - MockUnmappedAction) +@mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.MASK_REGRIDDING_MASK_VALUE", + MASK_REGRIDDING_MASK_VALUE, +) +@mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.ESMF_REGRID_METHODS", + ESMF_REGRID_METHODS, +) +@mock.patch("esmvalcore.preprocessor._regrid_esmpy.esmpy.Manager", mock.Mock) +@mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.esmpy.GridItem", MockGridItem +) +@mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.esmpy.RegridMethod", + MockRegridMethod, +) +@mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.esmpy.StaggerLoc", MockStaggerLoc +) +@mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.esmpy.UnmappedAction", + MockUnmappedAction, +) class TestHelpers(tests.Test): """Unit tests for helper functions.""" @@ -110,145 +120,191 @@ def setUp(self): # pylint: disable=too-many-locals lat_1d_pre_bounds = np.linspace(-90, 90, 5) lat_1d_bounds = np.stack( - [lat_1d_pre_bounds[:-1], lat_1d_pre_bounds[1:]], axis=1) + [lat_1d_pre_bounds[:-1], lat_1d_pre_bounds[1:]], axis=1 + ) lat_1d_points = lat_1d_bounds.mean(axis=1) lon_1d_pre_bounds = np.linspace(0, 360, 5) lon_1d_bounds = np.stack( - [lon_1d_pre_bounds[:-1], lon_1d_pre_bounds[1:]], axis=1) + [lon_1d_pre_bounds[:-1], lon_1d_pre_bounds[1:]], axis=1 + ) lon_1d_points = lon_1d_bounds.mean(axis=1) - lon_2d_points, lat_2d_points = np.meshgrid(lon_1d_points, - lat_1d_points) - (lon_2d_pre_bounds, - lat_2d_pre_bounds) = np.meshgrid(lon_1d_pre_bounds, lat_1d_pre_bounds) - lat_2d_bounds = np.stack([ - lat_2d_pre_bounds[:-1, :-1], lat_2d_pre_bounds[:-1, 1:], - lat_2d_pre_bounds[1:, 1:], lat_2d_pre_bounds[1:, :-1] - ], - axis=2) - lon_2d_bounds = np.stack([ - lon_2d_pre_bounds[:-1, :-1], lon_2d_pre_bounds[:-1, 1:], - lon_2d_pre_bounds[1:, 1:], lon_2d_pre_bounds[1:, :-1] - ], - axis=2) - self.lat_1d = mock.Mock(iris.coords.DimCoord, - standard_name='latitude', - long_name='latitude', - ndim=1, - points=lat_1d_points, - bounds=lat_1d_bounds, - has_bounds=mock.Mock(return_value=True)) + lon_2d_points, lat_2d_points = np.meshgrid( + lon_1d_points, lat_1d_points + ) + (lon_2d_pre_bounds, lat_2d_pre_bounds) = np.meshgrid( + lon_1d_pre_bounds, lat_1d_pre_bounds + ) + lat_2d_bounds = np.stack( + [ + lat_2d_pre_bounds[:-1, :-1], + lat_2d_pre_bounds[:-1, 1:], + lat_2d_pre_bounds[1:, 1:], + lat_2d_pre_bounds[1:, :-1], + ], + axis=2, + ) + lon_2d_bounds = np.stack( + [ + lon_2d_pre_bounds[:-1, :-1], + lon_2d_pre_bounds[:-1, 1:], + lon_2d_pre_bounds[1:, 1:], + lon_2d_pre_bounds[1:, :-1], + ], + axis=2, + ) + self.lat_1d = mock.Mock( + iris.coords.DimCoord, + standard_name="latitude", + long_name="latitude", + ndim=1, + points=lat_1d_points, + bounds=lat_1d_bounds, + has_bounds=mock.Mock(return_value=True), + ) self.lat_1d_no_bounds = mock.Mock( iris.coords.DimCoord, - standard_name='latitude', + standard_name="latitude", ndim=1, points=lat_1d_points, has_bounds=mock.Mock(return_value=False), bounds=lat_1d_bounds, - guess_bounds=mock.Mock()) - self.lon_1d = mock.Mock(iris.coords.DimCoord, - standard_name='longitude', - long_name='longitude', - ndim=1, - points=lon_1d_points, - bounds=lon_1d_bounds, - has_bounds=mock.Mock(return_value=True), - circular=True) - self.lon_1d_aux = mock.Mock(iris.coords.AuxCoord, - standard_name='longitude', - long_name='longitude', - ndim=1, - shape=lon_1d_points.shape, - points=lon_1d_points, - bounds=lon_1d_bounds, - has_bounds=mock.Mock(return_value=True)) - self.lat_2d = mock.Mock(iris.coords.AuxCoord, - standard_name='latitude', - long_name='latitude', - ndim=2, - points=lat_2d_points, - bounds=lat_2d_bounds, - has_bounds=mock.Mock(return_value=True)) - self.lon_2d = mock.Mock(iris.coords.AuxCoord, - standard_name='longitude', - long_name='longitude', - ndim=2, - points=lon_2d_points, - bounds=lon_2d_bounds, - has_bounds=mock.Mock(return_value=True)) + guess_bounds=mock.Mock(), + ) + self.lon_1d = mock.Mock( + iris.coords.DimCoord, + standard_name="longitude", + long_name="longitude", + ndim=1, + points=lon_1d_points, + bounds=lon_1d_bounds, + has_bounds=mock.Mock(return_value=True), + circular=True, + ) + self.lon_1d_aux = mock.Mock( + iris.coords.AuxCoord, + standard_name="longitude", + long_name="longitude", + ndim=1, + shape=lon_1d_points.shape, + points=lon_1d_points, + bounds=lon_1d_bounds, + has_bounds=mock.Mock(return_value=True), + ) + self.lat_2d = mock.Mock( + iris.coords.AuxCoord, + standard_name="latitude", + long_name="latitude", + ndim=2, + points=lat_2d_points, + bounds=lat_2d_bounds, + has_bounds=mock.Mock(return_value=True), + ) + self.lon_2d = mock.Mock( + iris.coords.AuxCoord, + standard_name="longitude", + long_name="longitude", + ndim=2, + points=lon_2d_points, + bounds=lon_2d_bounds, + has_bounds=mock.Mock(return_value=True), + ) self.lon_2d_non_circular = mock.Mock( iris.coords.AuxCoord, - standard_name='longitude', + standard_name="longitude", ndim=2, points=lon_2d_points[:, 1:-1], bounds=lon_2d_bounds[:, 1:-1], - has_bounds=mock.Mock(return_value=True)) - self.lat_3d = mock.Mock(iris.coords.AuxCoord, - standard_name='latitude', - long_name='latitude', - ndim=3) - self.lon_3d = mock.Mock(iris.coords.AuxCoord, - standard_name='longitude', - long_name='longitude', - ndim=3) + has_bounds=mock.Mock(return_value=True), + ) + self.lat_3d = mock.Mock( + iris.coords.AuxCoord, + standard_name="latitude", + long_name="latitude", + ndim=3, + ) + self.lon_3d = mock.Mock( + iris.coords.AuxCoord, + standard_name="longitude", + long_name="longitude", + ndim=3, + ) depth_pre_bounds = np.linspace(0, 5000, 5) - depth_bounds = np.stack([depth_pre_bounds[:-1], depth_pre_bounds[1:]], - axis=1) + depth_bounds = np.stack( + [depth_pre_bounds[:-1], depth_pre_bounds[1:]], axis=1 + ) depth_points = depth_bounds.mean(axis=1) - self.depth = mock.Mock(iris.coords.DimCoord, - standard_name='depth', - long_name='depth', - ndim=1, - shape=depth_points.shape, - points=depth_points, - bounds=depth_bounds, - has_bounds=mock.Mock(return_value=True)) - self.scalar_coord = mock.Mock(iris.coords.AuxCoord, - long_name='scalar_coord', - ndim=1, - shape=(1, )) + self.depth = mock.Mock( + iris.coords.DimCoord, + standard_name="depth", + long_name="depth", + ndim=1, + shape=depth_points.shape, + points=depth_points, + bounds=depth_bounds, + has_bounds=mock.Mock(return_value=True), + ) + self.scalar_coord = mock.Mock( + iris.coords.AuxCoord, long_name="scalar_coord", ndim=1, shape=(1,) + ) data_shape = lon_2d_points.shape raw_data = np.arange(np.prod(data_shape)).reshape(data_shape) mask = np.zeros(data_shape) - mask[:data_shape[0] // 2] = True + mask[: data_shape[0] // 2] = True self.data = np.ma.masked_array(raw_data, mask) - self.data_3d = np.repeat(self.data[..., np.newaxis], - depth_points.shape[0], - axis=-1) - self.expected_esmpy_lat = np.array([[-67.5, -22.5, 22.5, 67.5], - [-67.5, -22.5, 22.5, 67.5], - [-67.5, -22.5, 22.5, 67.5], - [-67.5, -22.5, 22.5, 67.5]]) - self.expected_esmpy_lon = np.array([[45., 45., 45., 45.], - [135., 135., 135., 135.], - [225., 225., 225., 225.], - [315., 315., 315., 315.]]) - self.expected_esmpy_lat_corners = np.array([[-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., 90.], - [-90., -45., 0., 45., - 90.]]) + self.data_3d = np.repeat( + self.data[..., np.newaxis], depth_points.shape[0], axis=-1 + ) + self.expected_esmpy_lat = np.array( + [ + [-67.5, -22.5, 22.5, 67.5], + [-67.5, -22.5, 22.5, 67.5], + [-67.5, -22.5, 22.5, 67.5], + [-67.5, -22.5, 22.5, 67.5], + ] + ) + self.expected_esmpy_lon = np.array( + [ + [45.0, 45.0, 45.0, 45.0], + [135.0, 135.0, 135.0, 135.0], + [225.0, 225.0, 225.0, 225.0], + [315.0, 315.0, 315.0, 315.0], + ] + ) + self.expected_esmpy_lat_corners = np.array( + [ + [-90.0, -45.0, 0.0, 45.0, 90.0], + [-90.0, -45.0, 0.0, 45.0, 90.0], + [-90.0, -45.0, 0.0, 45.0, 90.0], + [-90.0, -45.0, 0.0, 45.0, 90.0], + [-90.0, -45.0, 0.0, 45.0, 90.0], + ] + ) self.expected_esmpy_lon_corners = np.array( - [[0., 0., 0., 0., 0.], [90., 90., 90., 90., 90.], - [180., 180., 180., 180., 180.], [270., 270., 270., 270., 270.], - [360., 360., 360., 360., 360.]]) + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [90.0, 90.0, 90.0, 90.0, 90.0], + [180.0, 180.0, 180.0, 180.0, 180.0], + [270.0, 270.0, 270.0, 270.0, 270.0], + [360.0, 360.0, 360.0, 360.0, 360.0], + ] + ) self.coords = { - 'latitude': self.lat_2d, - 'longitude': self.lon_2d, - 'depth': self.depth, - 'scalar_coord': self.scalar_coord, + "latitude": self.lat_2d, + "longitude": self.lon_2d, + "depth": self.depth, + "scalar_coord": self.scalar_coord, } self.coord_dims = { - 'latitude': (0, 1), - 'longitude': (0, 1), + "latitude": (0, 1), + "longitude": (0, 1), self.lat_2d: (0, 1), self.lon_2d: (0, 1), - 'scalar_coord': (), + "scalar_coord": (), } def coord(name=None, axis=None): """Return selected coordinate for mock cube.""" - if axis == 'Z': + if axis == "Z": raise CoordinateNotFoundError() return self.coords[name] @@ -263,7 +319,7 @@ def coords(dim_coords=None, dimensions=None): self.cube = mock.Mock( spec=iris.cube.Cube, dtype=np.float32, - long_name='longname', + long_name="longname", ndim=2, shape=self.data.shape, data=self.data, @@ -276,22 +332,22 @@ def coords(dim_coords=None, dimensions=None): self.unmasked_cube = mock.Mock( spec=iris.cube.Cube, dtype=np.float32, - long_name='longname', + long_name="longname", ) self.coord_dims_3d = { - 'latitude': (1, 2), - 'longitude': (1, 2), + "latitude": (1, 2), + "longitude": (1, 2), self.lat_2d: (1, 2), self.lon_2d: (1, 2), - 'depth': (0, ), - self.depth: (0, ), + "depth": (0,), + self.depth: (0,), } def coord_3d(name=None, dimensions=None, dim_coords=None, axis=None): """Return coord for 3d mock cube.""" # pylint: disable=unused-argument - if axis == 'Z' or dimensions == [0]: - return self.coords['depth'] + if axis == "Z" or dimensions == [0]: + return self.coords["depth"] return self.coords[name] def coords_3d(dimensions=None): @@ -304,9 +360,9 @@ def coords_3d(dimensions=None): spec=iris.cube.Cube, dtype=np.float32, standard_name=None, - long_name='longname', - var_name='ln', - units=cf_units.Unit('1'), + long_name="longname", + var_name="ln", + units=cf_units.Unit("1"), attributes={}, cell_methods=[], ndim=3, @@ -320,13 +376,19 @@ def coords_3d(dimensions=None): def test_coords_iris_to_esmpy_mismatched_dimensions(self): """Test coord conversion with mismatched dimensions.""" - self.assertRaises(ValueError, coords_iris_to_esmpy, self.lat_1d, - self.lon_2d, True) + self.assertRaises( + ValueError, coords_iris_to_esmpy, self.lat_1d, self.lon_2d, True + ) def test_coords_iris_to_esmpy_invalid_dimensions(self): """Test coord conversion with invalid dimensions.""" - self.assertRaises(NotImplementedError, coords_iris_to_esmpy, - self.lat_3d, self.lon_3d, True) + self.assertRaises( + NotImplementedError, + coords_iris_to_esmpy, + self.lat_3d, + self.lon_3d, + True, + ) def test_coords_iris_to_esmpy_call_guess_bounds(self): """Test coord conversion with missing bounds.""" @@ -335,55 +397,60 @@ def test_coords_iris_to_esmpy_call_guess_bounds(self): def test_coords_iris_to_esmpy_1d_circular(self): """Test coord conversion with 1d coords and circular longitudes.""" - (esmpy_lat, esmpy_lon, esmpy_lat_corners, - esmpy_lon_corners) = coords_iris_to_esmpy(self.lat_1d, self.lon_1d, - True) + (esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners) = ( + coords_iris_to_esmpy(self.lat_1d, self.lon_1d, True) + ) self.assert_array_equal(esmpy_lat, self.expected_esmpy_lat) self.assert_array_equal(esmpy_lon, self.expected_esmpy_lon) - self.assert_array_equal(esmpy_lat_corners, - self.expected_esmpy_lat_corners[:-1]) - self.assert_array_equal(esmpy_lon_corners, - self.expected_esmpy_lon_corners[:-1]) + self.assert_array_equal( + esmpy_lat_corners, self.expected_esmpy_lat_corners[:-1] + ) + self.assert_array_equal( + esmpy_lon_corners, self.expected_esmpy_lon_corners[:-1] + ) def test_coords_iris_to_esmpy_1d_non_circular(self): """Test coord conversion with 1d coords and non circular longitudes.""" - (esmpy_lat, esmpy_lon, esmpy_lat_corners, - esmpy_lon_corners) = coords_iris_to_esmpy(self.lat_1d, self.lon_1d, - False) + (esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners) = ( + coords_iris_to_esmpy(self.lat_1d, self.lon_1d, False) + ) self.assert_array_equal(esmpy_lat, self.expected_esmpy_lat) self.assert_array_equal(esmpy_lon, self.expected_esmpy_lon) - self.assert_array_equal(esmpy_lat_corners, - self.expected_esmpy_lat_corners) - self.assert_array_equal(esmpy_lon_corners, - self.expected_esmpy_lon_corners) + self.assert_array_equal( + esmpy_lat_corners, self.expected_esmpy_lat_corners + ) + self.assert_array_equal( + esmpy_lon_corners, self.expected_esmpy_lon_corners + ) def test_coords_iris_to_esmpy_2d_circular(self): """Test coord conversion with 2d coords and circular longitudes.""" - (esmpy_lat, esmpy_lon, esmpy_lat_corners, - esmpy_lon_corners) = coords_iris_to_esmpy(self.lat_2d, self.lon_2d, - True) + (esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners) = ( + coords_iris_to_esmpy(self.lat_2d, self.lon_2d, True) + ) self.assert_array_equal(esmpy_lat, self.expected_esmpy_lat) self.assert_array_equal(esmpy_lon, self.expected_esmpy_lon) - self.assert_array_equal(esmpy_lat_corners, - self.expected_esmpy_lat_corners[:-1]) - self.assert_array_equal(esmpy_lon_corners, - self.expected_esmpy_lon_corners[:-1]) + self.assert_array_equal( + esmpy_lat_corners, self.expected_esmpy_lat_corners[:-1] + ) + self.assert_array_equal( + esmpy_lon_corners, self.expected_esmpy_lon_corners[:-1] + ) def test_coords_iris_to_esmpy_2d_non_circular(self): """Test coord conversion with 2d coords and non circular longitudes.""" - (esmpy_lat, esmpy_lon, esmpy_lat_corners, - esmpy_lon_corners) = coords_iris_to_esmpy(self.lat_2d, self.lon_2d, - False) + (esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners) = ( + coords_iris_to_esmpy(self.lat_2d, self.lon_2d, False) + ) self.assert_array_equal(esmpy_lat, self.expected_esmpy_lat) self.assert_array_equal(esmpy_lon, self.expected_esmpy_lon) - self.assert_array_equal(esmpy_lat_corners, - self.expected_esmpy_lat_corners) - self.assert_array_equal(esmpy_lon_corners, - self.expected_esmpy_lon_corners) - - @pytest.mark.skipif(sys.version_info.major == 3 - and sys.version_info.minor == 9, - reason="bug in mock.py for Python 3.9.0 and 3.9.1") + self.assert_array_equal( + esmpy_lat_corners, self.expected_esmpy_lat_corners + ) + self.assert_array_equal( + esmpy_lon_corners, self.expected_esmpy_lon_corners + ) + def test_get_grid_circular(self): """Test building of ESMF grid from iris cube circular longitude.""" expected_get_coords_calls = [ @@ -392,22 +459,25 @@ def test_get_grid_circular(self): mock.call(0, staggerloc=mock.sentinel.sl_corner), mock.call(1, staggerloc=mock.sentinel.sl_corner), ] - with mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Grid', - MockGrid) as mg: + with mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.esmpy.Grid", MockGrid + ) as mg: mg.get_coords.reset_mock() mg.add_coords.reset_mock() mg.add_item.reset_mock() - get_grid(self.expected_esmpy_lat, self.expected_esmpy_lon, - self.expected_esmpy_lat_corners[:-1], - self.expected_esmpy_lon_corners[:-1], True) + get_grid( + self.expected_esmpy_lat, + self.expected_esmpy_lon, + self.expected_esmpy_lat_corners[:-1], + self.expected_esmpy_lon_corners[:-1], + True, + ) mg.get_coords.assert_has_calls(expected_get_coords_calls) mg.add_coords.assert_called_once_with([mock.sentinel.sl_corner]) - mg.add_item.assert_called_once_with(mock.sentinel.gi_mask, - mock.sentinel.sl_center) + mg.add_item.assert_called_once_with( + mock.sentinel.gi_mask, mock.sentinel.sl_center + ) - @pytest.mark.skipif(sys.version_info.major == 3 - and sys.version_info.minor == 9, - reason="bug in mock.py for Python 3.9.0 and 3.9.1") def test_get_grid_non_circular(self): """Test building of ESMF grid from iris cube non circular longitude.""" expected_get_coords_calls = [ @@ -416,18 +486,24 @@ def test_get_grid_non_circular(self): mock.call(0, staggerloc=mock.sentinel.sl_corner), mock.call(1, staggerloc=mock.sentinel.sl_corner), ] - with mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Grid', - MockGrid) as mg: + with mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.esmpy.Grid", MockGrid + ) as mg: mg.get_coords.reset_mock() mg.add_coords.reset_mock() mg.add_item.reset_mock() - get_grid(self.expected_esmpy_lat, self.expected_esmpy_lon, - self.expected_esmpy_lat_corners, - self.expected_esmpy_lon_corners, False) + get_grid( + self.expected_esmpy_lat, + self.expected_esmpy_lon, + self.expected_esmpy_lat_corners, + self.expected_esmpy_lon_corners, + False, + ) mg.get_coords.assert_has_calls(expected_get_coords_calls) mg.add_coords.assert_called_once_with([mock.sentinel.sl_corner]) - mg.add_item.assert_called_once_with(mock.sentinel.gi_mask, - mock.sentinel.sl_center) + mg.add_item.assert_called_once_with( + mock.sentinel.gi_mask, mock.sentinel.sl_center + ) def test_is_lon_circular_dim_coords_true(self): """Test detection of circular longitudes 1d dim coords.""" @@ -463,60 +539,65 @@ def test_is_lon_circular_2d_aux_coords_non_circ(self): is_circ = is_lon_circular(self.lon_2d_non_circular) self.assertFalse(is_circ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Grid', MockGrid) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Field') - @pytest.mark.skipif(sys.version_info.major == 3 - and sys.version_info.minor == 9, - reason="bug in mock.py for Python 3.9.0 and 3.9.1") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.esmpy.Grid", MockGrid) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.esmpy.Field") def test_cube_to_empty_field(self, mock_field): """Test building of empty field from iris cube.""" field = cube_to_empty_field(self.cube) self.assertEqual(mock_field.return_value, field) mock_field.assert_called_once() ckwargs = mock_field.call_args[1] - self.assertEqual('longname', ckwargs['name']) - self.assertEqual(mock.sentinel.sl_center, ckwargs['staggerloc']) + self.assertEqual("longname", ckwargs["name"]) + self.assertEqual(mock.sentinel.sl_center, ckwargs["staggerloc"]) def test_get_representant(self): """Test extraction of horizontal representant from iris cube.""" - horizontal_slice = ['latitude', 'longitude'] + horizontal_slice = ["latitude", "longitude"] get_representant(self.cube, horizontal_slice) self.cube.__getitem__.assert_called_once_with( - (slice(None, None, None), slice(None, None, None))) + (slice(None, None, None), slice(None, None, None)) + ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Regrid') + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.cube_to_empty_field", + mock_cube_to_empty_field, + ) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.esmpy.Regrid") def test_build_regridder_2d_masked_data(self, mock_regrid): """Test building of 2d regridder for masked data.""" - mock_regrid.return_value = mock.Mock(return_value=mock.Mock( - data=self.data.T)) + mock_regrid.return_value = mock.Mock( + return_value=mock.Mock(data=self.data.T) + ) regrid_method = mock.sentinel.rm_bilinear src_rep = mock.MagicMock(data=self.data) dst_rep = mock.MagicMock() src_rep.field = mock.MagicMock(data=self.data.copy()) dst_rep.field = mock.MagicMock() - build_regridder_2d(src_rep, dst_rep, regrid_method, .99) + build_regridder_2d(src_rep, dst_rep, regrid_method, 0.99) expected_calls = [ - mock.call(src_mask_values=np.array([]), - dst_mask_values=np.array([]), - srcfield=src_rep.field, - dstfield=dst_rep.field, - unmapped_action=mock.sentinel.ua_ignore, - ignore_degenerate=True, - regrid_method=regrid_method), - mock.call(src_mask_values=np.array([1]), - dst_mask_values=np.array([1]), - regrid_method=regrid_method, - srcfield=src_rep.field, - dstfield=dst_rep.field, - unmapped_action=mock.sentinel.ua_ignore, - ignore_degenerate=True), + mock.call( + src_mask_values=np.array([]), + dst_mask_values=np.array([]), + srcfield=src_rep.field, + dstfield=dst_rep.field, + unmapped_action=mock.sentinel.ua_ignore, + ignore_degenerate=True, + regrid_method=regrid_method, + ), + mock.call( + src_mask_values=np.array([1]), + dst_mask_values=np.array([1]), + regrid_method=regrid_method, + srcfield=src_rep.field, + dstfield=dst_rep.field, + unmapped_action=mock.sentinel.ua_ignore, + ignore_degenerate=True, + ), ] kwargs = mock_regrid.call_args_list[0][-1] expected_kwargs = expected_calls[0][-1] self.assertEqual(expected_kwargs.keys(), kwargs.keys()) - array_keys = set(['src_mask_values', 'dst_mask_values']) + array_keys = set(["src_mask_values", "dst_mask_values"]) for key in kwargs.keys(): if key in array_keys: self.assertTrue((expected_kwargs[key] == kwargs[key]).all()) @@ -524,9 +605,11 @@ def test_build_regridder_2d_masked_data(self, mock_regrid): self.assertEqual(expected_kwargs[key], kwargs[key]) self.assertTrue(mock_regrid.call_args_list[1] == expected_calls[1]) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Regrid') + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.cube_to_empty_field", + mock_cube_to_empty_field, + ) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.esmpy.Regrid") def test_regridder_2d_unmasked_data(self, mock_regrid): """Test regridder for unmasked 2d data.""" field_regridder = mock.Mock(return_value=mock.Mock(data=self.data.T)) @@ -534,14 +617,16 @@ def test_regridder_2d_unmasked_data(self, mock_regrid): regrid_method = mock.sentinel.rm_bilinear src_rep = mock.MagicMock(data=self.data, dtype=np.float32) dst_rep = mock.MagicMock(shape=(4, 4)) - regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, .99) + regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, 0.99) field_regridder.reset_mock() regridder(src_rep) field_regridder.assert_called_once_with(src_rep.field, dst_rep.field) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.cube_to_empty_field', - mock_cube_to_empty_field) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.esmpy.Regrid') + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.cube_to_empty_field", + mock_cube_to_empty_field, + ) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.esmpy.Regrid") def test_regridder_2d_masked_data(self, mock_regrid): """Test regridder for masked 2d data.""" field_regridder = mock.Mock(return_value=mock.Mock(data=self.data.T)) @@ -549,77 +634,86 @@ def test_regridder_2d_masked_data(self, mock_regrid): regrid_method = mock.sentinel.rm_bilinear src_rep = mock.MagicMock(data=self.data) dst_rep = mock.MagicMock(shape=(4, 4)) - regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, .99) + regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, 0.99) field_regridder.reset_mock() regridder(self.cube) field_regridder.assert_called_once_with(src_rep.field, dst_rep.field) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder_3d') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder_2d') + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder_3d") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder_2d") def test_build_regridder_2(self, mock_regridder_2d, mock_regridder_3d): """Test build regridder for 2d data.""" # pylint: disable=no-self-use src_rep = mock.Mock(ndim=2) dst_rep = mock.Mock(ndim=2) - build_regridder(src_rep, dst_rep, 'nearest') + build_regridder(src_rep, dst_rep, "nearest") mock_regridder_2d.assert_called_once_with( - src_rep, dst_rep, mock.sentinel.rm_nearest_stod, .99) + src_rep, dst_rep, mock.sentinel.rm_nearest_stod, 0.99 + ) mock_regridder_3d.assert_not_called() - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder_3d') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder_2d') + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder_3d") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder_2d") def test_build_regridder_3(self, mock_regridder_2d, mock_regridder_3d): """Test build regridder for 3d data.""" # pylint: disable=no-self-use src_rep = mock.Mock(ndim=3) dst_rep = mock.Mock(ndim=3) - build_regridder(src_rep, dst_rep, 'nearest') + build_regridder(src_rep, dst_rep, "nearest") mock_regridder_3d.assert_called_once_with( - src_rep, dst_rep, mock.sentinel.rm_nearest_stod, .99) + src_rep, dst_rep, mock.sentinel.rm_nearest_stod, 0.99 + ) mock_regridder_2d.assert_not_called() - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_representant') + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.get_representant") def test_get_grid_representant_2d(self, mock_get_representant): """Test extraction of 2d grid representant from 2 spatial d cube.""" mock_get_representant.return_value = mock.sentinel.ret ret = get_grid_representant(self.cube) self.assertEqual(mock.sentinel.ret, ret) mock_get_representant.assert_called_once_with( - self.cube, ['latitude', 'longitude']) + self.cube, ["latitude", "longitude"] + ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_representant') + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.get_representant") def test_get_grid_representant_2d_horiz_only(self, mock_get_representant): """Test extraction of forced 2d grid representant from 2d cube.""" mock_get_representant.return_value = mock.sentinel.ret ret = get_grid_representant(self.cube, True) self.assertEqual(mock.sentinel.ret, ret) mock_get_representant.assert_called_once_with( - self.cube, ['latitude', 'longitude']) + self.cube, ["latitude", "longitude"] + ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_representant') + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.get_representant") def test_get_grid_representant_3d(self, mock_get_representant): """Test extraction of 3d grid representant from 3 spatial d cube.""" mock_get_representant.return_value = mock.sentinel.ret ret = get_grid_representant(self.cube_3d) self.assertEqual(mock.sentinel.ret, ret) mock_get_representant.assert_called_once_with( - self.cube_3d, [self.depth, 'latitude', 'longitude']) + self.cube_3d, [self.depth, "latitude", "longitude"] + ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_representant') + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.get_representant") def test_get_grid_representant_3d_horiz_only(self, mock_get_representant): """Test extraction of 2d grid representant from 3 spatial d cube.""" mock_get_representant.return_value = mock.sentinel.ret ret = get_grid_representant(self.cube_3d, True) self.assertEqual(mock.sentinel.ret, ret) mock_get_representant.assert_called_once_with( - self.cube_3d, ['latitude', 'longitude']) - - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_grid_representant', - mock.Mock(side_effect=identity)) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_empty_data') - @mock.patch('iris.cube.Cube') - def test_get_grid_representants_3d_src(self, mock_cube, - mock_get_empty_data): + self.cube_3d, ["latitude", "longitude"] + ) + + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.get_grid_representant", + mock.Mock(side_effect=identity), + ) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.get_empty_data") + @mock.patch("iris.cube.Cube") + def test_get_grid_representants_3d_src( + self, mock_cube, mock_get_empty_data + ): """Test extraction of grid representants from 3 spatial d cube.""" src = self.cube_3d mock_get_empty_data.return_value = mock.sentinel.empty_data @@ -638,12 +732,15 @@ def test_get_grid_representants_3d_src(self, mock_cube, aux_coords_and_dims=[], ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_grid_representant', - mock.Mock(side_effect=identity)) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_empty_data') - @mock.patch('iris.cube.Cube') - def test_get_grid_representants_2d_src(self, mock_cube, - mock_get_empty_data): + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.get_grid_representant", + mock.Mock(side_effect=identity), + ) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.get_empty_data") + @mock.patch("iris.cube.Cube") + def test_get_grid_representants_2d_src( + self, mock_cube, mock_get_empty_data + ): """Test extraction of grid representants from 2 spatial d cube.""" src = self.cube mock_cube.aux_coords = [] @@ -662,10 +759,12 @@ def test_get_grid_representants_2d_src(self, mock_cube, aux_coords_and_dims=[(self.scalar_coord, ())], ) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.map_slices') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_grid_representants', - mock.Mock(side_effect=identity)) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.map_slices") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder") + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.get_grid_representants", + mock.Mock(side_effect=identity), + ) def test_regrid_nearest(self, mock_build_regridder, mock_map_slices): """Test full regrid method.""" mock_build_regridder.return_value = mock.sentinel.regridder @@ -673,16 +772,18 @@ def test_regrid_nearest(self, mock_build_regridder, mock_map_slices): regridder = ESMPyNearest().regridder(self.cube_3d, self.cube) regridder(self.cube_3d) mock_build_regridder.assert_called_once_with( - self.cube_3d, self.cube, 'nearest', mask_threshold=0.99 + self.cube_3d, self.cube, "nearest", mask_threshold=0.99 + ) + mock_map_slices.assert_called_once_with( + self.cube_3d, mock.sentinel.regridder, self.cube_3d, self.cube ) - mock_map_slices.assert_called_once_with(self.cube_3d, - mock.sentinel.regridder, - self.cube_3d, self.cube) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.map_slices') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_grid_representants', - mock.Mock(side_effect=identity)) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.map_slices") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder") + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.get_grid_representants", + mock.Mock(side_effect=identity), + ) def test_regrid_linear(self, mock_build_regridder, mock_map_slices): """Test full regrid method.""" mock_build_regridder.return_value = mock.sentinel.regridder @@ -690,16 +791,18 @@ def test_regrid_linear(self, mock_build_regridder, mock_map_slices): regridder = ESMPyLinear().regridder(self.cube_3d, self.cube) regridder(self.cube_3d) mock_build_regridder.assert_called_once_with( - self.cube_3d, self.cube, 'linear', mask_threshold=0.99 + self.cube_3d, self.cube, "linear", mask_threshold=0.99 + ) + mock_map_slices.assert_called_once_with( + self.cube_3d, mock.sentinel.regridder, self.cube_3d, self.cube ) - mock_map_slices.assert_called_once_with(self.cube_3d, - mock.sentinel.regridder, - self.cube_3d, self.cube) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.map_slices') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_grid_representants', - mock.Mock(side_effect=identity)) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.map_slices") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder") + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.get_grid_representants", + mock.Mock(side_effect=identity), + ) def test_regrid_area_weighted(self, mock_build_regridder, mock_map_slices): """Test full regrid method.""" mock_build_regridder.return_value = mock.sentinel.regridder @@ -707,16 +810,18 @@ def test_regrid_area_weighted(self, mock_build_regridder, mock_map_slices): regridder = ESMPyAreaWeighted().regridder(self.cube_3d, self.cube) regridder(self.cube_3d) mock_build_regridder.assert_called_once_with( - self.cube_3d, self.cube, 'area_weighted', mask_threshold=0.99 + self.cube_3d, self.cube, "area_weighted", mask_threshold=0.99 + ) + mock_map_slices.assert_called_once_with( + self.cube_3d, mock.sentinel.regridder, self.cube_3d, self.cube ) - mock_map_slices.assert_called_once_with(self.cube_3d, - mock.sentinel.regridder, - self.cube_3d, self.cube) - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.map_slices') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.build_regridder') - @mock.patch('esmvalcore.preprocessor._regrid_esmpy.get_grid_representants', - mock.Mock(side_effect=identity)) + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.map_slices") + @mock.patch("esmvalcore.preprocessor._regrid_esmpy.build_regridder") + @mock.patch( + "esmvalcore.preprocessor._regrid_esmpy.get_grid_representants", + mock.Mock(side_effect=identity), + ) def test_data_realized_once(self, mock_build_regridder, mock_map_slices): """Test that the regridder realizes the data only once.""" src_cube = mock.MagicMock() @@ -740,12 +845,12 @@ def test_data_realized_once(self, mock_build_regridder, mock_map_slices): @pytest.mark.parametrize( - 'scheme,output', + "scheme,output", [ - (ESMPyAreaWeighted(), 'ESMPyAreaWeighted(mask_threshold=0.99)'), - (ESMPyLinear(), 'ESMPyLinear(mask_threshold=0.99)'), - (ESMPyNearest(), 'ESMPyNearest(mask_threshold=0.99)'), - ] + (ESMPyAreaWeighted(), "ESMPyAreaWeighted(mask_threshold=0.99)"), + (ESMPyLinear(), "ESMPyLinear(mask_threshold=0.99)"), + (ESMPyNearest(), "ESMPyNearest(mask_threshold=0.99)"), + ], ) def test_scheme_repr(scheme, output): """Test ``_ESMPyScheme.__repr__``.""" diff --git a/tests/unit/preprocessor/_regrid_iris_esmf_regrid/__init__.py b/tests/unit/preprocessor/_regrid_iris_esmf_regrid/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/preprocessor/_regrid_iris_esmf_regrid/test_regrid_iris_esmf_regrid.py b/tests/unit/preprocessor/_regrid_iris_esmf_regrid/test_regrid_iris_esmf_regrid.py new file mode 100644 index 0000000000..fce32401a6 --- /dev/null +++ b/tests/unit/preprocessor/_regrid_iris_esmf_regrid/test_regrid_iris_esmf_regrid.py @@ -0,0 +1,177 @@ +"""Tests for `esmvalcore.preprocessor._regrid_iris_esmf_regrid`.""" + +import esmf_regrid +import iris.cube +import numpy as np +import pytest + +from esmvalcore.preprocessor.regrid_schemes import IrisESMFRegrid + + +class TestIrisESMFRegrid: + def test_repr(self): + scheme = IrisESMFRegrid(method="bilinear") + expected = ( + "IrisESMFRegrid(method='bilinear', use_src_mask=True, " + "use_tgt_mask=True, collapse_src_mask_along=('Z',), " + "collapse_tgt_mask_along=('Z',), tgt_location=None, " + "mdtol=None)" + ) + assert repr(scheme) == expected + + def test_invalid_method_raises(self): + msg = ( + "`method` should be one of 'bilinear', 'conservative', or " + "'nearest'" + ) + with pytest.raises(ValueError, match=msg): + IrisESMFRegrid(method="x") + + def test_unused_mdtol_raises(self): + msg = ( + "`mdol` can only be specified when `method='bilinear'` " + "or `method='conservative'`" + ) + with pytest.raises(TypeError, match=msg): + IrisESMFRegrid(method="nearest", mdtol=1) + + def test_unused_src_resolution_raises(self): + msg = ( + "`src_resolution` can only be specified when " + "`method='conservative'`" + ) + with pytest.raises(TypeError, match=msg): + IrisESMFRegrid(method="nearest", src_resolution=100) + + def test_unused_tgt_resolution_raises(self): + msg = ( + "`tgt_resolution` can only be specified when " + "`method='conservative'`" + ) + with pytest.raises(TypeError, match=msg): + IrisESMFRegrid(method="nearest", tgt_resolution=100) + + def test_get_mask_2d(self): + cube = iris.cube.Cube( + np.ma.masked_array(np.arange(4), mask=[1, 0, 1, 0]).reshape( + (2, 2) + ), + dim_coords_and_dims=( + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="latitude", + ), + 0, + ], + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="longitude", + ), + 1, + ], + ), + ) + mask = IrisESMFRegrid._get_mask(cube, ("Z",)) + np.testing.assert_array_equal(mask, cube.data.mask) + + def test_get_mask_3d(self): + cube = iris.cube.Cube( + np.ma.masked_array(np.arange(4), mask=[1, 0, 1, 1]).reshape( + (2, 1, 2) + ), + dim_coords_and_dims=( + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="air_pressure", + ), + 0, + ], + [ + iris.coords.DimCoord( + np.arange(1), + standard_name="latitude", + ), + 1, + ], + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="longitude", + ), + 2, + ], + ), + ) + mask = IrisESMFRegrid._get_mask(cube, ("Z",)) + np.testing.assert_array_equal(mask, np.array([[1, 0]], dtype=bool)) + + def test_get_mask_3d_odd_dim_order(self): + cube = iris.cube.Cube( + np.ma.masked_array(np.arange(4), mask=[1, 0, 1, 1]).reshape( + (1, 2, 2) + ), + dim_coords_and_dims=( + [ + iris.coords.DimCoord( + np.arange(1), + standard_name="latitude", + ), + 0, + ], + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="air_pressure", + ), + 1, + ], + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="longitude", + ), + 2, + ], + ), + ) + mask = IrisESMFRegrid._get_mask(cube, ["air_pressure"]) + np.testing.assert_array_equal(mask, np.array([[1, 0]], dtype=bool)) + + @pytest.mark.parametrize( + "scheme", + [ + ("bilinear", esmf_regrid.ESMFBilinearRegridder), + ("conservative", esmf_regrid.ESMFAreaWeightedRegridder), + ("nearest", esmf_regrid.ESMFNearestRegridder), + ], + ) + def test_regrid(self, scheme): + method, scheme_cls = scheme + cube = iris.cube.Cube( + np.ma.arange(4).reshape((2, 2)), + dim_coords_and_dims=( + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="latitude", + units="degrees", + ), + 0, + ], + [ + iris.coords.DimCoord( + np.arange(2), + standard_name="longitude", + units="degrees", + ), + 1, + ], + ), + ) + + scheme = IrisESMFRegrid(method=method) + regridder = scheme.regridder(cube, cube) + assert isinstance(regridder, scheme_cls) diff --git a/tests/unit/preprocessor/_rolling_window/test_rolling_window.py b/tests/unit/preprocessor/_rolling_window/test_rolling_window.py index 8a70e9c112..ec6263c8ba 100644 --- a/tests/unit/preprocessor/_rolling_window/test_rolling_window.py +++ b/tests/unit/preprocessor/_rolling_window/test_rolling_window.py @@ -1,4 +1,5 @@ """Unit tests for the `esmvalcore.preprocessor._rolling_window` function.""" + import unittest import iris.coords @@ -12,22 +13,25 @@ def _create_2d_cube(): - - cube = Cube(np.broadcast_to(np.arange(1, 16), (11, 15)), - var_name='tas', - units='K') + cube = Cube( + np.broadcast_to(np.arange(1, 16), (11, 15)), var_name="tas", units="K" + ) cube.add_dim_coord( iris.coords.DimCoord( np.arange(-5, 6), - standard_name='latitude', - units=Unit('degrees'), - ), 0) + standard_name="latitude", + units=Unit("degrees"), + ), + 0, + ) cube.add_dim_coord( iris.coords.DimCoord( np.arange(1, 16), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', calendar='gregorian'), - ), 1) + standard_name="time", + units=Unit("days since 1950-01-01 00:00:00", calendar="gregorian"), + ), + 1, + ) return cube @@ -41,39 +45,41 @@ def setUp(self): def test_rolling_window_time(self): """Test rolling_window_statistics over time coordinate.""" - cube_time_sum = rolling_window_statistics(self.cube, - coordinate='time', - operator='sum', - window_length=2) + cube_time_sum = rolling_window_statistics( + self.cube, coordinate="time", operator="sum", window_length=2 + ) expected_data = np.broadcast_to(np.arange(3, 30, 2), (11, 14)) assert_equal(cube_time_sum.data, expected_data) assert cube_time_sum.shape == (11, 14) def test_rolling_window_latitude(self): """Test rolling_window_statistics over latitude coordinate.""" - cube_lat_mean = rolling_window_statistics(self.cube, - coordinate='latitude', - operator='mean', - window_length=3) + cube_lat_mean = rolling_window_statistics( + self.cube, coordinate="latitude", operator="mean", window_length=3 + ) expected_data = np.broadcast_to(np.arange(1, 16), (9, 15)) assert_equal(cube_lat_mean.data, expected_data) assert cube_lat_mean.shape == (9, 15) def test_rolling_window_coord(self): - self.cube.remove_coord('latitude') + self.cube.remove_coord("latitude") with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - rolling_window_statistics(self.cube, - coordinate='latitude', - operator='mean', - window_length=3) + rolling_window_statistics( + self.cube, + coordinate="latitude", + operator="mean", + window_length=3, + ) def test_rolling_window_operator(self): with self.assertRaises(ValueError): - rolling_window_statistics(self.cube, - coordinate='time', - operator='percentile', - window_length=2) + rolling_window_statistics( + self.cube, + coordinate="time", + operator="percentile", + window_length=2, + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_time/test_time.py b/tests/unit/preprocessor/_time/test_time.py index b478a97143..e8ddb5aae0 100644 --- a/tests/unit/preprocessor/_time/test_time.py +++ b/tests/unit/preprocessor/_time/test_time.py @@ -46,14 +46,14 @@ ) -def _create_sample_cube(calendar='gregorian'): +def _create_sample_cube(calendar="gregorian"): """Create sample cube.""" - cube = Cube(np.arange(1, 25), var_name='co2', units='J') + cube = Cube(np.arange(1, 25), var_name="co2", units="J") cube.add_dim_coord( iris.coords.DimCoord( - np.arange(15., 720., 30.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', calendar=calendar), + np.arange(15.0, 720.0, 30.0), + standard_name="time", + units=Unit("days since 1950-01-01 00:00:00", calendar=calendar), ), 0, ) @@ -63,12 +63,13 @@ def _create_sample_cube(calendar='gregorian'): def add_auxiliary_coordinate(cubelist): """Add AuxCoords to cubes in cubelist.""" for cube in cubelist: - iris.coord_categorisation.add_day_of_month(cube, cube.coord('time')) - iris.coord_categorisation.add_day_of_year(cube, cube.coord('time')) + iris.coord_categorisation.add_day_of_month(cube, cube.coord("time")) + iris.coord_categorisation.add_day_of_year(cube, cube.coord("time")) class TestExtractMonth(tests.Test): """Tests for extract_month.""" + def setUp(self): """Prepare tests.""" self.cube = _create_sample_cube() @@ -76,8 +77,9 @@ def setUp(self): def test_get_january(self): """Test january extraction.""" sliced = extract_month(self.cube, 1) - assert_array_equal(np.array([1, 1]), - sliced.coord('month_number').points) + assert_array_equal( + np.array([1, 1]), sliced.coord("month_number").points + ) def test_raises_if_extracted_cube_is_none(self): """Test function for winter.""" @@ -87,10 +89,11 @@ def test_raises_if_extracted_cube_is_none(self): def test_get_january_with_existing_coord(self): """Test january extraction.""" - iris.coord_categorisation.add_month_number(self.cube, 'time') + iris.coord_categorisation.add_month_number(self.cube, "time") sliced = extract_month(self.cube, 1) - assert_array_equal(np.array([1, 1]), - sliced.coord('month_number').points) + assert_array_equal( + np.array([1, 1]), sliced.coord("month_number").points + ) def test_bad_month_raises(self): """Test january extraction.""" @@ -102,6 +105,7 @@ def test_bad_month_raises(self): class TestTimeSlice(tests.Test): """Tests for extract_time.""" + def setUp(self): """Prepare tests.""" self.cube = _create_sample_cube() @@ -114,56 +118,61 @@ def test_raises_if_extracted_cube_is_none(self): def test_extract_time(self): """Test extract_time.""" sliced = extract_time(self.cube, 1950, 1, 1, 1950, 12, 31) - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.arange(1, 13, 1), - sliced.coord('month_number').points) + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.arange(1, 13, 1), sliced.coord("month_number").points + ) def test_extract_time_limit(self): """Test extract time when limits are included.""" - cube = Cube(np.arange(0, 720), var_name='co2', units='J') + cube = Cube(np.arange(0, 720), var_name="co2", units="J") cube.add_dim_coord( iris.coords.DimCoord( - np.arange(0., 720., 1.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='360_day'), + np.arange(0.0, 720.0, 1.0), + standard_name="time", + units=Unit( + "days since 1950-01-01 00:00:00", calendar="360_day" + ), ), 0, ) sliced = extract_time(cube, 1950, 1, 1, 1951, 1, 1) - assert_array_equal(np.arange(0, 360), sliced.coord('time').points) + assert_array_equal(np.arange(0, 360), sliced.coord("time").points) def test_extract_time_non_gregorian_day(self): """Test extract time when the day is not in the Gregorian calendar.""" - cube = Cube(np.arange(0, 720), var_name='co2', units='J') + cube = Cube(np.arange(0, 720), var_name="co2", units="J") cube.add_dim_coord( iris.coords.DimCoord( - np.arange(0., 720., 1.), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', - calendar='360_day'), + np.arange(0.0, 720.0, 1.0), + standard_name="time", + units=Unit( + "days since 1950-01-01 00:00:00", calendar="360_day" + ), ), 0, ) sliced = extract_time(cube, 1950, 2, 30, 1950, 3, 1) - assert_array_equal(np.array([59]), sliced.coord('time').points) + assert_array_equal(np.array([59]), sliced.coord("time").points) def test_extract_time_no_slice(self): """Test fail of extract_time.""" - self.cube.coord('time').guess_bounds() + self.cube.coord("time").guess_bounds() with self.assertRaises(ValueError) as ctx: extract_time(self.cube, 2200, 1, 1, 2200, 12, 31) - msg = ("Time slice 2200-01-01 to 2200-12-31 is outside" - " cube time bounds 1950-01-16 00:00:00 to 1951-12-07 00:00:00.") - assert ctx.exception.args == (msg, ) + msg = ( + "Time slice 2200-01-01 to 2200-12-31 is outside" + " cube time bounds 1950-01-16 00:00:00 to 1951-12-07 00:00:00." + ) + assert ctx.exception.args == (msg,) def test_extract_time_one_time(self): """Test extract_time with one time step.""" cube = _create_sample_cube() - cube.coord('time').guess_bounds() - cube = cube.collapsed('time', iris.analysis.MEAN) + cube.coord("time").guess_bounds() + cube = cube.collapsed("time", iris.analysis.MEAN) sliced = extract_time(cube, 1950, 1, 1, 1952, 12, 31) - assert_array_equal(np.array([360.]), sliced.coord('time').points) + assert_array_equal(np.array([360.0]), sliced.coord("time").points) def test_extract_time_no_time(self): """Test extract_time with no time step.""" @@ -174,61 +183,66 @@ def test_extract_time_no_time(self): class TestClipTimerange(tests.Test): """Tests for clip_timerange.""" + def setUp(self): """Prepare tests.""" self.cube = _create_sample_cube() @staticmethod - def _create_cube(data, times, bounds, calendar='gregorian'): - time = iris.coords.DimCoord(times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', - calendar=calendar)) + def _create_cube(data, times, bounds, calendar="gregorian"): + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name="time", + units=Unit("days since 1950-01-01", calendar=calendar), + ) cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube def test_clip_timerange_1_year(self): """Test clip_timerange with 1 year.""" - sliced = clip_timerange(self.cube, '1950/1950') - iris.coord_categorisation.add_month_number(sliced, 'time') - iris.coord_categorisation.add_year(sliced, 'time') - assert_array_equal(np.arange(1, 13, 1), - sliced.coord('month_number').points) - assert_array_equal(np.full(12, 1950), sliced.coord('year').points) + sliced = clip_timerange(self.cube, "1950/1950") + iris.coord_categorisation.add_month_number(sliced, "time") + iris.coord_categorisation.add_year(sliced, "time") + assert_array_equal( + np.arange(1, 13, 1), sliced.coord("month_number").points + ) + assert_array_equal(np.full(12, 1950), sliced.coord("year").points) def test_clip_timerange_3_years(self): """Test clip_timerange with 3 years.""" - sliced = clip_timerange(self.cube, '1949/1951') + sliced = clip_timerange(self.cube, "1949/1951") assert sliced == self.cube def test_clip_timerange_no_slice(self): """Test fail of clip_timerange.""" - self.cube.coord('time').guess_bounds() - msg = ("Time slice 2200-01-01 01:00:00 to 2201-01-01 is outside" - " cube time bounds 1950-01-16 00:00:00 to 1951-12-07 00:00:00.") + self.cube.coord("time").guess_bounds() + msg = ( + "Time slice 2200-01-01 01:00:00 to 2201-01-01 is outside" + " cube time bounds 1950-01-16 00:00:00 to 1951-12-07 00:00:00." + ) with self.assertRaises(ValueError) as ctx: - clip_timerange(self.cube, '22000101T010000/2200') - assert ctx.exception.args == (msg, ) + clip_timerange(self.cube, "22000101T010000/2200") + assert ctx.exception.args == (msg,) def test_clip_timerange_one_time(self): """Test clip_timerange with one time step.""" cube = _create_sample_cube() - cube = cube.collapsed('time', iris.analysis.MEAN) - sliced = clip_timerange(cube, '1950/1952') - assert_array_equal(np.array([360.]), sliced.coord('time').points) + cube = cube.collapsed("time", iris.analysis.MEAN) + sliced = clip_timerange(cube, "1950/1952") + assert_array_equal(np.array([360.0]), sliced.coord("time").points) def test_clip_timerange_no_time(self): """Test clip_timerange with no time step.""" cube = _create_sample_cube()[0] - sliced_timerange = clip_timerange(cube, '1950/1950') + sliced_timerange = clip_timerange(cube, "1950/1950") assert cube == sliced_timerange def test_clip_timerange_date(self): """Test timerange with dates.""" - sliced_year = clip_timerange(self.cube, '1950/1952') - sliced_month = clip_timerange(self.cube, '195001/195212') - sliced_day = clip_timerange(self.cube, '19500101/19521231') + sliced_year = clip_timerange(self.cube, "1950/1952") + sliced_month = clip_timerange(self.cube, "195001/195212") + sliced_day = clip_timerange(self.cube, "19500101/19521231") assert self.cube == sliced_year assert self.cube == sliced_month assert self.cube == sliced_day @@ -237,62 +251,73 @@ def test_clip_timerange_datetime(self): """Test timerange with datetime periods.""" data = np.arange(8) times = np.arange(0, 48, 6) - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('hours since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("hours since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - sliced_cube = clip_timerange(cube, '19500101T000000/19500101T120000') + sliced_cube = clip_timerange(cube, "19500101T000000/19500101T120000") expected_time = np.arange(0, 18, 6) assert_array_equal(sliced_cube.coord(time).points, expected_time) def test_clip_timerange_monthly(self): """Test timerange with monthly data.""" - time = np.arange(15., 2175., 30) + time = np.arange(15.0, 2175.0, 30) data = np.ones_like(time) calendars = [ - '360_day', '365_day', '366_day', - 'gregorian', 'julian', 'proleptic_gregorian'] + "360_day", + "365_day", + "366_day", + "gregorian", + "julian", + "proleptic_gregorian", + ] for calendar in calendars: cube = self._create_cube(data, time, None, calendar) - sliced_forward = clip_timerange(cube, '195001/P4Y6M') - sliced_backward = clip_timerange(cube, 'P4Y6M/195406') - assert sliced_forward.coord('time').cell(0).point.year == 1950 - assert sliced_forward.coord('time').cell(-1).point.year == 1954 - assert sliced_forward.coord('time').cell(0).point.month == 1 - assert sliced_forward.coord('time').cell(-1).point.month == 6 - - assert sliced_backward.coord('time').cell(-1).point.year == 1954 - assert sliced_backward.coord('time').cell(0).point.year == 1950 - assert sliced_backward.coord('time').cell(-1).point.month == 6 - assert sliced_backward.coord('time').cell(0).point.month == 1 + sliced_forward = clip_timerange(cube, "195001/P4Y6M") + sliced_backward = clip_timerange(cube, "P4Y6M/195406") + assert sliced_forward.coord("time").cell(0).point.year == 1950 + assert sliced_forward.coord("time").cell(-1).point.year == 1954 + assert sliced_forward.coord("time").cell(0).point.month == 1 + assert sliced_forward.coord("time").cell(-1).point.month == 6 + + assert sliced_backward.coord("time").cell(-1).point.year == 1954 + assert sliced_backward.coord("time").cell(0).point.year == 1950 + assert sliced_backward.coord("time").cell(-1).point.month == 6 + assert sliced_backward.coord("time").cell(0).point.month == 1 def test_clip_timerange_daily(self): """Test timerange with daily data.""" - time = np.arange(0., 3000.) + time = np.arange(0.0, 3000.0) data = np.ones_like(time) calendars = [ - '360_day', '365_day', '366_day', - 'gregorian', 'julian', 'proleptic_gregorian'] + "360_day", + "365_day", + "366_day", + "gregorian", + "julian", + "proleptic_gregorian", + ] for calendar in calendars: cube = self._create_cube(data, time, None, calendar) - sliced_forward = clip_timerange(cube, '19500101/P4Y6M2D') - sliced_backward = clip_timerange(cube, 'P4Y6M3D/19540703') - assert sliced_forward.coord('time').cell(0).point.year == 1950 - assert sliced_forward.coord('time').cell(-1).point.year == 1954 - assert sliced_forward.coord('time').cell(0).point.month == 1 - assert sliced_forward.coord('time').cell(-1).point.month == 7 - assert sliced_forward.coord('time').cell(0).point.day == 1 - assert sliced_forward.coord('time').cell(-1).point.day == 2 - - assert sliced_backward.coord('time').cell(-1).point.year == 1954 - assert sliced_backward.coord('time').cell(0).point.year == 1950 - assert sliced_backward.coord('time').cell(-1).point.month == 7 - assert sliced_backward.coord('time').cell(0).point.month == 1 - assert sliced_backward.coord('time').cell(-1).point.day == 3 - assert sliced_backward.coord('time').cell(0).point.day == 1 + sliced_forward = clip_timerange(cube, "19500101/P4Y6M2D") + sliced_backward = clip_timerange(cube, "P4Y6M3D/19540703") + assert sliced_forward.coord("time").cell(0).point.year == 1950 + assert sliced_forward.coord("time").cell(-1).point.year == 1954 + assert sliced_forward.coord("time").cell(0).point.month == 1 + assert sliced_forward.coord("time").cell(-1).point.month == 7 + assert sliced_forward.coord("time").cell(0).point.day == 1 + assert sliced_forward.coord("time").cell(-1).point.day == 2 + + assert sliced_backward.coord("time").cell(-1).point.year == 1954 + assert sliced_backward.coord("time").cell(0).point.year == 1950 + assert sliced_backward.coord("time").cell(-1).point.month == 7 + assert sliced_backward.coord("time").cell(0).point.month == 1 + assert sliced_backward.coord("time").cell(-1).point.day == 3 + assert sliced_backward.coord("time").cell(0).point.day == 1 def test_clip_timerange_duration_seconds(self): """Test timerange with duration periods with resolution up to @@ -300,91 +325,106 @@ def test_clip_timerange_duration_seconds(self): data = np.arange(8) times = np.arange(0, 48, 6) calendars = [ - '360_day', '365_day', '366_day', - 'gregorian', 'julian', 'proleptic_gregorian'] + "360_day", + "365_day", + "366_day", + "gregorian", + "julian", + "proleptic_gregorian", + ] for calendar in calendars: - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('hours since 1950-01-01', - calendar=calendar)) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("hours since 1950-01-01", calendar=calendar), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - sliced_cube_start = clip_timerange(cube, 'PT12H/19500101T120000') - sliced_cube_end = clip_timerange(cube, '19500101T000000/PT12H') + sliced_cube_start = clip_timerange(cube, "PT12H/19500101T120000") + sliced_cube_end = clip_timerange(cube, "19500101T000000/PT12H") expected_time = np.arange(0, 18, 6) assert_array_equal( - sliced_cube_start.coord('time').points, expected_time) + sliced_cube_start.coord("time").points, expected_time + ) assert_array_equal( - sliced_cube_end.coord('time').points, expected_time) + sliced_cube_end.coord("time").points, expected_time + ) def test_clip_timerange_30_day(self): """Test day 31 is converted to day 30 in 360_day calendars.""" - time = np.arange(0., 3000.) + time = np.arange(0.0, 3000.0) data = np.ones_like(time) - cube = self._create_cube(data, time, None, '360_day') - sliced_cube = clip_timerange(cube, '19500131/19500331') + cube = self._create_cube(data, time, None, "360_day") + sliced_cube = clip_timerange(cube, "19500131/19500331") expected_time = np.arange(29, 90, 1) - assert_array_equal( - sliced_cube.coord('time').points, expected_time) + assert_array_equal(sliced_cube.coord("time").points, expected_time) def test_clip_timerange_single_year_1d(self): """Test that single year stays dimensional coordinate.""" - cube = self._create_cube([0.0], [150.0], [[0.0, 365.0]], 'standard') - sliced_cube = clip_timerange(cube, '1950/1950') + cube = self._create_cube([0.0], [150.0], [[0.0, 365.0]], "standard") + sliced_cube = clip_timerange(cube, "1950/1950") - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert_array_equal(sliced_cube.coord('time').bounds, [[0.0, 365.0]]) + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert_array_equal(sliced_cube.coord("time").bounds, [[0.0, 365.0]]) assert cube.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) # Repeat test without bounds - cube.coord('time').bounds = None - sliced_cube = clip_timerange(cube, '1950/1950') + cube.coord("time").bounds = None + sliced_cube = clip_timerange(cube, "1950/1950") - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert sliced_cube.coord('time').bounds is None + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert sliced_cube.coord("time").bounds is None assert cube.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) def test_clip_timerange_single_year_2d(self): """Test that single year stays dimensional coordinate.""" - cube = self._create_cube([[0.0, 1.0]], [150.0], [[0.0, 365.0]], - 'standard') - lat_coord = iris.coords.DimCoord([10.0, 20.0], - standard_name='latitude') + cube = self._create_cube( + [[0.0, 1.0]], [150.0], [[0.0, 365.0]], "standard" + ) + lat_coord = iris.coords.DimCoord( + [10.0, 20.0], standard_name="latitude" + ) cube.add_dim_coord(lat_coord, 1) - sliced_cube = clip_timerange(cube, '1950/1950') + sliced_cube = clip_timerange(cube, "1950/1950") - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert_array_equal(sliced_cube.coord('time').bounds, [[0.0, 365.0]]) + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert_array_equal(sliced_cube.coord("time").bounds, [[0.0, 365.0]]) assert cube.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) # Repeat test without bounds - cube.coord('time').bounds = None - sliced_cube = clip_timerange(cube, '1950/1950') + cube.coord("time").bounds = None + sliced_cube = clip_timerange(cube, "1950/1950") - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert sliced_cube.coord('time').bounds is None + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert sliced_cube.coord("time").bounds is None assert cube.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) def test_clip_timerange_single_year_4d(self): """Test time is not scalar even when time is not first coordinate.""" - cube = self._create_cube([[[[0.0, 1.0]]]], [150.0], [[0.0, 365.0]], - 'standard') - plev_coord = iris.coords.DimCoord([1013.0], - standard_name='air_pressure') - lat_coord = iris.coords.DimCoord([10.0], standard_name='latitude') - lon_coord = iris.coords.DimCoord([0.0, 1.0], standard_name='longitude') + cube = self._create_cube( + [[[[0.0, 1.0]]]], [150.0], [[0.0, 365.0]], "standard" + ) + plev_coord = iris.coords.DimCoord( + [1013.0], standard_name="air_pressure" + ) + lat_coord = iris.coords.DimCoord([10.0], standard_name="latitude") + lon_coord = iris.coords.DimCoord([0.0, 1.0], standard_name="longitude") cube.add_dim_coord(plev_coord, 1) cube.add_dim_coord(lat_coord, 2) cube.add_dim_coord(lon_coord, 3) @@ -393,416 +433,461 @@ def test_clip_timerange_single_year_4d(self): cube_1 = cube.copy() cube_1.transpose([1, 0, 2, 3]) assert cube_1.shape == (1, 1, 1, 2) - sliced_cube = clip_timerange(cube_1, '1950/1950') + sliced_cube = clip_timerange(cube_1, "1950/1950") assert sliced_cube is not cube_1 - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert_array_equal(sliced_cube.coord('time').bounds, [[0.0, 365.0]]) + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert_array_equal(sliced_cube.coord("time").bounds, [[0.0, 365.0]]) assert cube_1.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) for coord_name in [c.name() for c in cube_1.coords()]: - assert (sliced_cube.coord_dims(coord_name) == - cube_1.coord_dims(coord_name)) + assert sliced_cube.coord_dims(coord_name) == cube_1.coord_dims( + coord_name + ) # Order: lat, lon, time, plev cube_2 = cube.copy() cube_2.transpose([2, 3, 0, 1]) assert cube_2.shape == (1, 2, 1, 1) - sliced_cube = clip_timerange(cube_2, '1950/1950') + sliced_cube = clip_timerange(cube_2, "1950/1950") assert sliced_cube is not cube_2 - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert_array_equal(sliced_cube.coord('time').bounds, [[0.0, 365.0]]) + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert_array_equal(sliced_cube.coord("time").bounds, [[0.0, 365.0]]) assert cube_2.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) for coord_name in [c.name() for c in cube_2.coords()]: - assert (sliced_cube.coord_dims(coord_name) == - cube_2.coord_dims(coord_name)) + assert sliced_cube.coord_dims(coord_name) == cube_2.coord_dims( + coord_name + ) # Order: lon, lat, plev, time cube_3 = cube.copy() cube_3.transpose([3, 2, 1, 0]) assert cube_3.shape == (2, 1, 1, 1) - sliced_cube = clip_timerange(cube_3, '1950/1950') + sliced_cube = clip_timerange(cube_3, "1950/1950") assert sliced_cube is not cube_3 - assert sliced_cube.coord('time').units == Unit( - 'days since 1950-01-01', calendar='standard') - assert_array_equal(sliced_cube.coord('time').points, [150.0]) - assert_array_equal(sliced_cube.coord('time').bounds, [[0.0, 365.0]]) + assert sliced_cube.coord("time").units == Unit( + "days since 1950-01-01", calendar="standard" + ) + assert_array_equal(sliced_cube.coord("time").points, [150.0]) + assert_array_equal(sliced_cube.coord("time").bounds, [[0.0, 365.0]]) assert cube_3.shape == sliced_cube.shape - assert sliced_cube.coord('time', dim_coords=True) + assert sliced_cube.coord("time", dim_coords=True) for coord_name in [c.name() for c in cube_3.coords()]: - assert (sliced_cube.coord_dims(coord_name) == - cube_3.coord_dims(coord_name)) + assert sliced_cube.coord_dims(coord_name) == cube_3.coord_dims( + coord_name + ) class TestExtractSeason(tests.Test): """Tests for extract_season.""" + def setUp(self): """Prepare tests.""" self.cube = _create_sample_cube() def test_get_djf(self): """Test function for winter.""" - sliced = extract_season(self.cube, 'DJF') - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.array([1, 2, 12, 1, 2, 12]), - sliced.coord('month_number').points) + sliced = extract_season(self.cube, "DJF") + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.array([1, 2, 12, 1, 2, 12]), sliced.coord("month_number").points + ) with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('clim_season') + self.cube.coord("clim_season") with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('season_year') + self.cube.coord("season_year") def test_raises_if_extracted_cube_is_none(self): """Test function for winter.""" - sliced = extract_season(self.cube, 'DJF') + sliced = extract_season(self.cube, "DJF") with assert_raises(ValueError): - extract_season(sliced, 'MAM') + extract_season(sliced, "MAM") def test_get_djf_caps(self): """Test function works when season specified in caps.""" - sliced = extract_season(self.cube, 'DJF') - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.array([1, 2, 12, 1, 2, 12]), - sliced.coord('month_number').points) + sliced = extract_season(self.cube, "DJF") + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.array([1, 2, 12, 1, 2, 12]), sliced.coord("month_number").points + ) with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('clim_season') + self.cube.coord("clim_season") with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('season_year') + self.cube.coord("season_year") def test_get_mam(self): """Test function for spring.""" - sliced = extract_season(self.cube, 'MAM') - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.array([3, 4, 5, 3, 4, 5]), - sliced.coord('month_number').points) + sliced = extract_season(self.cube, "MAM") + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.array([3, 4, 5, 3, 4, 5]), sliced.coord("month_number").points + ) with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('clim_season') + self.cube.coord("clim_season") with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('season_year') + self.cube.coord("season_year") def test_get_jja(self): """Test function for summer.""" - sliced = extract_season(self.cube, 'JJA') - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.array([6, 7, 8, 6, 7, 8]), - sliced.coord('month_number').points) + sliced = extract_season(self.cube, "JJA") + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.array([6, 7, 8, 6, 7, 8]), sliced.coord("month_number").points + ) def test_get_multiple_seasons(self): """Test function for two seasons.""" sliced = [extract_season(self.cube, seas) for seas in ["JJA", "SON"]] clim_coords = [sin_sli.coord("clim_season") for sin_sli in sliced] - assert_array_equal(clim_coords[0].points, - ['JJA', 'JJA', 'JJA', 'JJA', 'JJA', 'JJA']) - assert_array_equal(clim_coords[1].points, - ['SON', 'SON', 'SON', 'SON', 'SON', 'SON']) + assert_array_equal( + clim_coords[0].points, ["JJA", "JJA", "JJA", "JJA", "JJA", "JJA"] + ) + assert_array_equal( + clim_coords[1].points, ["SON", "SON", "SON", "SON", "SON", "SON"] + ) def test_get_son(self): """Test function for summer.""" - sliced = extract_season(self.cube, 'SON') - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.array([9, 10, 11, 9, 10, 11]), - sliced.coord('month_number').points) + sliced = extract_season(self.cube, "SON") + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.array([9, 10, 11, 9, 10, 11]), + sliced.coord("month_number").points, + ) with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('clim_season') + self.cube.coord("clim_season") with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('season_year') + self.cube.coord("season_year") def test_get_jf(self): """Test function for custom seasons.""" - sliced = extract_season(self.cube, 'JF') - iris.coord_categorisation.add_month_number(sliced, 'time') - assert_array_equal(np.array([1, 2, 1, 2]), - sliced.coord('month_number').points) + sliced = extract_season(self.cube, "JF") + iris.coord_categorisation.add_month_number(sliced, "time") + assert_array_equal( + np.array([1, 2, 1, 2]), sliced.coord("month_number").points + ) with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('clim_season') + self.cube.coord("clim_season") with assert_raises(iris.exceptions.CoordinateNotFoundError): - self.cube.coord('season_year') + self.cube.coord("season_year") class TestClimatology(tests.Test): """Test class for :func:`esmvalcore.preprocessor._time.climatology`.""" + @staticmethod def _create_cube(data, times, bounds): - time = iris.coords.DimCoord(times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='gregorian')) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name="time", + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) cube = iris.cube.Cube( - data, - dim_coords_and_dims=[(time, 0)], - units='kg m-2 s-1' + data, dim_coords_and_dims=[(time, 0)], units="kg m-2 s-1" ) return cube def test_time_mean(self): """Test for time average of a 1D field.""" - data = np.array([1., 1., 1.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + data = np.array([1.0, 1.0, 1.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='mean') - expected = np.array([1.], dtype=np.float32) + result = climate_statistics(cube, operator="mean") + expected = np.array([1.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') - self.assertFalse(cube.coords('_time_weights_')) - self.assertFalse(result.coords('_time_weights_')) + self.assertEqual(result.units, "kg m-2 s-1") + self.assertFalse(cube.coords("_time_weights_")) + self.assertFalse(result.coords("_time_weights_")) def test_time_mean_uneven(self): """Test for time average of a 1D field with uneven time boundaries.""" - data = np.array([1., 5.], dtype=np.float32) - times = np.array([5., 25.]) - bounds = np.array([[0., 1.], [1., 4.]]) + data = np.array([1.0, 5.0], dtype=np.float32) + times = np.array([5.0, 25.0]) + bounds = np.array([[0.0, 1.0], [1.0, 4.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='mean') - expected = np.array([4.], dtype=np.float32) + result = climate_statistics(cube, operator="mean") + expected = np.array([4.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_time_mean_365_day(self): """Test for time avg of a realistic time axis and 365 day calendar.""" - data = np.array([1., 1., 1., 1., 1., 1.], dtype=np.float32) + data = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float32) times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) + bounds = np.array( + [[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], [151, 181]] + ) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='mean') - expected = np.array([1.], dtype=np.float32) + result = climate_statistics(cube, operator="mean") + expected = np.array([1.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_time_sum(self): """Test for time sum of a 1D field.""" - data = np.array([1., 2., 1.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + data = np.array([1.0, 2.0, 1.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='sum') - expected = np.array([120.], dtype=np.float32) + result = climate_statistics(cube, operator="sum") + expected = np.array([120.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, '86400 kg m-2') + self.assertEqual(result.units, "86400 kg m-2") def test_time_sum_weighted(self): """Test for time sum of a 1D field.""" - data = np.array([1., 2., 1.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[10., 20.], [30., 60.], [73., 77.]]) + data = np.array([1.0, 2.0, 1.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[10.0, 20.0], [30.0, 60.0], [73.0, 77.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='sum') - expected = np.array([74.], dtype=np.float32) + result = climate_statistics(cube, operator="sum") + expected = np.array([74.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, '86400 kg m-2') + self.assertEqual(result.units, "86400 kg m-2") def test_time_sum_uneven(self): """Test for time sum of a 1D field with uneven time boundaries.""" - data = np.array([1., 5.], dtype=np.float32) - times = np.array([5., 25.]) - bounds = np.array([[0., 1.], [1., 4.]]) + data = np.array([1.0, 5.0], dtype=np.float32) + times = np.array([5.0, 25.0]) + bounds = np.array([[0.0, 1.0], [1.0, 4.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='sum') + result = climate_statistics(cube, operator="sum") expected = np.array([16.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, '86400 kg m-2') + self.assertEqual(result.units, "86400 kg m-2") def test_time_sum_365_day(self): """Test for time sum of a realistic time axis and 365 day calendar.""" - data = np.ones((6, )) + data = np.ones((6,)) data[3] = 2.0 times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) + bounds = np.array( + [[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], [151, 181]] + ) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='sum') - expected = np.array([211.], dtype=np.float32) + result = climate_statistics(cube, operator="sum") + expected = np.array([211.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, '86400 kg m-2') + self.assertEqual(result.units, "86400 kg m-2") def test_season_climatology(self): """Test for time avg of a realistic time axis and 365 day calendar.""" - data = np.array([1., 1., 1., 1., 1., 1.], dtype=np.float32) + data = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float32) times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) + bounds = np.array( + [[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], [151, 181]] + ) cube = self._create_cube(data, times, bounds) - for period in ('season', 'seasonal'): - result = climate_statistics(cube, operator='mean', period=period) - expected = np.array([1., 1., 1.], dtype=np.float32) + for period in ("season", "seasonal"): + result = climate_statistics(cube, operator="mean", period=period) + expected = np.array([1.0, 1.0, 1.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_custom_season_climatology(self): """Test for time avg of a realisitc time axis and 365 day calendar.""" - data = np.array([1., 1., 1., 1., 1., 1., 1., 1.], dtype=np.float32) + data = np.array( + [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float32 + ) times = np.array([15, 45, 74, 105, 135, 166, 195, 225]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181], [181, 212], [212, 243]]) + bounds = np.array( + [ + [0, 31], + [31, 59], + [59, 90], + [90, 120], + [120, 151], + [151, 181], + [181, 212], + [212, 243], + ] + ) cube = self._create_cube(data, times, bounds) - for period in ('season', 'seasonal'): - result = climate_statistics(cube, - operator='mean', - period=period, - seasons=('jfmamj', 'jasond')) - expected = np.array([1., 1.], dtype=np.float32) + for period in ("season", "seasonal"): + result = climate_statistics( + cube, + operator="mean", + period=period, + seasons=("jfmamj", "jasond"), + ) + expected = np.array([1.0, 1.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_monthly(self): """Test for time avg of a realistic time axis and 365 day calendar.""" - data = np.array([1., 1., 1., 1., 1., 1.], dtype=np.float32) + data = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float32) times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) + bounds = np.array( + [[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], [151, 181]] + ) cube = self._create_cube(data, times, bounds) - for period in ('monthly', 'month', 'mon'): - result = climate_statistics(cube, operator='mean', period=period) - expected = np.ones((6, ), dtype=np.float32) + for period in ("monthly", "month", "mon"): + result = climate_statistics(cube, operator="mean", period=period) + expected = np.ones((6,), dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_day(self): """Test for time avg of a realistic time axis and 365 day calendar.""" - data = np.array([1., 1., 1., 1., 1., 1.], dtype=np.float32) + data = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float32) times = np.array([0.5, 1.5, 2.5, 365.5, 366.5, 367.5]) - bounds = np.array([[0, 1], [1, 2], [2, 3], [365, 366], [366, 367], - [367, 368]]) + bounds = np.array( + [[0, 1], [1, 2], [2, 3], [365, 366], [366, 367], [367, 368]] + ) cube = self._create_cube(data, times, bounds) - for period in ('daily', 'day'): - result = climate_statistics(cube, operator='mean', period=period) + for period in ("daily", "day"): + result = climate_statistics(cube, operator="mean", period=period) expected = np.array([1, 1, 1], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_hour(self): """Test for time avg of a realistic time axis and 365 day calendar.""" - data = np.array([2., 2., 10., 4., 4., 6.], dtype=np.float32) + data = np.array([2.0, 2.0, 10.0, 4.0, 4.0, 6.0], dtype=np.float32) times = np.array([0.5, 1.5, 2.5, 24.5, 25.5, 48.5]) bounds = np.array([[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 6]]) cube = self._create_cube(data, times, bounds) - cube.coord('time').units = 'hours since 2000-01-01 00:00:00' + cube.coord("time").units = "hours since 2000-01-01 00:00:00" - for period in ('hourly', 'hour', 'hr'): - result = climate_statistics(cube, operator='mean', period=period) - expected = np.array([4., 3., 10.], dtype=np.float32) + for period in ("hourly", "hour", "hr"): + result = climate_statistics(cube, operator="mean", period=period) + expected = np.array([4.0, 3.0, 10.0], dtype=np.float32) assert_array_equal(result.data, expected) expected_hours = [0, 1, 2] - assert_array_equal(result.coord('hour').points, expected_hours) - self.assertEqual(result.units, 'kg m-2 s-1') + assert_array_equal(result.coord("hour").points, expected_hours) + self.assertEqual(result.units, "kg m-2 s-1") def test_period_not_supported(self): """Test for time avg of a realistic time axis and 365 day calendar.""" - data = np.array([1., 1., 1., 1., 1., 1.], dtype=np.float32) + data = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float32) times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) + bounds = np.array( + [[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], [151, 181]] + ) cube = self._create_cube(data, times, bounds) with self.assertRaises(ValueError): - climate_statistics(cube, operator='mean', period='bad') + climate_statistics(cube, operator="mean", period="bad") def test_time_max(self): """Test for time max of a 1D field.""" - data = np.array([0., 1., 2.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + data = np.array([0.0, 1.0, 2.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='max') - expected = np.array([2.], dtype=np.float32) + result = climate_statistics(cube, operator="max") + expected = np.array([2.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_time_min(self): """Test for time min of a 1D field.""" - data = np.array([0., 1., 2.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + data = np.array([0.0, 1.0, 2.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='min') - expected = np.array([0.], dtype=np.float32) + result = climate_statistics(cube, operator="min") + expected = np.array([0.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_time_median(self): """Test for time meadian of a 1D field.""" - data = np.array([0., 1., 2.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + data = np.array([0.0, 1.0, 2.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='median') - expected = np.array([1.], dtype=np.float32) + result = climate_statistics(cube, operator="median") + expected = np.array([1.0], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_time_rms(self): """Test for time rms of a 1D field.""" - data = np.array([0., 1., 2.], dtype=np.float32) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + data = np.array([0.0, 1.0, 2.0], dtype=np.float32) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - result = climate_statistics(cube, operator='rms') - expected = np.array([(5 / 3)**0.5], dtype=np.float32) + result = climate_statistics(cube, operator="rms") + expected = np.array([(5 / 3) ** 0.5], dtype=np.float32) assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2 s-1') + self.assertEqual(result.units, "kg m-2 s-1") def test_time_dependent_fx(self): """Test average time dimension in time-dependent fx vars.""" data = np.ones((3, 3, 3)) - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + times = np.array([15.0, 45.0, 75.0]) + bounds = np.array([[0.0, 30.0], [30.0, 60.0], [60.0, 90.0]]) cube = self._create_cube(data, times, bounds) - measure = iris.coords.CellMeasure(data, - standard_name='ocean_volume', - var_name='volcello', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + data, + standard_name="ocean_volume", + var_name="volcello", + units="m3", + measure="volume", + ) ancillary_var = iris.coords.AncillaryVariable( data, - standard_name='land_ice_area_fraction', - var_name='sftgif', - units='%') + standard_name="land_ice_area_fraction", + var_name="sftgif", + units="%", + ) cube.add_cell_measure(measure, (0, 1, 2)) cube.add_ancillary_variable(ancillary_var, (0, 1, 2)) - with self.assertLogs(level='DEBUG') as cm: - result = climate_statistics(cube, operator='mean', period='mon') - self.assertEqual(cm.records[0].getMessage(), - 'Averaging time dimension in measure volcello.') + with self.assertLogs(level="DEBUG") as cm: + result = climate_statistics(cube, operator="mean", period="mon") + self.assertEqual( + cm.records[0].getMessage(), + "Averaging time dimension in measure volcello.", + ) self.assertEqual( cm.records[1].getMessage(), - 'Averaging time dimension in ancillary variable sftgif.') - self.assertEqual(result.cell_measure('ocean_volume').ndim, 2) + "Averaging time dimension in ancillary variable sftgif.", + ) + self.assertEqual(result.cell_measure("ocean_volume").ndim, 2) self.assertEqual( - result.ancillary_variable('land_ice_area_fraction').ndim, 2) - self.assertEqual(result.units, 'kg m-2 s-1') + result.ancillary_variable("land_ice_area_fraction").ndim, 2 + ) + self.assertEqual(result.units, "kg m-2 s-1") class TestSeasonalStatistics(tests.Test): """Test :func:`esmvalcore.preprocessor._time.seasonal_statistics`.""" + @staticmethod def _create_cube(data, times): - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("days since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube @@ -813,8 +898,8 @@ def test_season_mean(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, 'mean') - expected = np.array([3., 6., 9.]) + result = seasonal_statistics(cube, "mean") + expected = np.array([3.0, 6.0, 9.0]) assert_array_equal(result.data, expected) def test_season_median(self): @@ -823,8 +908,8 @@ def test_season_median(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, 'median') - expected = np.array([3., 6., 9.]) + result = seasonal_statistics(cube, "median") + expected = np.array([3.0, 6.0, 9.0]) assert_array_equal(result.data, expected) def test_season_min(self): @@ -833,8 +918,8 @@ def test_season_min(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, 'min') - expected = np.array([2., 5., 8.]) + result = seasonal_statistics(cube, "min") + expected = np.array([2.0, 5.0, 8.0]) assert_array_equal(result.data, expected) def test_season_max(self): @@ -843,8 +928,8 @@ def test_season_max(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, 'max') - expected = np.array([4., 7., 10.]) + result = seasonal_statistics(cube, "max") + expected = np.array([4.0, 7.0, 10.0]) assert_array_equal(result.data, expected) def test_season_sum(self): @@ -853,8 +938,8 @@ def test_season_sum(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, 'sum') - expected = np.array([9., 18., 27.]) + result = seasonal_statistics(cube, "sum") + expected = np.array([9.0, 18.0, 27.0]) assert_array_equal(result.data, expected) def test_season_custom_mean(self): @@ -863,9 +948,9 @@ def test_season_custom_mean(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, - 'mean', - seasons=('jfmamj', 'jasond')) + result = seasonal_statistics( + cube, "mean", seasons=("jfmamj", "jasond") + ) expected = np.array([2.5, 8.5]) assert_array_equal(result.data, expected) @@ -875,9 +960,9 @@ def test_season_custom_spans_full_season(self): times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - result = seasonal_statistics(cube, - 'mean', - seasons=('JJAS', 'ondjfmam')) + result = seasonal_statistics( + cube, "mean", seasons=("JJAS", "ondjfmam") + ) expected = np.array([1]) assert_array_equal(result.data, expected) @@ -886,28 +971,35 @@ def test_time_dependent_fx(self): data = np.ones((12, 3, 3)) times = np.arange(15, 360, 30) cube = self._create_cube(data, times) - measure = iris.coords.CellMeasure(data, - standard_name='ocean_volume', - var_name='volcello', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + data, + standard_name="ocean_volume", + var_name="volcello", + units="m3", + measure="volume", + ) ancillary_var = iris.coords.AncillaryVariable( data, - standard_name='land_ice_area_fraction', - var_name='sftgif', - units='%') + standard_name="land_ice_area_fraction", + var_name="sftgif", + units="%", + ) cube.add_cell_measure(measure, (0, 1, 2)) cube.add_ancillary_variable(ancillary_var, (0, 1, 2)) - with self.assertLogs(level='DEBUG') as cm: - result = seasonal_statistics(cube, operator='mean') - self.assertEqual(cm.records[0].getMessage(), - 'Averaging time dimension in measure volcello.') + with self.assertLogs(level="DEBUG") as cm: + result = seasonal_statistics(cube, operator="mean") + self.assertEqual( + cm.records[0].getMessage(), + "Averaging time dimension in measure volcello.", + ) self.assertEqual( cm.records[1].getMessage(), - 'Averaging time dimension in ancillary variable sftgif.') - self.assertEqual(result.cell_measure('ocean_volume').ndim, 2) + "Averaging time dimension in ancillary variable sftgif.", + ) + self.assertEqual(result.cell_measure("ocean_volume").ndim, 2) self.assertEqual( - result.ancillary_variable('land_ice_area_fraction').ndim, 2) + result.ancillary_variable("land_ice_area_fraction").ndim, 2 + ) def test_season_not_available(self): """Test that an exception is raised if a season is not available.""" @@ -916,26 +1008,28 @@ def test_season_not_available(self): cube = self._create_cube(data, times) iris.coord_categorisation.add_season( cube, - 'time', - name='clim_season', - seasons=['JFMAMJ', 'JASOND'], + "time", + name="clim_season", + seasons=["JFMAMJ", "JASOND"], ) msg = ( "Seasons ('DJF', 'MAM', 'JJA', 'SON') do not match prior season " "extraction ['JASOND', 'JFMAMJ']." ) with pytest.raises(ValueError, match=re.escape(msg)): - seasonal_statistics(cube, 'mean') + seasonal_statistics(cube, "mean") class TestMonthlyStatistics(tests.Test): """Test :func:`esmvalcore.preprocessor._time.monthly_statistics`.""" + @staticmethod def _create_cube(data, times): - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("days since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube @@ -946,10 +1040,10 @@ def test_mean(self): times = np.arange(7, 360, 15) cube = self._create_cube(data, times) - result = monthly_statistics(cube, 'mean') - expected = np.array([ - 0.5, 2.5, 4.5, 6.5, 8.5, 10.5, 12.5, 14.5, 16.5, 18.5, 20.5, 22.5 - ]) + result = monthly_statistics(cube, "mean") + expected = np.array( + [0.5, 2.5, 4.5, 6.5, 8.5, 10.5, 12.5, 14.5, 16.5, 18.5, 20.5, 22.5] + ) assert_array_equal(result.data, expected) def test_median(self): @@ -958,10 +1052,10 @@ def test_median(self): times = np.arange(7, 360, 15) cube = self._create_cube(data, times) - result = monthly_statistics(cube, 'median') - expected = np.array([ - 0.5, 2.5, 4.5, 6.5, 8.5, 10.5, 12.5, 14.5, 16.5, 18.5, 20.5, 22.5 - ]) + result = monthly_statistics(cube, "median") + expected = np.array( + [0.5, 2.5, 4.5, 6.5, 8.5, 10.5, 12.5, 14.5, 16.5, 18.5, 20.5, 22.5] + ) assert_array_equal(result.data, expected) def test_min(self): @@ -970,7 +1064,7 @@ def test_min(self): times = np.arange(7, 360, 15) cube = self._create_cube(data, times) - result = monthly_statistics(cube, 'min') + result = monthly_statistics(cube, "min") expected = np.array([0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22]) assert_array_equal(result.data, expected) @@ -980,7 +1074,7 @@ def test_max(self): times = np.arange(7, 360, 15) cube = self._create_cube(data, times) - result = monthly_statistics(cube, 'max') + result = monthly_statistics(cube, "max") expected = np.array([1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23]) assert_array_equal(result.data, expected) @@ -990,47 +1084,56 @@ def test_sum(self): times = np.arange(7, 360, 15) cube = self._create_cube(data, times) - result = monthly_statistics(cube, 'sum') + result = monthly_statistics(cube, "sum") expected = np.array([1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 41, 45]) assert_array_equal(result.data, expected) def test_time_dependent_fx(self): """Test average time dimension in time-dependent fx vars.""" data = np.ones((3, 3, 3)) - times = np.array([15., 45., 75.]) + times = np.array([15.0, 45.0, 75.0]) cube = self._create_cube(data, times) - measure = iris.coords.CellMeasure(data, - standard_name='ocean_volume', - var_name='volcello', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + data, + standard_name="ocean_volume", + var_name="volcello", + units="m3", + measure="volume", + ) ancillary_var = iris.coords.AncillaryVariable( data, - standard_name='land_ice_area_fraction', - var_name='sftgif', - units='%') + standard_name="land_ice_area_fraction", + var_name="sftgif", + units="%", + ) cube.add_cell_measure(measure, (0, 1, 2)) cube.add_ancillary_variable(ancillary_var, (0, 1, 2)) - with self.assertLogs(level='DEBUG') as cm: - result = monthly_statistics(cube, operator='mean') - self.assertEqual(cm.records[0].getMessage(), - 'Averaging time dimension in measure volcello.') + with self.assertLogs(level="DEBUG") as cm: + result = monthly_statistics(cube, operator="mean") + self.assertEqual( + cm.records[0].getMessage(), + "Averaging time dimension in measure volcello.", + ) self.assertEqual( cm.records[1].getMessage(), - 'Averaging time dimension in ancillary variable sftgif.') - self.assertEqual(result.cell_measure('ocean_volume').ndim, 2) + "Averaging time dimension in ancillary variable sftgif.", + ) + self.assertEqual(result.cell_measure("ocean_volume").ndim, 2) self.assertEqual( - result.ancillary_variable('land_ice_area_fraction').ndim, 2) + result.ancillary_variable("land_ice_area_fraction").ndim, 2 + ) class TestHourlyStatistics(tests.Test): """Test :func:`esmvalcore.preprocessor._time.hourly_statistics`.""" + @staticmethod def _create_cube(data, times): - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('hours since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("hours since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube @@ -1041,7 +1144,7 @@ def test_mean(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = hourly_statistics(cube, 12, 'mean') + result = hourly_statistics(cube, 12, "mean") expected = np.array([0.5, 2.5, 4.5, 6.5]) assert_array_equal(result.data, expected) @@ -1051,7 +1154,7 @@ def test_median(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = hourly_statistics(cube, 12, 'median') + result = hourly_statistics(cube, 12, "median") expected = np.array([0.5, 2.5, 4.5, 6.5]) assert_array_equal(result.data, expected) @@ -1061,8 +1164,8 @@ def test_min(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = hourly_statistics(cube, 12, 'min') - expected = np.array([0., 2., 4., 6.]) + result = hourly_statistics(cube, 12, "min") + expected = np.array([0.0, 2.0, 4.0, 6.0]) assert_array_equal(result.data, expected) def test_max(self): @@ -1071,8 +1174,8 @@ def test_max(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = hourly_statistics(cube, 12, 'max') - expected = np.array([1., 3., 5., 7.]) + result = hourly_statistics(cube, 12, "max") + expected = np.array([1.0, 3.0, 5.0, 7.0]) assert_array_equal(result.data, expected) def test_sum(self): @@ -1081,19 +1184,21 @@ def test_sum(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = hourly_statistics(cube, 12, 'sum') - expected = np.array([1., 5., 9., 13.]) + result = hourly_statistics(cube, 12, "sum") + expected = np.array([1.0, 5.0, 9.0, 13.0]) assert_array_equal(result.data, expected) class TestDailyStatistics(tests.Test): """Test :func:`esmvalcore.preprocessor._time.monthly_statistics`.""" + @staticmethod def _create_cube(data, times): - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('hours since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("hours since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube @@ -1104,7 +1209,7 @@ def test_mean(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = daily_statistics(cube, 'mean') + result = daily_statistics(cube, "mean") expected = np.array([1.5, 5.5]) assert_array_equal(result.data, expected) @@ -1114,7 +1219,7 @@ def test_median(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = daily_statistics(cube, 'median') + result = daily_statistics(cube, "median") expected = np.array([1.5, 5.5]) assert_array_equal(result.data, expected) @@ -1124,8 +1229,8 @@ def test_min(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = daily_statistics(cube, 'min') - expected = np.array([0., 4.]) + result = daily_statistics(cube, "min") + expected = np.array([0.0, 4.0]) assert_array_equal(result.data, expected) def test_max(self): @@ -1134,8 +1239,8 @@ def test_max(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = daily_statistics(cube, 'max') - expected = np.array([3., 7.]) + result = daily_statistics(cube, "max") + expected = np.array([3.0, 7.0]) assert_array_equal(result.data, expected) def test_sum(self): @@ -1144,126 +1249,126 @@ def test_sum(self): times = np.arange(0, 48, 6) cube = self._create_cube(data, times) - result = daily_statistics(cube, 'sum') - expected = np.array([6., 22.]) + result = daily_statistics(cube, "sum") + expected = np.array([6.0, 22.0]) assert_array_equal(result.data, expected) @pytest.fixture def cube_1d_time(): """Simple 1D cube with time coordinate of length one.""" - units = Unit('days since 2000-01-01', calendar='standard') + units = Unit("days since 2000-01-01", calendar="standard") time_coord = iris.coords.DimCoord( units.date2num(datetime(2024, 1, 26, 14, 57, 28)), bounds=[ units.date2num(datetime(2024, 1, 26, 13, 57, 28)), units.date2num(datetime(2024, 1, 26, 15, 57, 28)), ], - standard_name='time', - attributes={'test': 1}, + standard_name="time", + attributes={"test": 1}, units=units, ) - cube = Cube([1], var_name='tas', dim_coords_and_dims=[(time_coord, 0)]) + cube = Cube([1], var_name="tas", dim_coords_and_dims=[(time_coord, 0)]) return cube @pytest.mark.parametrize( - 'frequency,calendar,new_date,new_bounds', + "frequency,calendar,new_date,new_bounds", [ - ('dec', None, (2024, 1, 1), [(2019, 1, 1), (2029, 1, 1)]), - ('dec', '365_day', (2024, 1, 1), [(2019, 1, 1), (2029, 1, 1)]), - ('yr', None, (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), - ('yr', '365_day', (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), - ('yrPt', None, (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), - ('yrPt', '365_day', (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), - ('mon', None, (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), - ('mon', '365_day', (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), - ('monC', None, (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), - ('monC', '365_day', (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), - ('monPt', None, (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), - ('monPt', '365_day', (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), - ('day', None, (2024, 1, 26, 12), [(2024, 1, 26), (2024, 1, 27)]), - ('24hr', None, (2024, 1, 26, 12), [(2024, 1, 26), (2024, 1, 27)]), - ('12hr', None, (2024, 1, 26, 18), [(2024, 1, 26, 12), (2024, 1, 27)]), + ("dec", None, (2024, 1, 1), [(2019, 1, 1), (2029, 1, 1)]), + ("dec", "365_day", (2024, 1, 1), [(2019, 1, 1), (2029, 1, 1)]), + ("yr", None, (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), + ("yr", "365_day", (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), + ("yrPt", None, (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), + ("yrPt", "365_day", (2024, 7, 1), [(2024, 1, 1), (2025, 1, 1)]), + ("mon", None, (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), + ("mon", "365_day", (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), + ("monC", None, (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), + ("monC", "365_day", (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), + ("monPt", None, (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), + ("monPt", "365_day", (2024, 1, 15), [(2024, 1, 1), (2024, 2, 1)]), + ("day", None, (2024, 1, 26, 12), [(2024, 1, 26), (2024, 1, 27)]), + ("24hr", None, (2024, 1, 26, 12), [(2024, 1, 26), (2024, 1, 27)]), + ("12hr", None, (2024, 1, 26, 18), [(2024, 1, 26, 12), (2024, 1, 27)]), ( - '8hr', + "8hr", None, (2024, 1, 26, 12), [(2024, 1, 26, 8), (2024, 1, 26, 16)], ), ( - '6hr', + "6hr", None, (2024, 1, 26, 15), [(2024, 1, 26, 12), (2024, 1, 26, 18)], ), ( - '6hrPt', + "6hrPt", None, (2024, 1, 26, 15), [(2024, 1, 26, 12), (2024, 1, 26, 18)], ), ( - '6hrCM', + "6hrCM", None, (2024, 1, 26, 15), [(2024, 1, 26, 12), (2024, 1, 26, 18)], ), ( - '4hr', + "4hr", None, (2024, 1, 26, 14), [(2024, 1, 26, 12), (2024, 1, 26, 16)], ), ( - '3hr', + "3hr", None, (2024, 1, 26, 13, 30), [(2024, 1, 26, 12), (2024, 1, 26, 15)], ), ( - '3hrPt', + "3hrPt", None, (2024, 1, 26, 13, 30), [(2024, 1, 26, 12), (2024, 1, 26, 15)], ), ( - '3hrCM', + "3hrCM", None, (2024, 1, 26, 13, 30), [(2024, 1, 26, 12), (2024, 1, 26, 15)], ), ( - '2hr', + "2hr", None, (2024, 1, 26, 15), [(2024, 1, 26, 14), (2024, 1, 26, 16)], ), ( - '1hr', + "1hr", None, (2024, 1, 26, 14, 30), [(2024, 1, 26, 14), (2024, 1, 26, 15)], ), ( - '1hrPt', + "1hrPt", None, (2024, 1, 26, 14, 30), [(2024, 1, 26, 14), (2024, 1, 26, 15)], ), ( - '1hrCM', + "1hrCM", None, (2024, 1, 26, 14, 30), [(2024, 1, 26, 14), (2024, 1, 26, 15)], ), ( - 'hr', + "hr", None, (2024, 1, 26, 14, 30), [(2024, 1, 26, 14), (2024, 1, 26, 15)], ), - ] + ], ) def test_regrid_time(cube_1d_time, frequency, calendar, new_date, new_bounds): """Test ``regrid_time``.""" @@ -1275,15 +1380,15 @@ def test_regrid_time(cube_1d_time, frequency, calendar, new_date, new_bounds): assert new_cube.data == cube.data assert new_cube.metadata == cube.metadata - time = new_cube.coord('time') + time = new_cube.coord("time") if calendar is None: - assert time.metadata == cube.coord('time').metadata + assert time.metadata == cube.coord("time").metadata else: assert time.metadata == DimCoordMetadata( - 'time', - 'time', - 'time', - Unit('days since 1850-01-01 00:00:00', calendar=calendar), + "time", + "time", + "time", + Unit("days since 1850-01-01 00:00:00", calendar=calendar), {}, None, False, @@ -1309,62 +1414,62 @@ def test_regrid_time(cube_1d_time, frequency, calendar, new_date, new_bounds): def test_regrid_time_aux_coords(cube_1d_time): """Test ``regrid_time``.""" - iris.coord_categorisation.add_day_of_month(cube_1d_time, 'time') - iris.coord_categorisation.add_day_of_year(cube_1d_time, 'time') - iris.coord_categorisation.add_hour(cube_1d_time, 'time') - iris.coord_categorisation.add_month(cube_1d_time, 'time') - iris.coord_categorisation.add_month_fullname(cube_1d_time, 'time') - iris.coord_categorisation.add_month_number(cube_1d_time, 'time') - iris.coord_categorisation.add_season(cube_1d_time, 'time') - iris.coord_categorisation.add_season_number(cube_1d_time, 'time') - iris.coord_categorisation.add_season_year(cube_1d_time, 'time') - iris.coord_categorisation.add_weekday(cube_1d_time, 'time') - iris.coord_categorisation.add_weekday_fullname(cube_1d_time, 'time') - iris.coord_categorisation.add_weekday_number(cube_1d_time, 'time') - iris.coord_categorisation.add_year(cube_1d_time, 'time') + iris.coord_categorisation.add_day_of_month(cube_1d_time, "time") + iris.coord_categorisation.add_day_of_year(cube_1d_time, "time") + iris.coord_categorisation.add_hour(cube_1d_time, "time") + iris.coord_categorisation.add_month(cube_1d_time, "time") + iris.coord_categorisation.add_month_fullname(cube_1d_time, "time") + iris.coord_categorisation.add_month_number(cube_1d_time, "time") + iris.coord_categorisation.add_season(cube_1d_time, "time") + iris.coord_categorisation.add_season_number(cube_1d_time, "time") + iris.coord_categorisation.add_season_year(cube_1d_time, "time") + iris.coord_categorisation.add_weekday(cube_1d_time, "time") + iris.coord_categorisation.add_weekday_fullname(cube_1d_time, "time") + iris.coord_categorisation.add_weekday_number(cube_1d_time, "time") + iris.coord_categorisation.add_year(cube_1d_time, "time") cube = cube_1d_time.copy() - new_cube = regrid_time(cube, 'yr') + new_cube = regrid_time(cube, "yr") assert cube == cube_1d_time assert new_cube.data == cube.data assert new_cube.metadata == cube.metadata - np.testing.assert_array_equal(new_cube.coord('day_of_month').points, [1]) - np.testing.assert_array_equal(new_cube.coord('day_of_year').points, [183]) - np.testing.assert_array_equal(new_cube.coord('hour').points, [0]) - np.testing.assert_array_equal(new_cube.coord('month').points, ['Jul']) + np.testing.assert_array_equal(new_cube.coord("day_of_month").points, [1]) + np.testing.assert_array_equal(new_cube.coord("day_of_year").points, [183]) + np.testing.assert_array_equal(new_cube.coord("hour").points, [0]) + np.testing.assert_array_equal(new_cube.coord("month").points, ["Jul"]) np.testing.assert_array_equal( - new_cube.coord('month_fullname').points, ['July'] + new_cube.coord("month_fullname").points, ["July"] ) - np.testing.assert_array_equal(new_cube.coord('month_number').points, [7]) - np.testing.assert_array_equal(new_cube.coord('season').points, ['jja']) - np.testing.assert_array_equal(new_cube.coord('season_number').points, [2]) - np.testing.assert_array_equal(new_cube.coord('season_year').points, [2024]) - np.testing.assert_array_equal(new_cube.coord('weekday').points, ['Mon']) + np.testing.assert_array_equal(new_cube.coord("month_number").points, [7]) + np.testing.assert_array_equal(new_cube.coord("season").points, ["jja"]) + np.testing.assert_array_equal(new_cube.coord("season_number").points, [2]) + np.testing.assert_array_equal(new_cube.coord("season_year").points, [2024]) + np.testing.assert_array_equal(new_cube.coord("weekday").points, ["Mon"]) np.testing.assert_array_equal( - new_cube.coord('weekday_fullname').points, ['Monday'] + new_cube.coord("weekday_fullname").points, ["Monday"] ) - np.testing.assert_array_equal(new_cube.coord('weekday_number').points, [0]) - np.testing.assert_array_equal(new_cube.coord('year').points, [2024]) + np.testing.assert_array_equal(new_cube.coord("weekday_number").points, [0]) + np.testing.assert_array_equal(new_cube.coord("year").points, [2024]) def test_regrid_time_invalid_freq(cube_1d_time): """Test ``regrid_time``.""" msg = "Frequency 'invalid' is not supported" with pytest.raises(NotImplementedError, match=msg): - regrid_time(cube_1d_time, 'invalid') + regrid_time(cube_1d_time, "invalid") -@pytest.mark.parametrize('freq', ['day', '6hr', '3hrPt', '1hrCM', 'hr']) +@pytest.mark.parametrize("freq", ["day", "6hr", "3hrPt", "1hrCM", "hr"]) def test_regrid_time_invalid_freq_for_calendar(cube_1d_time, freq): """Test ``regrid_time``.""" msg = f"Setting a fixed calendar is not supported for frequency '{freq}'" with pytest.raises(NotImplementedError, match=msg): - regrid_time(cube_1d_time, freq, calendar='365_day') + regrid_time(cube_1d_time, freq, calendar="365_day") -@pytest.mark.parametrize('freq', ['5hr', '7hrPt', '9hrCM', '10hr', '21hrPt']) +@pytest.mark.parametrize("freq", ["5hr", "7hrPt", "9hrCM", "10hr", "21hrPt"]) def test_regrid_time_hour_no_divisor_of_24(cube_1d_time, freq): """Test ``regrid_time``.""" msg = f"For `n`-hourly data, `n` must be a divisor of 24, got '{freq}'" @@ -1374,92 +1479,105 @@ def test_regrid_time_hour_no_divisor_of_24(cube_1d_time, freq): class TestTimeseriesFilter(tests.Test): """Tests for timeseries filter.""" + def setUp(self): """Prepare tests.""" self.cube = _create_sample_cube() def test_timeseries_filter_simple(self): """Test timeseries_filter func.""" - filtered_cube = timeseries_filter(self.cube, - 7, - 14, - filter_type='lowpass', - filter_stats='sum') - expected_data = np.array([ - 2.44824568, 3.0603071, 3.67236852, 4.28442994, 4.89649137, - 5.50855279, 6.12061421, 6.73267563, 7.34473705, 7.95679847, - 8.56885989, 9.18092131, 9.79298273, 10.40504415, 11.01710557, - 11.62916699, 12.24122841, 12.85328983 - ]) + filtered_cube = timeseries_filter( + self.cube, 7, 14, filter_type="lowpass", filter_stats="sum" + ) + expected_data = np.array( + [ + 2.44824568, + 3.0603071, + 3.67236852, + 4.28442994, + 4.89649137, + 5.50855279, + 6.12061421, + 6.73267563, + 7.34473705, + 7.95679847, + 8.56885989, + 9.18092131, + 9.79298273, + 10.40504415, + 11.01710557, + 11.62916699, + 12.24122841, + 12.85328983, + ] + ) assert_array_almost_equal(filtered_cube.data, expected_data) - assert len(filtered_cube.coord('time').points) == 18 + assert len(filtered_cube.coord("time").points) == 18 def test_timeseries_filter_timecoord(self): """Test missing time axis.""" new_cube = self.cube.copy() - new_cube.remove_coord(new_cube.coord('time')) + new_cube.remove_coord(new_cube.coord("time")) with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - timeseries_filter(new_cube, - 7, - 14, - filter_type='lowpass', - filter_stats='sum') + timeseries_filter( + new_cube, 7, 14, filter_type="lowpass", filter_stats="sum" + ) def test_timeseries_filter_implemented(self): """Test a not implemented filter.""" with self.assertRaises(NotImplementedError): - timeseries_filter(self.cube, - 7, - 14, - filter_type='bypass', - filter_stats='sum') + timeseries_filter( + self.cube, 7, 14, filter_type="bypass", filter_stats="sum" + ) def make_time_series(number_years=2): """Make a cube with time only dimension.""" times = np.array([i * 30 + 15 for i in range(0, 12 * number_years, 1)]) bounds = np.array([i * 30 for i in range(0, 12 * number_years + 1, 1)]) - bounds = np.array([[bnd, bounds[index + 1]] - for index, bnd in enumerate(bounds[:-1])]) + bounds = np.array( + [[bnd, bounds[index + 1]] for index, bnd in enumerate(bounds[:-1])] + ) data = np.ones_like(times) - time = iris.coords.DimCoord(times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name="time", + units=Unit("days since 1950-01-01", calendar="360_day"), + ) cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube -@pytest.mark.parametrize('existing_coord', [True, False]) +@pytest.mark.parametrize("existing_coord", [True, False]) def test_annual_average(existing_coord): """Test for annual average.""" cube = make_time_series(number_years=2) if existing_coord: - iris.coord_categorisation.add_year(cube, 'time') + iris.coord_categorisation.add_year(cube, "time") result = annual_statistics(cube) - expected = np.array([1., 1.]) + expected = np.array([1.0, 1.0]) assert_array_equal(result.data, expected) - expected_time = np.array([180., 540.]) - assert_array_equal(result.coord('time').points, expected_time) + expected_time = np.array([180.0, 540.0]) + assert_array_equal(result.coord("time").points, expected_time) -@pytest.mark.parametrize('existing_coord', [True, False]) +@pytest.mark.parametrize("existing_coord", [True, False]) def test_annual_sum(existing_coord): """Test for annual sum.""" cube = make_time_series(number_years=2) if existing_coord: - iris.coord_categorisation.add_year(cube, 'time') + iris.coord_categorisation.add_year(cube, "time") - result = annual_statistics(cube, 'sum') - expected = np.array([12., 12.]) + result = annual_statistics(cube, "sum") + expected = np.array([12.0, 12.0]) assert_array_equal(result.data, expected) - expected_time = np.array([180., 540.]) - assert_array_equal(result.coord('time').points, expected_time) + expected_time = np.array([180.0, 540.0]) + assert_array_equal(result.coord("time").points, expected_time) -@pytest.mark.parametrize('existing_coord', [True, False]) +@pytest.mark.parametrize("existing_coord", [True, False]) def test_decadal_average(existing_coord): """Test for decadal average.""" cube = make_time_series(number_years=20) @@ -1471,46 +1589,53 @@ def get_decade(coord, value): return date.year - date.year % 10 iris.coord_categorisation.add_categorised_coord( - cube, 'decade', 'time', get_decade) + cube, "decade", "time", get_decade + ) result = decadal_statistics(cube) - expected = np.array([1., 1.]) + expected = np.array([1.0, 1.0]) assert_array_equal(result.data, expected) - expected_time = np.array([1800., 5400.]) - assert_array_equal(result.coord('time').points, expected_time) + expected_time = np.array([1800.0, 5400.0]) + assert_array_equal(result.coord("time").points, expected_time) -@pytest.mark.parametrize('existing_coord', [True, False]) +@pytest.mark.parametrize("existing_coord", [True, False]) def test_decadal_average_time_dependent_fx(existing_coord): """Test for decadal average.""" cube = make_time_series(number_years=20) - measure = iris.coords.CellMeasure(cube.data, - standard_name='ocean_volume', - var_name='volcello', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + cube.data, + standard_name="ocean_volume", + var_name="volcello", + units="m3", + measure="volume", + ) ancillary_var = iris.coords.AncillaryVariable( cube.data, - standard_name='land_ice_area_fraction', - var_name='sftgif', - units='%') + standard_name="land_ice_area_fraction", + var_name="sftgif", + units="%", + ) cube.add_cell_measure(measure, 0) cube.add_ancillary_variable(ancillary_var, 0) if existing_coord: + def get_decade(coord, value): """Get decades from cube.""" date = coord.units.num2date(value) return date.year - date.year % 10 iris.coord_categorisation.add_categorised_coord( - cube, 'decade', 'time', get_decade) + cube, "decade", "time", get_decade + ) result = decadal_statistics(cube) - assert result.cell_measure('ocean_volume').data.shape == (1,) - assert result.ancillary_variable( - 'land_ice_area_fraction').data.shape == (1,) + assert result.cell_measure("ocean_volume").data.shape == (1,) + assert result.ancillary_variable("land_ice_area_fraction").data.shape == ( + 1, + ) -@pytest.mark.parametrize('existing_coord', [True, False]) +@pytest.mark.parametrize("existing_coord", [True, False]) def test_decadal_sum(existing_coord): """Test for decadal average.""" cube = make_time_series(number_years=20) @@ -1522,31 +1647,33 @@ def get_decade(coord, value): return date.year - date.year % 10 iris.coord_categorisation.add_categorised_coord( - cube, 'decade', 'time', get_decade) + cube, "decade", "time", get_decade + ) - result = decadal_statistics(cube, 'sum') - expected = np.array([120., 120.]) + result = decadal_statistics(cube, "sum") + expected = np.array([120.0, 120.0]) assert_array_equal(result.data, expected) - expected_time = np.array([1800., 5400.]) - assert_array_equal(result.coord('time').points, expected_time) + expected_time = np.array([1800.0, 5400.0]) + assert_array_equal(result.coord("time").points, expected_time) def make_map_data(number_years=2): """Make a cube with time, lat and lon dimensions.""" times = np.arange(0.5, number_years * 360) bounds = np.stack(((times - 0.5), (times + 0.5)), 1) - time = iris.coords.DimCoord(times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name="time", + units=Unit("days since 1950-01-01", calendar="360_day"), + ) lat = iris.coords.DimCoord( range(2), - standard_name='latitude', + standard_name="latitude", ) lon = iris.coords.DimCoord( range(2), - standard_name='longitude', + standard_name="longitude", ) data = np.array([[0, 1], [1, 0]]) * times[:, None, None] chunks = (int(data.shape[0] / 2), 1, 2) @@ -1558,50 +1685,62 @@ def make_map_data(number_years=2): PARAMETERS: List[Tuple] = [] -for period in ('full', 'day', 'month', 'season'): +for period in ("full", "day", "month", "season"): PARAMETERS.append((period, None)) - if period == 'season': - PARAMETERS.append((period, { - "start_year": 1950, - 'start_month': 3, - 'start_day': 1, - "end_year": 1951, - 'end_month': 3, - 'end_day': 1, - })) + if period == "season": + PARAMETERS.append( + ( + period, + { + "start_year": 1950, + "start_month": 3, + "start_day": 1, + "end_year": 1951, + "end_month": 3, + "end_day": 1, + }, + ) + ) else: - PARAMETERS.append((period, { - "start_year": 1950, - 'start_month': 1, - 'start_day': 1, - "end_year": 1951, - 'end_month': 1, - 'end_day': 1, - })) + PARAMETERS.append( + ( + period, + { + "start_year": 1950, + "start_month": 1, + "start_day": 1, + "end_year": 1951, + "end_month": 1, + "end_day": 1, + }, + ) + ) -@pytest.mark.parametrize('period', ['full']) +@pytest.mark.parametrize("period", ["full"]) def test_standardized_anomalies(period, standardize=True): """Test standardized ``anomalies``.""" cube = make_map_data(number_years=2) result = anomalies(cube, period, standardize=standardize) - if period == 'full': - expected_anomalies = (cube.data - - np.mean(cube.data, axis=0, keepdims=True)) + if period == "full": + expected_anomalies = cube.data - np.mean( + cube.data, axis=0, keepdims=True + ) if standardize: # NB: default behaviour for np.std is ddof=0, whereas # default behaviour for iris.analysis.STD_DEV is ddof=1 expected_stdanomalies = expected_anomalies / np.std( - expected_anomalies, axis=0, keepdims=True, ddof=1) + expected_anomalies, axis=0, keepdims=True, ddof=1 + ) expected = np.ma.masked_invalid(expected_stdanomalies) assert_array_equal(result.data, expected) - assert result.units == '1' + assert result.units == "1" else: expected = np.ma.masked_invalid(expected_anomalies) assert_array_equal(result.data, expected) -@pytest.mark.parametrize('period, reference', PARAMETERS) +@pytest.mark.parametrize("period, reference", PARAMETERS) def test_anomalies_preserve_metadata(period, reference, standardize=False): """Test that ``anomalies`` preserves metadata.""" cube = make_map_data(number_years=2) @@ -1616,124 +1755,139 @@ def test_anomalies_preserve_metadata(period, reference, standardize=False): assert coord_cube == coord_res -@pytest.mark.parametrize('period, reference', PARAMETERS) +@pytest.mark.parametrize("period, reference", PARAMETERS) def test_anomalies(period, reference, standardize=False): """Test ``anomalies``.""" cube = make_map_data(number_years=2) result = anomalies(cube, period, reference, standardize=standardize) if reference is None: - if period == 'full': + if period == "full": anom = np.arange(-359.5, 360) - elif period == 'day': + elif period == "day": anom = np.concatenate((np.ones(360) * -180, np.ones(360) * 180)) - elif period == 'month': + elif period == "month": anom1 = np.concatenate( - [np.arange(-194.5, -165) for x in range(12)]) + [np.arange(-194.5, -165) for x in range(12)] + ) anom2 = np.concatenate([np.arange(165.5, 195) for x in range(12)]) anom = np.concatenate((anom1, anom2)) - elif period == 'season': - anom = np.concatenate(( - np.arange(-314.5, -255), - np.arange(-224.5, -135), - np.arange(-224.5, -135), - np.arange(-224.5, -135), - np.arange(15.5, 105), - np.arange(135.5, 225), - np.arange(135.5, 225), - np.arange(135.5, 225), - np.arange(375.5, 405), - )) + elif period == "season": + anom = np.concatenate( + ( + np.arange(-314.5, -255), + np.arange(-224.5, -135), + np.arange(-224.5, -135), + np.arange(-224.5, -135), + np.arange(15.5, 105), + np.arange(135.5, 225), + np.arange(135.5, 225), + np.arange(135.5, 225), + np.arange(375.5, 405), + ) + ) else: - if period == 'full': + if period == "full": anom = np.arange(-179.5, 540) - elif period == 'day': + elif period == "day": anom = np.concatenate((np.zeros(360), np.ones(360) * 360)) - elif period == 'month': + elif period == "month": anom1 = np.concatenate([np.arange(-14.5, 15) for x in range(12)]) anom2 = np.concatenate([np.arange(345.5, 375) for x in range(12)]) anom = np.concatenate((anom1, anom2)) - elif period == 'season': - anom = np.concatenate(( - np.arange(-374.5, -315), - np.arange(-44.5, 45), - np.arange(-44.5, 45), - np.arange(-44.5, 45), - np.arange(-44.5, 45), - np.arange(315.5, 405), - np.arange(315.5, 405), - np.arange(315.5, 405), - np.arange(315.5, 345), - )) + elif period == "season": + anom = np.concatenate( + ( + np.arange(-374.5, -315), + np.arange(-44.5, 45), + np.arange(-44.5, 45), + np.arange(-44.5, 45), + np.arange(-44.5, 45), + np.arange(315.5, 405), + np.arange(315.5, 405), + np.arange(315.5, 405), + np.arange(315.5, 345), + ) + ) expected = anom[:, None, None] * [[0, 1], [1, 0]] assert_array_equal(result.data, expected) - assert_array_equal(result.coord('time').points, cube.coord('time').points) + assert_array_equal(result.coord("time").points, cube.coord("time").points) def test_anomalies_custom_season(): """Test ``anomalies`` with custom season.""" cube = make_map_data(number_years=2) - result = anomalies(cube, 'season', seasons=('jfmamj', 'jasond')) - anom = np.concatenate(( - np.arange(-269.5, -90), - np.arange(-269.5, -90), - np.arange(90.5, 270), - np.arange(90.5, 270), - )) + result = anomalies(cube, "season", seasons=("jfmamj", "jasond")) + anom = np.concatenate( + ( + np.arange(-269.5, -90), + np.arange(-269.5, -90), + np.arange(90.5, 270), + np.arange(90.5, 270), + ) + ) expected = anom[:, None, None] * [[0, 1], [1, 0]] assert_array_equal(result.data, expected) - assert_array_equal(result.coord('time').points, cube.coord('time').points) + assert_array_equal(result.coord("time").points, cube.coord("time").points) -@pytest.mark.parametrize('period', ['hourly', 'hour', 'hr']) +@pytest.mark.parametrize("period", ["hourly", "hour", "hr"]) def test_anomalies_hourly(period): """Test ``anomalies`` with hourly data.""" cube = make_map_data(number_years=1)[:48, ...] - cube.coord('time').units = 'hours since 2000-01-01 00:00:00' + cube.coord("time").units = "hours since 2000-01-01 00:00:00" result = anomalies(cube, period) - expected = np.concatenate(( - np.broadcast_to(np.array([[0, -12], [-12, 0]]), (24, 2, 2)), - np.broadcast_to(np.array([[0, 12], [12, 0]]), (24, 2, 2)), - )) + expected = np.concatenate( + ( + np.broadcast_to(np.array([[0, -12], [-12, 0]]), (24, 2, 2)), + np.broadcast_to(np.array([[0, 12], [12, 0]]), (24, 2, 2)), + ) + ) assert_array_equal(result.data, expected) - assert result.coord('time') == cube.coord('time') + assert result.coord("time") == cube.coord("time") def get_0d_time(): """Get 0D time coordinate.""" - time = iris.coords.AuxCoord(15.0, - bounds=[0.0, 30.0], - standard_name='time', - units='days since 1850-01-01 00:00:00') + time = iris.coords.AuxCoord( + 15.0, + bounds=[0.0, 30.0], + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) return time def get_1d_time(): """Get 1D time coordinate.""" - time = iris.coords.DimCoord([20., 45.], - standard_name='time', - bounds=[[15., 30.], [30., 60.]], - units=Unit('days since 1950-01-01', - calendar='gregorian')) + time = iris.coords.DimCoord( + [20.0, 45.0], + standard_name="time", + bounds=[[15.0, 30.0], [30.0, 60.0]], + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) return time def get_2d_time(): """Get 2D time coordinate.""" - time = iris.coords.AuxCoord([[20., 45.]], - standard_name='time', - bounds=[[[15., 30.], [30., 60.]]], - units=Unit('days since 1950-01-01', - calendar='gregorian')) + time = iris.coords.AuxCoord( + [[20.0, 45.0]], + standard_name="time", + bounds=[[[15.0, 30.0], [30.0, 60.0]]], + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) return time def get_lon_coord(): """Get longitude coordinate.""" - lons = iris.coords.DimCoord([1.5, 2.5, 3.5], - standard_name='longitude', - long_name='longitude', - bounds=[[1., 2.], [2., 3.], [3., 4.]], - units='degrees_east') + lons = iris.coords.DimCoord( + [1.5, 2.5, 3.5], + standard_name="longitude", + long_name="longitude", + bounds=[[1.0, 2.0], [2.0, 3.0], [3.0, 4.0]], + units="degrees_east", + ) return lons @@ -1743,24 +1897,28 @@ def _make_cube(): data2 = np.ma.ones((2, 1, 1, 3)) time = get_1d_time() - zcoord = iris.coords.DimCoord([0.5], - standard_name='air_pressure', - long_name='air_pressure', - bounds=[[0., 2.5]], - units='Pa', - attributes={'positive': 'down'}) - lats = iris.coords.DimCoord([1.5], - standard_name='latitude', - long_name='latitude', - bounds=[[1., 2.]], - units='degrees_north', - coord_system=coord_sys) + zcoord = iris.coords.DimCoord( + [0.5], + standard_name="air_pressure", + long_name="air_pressure", + bounds=[[0.0, 2.5]], + units="Pa", + attributes={"positive": "down"}, + ) + lats = iris.coords.DimCoord( + [1.5], + standard_name="latitude", + long_name="latitude", + bounds=[[1.0, 2.0]], + units="degrees_north", + coord_system=coord_sys, + ) lons = get_lon_coord() coords_spec4 = [(time, 0), (zcoord, 1), (lats, 2), (lons, 3)] cube1 = iris.cube.Cube( data2, dim_coords_and_dims=coords_spec4, - units='kg m-2 s-1', + units="kg m-2 s-1", ) return cube1 @@ -1770,7 +1928,7 @@ def test_get_time_weights(): cube = _make_cube() weights = get_time_weights(cube) assert isinstance(weights, np.ndarray) - assert weights.shape == (2, ) + assert weights.shape == (2,) np.testing.assert_allclose(weights, [15.0, 30.0]) @@ -1780,20 +1938,19 @@ def test_get_time_weights_lazy(): cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) weights = get_time_weights(cube) assert isinstance(weights, da.Array) - assert weights.shape == (2, ) - assert weights.chunks == ((1, 1), ) + assert weights.shape == (2,) + assert weights.chunks == ((1, 1),) np.testing.assert_allclose(weights, [15.0, 30.0]) def test_get_time_weights_0d_time(): """Test ``get_time_weights`` for 0D time coordinate.""" time = get_0d_time() - cube = iris.cube.Cube(0.0, - var_name='x', - units='K', - aux_coords_and_dims=[(time, ())]) + cube = iris.cube.Cube( + 0.0, var_name="x", units="K", aux_coords_and_dims=[(time, ())] + ) weights = get_time_weights(cube) - assert weights.shape == (1, ) + assert weights.shape == (1,) np.testing.assert_allclose(weights, [30.0]) @@ -1801,25 +1958,26 @@ def test_get_time_weights_0d_time_1d_lon(): """Test ``get_time_weights`` for 0D time and 1D longitude coordinate.""" time = get_0d_time() lons = get_lon_coord() - cube = iris.cube.Cube([0.0, 0.0, 0.0], - var_name='x', - units='K', - aux_coords_and_dims=[(time, ())], - dim_coords_and_dims=[(lons, 0)]) + cube = iris.cube.Cube( + [0.0, 0.0, 0.0], + var_name="x", + units="K", + aux_coords_and_dims=[(time, ())], + dim_coords_and_dims=[(lons, 0)], + ) weights = get_time_weights(cube) - assert weights.shape == (1, ) + assert weights.shape == (1,) np.testing.assert_allclose(weights, [30.0]) def test_get_time_weights_1d_time(): """Test ``get_time_weights`` for 1D time coordinate.""" time = get_1d_time() - cube = iris.cube.Cube([0.0, 1.0], - var_name='x', - units='K', - dim_coords_and_dims=[(time, 0)]) + cube = iris.cube.Cube( + [0.0, 1.0], var_name="x", units="K", dim_coords_and_dims=[(time, 0)] + ) weights = get_time_weights(cube) - assert weights.shape == (2, ) + assert weights.shape == (2,) np.testing.assert_allclose(weights, [15.0, 30.0]) @@ -1827,73 +1985,83 @@ def test_get_time_weights_1d_time_1d_lon(): """Test ``get_time_weights`` for 1D time and 1D longitude coordinate.""" time = get_1d_time() lons = get_lon_coord() - cube = iris.cube.Cube([[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], - var_name='x', - units='K', - dim_coords_and_dims=[(time, 0), (lons, 1)]) + cube = iris.cube.Cube( + [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], + var_name="x", + units="K", + dim_coords_and_dims=[(time, 0), (lons, 1)], + ) weights = get_time_weights(cube) - assert weights.shape == (2, ) + assert weights.shape == (2,) np.testing.assert_allclose(weights, [15.0, 30.0]) def test_get_time_weights_2d_time(): """Test ``get_time_weights`` for 1D time coordinate.""" time = get_2d_time() - cube = iris.cube.Cube([[0.0, 1.0]], - var_name='x', - units='K', - aux_coords_and_dims=[(time, (0, 1))]) + cube = iris.cube.Cube( + [[0.0, 1.0]], + var_name="x", + units="K", + aux_coords_and_dims=[(time, (0, 1))], + ) with pytest.raises(ValueError): get_time_weights(cube) def test_climate_statistics_0d_time_1d_lon(): """Test climate statistics.""" - time = iris.coords.DimCoord([1.0], - bounds=[[0.0, 2.0]], - var_name='time', - standard_name='time', - units='days since 1850-01-01 00:00:00') + time = iris.coords.DimCoord( + [1.0], + bounds=[[0.0, 2.0]], + var_name="time", + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) lons = get_lon_coord() - cube = iris.cube.Cube([[1.0, -1.0, 42.0]], - var_name='x', - units='K day-1', - dim_coords_and_dims=[(time, 0), (lons, 1)]) - new_cube = climate_statistics(cube, operator='sum', period='full') + cube = iris.cube.Cube( + [[1.0, -1.0, 42.0]], + var_name="x", + units="K day-1", + dim_coords_and_dims=[(time, 0), (lons, 1)], + ) + new_cube = climate_statistics(cube, operator="sum", period="full") assert cube.shape == (1, 3) - assert new_cube.shape == (3, ) + assert new_cube.shape == (3,) np.testing.assert_allclose(new_cube.data, [2.0, -2.0, 84.0]) - assert new_cube.units == 'K' + assert new_cube.units == "K" def test_climate_statistics_complex_cube_sum(): """Test climate statistics.""" cube = _make_cube() - new_cube = climate_statistics(cube, operator='sum', period='full') + new_cube = climate_statistics(cube, operator="sum", period="full") assert cube.shape == (2, 1, 1, 3) assert new_cube.shape == (1, 1, 3) np.testing.assert_allclose(new_cube.data, [[[45.0, 45.0, 45.0]]]) - assert new_cube.units == '86400 kg m-2' + assert new_cube.units == "86400 kg m-2" def test_climate_statistics_complex_cube_mean(): """Test climate statistics.""" cube = _make_cube() - new_cube = climate_statistics(cube, operator='mean', period='full') + new_cube = climate_statistics(cube, operator="mean", period="full") assert cube.shape == (2, 1, 1, 3) assert new_cube.shape == (1, 1, 3) np.testing.assert_allclose(new_cube.data, [[[1.0, 1.0, 1.0]]]) - assert new_cube.units == 'kg m-2 s-1' + assert new_cube.units == "kg m-2 s-1" class TestResampleHours(tests.Test): """Test :func:`esmvalcore.preprocessor._time.resample_hours`.""" + @staticmethod def _create_cube(data, times): - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('hours since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("hours since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube @@ -1990,9 +2158,9 @@ def test_resample_interpolate_linear(self): times = np.array([6, 18]) cube = self._create_cube(data, times) - result = resample_hours(cube, interval=12, interpolate='linear') + result = resample_hours(cube, interval=12, interpolate="linear") assert_array_equal(result.data, [0.5, 1.5]) - assert_array_equal(result.coord('time').points, [0, 12]) + assert_array_equal(result.coord("time").points, [0, 12]) def test_resample_interpolate_nearest(self): """Test ``resample_hours``.""" @@ -2001,10 +2169,10 @@ def test_resample_interpolate_nearest(self): cube = self._create_cube(data, times) result = resample_hours( - cube, interval=12, offset=1, interpolate='nearest' + cube, interval=12, offset=1, interpolate="nearest" ) assert_array_equal(result.data, [1, 2]) - assert_array_equal(result.coord('time').points, [1, 13]) + assert_array_equal(result.coord("time").points, [1, 13]) def test_resample_invalid_interpolation(self): """Test ``resample_hours``.""" @@ -2013,17 +2181,19 @@ def test_resample_invalid_interpolation(self): cube = self._create_cube(data, times) with self.assertRaises(ValueError): - resample_hours(cube, interval=1, interpolate='invalid') + resample_hours(cube, interval=1, interpolate="invalid") class TestResampleTime(tests.Test): """Test :func:`esmvalcore.preprocessor._time.resample_time`.""" + @staticmethod def _create_cube(data, times): - time = iris.coords.DimCoord(times, - standard_name='time', - units=Unit('hours since 1950-01-01', - calendar='360_day')) + time = iris.coords.DimCoord( + times, + standard_name="time", + units=Unit("hours since 1950-01-01", calendar="360_day"), + ) time.guess_bounds() cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) return cube @@ -2066,10 +2236,12 @@ def test_resample_daily_to_monthly(self): cube = self._create_cube(data, times) result = resample_time(cube, day=15) - expected = np.array([ - 14 * 24, - 44 * 24, - ]) + expected = np.array( + [ + 14 * 24, + 44 * 24, + ] + ) assert_array_equal(result.data, expected) def test_resample_fails(self): @@ -2092,5 +2264,5 @@ def test_resample_fails_scalar(self): resample_time(cube, day=16) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_trend/test_trend.py b/tests/unit/preprocessor/_trend/test_trend.py index 87d7db1635..4171b5da15 100644 --- a/tests/unit/preprocessor/_trend/test_trend.py +++ b/tests/unit/preprocessor/_trend/test_trend.py @@ -1,4 +1,5 @@ """Unit tests for :mod:`esmvalcore.preprocessor._trend`.""" + import dask.array as da import iris import iris.coord_categorisation @@ -23,26 +24,35 @@ def assert_masked_array_equal(arr_1, arr_2): def get_cube(times=None, time_units=None): """Create cube.""" - lats = iris.coords.DimCoord([0.0, 20.0], standard_name='latitude', - units='m') - lons = iris.coords.DimCoord([500.0, 600.0], standard_name='longitude', - units='m') - aux_coord = iris.coords.AuxCoord([0.0, 0.0], var_name='aux') + lats = iris.coords.DimCoord( + [0.0, 20.0], standard_name="latitude", units="m" + ) + lons = iris.coords.DimCoord( + [500.0, 600.0], standard_name="longitude", units="m" + ) + aux_coord = iris.coords.AuxCoord([0.0, 0.0], var_name="aux") if times is None: - cube = iris.cube.Cube([[1.0, 2.0], [3.0, 4.0]], var_name='x', - long_name='X', units='kg', - dim_coords_and_dims=[(lats, 0), (lons, 1)], - aux_coords_and_dims=[(aux_coord, 0)]) + cube = iris.cube.Cube( + [[1.0, 2.0], [3.0, 4.0]], + var_name="x", + long_name="X", + units="kg", + dim_coords_and_dims=[(lats, 0), (lons, 1)], + aux_coords_and_dims=[(aux_coord, 0)], + ) return cube if time_units is None: - time_units = Unit('days since 1850-01-01 00:00:00') - times = iris.coords.DimCoord(times, standard_name='time', units=time_units) + time_units = Unit("days since 1850-01-01 00:00:00") + times = iris.coords.DimCoord(times, standard_name="time", units=time_units) cube_data = np.arange(4 * times.shape[0]).reshape(times.shape[0], 2, 2) - cube = iris.cube.Cube(cube_data.astype('float32'), var_name='x', - long_name='X', units='kg', - dim_coords_and_dims=[(times, 0), (lats, 1), - (lons, 2)], - aux_coords_and_dims=[(aux_coord, 1)]) + cube = iris.cube.Cube( + cube_data.astype("float32"), + var_name="x", + long_name="X", + units="kg", + dim_coords_and_dims=[(times, 0), (lats, 1), (lons, 2)], + aux_coords_and_dims=[(aux_coord, 1)], + ) return cube @@ -68,35 +78,37 @@ def cube_3_time(): @pytest.fixture def cube_3_time_years(): """Cube with three years.""" - return get_cube(times=[0.0, 1.0, 2.0], time_units='year') + return get_cube(times=[0.0, 1.0, 2.0], time_units="year") def test_linear_trend_coord_not_found(cube_no_time): """Test calculation of linear trend when dimension is not available.""" with pytest.raises(iris.exceptions.CoordinateNotFoundError) as err: linear_trend(cube_no_time) - assert 'time' in str(err.value) + assert "time" in str(err.value) with pytest.raises(iris.exceptions.CoordinateNotFoundError) as err: - linear_trend(cube_no_time, coordinate='time') - assert 'time' in str(err.value) + linear_trend(cube_no_time, coordinate="time") + assert "time" in str(err.value) with pytest.raises(iris.exceptions.CoordinateNotFoundError) as err: - linear_trend(cube_no_time, coordinate='aux') - assert 'aux' in str(err.value) + linear_trend(cube_no_time, coordinate="aux") + assert "aux" in str(err.value) def test_linear_trend_1_time(cube_1_time): """Test calculation of linear trend with single time point.""" cube_trend = linear_trend(cube_1_time) assert cube_trend.shape == (2, 2) - assert_masked_array_equal(cube_trend.data, - np.ma.masked_equal([[0.0, 0.0], [0.0, 0.0]], - 0.0)) - assert not cube_trend.coords('time', dim_coords=True) - assert cube_trend.coords('latitude', dim_coords=True) - assert cube_trend.coords('longitude', dim_coords=True) - assert cube_trend.units == 'kg day-1' - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert_masked_array_equal( + cube_trend.data, np.ma.masked_equal([[0.0, 0.0], [0.0, 0.0]], 0.0) + ) + assert not cube_trend.coords("time", dim_coords=True) + assert cube_trend.coords("latitude", dim_coords=True) + assert cube_trend.coords("longitude", dim_coords=True) + assert cube_trend.units == "kg day-1" + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) def test_linear_trend_3_time(cube_3_time): @@ -105,12 +117,14 @@ def test_linear_trend_3_time(cube_3_time): cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[3.5, 4.0], [4.0, 4.0]]) - assert not cube_trend.coords('time', dim_coords=True) - assert cube_trend.coords('latitude', dim_coords=True) - assert cube_trend.coords('longitude', dim_coords=True) - assert cube_trend.units == 'kg day-1' - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert not cube_trend.coords("time", dim_coords=True) + assert cube_trend.coords("latitude", dim_coords=True) + assert cube_trend.coords("longitude", dim_coords=True) + assert cube_trend.units == "kg day-1" + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) def test_linear_trend_3_time_lazy(cube_3_time): @@ -120,12 +134,14 @@ def test_linear_trend_3_time_lazy(cube_3_time): cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[-8.0, -8.0], [-8.0, -8.0]]) - assert not cube_trend.coords('time', dim_coords=True) - assert cube_trend.coords('latitude', dim_coords=True) - assert cube_trend.coords('longitude', dim_coords=True) - assert cube_trend.units == 'kg day-1' - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert not cube_trend.coords("time", dim_coords=True) + assert cube_trend.coords("latitude", dim_coords=True) + assert cube_trend.coords("longitude", dim_coords=True) + assert cube_trend.units == "kg day-1" + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) def test_linear_trend_3_time_no_metadata(cube_3_time): @@ -134,46 +150,56 @@ def test_linear_trend_3_time_no_metadata(cube_3_time): cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[4.0, 4.0], [4.0, 4.0]]) - assert cube_trend.units == Unit('unknown') - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert cube_trend.units == Unit("unknown") + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) # Cube with unknown units - cube_3_time.units = Unit('unknown') + cube_3_time.units = Unit("unknown") cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[4.0, 4.0], [4.0, 4.0]]) - assert cube_trend.units == Unit('unknown') - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert cube_trend.units == Unit("unknown") + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) # Cube with no units - cube_3_time.units = Unit('no unit') + cube_3_time.units = Unit("no unit") cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[4.0, 4.0], [4.0, 4.0]]) - assert cube_trend.units == Unit('no unit') - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert cube_trend.units == Unit("no unit") + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) # Time with unknown units - cube_3_time.units = 'kg' - cube_3_time.coord('time').units = Unit('unknown') + cube_3_time.units = "kg" + cube_3_time.coord("time").units = Unit("unknown") cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[4.0, 4.0], [4.0, 4.0]]) - assert cube_trend.units == Unit('unknown') - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert cube_trend.units == Unit("unknown") + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) # Time with no units - cube_3_time.coord('time').units = Unit('no unit') + cube_3_time.coord("time").units = Unit("no unit") cube_trend = linear_trend(cube_3_time) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[4.0, 4.0], [4.0, 4.0]]) - assert cube_trend.units == Unit('kg') - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert cube_trend.units == Unit("kg") + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) def test_linear_trend_3_time_years(cube_3_time_years): @@ -181,69 +207,81 @@ def test_linear_trend_3_time_years(cube_3_time_years): cube_trend = linear_trend(cube_3_time_years) assert cube_trend.shape == (2, 2) assert_masked_array_equal(cube_trend.data, [[4.0, 4.0], [4.0, 4.0]]) - assert cube_trend.units == 'kg yr-1' - assert (iris.coords.CellMethod('trend', coords=('time',)) in - cube_trend.cell_methods) + assert cube_trend.units == "kg yr-1" + assert ( + iris.coords.CellMethod("trend", coords=("time",)) + in cube_trend.cell_methods + ) def test_linear_trend_latitude(cube_3_time): """Test calculation of linear trend along latitude coordinate.""" cube_3_time.data[0, 0, 0] = np.nan cube_3_time.data = np.ma.masked_invalid(cube_3_time.data) - cube_trend = linear_trend(cube_3_time, coordinate='latitude') + cube_trend = linear_trend(cube_3_time, coordinate="latitude") assert cube_trend.shape == (3, 2) - assert_masked_array_equal(cube_trend.data, np.ma.masked_invalid( - [[np.nan, 0.1], [0.1, 0.1], [0.1, 0.1]])) - assert cube_trend.coords('time', dim_coords=True) - assert not cube_trend.coords('latitude', dim_coords=True) - assert cube_trend.coords('longitude', dim_coords=True) - assert cube_trend.units == 'kg m-1' - assert (iris.coords.CellMethod('trend', coords=('latitude',)) in - cube_trend.cell_methods) + assert_masked_array_equal( + cube_trend.data, + np.ma.masked_invalid([[np.nan, 0.1], [0.1, 0.1], [0.1, 0.1]]), + ) + assert cube_trend.coords("time", dim_coords=True) + assert not cube_trend.coords("latitude", dim_coords=True) + assert cube_trend.coords("longitude", dim_coords=True) + assert cube_trend.units == "kg m-1" + assert ( + iris.coords.CellMethod("trend", coords=("latitude",)) + in cube_trend.cell_methods + ) def test_linear_trend_longitude(cube_3_time): """Test calculation of linear trend along longitude coordinate.""" cube_3_time.data[1, 0, 0] = np.nan cube_3_time.data = np.ma.masked_invalid(cube_3_time.data) - cube_trend = linear_trend(cube_3_time, coordinate='longitude') + cube_trend = linear_trend(cube_3_time, coordinate="longitude") assert cube_trend.shape == (3, 2) - assert_masked_array_equal(cube_trend.data, np.ma.masked_invalid( - [[0.01, 0.01], [np.nan, 0.01], [0.01, 0.01]])) - assert cube_trend.coords('time', dim_coords=True) - assert cube_trend.coords('latitude', dim_coords=True) - assert not cube_trend.coords('longitude', dim_coords=True) - assert cube_trend.units == 'kg m-1' - assert (iris.coords.CellMethod('trend', coords=('longitude',)) in - cube_trend.cell_methods) + assert_masked_array_equal( + cube_trend.data, + np.ma.masked_invalid([[0.01, 0.01], [np.nan, 0.01], [0.01, 0.01]]), + ) + assert cube_trend.coords("time", dim_coords=True) + assert cube_trend.coords("latitude", dim_coords=True) + assert not cube_trend.coords("longitude", dim_coords=True) + assert cube_trend.units == "kg m-1" + assert ( + iris.coords.CellMethod("trend", coords=("longitude",)) + in cube_trend.cell_methods + ) def test_linear_trend_stderr_coord_not_found(cube_no_time): """Test calculation of trend stderr when dimension is not available.""" with pytest.raises(iris.exceptions.CoordinateNotFoundError) as err: linear_trend_stderr(cube_no_time) - assert 'time' in str(err.value) + assert "time" in str(err.value) with pytest.raises(iris.exceptions.CoordinateNotFoundError) as err: - linear_trend_stderr(cube_no_time, coordinate='time') - assert 'time' in str(err.value) + linear_trend_stderr(cube_no_time, coordinate="time") + assert "time" in str(err.value) with pytest.raises(iris.exceptions.CoordinateNotFoundError) as err: - linear_trend_stderr(cube_no_time, coordinate='aux') - assert 'aux' in str(err.value) + linear_trend_stderr(cube_no_time, coordinate="aux") + assert "aux" in str(err.value) def test_linear_trend_stderr_1_time(cube_1_time): """Test calculation of trend stderr with single time point.""" cube_stderr = linear_trend_stderr(cube_1_time) assert cube_stderr.shape == (2, 2) - assert_masked_array_equal(cube_stderr.data, - np.ma.masked_equal([[0.0, 0.0], [0.0, 0.0]], - 0.0)) - assert not cube_stderr.coords('time', dim_coords=True) - assert cube_stderr.coords('latitude', dim_coords=True) - assert cube_stderr.coords('longitude', dim_coords=True) - assert cube_stderr.units == 'kg day-1' - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert_masked_array_equal( + cube_stderr.data, np.ma.masked_equal([[0.0, 0.0], [0.0, 0.0]], 0.0) + ) + assert not cube_stderr.coords("time", dim_coords=True) + assert cube_stderr.coords("latitude", dim_coords=True) + assert cube_stderr.coords("longitude", dim_coords=True) + assert cube_stderr.units == "kg day-1" + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) def test_linear_trend_stderr_3_time(cube_3_time): @@ -251,32 +289,42 @@ def test_linear_trend_stderr_3_time(cube_3_time): cube_3_time.data[0, 0, 0] = 1.0 cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) - assert_masked_array_equal(cube_stderr.data, - [[0.28867513459482086, 0.0], [0.0, 0.0]]) - assert not cube_stderr.coords('time', dim_coords=True) - assert cube_stderr.coords('latitude', dim_coords=True) - assert cube_stderr.coords('longitude', dim_coords=True) - assert cube_stderr.units == 'kg day-1' - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert_masked_array_equal( + cube_stderr.data, [[0.28867513459482086, 0.0], [0.0, 0.0]] + ) + assert not cube_stderr.coords("time", dim_coords=True) + assert cube_stderr.coords("latitude", dim_coords=True) + assert cube_stderr.coords("longitude", dim_coords=True) + assert cube_stderr.units == "kg day-1" + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) def test_linear_trend_stderr_3_time_lazy(cube_3_time): """Test lazy calculation of trend stderr with three time points.""" - cube_3_time.data = da.array([[[1.0, 1.0], [2.0, 3.0]], - [[4.0, 5.0], [6.0, 7.0]], - [[8.0, 9.0], [10.0, 11.0]]]) + cube_3_time.data = da.array( + [ + [[1.0, 1.0], [2.0, 3.0]], + [[4.0, 5.0], [6.0, 7.0]], + [[8.0, 9.0], [10.0, 11.0]], + ] + ) assert cube_3_time.has_lazy_data() cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) - assert_masked_array_equal(cube_stderr.data, - [[0.28867513459482086, 0.0], [0.0, 0.0]]) - assert not cube_stderr.coords('time', dim_coords=True) - assert cube_stderr.coords('latitude', dim_coords=True) - assert cube_stderr.coords('longitude', dim_coords=True) - assert cube_stderr.units == 'kg day-1' - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert_masked_array_equal( + cube_stderr.data, [[0.28867513459482086, 0.0], [0.0, 0.0]] + ) + assert not cube_stderr.coords("time", dim_coords=True) + assert cube_stderr.coords("latitude", dim_coords=True) + assert cube_stderr.coords("longitude", dim_coords=True) + assert cube_stderr.units == "kg day-1" + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) def test_linear_trend_stderr_3_time_no_metadata(cube_3_time): @@ -285,46 +333,56 @@ def test_linear_trend_stderr_3_time_no_metadata(cube_3_time): cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) assert_masked_array_equal(cube_stderr.data, [[0.0, 0.0], [0.0, 0.0]]) - assert cube_stderr.units == Unit('unknown') - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert cube_stderr.units == Unit("unknown") + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) # Cube with unknown units - cube_3_time.units = Unit('unknown') + cube_3_time.units = Unit("unknown") cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) assert_masked_array_equal(cube_stderr.data, [[0.0, 0.0], [0.0, 0.0]]) - assert cube_stderr.units == Unit('unknown') - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert cube_stderr.units == Unit("unknown") + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) # Cube with no units - cube_3_time.units = Unit('no unit') + cube_3_time.units = Unit("no unit") cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) assert_masked_array_equal(cube_stderr.data, [[0.0, 0.0], [0.0, 0.0]]) - assert cube_stderr.units == Unit('no unit') - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert cube_stderr.units == Unit("no unit") + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) # Time with unknown units - cube_3_time.units = 'kg' - cube_3_time.coord('time').units = Unit('unknown') + cube_3_time.units = "kg" + cube_3_time.coord("time").units = Unit("unknown") cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) assert_masked_array_equal(cube_stderr.data, [[0.0, 0.0], [0.0, 0.0]]) - assert cube_stderr.units == Unit('unknown') - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert cube_stderr.units == Unit("unknown") + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) # Time with no units - cube_3_time.coord('time').units = Unit('no unit') + cube_3_time.coord("time").units = Unit("no unit") cube_stderr = linear_trend_stderr(cube_3_time) assert cube_stderr.shape == (2, 2) assert_masked_array_equal(cube_stderr.data, [[0.0, 0.0], [0.0, 0.0]]) - assert cube_stderr.units == Unit('kg') - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert cube_stderr.units == Unit("kg") + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) def test_linear_trend_stderr_3_time_years(cube_3_time_years): @@ -332,40 +390,51 @@ def test_linear_trend_stderr_3_time_years(cube_3_time_years): cube_3_time_years.data[1, 1, 1] = 1.0 cube_stderr = linear_trend_stderr(cube_3_time_years) assert cube_stderr.shape == (2, 2) - assert_masked_array_equal(cube_stderr.data, - [[0.0, 0.0], [0.0, 3.464101615137754]]) - assert cube_stderr.units == 'kg yr-1' - assert (iris.coords.CellMethod('trend_stderr', coords=('time',)) in - cube_stderr.cell_methods) + assert_masked_array_equal( + cube_stderr.data, [[0.0, 0.0], [0.0, 3.464101615137754]] + ) + assert cube_stderr.units == "kg yr-1" + assert ( + iris.coords.CellMethod("trend_stderr", coords=("time",)) + in cube_stderr.cell_methods + ) def test_linear_trend_stderr_latitude(cube_3_time): """Test calculation of trend stderr along latitude coordinate.""" cube_3_time.data[0, 0, 0] = np.nan cube_3_time.data = np.ma.masked_invalid(cube_3_time.data) - cube_stderr = linear_trend_stderr(cube_3_time, coordinate='latitude') + cube_stderr = linear_trend_stderr(cube_3_time, coordinate="latitude") assert cube_stderr.shape == (3, 2) - assert_masked_array_equal(cube_stderr.data, np.ma.masked_invalid( - [[np.nan, 0.0], [0.0, 0.0], [0.0, 0.0]])) - assert cube_stderr.coords('time', dim_coords=True) - assert not cube_stderr.coords('latitude', dim_coords=True) - assert cube_stderr.coords('longitude', dim_coords=True) - assert cube_stderr.units == 'kg m-1' - assert (iris.coords.CellMethod('trend_stderr', coords=('latitude',)) in - cube_stderr.cell_methods) + assert_masked_array_equal( + cube_stderr.data, + np.ma.masked_invalid([[np.nan, 0.0], [0.0, 0.0], [0.0, 0.0]]), + ) + assert cube_stderr.coords("time", dim_coords=True) + assert not cube_stderr.coords("latitude", dim_coords=True) + assert cube_stderr.coords("longitude", dim_coords=True) + assert cube_stderr.units == "kg m-1" + assert ( + iris.coords.CellMethod("trend_stderr", coords=("latitude",)) + in cube_stderr.cell_methods + ) def test_linear_trend_stderr_longitude(cube_3_time): """Test calculation of trend stderr along longitude coordinate.""" cube_3_time.data[1, 0, 0] = np.nan cube_3_time.data = np.ma.masked_invalid(cube_3_time.data) - cube_stderr = linear_trend_stderr(cube_3_time, coordinate='longitude') + cube_stderr = linear_trend_stderr(cube_3_time, coordinate="longitude") assert cube_stderr.shape == (3, 2) - assert_masked_array_equal(cube_stderr.data, np.ma.masked_invalid( - [[0.0, 0.0], [np.nan, 0.0], [0.0, 0.0]])) - assert cube_stderr.coords('time', dim_coords=True) - assert cube_stderr.coords('latitude', dim_coords=True) - assert not cube_stderr.coords('longitude', dim_coords=True) - assert cube_stderr.units == 'kg m-1' - assert (iris.coords.CellMethod('trend_stderr', coords=('longitude',)) in - cube_stderr.cell_methods) + assert_masked_array_equal( + cube_stderr.data, + np.ma.masked_invalid([[0.0, 0.0], [np.nan, 0.0], [0.0, 0.0]]), + ) + assert cube_stderr.coords("time", dim_coords=True) + assert cube_stderr.coords("latitude", dim_coords=True) + assert not cube_stderr.coords("longitude", dim_coords=True) + assert cube_stderr.units == "kg m-1" + assert ( + iris.coords.CellMethod("trend_stderr", coords=("longitude",)) + in cube_stderr.cell_methods + ) diff --git a/tests/unit/preprocessor/_units/test_convert_units.py b/tests/unit/preprocessor/_units/test_convert_units.py index c1a2a2453b..8fea071943 100644 --- a/tests/unit/preprocessor/_units/test_convert_units.py +++ b/tests/unit/preprocessor/_units/test_convert_units.py @@ -13,49 +13,54 @@ class TestConvertUnits(tests.Test): """Test class for _units.""" + def setUp(self): """Prepare tests.""" coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - self.data2 = np.array([[0., 1.], [2., 3.]]) - lons2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) + self.data2 = np.array([[0.0, 1.0], [2.0, 3.0]]) + lons2 = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="longitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_east", + coord_system=coord_sys, + ) + lats2 = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="latitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_north", + coord_system=coord_sys, + ) coords_spec3 = [(lats2, 0), (lons2, 1)] - self.arr = iris.cube.Cube(self.data2, - units='K', - dim_coords_and_dims=coords_spec3) + self.arr = iris.cube.Cube( + self.data2, units="K", dim_coords_and_dims=coords_spec3 + ) def test_convert_incompatible_units(self): """Test conversion to incompatible units.""" - self.assertRaises(ValueError, convert_units, self.arr, 'm') + self.assertRaises(ValueError, convert_units, self.arr, "m") def test_convert_compatible_units(self): """Test conversion to compatible units.""" - result = convert_units(self.arr, 'degC') + result = convert_units(self.arr, "degC") expected_data = np.array([[-273.15, -272.15], [-271.15, -270.15]]) - expected_units = cf_units.Unit('degC') + expected_units = cf_units.Unit("degC") self.assertEqual(result.units, expected_units) self.assert_array_equal(result.data, expected_data) def test_convert_ozone_content_m_to_du(self): """Test special conversion of ozone_content.""" self.arr.standard_name = ( - 'equivalent_thickness_at_stp_of_atmosphere_ozone_content' + "equivalent_thickness_at_stp_of_atmosphere_ozone_content" ) - self.arr.units = 'm' - result = convert_units(self.arr, 'DU') + self.arr.units = "m" + result = convert_units(self.arr, "DU") self.assertEqual( result.standard_name, - 'equivalent_thickness_at_stp_of_atmosphere_ozone_content', + "equivalent_thickness_at_stp_of_atmosphere_ozone_content", ) - self.assertEqual(result.units, 'DU') + self.assertEqual(result.units, "DU") np.testing.assert_allclose( result.data, [[0.0, 1e5], [2e5, 3e5]], @@ -64,15 +69,15 @@ def test_convert_ozone_content_m_to_du(self): def test_convert_ozone_content_du_to_m(self): """Test special conversion of ozone_content.""" self.arr.standard_name = ( - 'equivalent_thickness_at_stp_of_atmosphere_ozone_content' + "equivalent_thickness_at_stp_of_atmosphere_ozone_content" ) - self.arr.units = 'DU' - result = convert_units(self.arr, 'mm') + self.arr.units = "DU" + result = convert_units(self.arr, "mm") self.assertEqual( result.standard_name, - 'equivalent_thickness_at_stp_of_atmosphere_ozone_content', + "equivalent_thickness_at_stp_of_atmosphere_ozone_content", ) - self.assertEqual(result.units, 'mm') + self.assertEqual(result.units, "mm") np.testing.assert_allclose( result.data, [[0.0, 1e-2], [2e-2, 3e-2]], @@ -80,11 +85,11 @@ def test_convert_ozone_content_du_to_m(self): def test_convert_precipitation_flux(self): """Test special conversion of precipitation_flux.""" - self.arr.standard_name = 'precipitation_flux' - self.arr.units = 'kg m-2 s-1' - result = convert_units(self.arr, 'mm day-1') - self.assertEqual(result.standard_name, 'lwe_precipitation_rate') - self.assertEqual(result.units, 'mm day-1') + self.arr.standard_name = "precipitation_flux" + self.arr.units = "kg m-2 s-1" + result = convert_units(self.arr, "mm day-1") + self.assertEqual(result.standard_name, "lwe_precipitation_rate") + self.assertEqual(result.units, "mm day-1") np.testing.assert_allclose( result.data, [[0.0, 86400.0], [172800.0, 259200.0]], @@ -92,11 +97,11 @@ def test_convert_precipitation_flux(self): def test_convert_precipitation_flux_convertible(self): """Test special conversion of precipitation_flux.""" - self.arr.standard_name = 'precipitation_flux' - self.arr.units = 'g m-2 yr-1' - result = convert_units(self.arr, 'm yr-1') - self.assertEqual(result.standard_name, 'lwe_precipitation_rate') - self.assertEqual(result.units, 'm yr-1') + self.arr.standard_name = "precipitation_flux" + self.arr.units = "g m-2 yr-1" + result = convert_units(self.arr, "m yr-1") + self.assertEqual(result.standard_name, "lwe_precipitation_rate") + self.assertEqual(result.units, "m yr-1") np.testing.assert_allclose( result.data, [[0.0, 1.0e-6], [2.0e-6, 3.0e-6]], @@ -104,27 +109,27 @@ def test_convert_precipitation_flux_convertible(self): def test_convert_precipitation_flux_fail_invalid_name(self): """Test special conversion of precipitation_flux.""" - self.arr.units = 'kg m-2 s-1' - self.assertRaises(ValueError, convert_units, self.arr, 'mm day-1') + self.arr.units = "kg m-2 s-1" + self.assertRaises(ValueError, convert_units, self.arr, "mm day-1") def test_convert_precipitation_flux_fail_invalid_source_units(self): """Test special conversion of precipitation_flux.""" - self.arr.standard_name = 'precipitation_flux' - self.assertRaises(ValueError, convert_units, self.arr, 'mm day-1') + self.arr.standard_name = "precipitation_flux" + self.assertRaises(ValueError, convert_units, self.arr, "mm day-1") def test_convert_precipitation_flux_fail_invalid_target_units(self): """Test special conversion of precipitation_flux.""" - self.arr.standard_name = 'precipitation_flux' - self.arr.units = 'kg m-2 s-1' - self.assertRaises(ValueError, convert_units, self.arr, 'K') + self.arr.standard_name = "precipitation_flux" + self.arr.units = "kg m-2 s-1" + self.assertRaises(ValueError, convert_units, self.arr, "K") def test_convert_lwe_precipitation_rate(self): """Test special conversion of lwe_precipitation_rate.""" - self.arr.standard_name = 'lwe_precipitation_rate' - self.arr.units = 'mm s-1' - result = convert_units(self.arr, 'kg m-2 s-1') - self.assertEqual(result.standard_name, 'precipitation_flux') - self.assertEqual(result.units, 'kg m-2 s-1') + self.arr.standard_name = "lwe_precipitation_rate" + self.arr.units = "mm s-1" + result = convert_units(self.arr, "kg m-2 s-1") + self.assertEqual(result.standard_name, "precipitation_flux") + self.assertEqual(result.units, "kg m-2 s-1") np.testing.assert_allclose( result.data, [[0.0, 1.0], [2.0, 3.0]], @@ -132,11 +137,11 @@ def test_convert_lwe_precipitation_rate(self): def test_convert_lwe_precipitation_rate_convertible(self): """Test special conversion of lwe_precipitation_rate.""" - self.arr.standard_name = 'lwe_precipitation_rate' - self.arr.units = 'm yr-1' - result = convert_units(self.arr, 'g m-2 yr-1') - self.assertEqual(result.standard_name, 'precipitation_flux') - self.assertEqual(result.units, 'g m-2 yr-1') + self.arr.standard_name = "lwe_precipitation_rate" + self.arr.units = "m yr-1" + result = convert_units(self.arr, "g m-2 yr-1") + self.assertEqual(result.standard_name, "precipitation_flux") + self.assertEqual(result.units, "g m-2 yr-1") np.testing.assert_allclose( result.data, [[0.0, 1.0e6], [2.0e6, 3.0e6]], @@ -144,111 +149,113 @@ def test_convert_lwe_precipitation_rate_convertible(self): def test_convert_lwe_precipitation_rate_fail_invalid_name(self): """Test special conversion of lwe_precipitation_rate.""" - self.arr.units = 'mm s-1' - self.assertRaises(ValueError, convert_units, self.arr, 'kg m-2 s-1') + self.arr.units = "mm s-1" + self.assertRaises(ValueError, convert_units, self.arr, "kg m-2 s-1") def test_convert_lwe_precipitation_rate_fail_invalid_source_units(self): """Test special conversion of lwe_precipitation_rate.""" - self.arr.standard_name = 'lwe_precipitation_rate' - self.assertRaises(ValueError, convert_units, self.arr, 'kg m-2 s-1') + self.arr.standard_name = "lwe_precipitation_rate" + self.assertRaises(ValueError, convert_units, self.arr, "kg m-2 s-1") def test_convert_lwe_precipitation_rate_fail_invalid_target_units(self): """Test special conversion of lwe_precipitation_rate.""" - self.arr.standard_name = 'lwe_precipitation_rate' - self.arr.units = 'mm s-1' - self.assertRaises(ValueError, convert_units, self.arr, 'K') + self.arr.standard_name = "lwe_precipitation_rate" + self.arr.units = "mm s-1" + self.assertRaises(ValueError, convert_units, self.arr, "K") class TestFluxToTotal(tests.Test): """Test class for _units.""" + def setUp(self): """Prepare tests.""" data = np.arange(4) time = iris.coords.DimCoord( np.arange(1, 8, 2), - var_name='time', - standard_name='time', - bounds=np.array([np.arange(0, 8, 2), - np.arange(2, 9, 2)]).T, - units=cf_units.Unit('days since 1950-01-01', - calendar='gregorian')) + var_name="time", + standard_name="time", + bounds=np.array([np.arange(0, 8, 2), np.arange(2, 9, 2)]).T, + units=cf_units.Unit("days since 1950-01-01", calendar="gregorian"), + ) coords_spec = [ (time, 0), ] - self.cube = iris.cube.Cube(data, - units='kg day-1', - dim_coords_and_dims=coords_spec) + self.cube = iris.cube.Cube( + data, units="kg day-1", dim_coords_and_dims=coords_spec + ) def test_missing_coordinate(self): """Test error is raised if missing coordinate.""" self.assertRaises( - ValueError, - accumulate_coordinate, - self.cube, 'longitude') + ValueError, accumulate_coordinate, self.cube, "longitude" + ) def test_multidim_coordinate(self): """Test error is raised if coordinate is multidimensional.""" i_coord = iris.coords.DimCoord( [0, 1], - long_name='cell index along first dimension', - units='1',) + long_name="cell index along first dimension", + units="1", + ) j_coord = iris.coords.DimCoord( [0, 1], - long_name='cell index along second dimension', - units='1',) + long_name="cell index along second dimension", + units="1", + ) lat_coord = iris.coords.AuxCoord( [[-40.0, -20.0], [-20.0, 0.0]], - var_name='lat', - standard_name='latitude', - units='degrees_north',) + var_name="lat", + standard_name="latitude", + units="degrees_north", + ) lon_coord = iris.coords.AuxCoord( [[100.0, 140.0], [80.0, 100.0]], - var_name='lon', - standard_name='longitude', - units='degrees_east', - ) + var_name="lon", + standard_name="longitude", + units="degrees_east", + ) cube = iris.cube.Cube( np.ones((2, 2)), - var_name='tos', - long_name='sea_surface_temperature', - units='K', + var_name="tos", + long_name="sea_surface_temperature", + units="K", dim_coords_and_dims=[(j_coord, 0), (i_coord, 1)], aux_coords_and_dims=[(lat_coord, (0, 1)), (lon_coord, (0, 1))], ) self.assertRaises( - NotImplementedError, - accumulate_coordinate, cube, 'longitude') + NotImplementedError, accumulate_coordinate, cube, "longitude" + ) def test_flux_by_second(self): """Test conversion to compatible units.""" - self.cube.units = 'kg s-1' - result = accumulate_coordinate(self.cube, 'time') + self.cube.units = "kg s-1" + result = accumulate_coordinate(self.cube, "time") expected_data = np.array([0, 2, 4, 6]) * 24 * 3600 - expected_units = cf_units.Unit('kg') + expected_units = cf_units.Unit("kg") self.assertEqual(result.units, expected_units) self.assert_array_equal(result.data, expected_data) def test_flux_by_day(self): """Test conversion to compatible units.""" - result = accumulate_coordinate(self.cube, 'time') + result = accumulate_coordinate(self.cube, "time") expected_data = np.array([0, 2, 4, 6]) - expected_units = cf_units.Unit('kg') + expected_units = cf_units.Unit("kg") self.assertEqual(result.units, expected_units) self.assert_array_equal(result.data, expected_data) def test_flux_by_hour(self): """Test conversion to compatible units.""" - self.cube.units = 'kg hr-1' - result = accumulate_coordinate(self.cube, 'time') + self.cube.units = "kg hr-1" + result = accumulate_coordinate(self.cube, "time") expected_data = np.array([0, 2, 4, 6]) * 24 - expected_units = cf_units.Unit('kg') + expected_units = cf_units.Unit("kg") self.assertEqual(result.units, expected_units) self.assert_array_equal(result.data, expected_data) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_volume/test_volume.py b/tests/unit/preprocessor/_volume/test_volume.py index f055d2f7b2..682d42fd45 100644 --- a/tests/unit/preprocessor/_volume/test_volume.py +++ b/tests/unit/preprocessor/_volume/test_volume.py @@ -35,79 +35,95 @@ def setUp(self): mask3[0, 0, 0, 0] = True data3 = np.ma.array(data3, mask=mask3) - time = iris.coords.DimCoord([15, 45], - standard_name='time', - bounds=[[1., 30.], [30., 60.]], - units=Unit('days since 1950-01-01', - calendar='gregorian')) - time2 = iris.coords.DimCoord([1., 2., 3., 4.], - standard_name='time', - bounds=[ - [0.5, 1.5], - [1.5, 2.5], - [2.5, 3.5], - [3.5, 4.5], - ], - units=Unit('days since 1950-01-01', - calendar='gregorian')) - - zcoord = iris.coords.DimCoord([0.5, 5., 50.], - long_name='zcoord', - bounds=[[0., 2.5], [2.5, 25.], - [25., 250.]], - units='m', - attributes={'positive': 'down'}) - scoord = iris.coords.DimCoord([36., 36.5, 37.], - long_name='ocean_sigma_coordinate', - bounds=[[35.5, 36.25], [36.25, 36.75], - [36.75, 37.5]], - units='kg m-3', - attributes={'positive': 'down'}) - zcoord_nobounds = iris.coords.DimCoord([0.5, 5., 50.], - long_name='zcoord', - units='m', - attributes={'positive': 'down'}) + time = iris.coords.DimCoord( + [15, 45], + standard_name="time", + bounds=[[1.0, 30.0], [30.0, 60.0]], + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) + time2 = iris.coords.DimCoord( + [1.0, 2.0, 3.0, 4.0], + standard_name="time", + bounds=[ + [0.5, 1.5], + [1.5, 2.5], + [2.5, 3.5], + [3.5, 4.5], + ], + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) + + zcoord = iris.coords.DimCoord( + [0.5, 5.0, 50.0], + long_name="zcoord", + bounds=[[0.0, 2.5], [2.5, 25.0], [25.0, 250.0]], + units="m", + attributes={"positive": "down"}, + ) + scoord = iris.coords.DimCoord( + [36.0, 36.5, 37.0], + long_name="ocean_sigma_coordinate", + bounds=[[35.5, 36.25], [36.25, 36.75], [36.75, 37.5]], + units="kg m-3", + attributes={"positive": "down"}, + ) + zcoord_nobounds = iris.coords.DimCoord( + [0.5, 5.0, 50.0], + long_name="zcoord", + units="m", + attributes={"positive": "down"}, + ) zcoord_4d = iris.coords.AuxCoord( - np.broadcast_to([[[[0.5]], [[5.]], [[50.]]]], (2, 3, 2, 2)), - long_name='zcoord', + np.broadcast_to([[[[0.5]], [[5.0]], [[50.0]]]], (2, 3, 2, 2)), + long_name="zcoord", bounds=np.broadcast_to( - [[[[[0., 2.5]]], [[[2.5, 25.]]], [[[25., 250.]]]]], + [[[[[0.0, 2.5]]], [[[2.5, 25.0]]], [[[25.0, 250.0]]]]], (2, 3, 2, 2, 2), ), - units='m', - attributes={'positive': 'down'}, + units="m", + attributes={"positive": "down"}, ) zcoord_3d_invalid_bounds = iris.coords.AuxCoord( - np.broadcast_to([[[0.5]], [[5.]], [[50.]]], (3, 2, 2)), - long_name='zcoord', + np.broadcast_to([[[0.5]], [[5.0]], [[50.0]]], (3, 2, 2)), + long_name="zcoord", bounds=np.broadcast_to( - [[[[0., 2.5, 2.5, 3.]]], - [[[2.5, 25., 25., 30.]]], - [[[25., 250., 250., 300.]]]], + [ + [[[0.0, 2.5, 2.5, 3.0]]], + [[[2.5, 25.0, 25.0, 30.0]]], + [[[25.0, 250.0, 250.0, 300.0]]], + ], (3, 2, 2, 4), ), - units='m', - attributes={'positive': 'down'}, - ) - lons2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord([1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) - - lons2d = iris.coords.AuxCoord([[1.5, 2.5], [1.2, 2.7]], - standard_name='longitude', - units='degrees_east', - coord_system=coord_sys) - lats2d = iris.coords.AuxCoord([[1.5, 2.5], [1.2, 2.7]], - standard_name='latitude', - units='degrees_north', - coord_system=coord_sys) + units="m", + attributes={"positive": "down"}, + ) + lons2 = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="longitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_east", + coord_system=coord_sys, + ) + lats2 = iris.coords.DimCoord( + [1.5, 2.5], + standard_name="latitude", + bounds=[[1.0, 2.0], [2.0, 3.0]], + units="degrees_north", + coord_system=coord_sys, + ) + + lons2d = iris.coords.AuxCoord( + [[1.5, 2.5], [1.2, 2.7]], + standard_name="longitude", + units="degrees_east", + coord_system=coord_sys, + ) + lats2d = iris.coords.AuxCoord( + [[1.5, 2.5], [1.2, 2.7]], + standard_name="latitude", + units="degrees_north", + coord_system=coord_sys, + ) coords_spec3 = [(zcoord, 0), (lats2, 1), (lons2, 2)] self.grid_3d = iris.cube.Cube(data1, dim_coords_and_dims=coords_spec3) @@ -116,336 +132,365 @@ def setUp(self): self.grid_4d = iris.cube.Cube( data2, dim_coords_and_dims=coords_spec4, - units='kg m-3', + units="kg m-3", ) self.grid_4d_lazy = self.grid_4d.copy() self.grid_4d_lazy.data = self.grid_4d_lazy.lazy_data().rechunk( - (1, 2, None, None)) + (1, 2, None, None) + ) coords_spec4_sigma = [(time, 0), (scoord, 1), (lats2, 2), (lons2, 3)] self.grid_4d_sigma_space = iris.cube.Cube( data2, dim_coords_and_dims=coords_spec4_sigma, - units='kg m-3', + units="kg m-3", ) coords_spec5 = [(time2, 0), (zcoord, 1), (lats2, 2), (lons2, 3)] self.grid_4d_2 = iris.cube.Cube( data3, dim_coords_and_dims=coords_spec5, - units='kg m-3', + units="kg m-3", ) self.grid_4d_z = iris.cube.Cube( data2, dim_coords_and_dims=[(time, 0), (lats2, 2), (lons2, 3)], aux_coords_and_dims=[(zcoord_4d, (0, 1, 2, 3))], - units='kg m-3', + units="kg m-3", ) self.grid_4d_znobounds = iris.cube.Cube( data2, dim_coords_and_dims=[ - (time, 0), (zcoord_nobounds, 1), (lats2, 2), (lons2, 3) + (time, 0), + (zcoord_nobounds, 1), + (lats2, 2), + (lons2, 3), ], - units='kg m-3', + units="kg m-3", ) self.grid_4d_irregular = iris.cube.Cube( data2, dim_coords_and_dims=[(time, 0), (zcoord, 1)], aux_coords_and_dims=[(lats2d, (2, 3)), (lons2d, (2, 3))], - units='kg m-3', + units="kg m-3", ) self.grid_invalid_z_bounds = iris.cube.Cube( data2, dim_coords_and_dims=[(time, 0), (lats2, 2), (lons2, 3)], aux_coords_and_dims=[(zcoord_3d_invalid_bounds, (1, 2, 3))], - units='kg m-3', + units="kg m-3", ) # allow iris to figure out the axis='z' coordinate - iris.util.guess_coord_axis(self.grid_3d.coord('zcoord')) - iris.util.guess_coord_axis(self.grid_4d.coord('zcoord')) - iris.util.guess_coord_axis(self.grid_4d_2.coord('zcoord')) - iris.util.guess_coord_axis(self.grid_4d_z.coord('zcoord')) + iris.util.guess_coord_axis(self.grid_3d.coord("zcoord")) + iris.util.guess_coord_axis(self.grid_4d.coord("zcoord")) + iris.util.guess_coord_axis(self.grid_4d_2.coord("zcoord")) + iris.util.guess_coord_axis(self.grid_4d_z.coord("zcoord")) def test_add_axis_stats_weights_coord(self): """Test _add_axis_stats_weights_coord.""" - assert not self.grid_4d.coords('_axis_statistics_weights_') - coord = self.grid_4d.coord('zcoord') - coord_dims = self.grid_4d.coord_dims('zcoord') + assert not self.grid_4d.coords("_axis_statistics_weights_") + coord = self.grid_4d.coord("zcoord") + coord_dims = self.grid_4d.coord_dims("zcoord") _add_axis_stats_weights_coord(self.grid_4d, coord, coord_dims) - weights_coord = self.grid_4d.coord('_axis_statistics_weights_') + weights_coord = self.grid_4d.coord("_axis_statistics_weights_") assert not weights_coord.has_lazy_points() - assert weights_coord.units == 'm' + assert weights_coord.units == "m" np.testing.assert_allclose(weights_coord.points, [2.5, 22.5, 225.0]) def test_add_axis_stats_weights_coord_lazy(self): """Test _add_axis_stats_weights_coord.""" self.grid_4d.data = self.grid_4d.lazy_data() - assert not self.grid_4d.coords('_axis_statistics_weights_') - coord = self.grid_4d.coord('zcoord') - coord_dims = self.grid_4d.coord_dims('zcoord') + assert not self.grid_4d.coords("_axis_statistics_weights_") + coord = self.grid_4d.coord("zcoord") + coord_dims = self.grid_4d.coord_dims("zcoord") _add_axis_stats_weights_coord(self.grid_4d, coord, coord_dims) - weights_coord = self.grid_4d.coord('_axis_statistics_weights_') + weights_coord = self.grid_4d.coord("_axis_statistics_weights_") assert weights_coord.has_lazy_points() - assert weights_coord.units == 'm' + assert weights_coord.units == "m" np.testing.assert_allclose(weights_coord.points, [2.5, 22.5, 225.0]) def test_axis_statistics_mean(self): """Test axis statistics with operator mean.""" data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data - result = axis_statistics(self.grid_4d, 'z', 'mean') - bounds = self.grid_4d.coord(axis='z').bounds - weights = (bounds[:, 1] - bounds[:, 0]) + result = axis_statistics(self.grid_4d, "z", "mean") + bounds = self.grid_4d.coord(axis="z").bounds + weights = bounds[:, 1] - bounds[:, 0] expected = np.average(data, axis=1, weights=weights) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') - self.assertFalse(self.grid_4d.coords('_axis_statistics_weights_')) - self.assertFalse(result.coords('_axis_statistics_weights_')) + self.assertEqual(result.units, "kg m-3") + self.assertFalse(self.grid_4d.coords("_axis_statistics_weights_")) + self.assertFalse(result.coords("_axis_statistics_weights_")) def test_axis_statistics_median(self): """Test axis statistics in with operator median.""" data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data - result = axis_statistics(self.grid_4d, 'z', 'median') + result = axis_statistics(self.grid_4d, "z", "median") expected = np.median(data, axis=1) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_axis_statistics_min(self): """Test axis statistics with operator min.""" data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data - result = axis_statistics(self.grid_4d, 'z', 'min') + result = axis_statistics(self.grid_4d, "z", "min") expected = np.min(data, axis=1) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_axis_statistics_max(self): """Test axis statistics with operator max.""" data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data - result = axis_statistics(self.grid_4d, 'z', 'max') + result = axis_statistics(self.grid_4d, "z", "max") expected = np.max(data, axis=1) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_axis_statistics_rms(self): """Test axis statistics with operator rms.""" - result = axis_statistics(self.grid_4d, 'z', 'rms') + result = axis_statistics(self.grid_4d, "z", "rms") expected = np.ma.ones((2, 2, 2)) self.assert_array_equal(result.data, expected) def test_axis_statistics_std(self): """Test axis statistics with operator std_dev.""" - result = axis_statistics(self.grid_4d, 'z', 'std_dev') + result = axis_statistics(self.grid_4d, "z", "std_dev") expected = np.ma.zeros((2, 2, 2)) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_axis_statistics_variance(self): """Test axis statistics with operator variance.""" - result = axis_statistics(self.grid_4d, 'z', 'variance') + result = axis_statistics(self.grid_4d, "z", "variance") expected = np.ma.zeros((2, 2, 2)) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg2 m-6') + self.assertEqual(result.units, "kg2 m-6") def test_axis_statistics_sum(self): """Test axis statistics in multiple operators.""" - result = axis_statistics(self.grid_4d, 'z', 'sum') + result = axis_statistics(self.grid_4d, "z", "sum") expected = np.ma.ones((2, 2, 2)) * 250 self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-2') + self.assertEqual(result.units, "kg m-2") def test_axis_statistics_subtract_mean(self): """Test axis statistics with operator mean.""" data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data result = axis_statistics( - self.grid_4d, 'z', 'mean', normalize='subtract' + self.grid_4d, "z", "mean", normalize="subtract" ) - bounds = self.grid_4d.coord(axis='z').bounds - weights = (bounds[:, 1] - bounds[:, 0]) - expected = ( - data - np.average(data, axis=1, weights=weights, keepdims=True) + bounds = self.grid_4d.coord(axis="z").bounds + weights = bounds[:, 1] - bounds[:, 0] + expected = data - np.average( + data, axis=1, weights=weights, keepdims=True ) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') - self.assertFalse(self.grid_4d.coords('_axis_statistics_weights_')) - self.assertFalse(result.coords('_axis_statistics_weights_')) + self.assertEqual(result.units, "kg m-3") + self.assertFalse(self.grid_4d.coords("_axis_statistics_weights_")) + self.assertFalse(result.coords("_axis_statistics_weights_")) def test_axis_statistics_divide_min(self): """Test axis statistics with operator sum.""" data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data - result = axis_statistics(self.grid_4d, 'z', 'min', normalize='divide') + result = axis_statistics(self.grid_4d, "z", "min", normalize="divide") expected = data / np.min(data, axis=1, keepdims=True) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, '1') - self.assertFalse(self.grid_4d.coords('_axis_statistics_weights_')) - self.assertFalse(result.coords('_axis_statistics_weights_')) + self.assertEqual(result.units, "1") + self.assertFalse(self.grid_4d.coords("_axis_statistics_weights_")) + self.assertFalse(result.coords("_axis_statistics_weights_")) def test_wrong_axis_statistics_fail(self): """Test raises error when axis is not found in cube.""" with self.assertRaises(ValueError) as err: - axis_statistics(self.grid_3d, 't', 'mean') + axis_statistics(self.grid_3d, "t", "mean") self.assertEqual( - f'Axis t not found in cube {self.grid_3d.summary(shorten=True)}', - str(err.exception)) + f"Axis t not found in cube {self.grid_3d.summary(shorten=True)}", + str(err.exception), + ) def test_multidimensional_axis_statistics_fail(self): i_coord = iris.coords.DimCoord( [0, 1], - long_name='cell index along first dimension', - units='1', + long_name="cell index along first dimension", + units="1", ) j_coord = iris.coords.DimCoord( [0, 1], - long_name='cell index along second dimension', - units='1', + long_name="cell index along second dimension", + units="1", ) lat_coord = iris.coords.AuxCoord( [[-40.0, -20.0], [-20.0, 0.0]], - var_name='lat', - standard_name='latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + units="degrees_north", ) lon_coord = iris.coords.AuxCoord( [[100.0, 140.0], [80.0, 100.0]], - var_name='lon', - standard_name='longitude', - units='degrees_east', + var_name="lon", + standard_name="longitude", + units="degrees_east", ) cube = iris.cube.Cube( np.ones((2, 2)), - var_name='tos', - long_name='sea_surface_temperature', - units='K', + var_name="tos", + long_name="sea_surface_temperature", + units="K", dim_coords_and_dims=[(j_coord, 0), (i_coord, 1)], aux_coords_and_dims=[(lat_coord, (0, 1)), (lon_coord, (0, 1))], ) with self.assertRaises(NotImplementedError) as err: - axis_statistics(cube, 'x', 'mean') - self.assertEqual(('axis_statistics not implemented for ' - 'multidimensional coordinates.'), str(err.exception)) + axis_statistics(cube, "x", "mean") + self.assertEqual( + ( + "axis_statistics not implemented for " + "multidimensional coordinates." + ), + str(err.exception), + ) def test_extract_volume(self): """Test to extract the top two layers of a 3 layer depth column.""" - result = extract_volume(self.grid_3d, 0., 10.) + result = extract_volume(self.grid_3d, 0.0, 10.0) expected = np.ones((2, 2, 2)) print(result.data, expected.data) self.assert_array_equal(result.data, expected) def test_extract_volume_intervals(self): """Test to extract open and closed intervals.""" - open_interval = extract_volume(self.grid_3d, 0., 5.) - expected_levels_open = np.array([ - 0.5, - ]) + open_interval = extract_volume(self.grid_3d, 0.0, 5.0) + expected_levels_open = np.array( + [ + 0.5, + ] + ) - closed_interval = extract_volume(self.grid_3d, 0., 5., 'closed') - expected_levels_closed = np.array([0.5, 5.]) + closed_interval = extract_volume(self.grid_3d, 0.0, 5.0, "closed") + expected_levels_closed = np.array([0.5, 5.0]) self.assert_array_equal( - open_interval.coord(axis='Z').points, expected_levels_open) + open_interval.coord(axis="Z").points, expected_levels_open + ) self.assert_array_equal( - closed_interval.coord(axis='Z').points, expected_levels_closed) + closed_interval.coord(axis="Z").points, expected_levels_closed + ) def test_extract_volume_mixed_intervals(self): - left_closed = extract_volume(self.grid_3d, 0.5, 5., 'left_closed') - expected_levels_left = np.array([ - 0.5, - ]) + left_closed = extract_volume(self.grid_3d, 0.5, 5.0, "left_closed") + expected_levels_left = np.array( + [ + 0.5, + ] + ) - right_closed = extract_volume(self.grid_3d, 0.5, 5., 'right_closed') - expected_levels_right = np.array([ - 5., - ]) + right_closed = extract_volume(self.grid_3d, 0.5, 5.0, "right_closed") + expected_levels_right = np.array( + [ + 5.0, + ] + ) self.assert_array_equal( - left_closed.coord(axis='Z').points, expected_levels_left) + left_closed.coord(axis="Z").points, expected_levels_left + ) self.assert_array_equal( - right_closed.coord(axis='Z').points, expected_levels_right) + right_closed.coord(axis="Z").points, expected_levels_right + ) def test_extract_volume_nearest_values(self): """Test to extract nearest values.""" - default = extract_volume(self.grid_3d, 0, 48, 'closed', False) - expected_levels_default = np.array([0.5, 5.]) + default = extract_volume(self.grid_3d, 0, 48, "closed", False) + expected_levels_default = np.array([0.5, 5.0]) - nearest = extract_volume(self.grid_3d, 0, 48, 'closed', True) - expected_levels_nearest = np.array([0.5, 5., 50.]) + nearest = extract_volume(self.grid_3d, 0, 48, "closed", True) + expected_levels_nearest = np.array([0.5, 5.0, 50.0]) self.assert_array_equal( - default.coord(axis='Z').points, expected_levels_default) + default.coord(axis="Z").points, expected_levels_default + ) self.assert_array_equal( - nearest.coord(axis='Z').points, expected_levels_nearest) + nearest.coord(axis="Z").points, expected_levels_nearest + ) def test_extract_volume_error(self): with self.assertRaises(ValueError) as err: - extract_volume(self.grid_3d, 0., 5., 'wrong') + extract_volume(self.grid_3d, 0.0, 5.0, "wrong") self.assertEqual( - ('Depth extraction bounds can be set to "open", "closed", ' - '"left_closed", or "right_closed". ' - 'Got "wrong".'), str(err.exception)) + ( + 'Depth extraction bounds can be set to "open", "closed", ' + '"left_closed", or "right_closed". ' + 'Got "wrong".' + ), + str(err.exception), + ) def test_extract_volume_mean(self): """Test to extract the top two layers and compute the weighted average of a cube.""" grid_volume = calculate_volume(self.grid_4d) - measure = iris.coords.CellMeasure(grid_volume, - standard_name='ocean_volume', - units='m3', - measure='volume') + assert isinstance(grid_volume, np.ndarray) + measure = iris.coords.CellMeasure( + grid_volume, + standard_name="ocean_volume", + units="m3", + measure="volume", + ) self.grid_4d.add_cell_measure(measure, range(0, measure.ndim)) - result = extract_volume(self.grid_4d, 0., 10.) + result = extract_volume(self.grid_4d, 0.0, 10.0) expected = np.ma.ones((2, 2, 2, 2)) self.assert_array_equal(result.data, expected) - result_mean = volume_statistics(result, 'mean') + result_mean = volume_statistics(result, "mean") - expected_mean = np.ma.array([1., 1.], mask=False) + expected_mean = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result_mean.data, expected_mean) - self.assertEqual(result_mean.units, 'kg m-3') - self.assertTrue(self.grid_4d.cell_measures('ocean_volume')) - self.assertFalse(result_mean.cell_measures('ocean_volume')) + self.assertEqual(result_mean.units, "kg m-3") + self.assertTrue(self.grid_4d.cell_measures("ocean_volume")) + self.assertFalse(result_mean.cell_measures("ocean_volume")) def test_volume_statistics(self): """Test to take the volume weighted average of a (2,3,2,2) cube.""" - self.assertFalse(self.grid_4d.cell_measures('ocean_volume')) + self.assertFalse(self.grid_4d.cell_measures("ocean_volume")) - result = volume_statistics(self.grid_4d, 'mean') + result = volume_statistics(self.grid_4d, "mean") - expected = np.ma.array([1., 1.], mask=False) + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') - self.assertFalse(self.grid_4d.cell_measures('ocean_volume')) - self.assertFalse(result.cell_measures('ocean_volume')) + self.assertEqual(result.units, "kg m-3") + self.assertFalse(self.grid_4d.cell_measures("ocean_volume")) + self.assertFalse(result.cell_measures("ocean_volume")) def test_volume_nolevbounds(self): """Test to take the volume weighted average of a cube with no bounds in the z axis. """ - self.assertFalse(self.grid_4d_znobounds.coord(axis='z').has_bounds()) - result = volume_statistics(self.grid_4d_znobounds, 'mean') + self.assertFalse(self.grid_4d_znobounds.coord(axis="z").has_bounds()) + result = volume_statistics(self.grid_4d_znobounds, "mean") - expected = np.ma.array([1., 1.], mask=False) + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') - self.assertFalse(self.grid_4d.cell_measures('ocean_volume')) - self.assertFalse(result.cell_measures('ocean_volume')) + self.assertEqual(result.units, "kg m-3") + self.assertFalse(self.grid_4d.cell_measures("ocean_volume")) + self.assertFalse(result.cell_measures("ocean_volume")) def test_calculate_volume_lazy(self): """Test that calculate_volume returns a lazy volume @@ -465,15 +510,17 @@ def test_volume_statistics_cell_measure(self): The volume measure is pre-loaded in the cube. """ grid_volume = calculate_volume(self.grid_4d) - measure = iris.coords.CellMeasure(grid_volume, - standard_name='ocean_volume', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + grid_volume, + standard_name="ocean_volume", + units="m3", + measure="volume", + ) self.grid_4d.add_cell_measure(measure, range(0, measure.ndim)) - result = volume_statistics(self.grid_4d, 'mean') - expected = np.ma.array([1., 1.], mask=False) + result = volume_statistics(self.grid_4d, "mean") + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_volume_statistics_cell_measure_lazy(self): """Test to take the volume weighted average of a lazy (2,3,2,2) cube. @@ -481,16 +528,18 @@ def test_volume_statistics_cell_measure_lazy(self): The volume measure is pre-loaded in the cube. """ grid_volume = calculate_volume(self.grid_4d_lazy) - measure = iris.coords.CellMeasure(grid_volume, - standard_name='ocean_volume', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + grid_volume, + standard_name="ocean_volume", + units="m3", + measure="volume", + ) self.grid_4d_lazy.add_cell_measure(measure, range(0, measure.ndim)) - result = volume_statistics(self.grid_4d_lazy, 'mean') + result = volume_statistics(self.grid_4d_lazy, "mean") assert result.has_lazy_data() - expected = np.ma.array([1., 1.], mask=False) + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_volume_statistics_long(self): """Test to take the volume weighted average of a (4,3,2,2) cube. @@ -498,39 +547,39 @@ def test_volume_statistics_long(self): This extra time is needed, as the volume average calculation uses different methods for small and large cubes. """ - result = volume_statistics(self.grid_4d_2, 'mean') - expected = np.ma.array([1., 1., 1., 1.], mask=False) + result = volume_statistics(self.grid_4d_2, "mean") + expected = np.ma.array([1.0, 1.0, 1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_volume_statistics_masked_level(self): """Test to take the volume weighted average of a (2,3,2,2) cube where the last depth level is fully masked.""" self.grid_4d.data[:, -1, :, :] = np.ma.masked_all((2, 2, 2)) - result = volume_statistics(self.grid_4d, 'mean') - expected = np.ma.array([1., 1.], mask=False) + result = volume_statistics(self.grid_4d, "mean") + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) def test_volume_statistics_masked_timestep(self): """Test to take the volume weighted average of a (2,3,2,2) cube where the first timestep is fully masked.""" self.grid_4d.data[0, :, :, :] = np.ma.masked_all((3, 2, 2)) - result = volume_statistics(self.grid_4d, 'mean') - expected = np.ma.array([1., 1], mask=[True, False]) + result = volume_statistics(self.grid_4d, "mean") + expected = np.ma.array([1.0, 1], mask=[True, False]) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_volume_statistics_subtract_mean(self): """Test to take the volume weighted average of a (2,3,2,2) cube.""" - self.assertFalse(self.grid_4d.cell_measures('ocean_volume')) + self.assertFalse(self.grid_4d.cell_measures("ocean_volume")) - result = volume_statistics(self.grid_4d, 'mean', normalize='subtract') + result = volume_statistics(self.grid_4d, "mean", normalize="subtract") expected = np.ma.zeros((2, 3, 2, 2)) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') - self.assertFalse(self.grid_4d.cell_measures('ocean_volume')) - self.assertFalse(result.cell_measures('ocean_volume')) + self.assertEqual(result.units, "kg m-3") + self.assertFalse(self.grid_4d.cell_measures("ocean_volume")) + self.assertFalse(result.cell_measures("ocean_volume")) def test_volume_statistics_cell_measure_divide_mean(self): """Test to take the volume weighted average of a (2,3,2,2) cube. @@ -538,17 +587,19 @@ def test_volume_statistics_cell_measure_divide_mean(self): The volume measure is pre-loaded in the cube. """ grid_volume = calculate_volume(self.grid_4d) - measure = iris.coords.CellMeasure(grid_volume, - standard_name='ocean_volume', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + grid_volume, + standard_name="ocean_volume", + units="m3", + measure="volume", + ) self.grid_4d.add_cell_measure(measure, range(0, measure.ndim)) - result = volume_statistics(self.grid_4d, 'mean', normalize='divide') + result = volume_statistics(self.grid_4d, "mean", normalize="divide") expected = np.ma.ones((2, 3, 2, 2)) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, '1') + self.assertEqual(result.units, "1") def test_volume_statistics_weights(self): """Test to take the volume weighted average of a (2,3,2,2) cube. @@ -557,128 +608,126 @@ def test_volume_statistics_weights(self): """ data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d.data = data - measure = iris.coords.CellMeasure(data, - standard_name='ocean_volume', - units='m3', - measure='volume') + measure = iris.coords.CellMeasure( + data, standard_name="ocean_volume", units="m3", measure="volume" + ) self.grid_4d.add_cell_measure(measure, range(0, measure.ndim)) - result = volume_statistics(self.grid_4d, 'mean') - expected = np.ma.array([8.333333333333334, 19.144144144144143], - mask=[False, False]) + result = volume_statistics(self.grid_4d, "mean") + expected = np.ma.array( + [8.333333333333334, 19.144144144144143], mask=[False, False] + ) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_volume_statistics_wrong_operator_fail(self): with self.assertRaises(ValueError) as err: - volume_statistics(self.grid_4d, 'wrong') - self.assertEqual('Volume operator wrong not recognised.', - str(err.exception)) + volume_statistics(self.grid_4d, "wrong") + self.assertEqual( + "Volume operator wrong not recognised.", str(err.exception) + ) def test_volume_statistics_2d_lat_fail(self): with self.assertRaises(CoordinateMultiDimError): - volume_statistics(self.grid_4d_irregular, 'mean') + volume_statistics(self.grid_4d_irregular, "mean") def test_volume_statistics_2d_lat_cellarea(self): - measure = iris.coords.CellMeasure(np.arange(1, 5).reshape(2, 2), - standard_name='cell_area', - units='m2', - measure='area') + measure = iris.coords.CellMeasure( + np.arange(1, 5).reshape(2, 2), + standard_name="cell_area", + units="m2", + measure="area", + ) self.grid_4d_irregular.add_cell_measure(measure, (2, 3)) - result = volume_statistics(self.grid_4d_irregular, 'mean') - expected = np.ma.array([1., 1.], mask=False) + result = volume_statistics(self.grid_4d_irregular, "mean") + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") data = np.ma.arange(1, 25).reshape(2, 3, 2, 2) self.grid_4d_irregular.data = data - result = volume_statistics(self.grid_4d_irregular, 'mean') + result = volume_statistics(self.grid_4d_irregular, "mean") expected = np.ma.array([10.56, 22.56], mask=False) self.assert_array_equal(result.data, expected) - self.assertEqual(result.units, 'kg m-3') + self.assertEqual(result.units, "kg m-3") def test_volume_statistics_invalid_bounds(self): """Test z-axis bounds is not 2 in last dimension""" with self.assertRaises(ValueError) as err: - volume_statistics(self.grid_invalid_z_bounds, 'mean') - self.assertIn( + volume_statistics(self.grid_invalid_z_bounds, "mean") + assert ( "Z axis bounds shape found (3, 2, 2, 4). Bounds should be " - "2 in the last dimension to compute the thickness.", - str(err.exception) - ) + "2 in the last dimension to compute the thickness." + ) in str(err.exception) def test_volume_statistics_invalid_units(self): """Test z-axis units cannot be converted to m""" with self.assertRaises(ValueError) as err: - volume_statistics(self.grid_4d_sigma_space, 'mean') - self.assertIn( + volume_statistics(self.grid_4d_sigma_space, "mean") + assert ( "Cannot compute volume using the Z-axis. " - "Unable to convert from 'Unit('kg m-3')' to 'Unit('m')'.", - str(err.exception) - ) + "Unable to convert from 'Unit('kg m-3')' to 'Unit('m')'." + ) in str(err.exception) def test_volume_statistics_z_axis_time_error(self): # Fails because depth z-axis coord depends on time dimensions # which would aggregate also along that dimension with self.assertRaises(ValueError) as err: - volume_statistics(self.grid_4d_z, 'mean') - self.assertIn( + volume_statistics(self.grid_4d_z, "mean") + assert ( "X and Y axis coordinates depend on (2, 3) dimensions, " "while X, Y, and Z axis depends on (0, 1, 2, 3) dimensions. " "This may indicate Z axis depending on other dimension than " - "space that could provoke invalid aggregation...", - str(err.exception) - ) + "space that could provoke invalid aggregation..." + ) in str(err.exception) grid_3d_no_x = self.grid_4d_z[..., 0] with self.assertRaises(ValueError) as err: - volume_statistics(grid_3d_no_x, 'mean') - self.assertIn( + volume_statistics(grid_3d_no_x, "mean") + assert ( "X and Y axis coordinates depend on (2,) dimensions, " "while X, Y, and Z axis depends on (0, 1, 2) dimensions. " "This may indicate Z axis depending on other dimension than " - "space that could provoke invalid aggregation...", - str(err.exception) - ) + "space that could provoke invalid aggregation..." + ) in str(err.exception) def test_volume_statistics_missing_axis(self): # x axis is missing grid_no_x = self.grid_4d[..., 0] - volume_statistics(grid_no_x, 'mean') + volume_statistics(grid_no_x, "mean") # y axis is missing grid_no_y = self.grid_4d[..., 0, :] - volume_statistics(grid_no_y, 'mean') + volume_statistics(grid_no_y, "mean") # z axis is missing grid_no_z = self.grid_4d[:, 0] with self.assertRaises(ValueError) as err: - volume_statistics(grid_no_z, 'mean') - self.assertIn("Cannot compute volume with scalar Z-axis", - str(err.exception)) + volume_statistics(grid_no_z, "mean") + assert "Cannot compute volume with scalar Z-axis" in str(err.exception) def test_volume_statistics_2d_depth(self): # Create new 2D depth coord - new_z_coord = self.grid_4d_z.coord('zcoord')[0, :, :, 0] - self.grid_4d_z.remove_coord('zcoord') + new_z_coord = self.grid_4d_z.coord("zcoord")[0, :, :, 0] + self.grid_4d_z.remove_coord("zcoord") self.grid_4d_z.add_aux_coord(new_z_coord, (1, 2)) - result = volume_statistics(self.grid_4d, 'mean') - expected = np.ma.array([1., 1.], mask=False) + result = volume_statistics(self.grid_4d, "mean") + expected = np.ma.array([1.0, 1.0], mask=False) self.assert_array_equal(result.data, expected) def test_depth_integration_1d(self): """Test to take the depth integration of a 3 layer cube.""" result = depth_integration(self.grid_3d[:, 0, 0]) - expected = np.ones((1, 1)) * 250. + expected = np.ones((1, 1)) * 250.0 self.assert_array_equal(result.data, expected) def test_depth_integration_3d(self): """Test to take the depth integration of a 3 layer cube.""" result = depth_integration(self.grid_3d) - expected = np.ones((2, 2)) * 250. + expected = np.ones((2, 2)) * 250.0 self.assert_array_equal(result.data, expected) def test_extract_transect_latitude(self): @@ -695,10 +744,10 @@ def test_extract_transect_longitude(self): def test_extract_trajectory(self): """Test to extract a trajectory from a (3, 2, 2) cube.""" - result = extract_trajectory(self.grid_3d, [1.5, 2.5], [2., 2.], 2) + result = extract_trajectory(self.grid_3d, [1.5, 2.5], [2.0, 2.0], 2) expected = np.ones((3, 2)) self.assert_array_equal(result.data, expected) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py b/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py index 3e90fee118..bdfef951d2 100644 --- a/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py +++ b/tests/unit/preprocessor/_weighting/test_weighting_landsea_fraction.py @@ -9,41 +9,52 @@ import esmvalcore.preprocessor._weighting as weighting crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) -LON_3 = iris.coords.DimCoord([0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=crd_sys) -LON_4 = iris.coords.DimCoord([0, 1.5, 2.5, 3.5], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3], - [3, 4]], - units='degrees_east', - coord_system=crd_sys) +LON_3 = iris.coords.DimCoord( + [0, 1.5, 3], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3]], + units="degrees_east", + coord_system=crd_sys, +) +LON_4 = iris.coords.DimCoord( + [0, 1.5, 2.5, 3.5], + standard_name="longitude", + bounds=[[0, 1], [1, 2], [2, 3], [3, 4]], + units="degrees_east", + coord_system=crd_sys, +) CUBE_SFTLF = iris.cube.Cube( [10.0, 0.0, 100.0], - var_name='sftlf', - standard_name='land_area_fraction', - units=Unit('%'), - dim_coords_and_dims=[(LON_3, 0), ] + var_name="sftlf", + standard_name="land_area_fraction", + units=Unit("%"), + dim_coords_and_dims=[ + (LON_3, 0), + ], ) CUBE_SFTOF = iris.cube.Cube( [100.0, 0.0, 50.0, 70.0], - var_name='sftof', - standard_name='sea_area_fraction', - units=Unit('%'), - dim_coords_and_dims=[(LON_4, 0), ] + var_name="sftof", + standard_name="sea_area_fraction", + units=Unit("%"), + dim_coords_and_dims=[ + (LON_4, 0), + ], ) CUBE_3 = iris.cube.Cube( [10.0, 20.0, 0.0], - var_name='dim3', - dim_coords_and_dims=[(LON_3, 0), ] + var_name="dim3", + dim_coords_and_dims=[ + (LON_3, 0), + ], ) CUBE_4 = iris.cube.Cube( [1.0, 2.0, -1.0, 2.0], - var_name='dim4', - dim_coords_and_dims=[(LON_4, 0), ] + var_name="dim4", + dim_coords_and_dims=[ + (LON_4, 0), + ], ) CUBE_ANCILLARY_3 = CUBE_3.copy() @@ -56,18 +67,28 @@ FRAC_SFTOF = np.array([0.0, 1.0, 0.5, 0.3]) LAND_FRACTION = [ - (CUBE_3, None, [ - 'Ancillary variables land/sea area fraction not found in cube. ' - 'Check ancillary data availability.']), - (CUBE_4, None, [ - 'Ancillary variables land/sea area fraction not found in cube. ' - 'Check ancillary data availability.']), + ( + CUBE_3, + None, + [ + "Ancillary variables land/sea area fraction not found in cube. " + "Check ancillary data availability." + ], + ), + ( + CUBE_4, + None, + [ + "Ancillary variables land/sea area fraction not found in cube. " + "Check ancillary data availability." + ], + ), (CUBE_ANCILLARY_3, FRAC_SFTLF, []), - (CUBE_ANCILLARY_4, FRAC_SFTOF, []) + (CUBE_ANCILLARY_4, FRAC_SFTOF, []), ] -@pytest.mark.parametrize('cube,out,err', LAND_FRACTION) +@pytest.mark.parametrize("cube,out,err", LAND_FRACTION) def test_get_land_fraction(cube, out, err): """Test calculation of land fraction.""" (land_fraction, errors) = weighting._get_land_fraction(cube) @@ -76,7 +97,7 @@ def test_get_land_fraction(cube, out, err): else: assert np.allclose(land_fraction, out) assert len(errors) == len(err) - for (idx, error) in enumerate(errors): + for idx, error in enumerate(errors): assert err[idx] in error @@ -86,28 +107,26 @@ def test_get_land_fraction(cube, out, err): CUBE_4_O = CUBE_4.copy([1.0, 0.0, -0.5, 1.4]) WEIGHTING_LANDSEA_FRACTION = [ - (CUBE_3, 'land', ValueError), - (CUBE_3, 'sea', ValueError), - (CUBE_ANCILLARY_3, 'land', CUBE_3_L), - (CUBE_ANCILLARY_3, 'sea', CUBE_3_O), - (CUBE_4, 'land', ValueError), - (CUBE_4, 'sea', ValueError), - (CUBE_ANCILLARY_4, 'land', CUBE_4_L), - (CUBE_ANCILLARY_4, 'sea', CUBE_4_O), + (CUBE_3, "land", ValueError), + (CUBE_3, "sea", ValueError), + (CUBE_ANCILLARY_3, "land", CUBE_3_L), + (CUBE_ANCILLARY_3, "sea", CUBE_3_O), + (CUBE_4, "land", ValueError), + (CUBE_4, "sea", ValueError), + (CUBE_ANCILLARY_4, "land", CUBE_4_L), + (CUBE_ANCILLARY_4, "sea", CUBE_4_O), ] -@pytest.mark.parametrize('cube,area_type,out', - WEIGHTING_LANDSEA_FRACTION) -def test_weighting_landsea_fraction(cube, - area_type, - out): +@pytest.mark.parametrize("cube,area_type,out", WEIGHTING_LANDSEA_FRACTION) +def test_weighting_landsea_fraction(cube, area_type, out): """Test landsea fraction weighting preprocessor.""" # Exceptions if isinstance(out, type): with pytest.raises(out): weighted_cube = weighting.weighting_landsea_fraction( - cube, area_type) + cube, area_type + ) return # Regular cases diff --git a/tests/unit/preprocessor/test_configuration.py b/tests/unit/preprocessor/test_configuration.py index 39adae64a6..e5461598c9 100644 --- a/tests/unit/preprocessor/test_configuration.py +++ b/tests/unit/preprocessor/test_configuration.py @@ -1,4 +1,5 @@ """Tests for the basic configuration of the preprocessor module.""" + from esmvalcore.preprocessor import ( DEFAULT_ORDER, FINAL_STEPS, @@ -19,9 +20,14 @@ def test_non_repeated_keys(): def test_time_preprocessores_default_order_added(): assert all( - (time_preproc in DEFAULT_ORDER for time_preproc in TIME_PREPROCESSORS)) + (time_preproc in DEFAULT_ORDER for time_preproc in TIME_PREPROCESSORS) + ) def test_multimodel_functions_in_default_order(): - assert all((time_preproc in DEFAULT_ORDER - for time_preproc in MULTI_MODEL_FUNCTIONS)) + assert all( + ( + time_preproc in DEFAULT_ORDER + for time_preproc in MULTI_MODEL_FUNCTIONS + ) + ) diff --git a/tests/unit/preprocessor/test_error_logging.py b/tests/unit/preprocessor/test_error_logging.py index adbedc7cd2..49899203b3 100644 --- a/tests/unit/preprocessor/test_error_logging.py +++ b/tests/unit/preprocessor/test_error_logging.py @@ -23,7 +23,8 @@ def assert_debug_call_ok(mock_logger, items): debug_call_args = mock_logger.debug.call_args[0] assert debug_call_args[0] == ( "Running preprocessor function '%s' on the data\n%s%s\nwith function " - "argument(s)\n%s") + "argument(s)\n%s" + ) assert debug_call_args[1] == "failing_function" if isinstance(items, (PreprocessorFile, Cube, str)): assert debug_call_args[2] == repr(items) @@ -40,49 +41,50 @@ def assert_error_call_ok(mock_logger): error_call_args = mock_logger.error.call_args[0] assert error_call_args[0] == ( "Failed to run preprocessor function '%s' on the data\n%s%s\nwith " - "function argument(s)\n%s") + "function argument(s)\n%s" + ) assert error_call_args[1] == "failing_function" assert error_call_args[4] == "test = 42,\nlist = ['a', 'b']" -KWARGS = {'test': 42, 'list': ['a', 'b']} -PREPROC_FILE = PreprocessorFile(Path('a')) +KWARGS = {"test": 42, "list": ["a", "b"]} +PREPROC_FILE = PreprocessorFile(Path("a")) TEST_ITEMS_SHORT = [ # Scalars PREPROC_FILE, Cube(0), - 'a', + "a", # 1-element lists [PREPROC_FILE], [Cube(0)], - ['a'], + ["a"], # 1-element sets set([PREPROC_FILE]), set([Cube(0)]), - set(['a']), + set(["a"]), # 1-element CubeList CubeList([Cube(0)]), # 4-element lists [PREPROC_FILE] * 4, [Cube(0)] * 4, - ['a'] * 4, + ["a"] * 4, # 4-element sets - set(['a', 'b', 'c', 'd']), + set(["a", "b", "c", "d"]), # 4-element CubeList CubeList([Cube(0), Cube(1), Cube(2), Cube(3)]), ] TEST_ITEMS_LONG = [ # 6-element list - ['a', 'b', 'c', 'd', 'e', 'f'], + ["a", "b", "c", "d", "e", "f"], # 6-element set - set(['a', 'b', 'c', 'd', 'e', 'f']), + set(["a", "b", "c", "d", "e", "f"]), ] -SHORT_INPUT_FILES = ['x', 'y', 'z', 'w'] -LONG_INPUT_FILES = ['x', 'y', 'z', 'w', 'v', 'u'] +SHORT_INPUT_FILES = ["x", "y", "z", "w"] +LONG_INPUT_FILES = ["x", "y", "z", "w", "v", "u"] -@pytest.mark.parametrize('items', TEST_ITEMS_SHORT) -@mock.patch('esmvalcore.preprocessor.logger', autospec=True) +@pytest.mark.parametrize("items", TEST_ITEMS_SHORT) +@mock.patch("esmvalcore.preprocessor.logger", autospec=True) def test_short_items_no_input_files(mock_logger, items): """Test short list of items and no input files.""" with pytest.raises(ValueError, match=VALUE_ERROR_MSG): @@ -105,19 +107,21 @@ def test_short_items_no_input_files(mock_logger, items): assert error_call_args[3] == "" -@pytest.mark.parametrize('items', TEST_ITEMS_SHORT) -@mock.patch('esmvalcore.preprocessor.logger', autospec=True) +@pytest.mark.parametrize("items", TEST_ITEMS_SHORT) +@mock.patch("esmvalcore.preprocessor.logger", autospec=True) def test_short_items_short_input_files(mock_logger, items): """Test short list of items and short list of input files.""" with pytest.raises(ValueError, match=VALUE_ERROR_MSG): - _run_preproc_function(failing_function, items, KWARGS, - input_files=SHORT_INPUT_FILES) + _run_preproc_function( + failing_function, items, KWARGS, input_files=SHORT_INPUT_FILES + ) assert len(mock_logger.mock_calls) == 2 # Debug call assert_debug_call_ok(mock_logger, items) assert mock_logger.debug.call_args[0][3] == ( - "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']") + "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']" + ) # Error call assert_error_call_ok(mock_logger) @@ -129,22 +133,25 @@ def test_short_items_short_input_files(mock_logger, items): assert repr(item) in error_call_args[2] assert "further argument(s) not shown here;" not in error_call_args[2] assert error_call_args[3] == ( - "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']") + "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']" + ) -@pytest.mark.parametrize('items', TEST_ITEMS_SHORT) -@mock.patch('esmvalcore.preprocessor.logger', autospec=True) +@pytest.mark.parametrize("items", TEST_ITEMS_SHORT) +@mock.patch("esmvalcore.preprocessor.logger", autospec=True) def test_short_items_long_input_files(mock_logger, items): """Test short list of items and long list of input files.""" with pytest.raises(ValueError, match=VALUE_ERROR_MSG): - _run_preproc_function(failing_function, items, KWARGS, - input_files=LONG_INPUT_FILES) + _run_preproc_function( + failing_function, items, KWARGS, input_files=LONG_INPUT_FILES + ) assert len(mock_logger.mock_calls) == 2 # Debug call assert_debug_call_ok(mock_logger, items) assert mock_logger.debug.call_args[0][3] == ( - "\nloaded from original input file(s)\n['x', 'y', 'z', 'w', 'v', 'u']") + "\nloaded from original input file(s)\n['x', 'y', 'z', 'w', 'v', 'u']" + ) # Error call assert_error_call_ok(mock_logger) @@ -158,11 +165,12 @@ def test_short_items_long_input_files(mock_logger, items): assert error_call_args[3] == ( "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']\n(and 2 " "further file(s) not shown here; refer to the debug log for a full " - "list)") + "list)" + ) -@pytest.mark.parametrize('items', TEST_ITEMS_LONG) -@mock.patch('esmvalcore.preprocessor.logger', autospec=True) +@pytest.mark.parametrize("items", TEST_ITEMS_LONG) +@mock.patch("esmvalcore.preprocessor.logger", autospec=True) def test_long_items_no_input_files(mock_logger, items): """Test long list of items and no input files.""" with pytest.raises(ValueError, match=VALUE_ERROR_MSG): @@ -185,19 +193,21 @@ def test_long_items_no_input_files(mock_logger, items): assert error_call_args[3] == "" -@pytest.mark.parametrize('items', TEST_ITEMS_LONG) -@mock.patch('esmvalcore.preprocessor.logger', autospec=True) +@pytest.mark.parametrize("items", TEST_ITEMS_LONG) +@mock.patch("esmvalcore.preprocessor.logger", autospec=True) def test_long_items_short_input_files(mock_logger, items): """Test long list of items and short list of input files.""" with pytest.raises(ValueError, match=VALUE_ERROR_MSG): - _run_preproc_function(failing_function, items, KWARGS, - input_files=SHORT_INPUT_FILES) + _run_preproc_function( + failing_function, items, KWARGS, input_files=SHORT_INPUT_FILES + ) assert len(mock_logger.mock_calls) == 2 # Debug call assert_debug_call_ok(mock_logger, items) assert mock_logger.debug.call_args[0][3] == ( - "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']") + "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']" + ) # Error call assert_error_call_ok(mock_logger) @@ -209,22 +219,25 @@ def test_long_items_short_input_files(mock_logger, items): assert repr(item) not in error_call_args[2] assert "\n(and 2 further argument(s) not shown here;" in error_call_args[2] assert error_call_args[3] == ( - "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']") + "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']" + ) -@pytest.mark.parametrize('items', TEST_ITEMS_LONG) -@mock.patch('esmvalcore.preprocessor.logger', autospec=True) +@pytest.mark.parametrize("items", TEST_ITEMS_LONG) +@mock.patch("esmvalcore.preprocessor.logger", autospec=True) def test_long_items_long_input_files(mock_logger, items): """Test long list of items and long list of input files.""" with pytest.raises(ValueError, match=VALUE_ERROR_MSG): - _run_preproc_function(failing_function, items, KWARGS, - input_files=LONG_INPUT_FILES) + _run_preproc_function( + failing_function, items, KWARGS, input_files=LONG_INPUT_FILES + ) assert len(mock_logger.mock_calls) == 2 # Debug call assert_debug_call_ok(mock_logger, items) assert mock_logger.debug.call_args[0][3] == ( - "\nloaded from original input file(s)\n['x', 'y', 'z', 'w', 'v', 'u']") + "\nloaded from original input file(s)\n['x', 'y', 'z', 'w', 'v', 'u']" + ) # Error call assert_error_call_ok(mock_logger) @@ -238,10 +251,11 @@ def test_long_items_long_input_files(mock_logger, items): assert error_call_args[3] == ( "\nloaded from original input file(s)\n['x', 'y', 'z', 'w']\n(and 2 " "further file(s) not shown here; refer to the debug log for a full " - "list)") + "list)" + ) -class MockAncestor(): +class MockAncestor: """Mock class for ancestors.""" def __init__(self, filename): diff --git a/tests/unit/preprocessor/test_preprocessor_file.py b/tests/unit/preprocessor/test_preprocessor_file.py index 30e4c943cd..d386dbc1e6 100644 --- a/tests/unit/preprocessor/test_preprocessor_file.py +++ b/tests/unit/preprocessor/test_preprocessor_file.py @@ -9,12 +9,12 @@ from esmvalcore.preprocessor import PreprocessorFile ATTRIBUTES = { - 'filename': Path('file.nc'), - 'standard_name': 'precipitation', - 'long_name': 'Precipitation', - 'short_name': 'pr', - 'units': 'kg m-2 s-1', - 'frequency': 'mon', + "filename": Path("file.nc"), + "standard_name": "precipitation", + "long_name": "Precipitation", + "short_name": "pr", + "units": "kg m-2 s-1", + "frequency": "mon", } @@ -23,15 +23,15 @@ def product(): """PreprocessorFile object used for testing.""" cube = Cube( 0, - var_name='tas', - standard_name='air_temperature', - long_name='Near-Surface Air Temperature', - units='K', - attributes={'frequency': 'day'}, + var_name="tas", + standard_name="air_temperature", + long_name="Near-Surface Air Temperature", + units="K", + attributes={"frequency": "day"}, ) product = PreprocessorFile( - filename=Path('file.nc'), - attributes={k: v for k, v in ATTRIBUTES.items() if k != 'filename'}, + filename=Path("file.nc"), + attributes={k: v for k, v in ATTRIBUTES.items() if k != "filename"}, settings={}, ) product._cubes = CubeList([cube, cube, cube]) @@ -52,73 +52,74 @@ def test_update_attributes(product): product._update_attributes() assert product.attributes == { - 'filename': Path('file.nc'), - 'standard_name': 'air_temperature', - 'long_name': 'Near-Surface Air Temperature', - 'short_name': 'tas', - 'units': 'K', - 'frequency': 'day', + "filename": Path("file.nc"), + "standard_name": "air_temperature", + "long_name": "Near-Surface Air Temperature", + "short_name": "tas", + "units": "K", + "frequency": "day", } - assert isinstance(product.attributes['units'], str) + assert isinstance(product.attributes["units"], str) @pytest.mark.parametrize( - 'name,cube_property,expected_name', + "name,cube_property,expected_name", [ - ('standard_name', 'standard_name', ''), - ('long_name', 'long_name', ''), - ('short_name', 'var_name', ''), + ("standard_name", "standard_name", ""), + ("long_name", "long_name", ""), + ("short_name", "var_name", ""), ], ) -def test_update_attributes_empty_names(product, name, cube_property, - expected_name): +def test_update_attributes_empty_names( + product, name, cube_property, expected_name +): """Test ``_update_attributes``.""" setattr(product._cubes[0], cube_property, None) product._update_attributes() expected_attributes = { - 'filename': Path('file.nc'), - 'standard_name': 'air_temperature', - 'long_name': 'Near-Surface Air Temperature', - 'short_name': 'tas', - 'units': 'K', - 'frequency': 'day', + "filename": Path("file.nc"), + "standard_name": "air_temperature", + "long_name": "Near-Surface Air Temperature", + "short_name": "tas", + "units": "K", + "frequency": "day", } expected_attributes[name] = expected_name assert product.attributes == expected_attributes - assert isinstance(product.attributes['units'], str) + assert isinstance(product.attributes["units"], str) def test_update_attributes_empty_frequency(product): """Test ``_update_attributes``.""" - product._cubes[0].attributes.pop('frequency') + product._cubes[0].attributes.pop("frequency") product._update_attributes() assert product.attributes == { - 'filename': Path('file.nc'), - 'standard_name': 'air_temperature', - 'long_name': 'Near-Surface Air Temperature', - 'short_name': 'tas', - 'units': 'K', - 'frequency': 'mon', + "filename": Path("file.nc"), + "standard_name": "air_temperature", + "long_name": "Near-Surface Air Temperature", + "short_name": "tas", + "units": "K", + "frequency": "mon", } - assert isinstance(product.attributes['units'], str) + assert isinstance(product.attributes["units"], str) def test_update_attributes_no_frequency(product): """Test ``_update_attributes``.""" - product._cubes[0].attributes.pop('frequency') - product.attributes.pop('frequency') + product._cubes[0].attributes.pop("frequency") + product.attributes.pop("frequency") product._update_attributes() assert product.attributes == { - 'filename': Path('file.nc'), - 'standard_name': 'air_temperature', - 'long_name': 'Near-Surface Air Temperature', - 'short_name': 'tas', - 'units': 'K', + "filename": Path("file.nc"), + "standard_name": "air_temperature", + "long_name": "Near-Surface Air Temperature", + "short_name": "tas", + "units": "K", } - assert isinstance(product.attributes['units'], str) + assert isinstance(product.attributes["units"], str) def test_close_no_cubes(): @@ -147,11 +148,11 @@ def test_close(): assert product._cubes is None -@mock.patch('esmvalcore.preprocessor.preprocess', autospec=True) +@mock.patch("esmvalcore.preprocessor.preprocess", autospec=True) def test_save(mock_preprocess): """Test ``save``.""" product = mock.create_autospec(PreprocessorFile, instance=True) - product.settings = {'save': {}} + product.settings = {"save": {}} product._cubes = mock.sentinel.cubes product._input_files = mock.sentinel.input_files @@ -159,6 +160,6 @@ def test_save(mock_preprocess): assert mock_preprocess.mock_calls == [ mock.call( - mock.sentinel.cubes, 'save', input_files=mock.sentinel.input_files + mock.sentinel.cubes, "save", input_files=mock.sentinel.input_files ), ] diff --git a/tests/unit/preprocessor/test_runner.py b/tests/unit/preprocessor/test_runner.py index e250bac099..2c9640e866 100644 --- a/tests/unit/preprocessor/test_runner.py +++ b/tests/unit/preprocessor/test_runner.py @@ -8,30 +8,38 @@ def test_first_argument_name(): """Check that the input type of all preprocessor functions is valid.""" - valid_itypes = ('file', 'files', 'cube', 'cubes', 'products', - 'input_products') + valid_itypes = ( + "file", + "files", + "cube", + "cubes", + "products", + "input_products", + ) for step in esmvalcore.preprocessor.DEFAULT_ORDER: itype = esmvalcore.preprocessor._get_itype(step) assert itype in valid_itypes, ( "Invalid preprocessor function definition {}, first argument " - "should be one of {} but is {}".format(step, valid_itypes, itype)) + "should be one of {} but is {}".format(step, valid_itypes, itype) + ) def test_multi_model_exist(): assert esmvalcore.preprocessor.MULTI_MODEL_FUNCTIONS.issubset( - set(esmvalcore.preprocessor.DEFAULT_ORDER)) + set(esmvalcore.preprocessor.DEFAULT_ORDER) + ) -@pytest.mark.parametrize('debug', [False, True]) +@pytest.mark.parametrize("debug", [False, True]) def test_preprocess_debug(mocker, debug): - in_cube = iris.cube.Cube([1], var_name='tas') - out_cube = iris.cube.Cube([2], var_name='tas') + in_cube = iris.cube.Cube([1], var_name="tas") + out_cube = iris.cube.Cube([2], var_name="tas") items = [in_cube] result = [out_cube] - step = 'annual_statistics' - input_files = [Path('/path/to/input.nc')] - output_file = Path('/path/to/output.nc') + step = "annual_statistics" + input_files = [Path("/path/to/input.nc")] + output_file = Path("/path/to/output.nc") mock_annual_statistics = mocker.create_autospec( esmvalcore.preprocessor.annual_statistics, @@ -39,10 +47,9 @@ def test_preprocess_debug(mocker, debug): ) mock_save = mocker.create_autospec(esmvalcore.preprocessor.save) mocker.patch( - 'esmvalcore.preprocessor.annual_statistics', - new=mock_annual_statistics + "esmvalcore.preprocessor.annual_statistics", new=mock_annual_statistics ) - mocker.patch('esmvalcore.preprocessor.save', new=mock_save) + mocker.patch("esmvalcore.preprocessor.save", new=mock_save) esmvalcore.preprocessor.preprocess( items, @@ -50,12 +57,14 @@ def test_preprocess_debug(mocker, debug): input_files=input_files, output_file=output_file, debug=debug, - operator='mean', + operator="mean", ) esmvalcore.preprocessor.annual_statistics.assert_called_with( - in_cube, operator='mean') + in_cube, operator="mean" + ) if debug: esmvalcore.preprocessor.save.assert_called_with( - result, '/path/to/output/00_annual_statistics.nc') + result, "/path/to/output/00_annual_statistics.nc" + ) else: esmvalcore.preprocessor.save.assert_not_called() diff --git a/tests/unit/preprocessor/test_shared.py b/tests/unit/preprocessor/test_shared.py index 4f70b2b419..02a48991ba 100644 --- a/tests/unit/preprocessor/test_shared.py +++ b/tests/unit/preprocessor/test_shared.py @@ -1,4 +1,5 @@ """Unit tests for `esmvalcore.preprocessor._shared`.""" + import inspect import warnings @@ -13,7 +14,7 @@ from esmvalcore.exceptions import ESMValCoreDeprecationWarning from esmvalcore.preprocessor import PreprocessorFile from esmvalcore.preprocessor._shared import ( - _get_area_weights, + _compute_area_weights, _group_products, aggregator_accept_weights, get_array_module, @@ -23,7 +24,7 @@ ) -@pytest.mark.parametrize('operator', ['gmean', 'GmEaN', 'GMEAN']) +@pytest.mark.parametrize("operator", ["gmean", "GmEaN", "GMEAN"]) def test_get_iris_aggregator_gmean(operator): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator) @@ -31,7 +32,7 @@ def test_get_iris_aggregator_gmean(operator): assert agg_kwargs == {} -@pytest.mark.parametrize('operator', ['hmean', 'hMeAn', 'HMEAN']) +@pytest.mark.parametrize("operator", ["hmean", "hMeAn", "HMEAN"]) def test_get_iris_aggregator_hmean(operator): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator) @@ -39,7 +40,7 @@ def test_get_iris_aggregator_hmean(operator): assert agg_kwargs == {} -@pytest.mark.parametrize('operator', ['max', 'mAx', 'MAX']) +@pytest.mark.parametrize("operator", ["max", "mAx", "MAX"]) def test_get_iris_aggregator_max(operator): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator) @@ -47,8 +48,8 @@ def test_get_iris_aggregator_max(operator): assert agg_kwargs == {} -@pytest.mark.parametrize('kwargs', [{}, {'weights': True}, {'weights': False}]) -@pytest.mark.parametrize('operator', ['mean', 'mEaN', 'MEAN']) +@pytest.mark.parametrize("kwargs", [{}, {"weights": True}, {"weights": False}]) +@pytest.mark.parametrize("operator", ["mean", "mEaN", "MEAN"]) def test_get_iris_aggregator_mean(operator, kwargs): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -56,8 +57,8 @@ def test_get_iris_aggregator_mean(operator, kwargs): assert agg_kwargs == kwargs -@pytest.mark.parametrize('kwargs', [{}, {'weights': True}]) -@pytest.mark.parametrize('operator', ['median', 'mEdIaN', 'MEDIAN']) +@pytest.mark.parametrize("kwargs", [{}, {"weights": True}]) +@pytest.mark.parametrize("operator", ["median", "mEdIaN", "MEDIAN"]) def test_get_iris_aggregator_median(operator, kwargs): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -65,7 +66,7 @@ def test_get_iris_aggregator_median(operator, kwargs): assert agg_kwargs == kwargs -@pytest.mark.parametrize('operator', ['min', 'MiN', 'MIN']) +@pytest.mark.parametrize("operator", ["min", "MiN", "MIN"]) def test_get_iris_aggregator_min(operator): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator) @@ -73,7 +74,7 @@ def test_get_iris_aggregator_min(operator): assert agg_kwargs == {} -@pytest.mark.parametrize('operator', ['peak', 'pEaK', 'PEAK']) +@pytest.mark.parametrize("operator", ["peak", "pEaK", "PEAK"]) def test_get_iris_aggregator_peak(operator): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator) @@ -81,8 +82,8 @@ def test_get_iris_aggregator_peak(operator): assert agg_kwargs == {} -@pytest.mark.parametrize('kwargs', [{'percent': 80.0, 'alphap': 0.5}]) -@pytest.mark.parametrize('operator', ['percentile', 'PERCENTILE']) +@pytest.mark.parametrize("kwargs", [{"percent": 80.0, "alphap": 0.5}]) +@pytest.mark.parametrize("operator", ["percentile", "PERCENTILE"]) def test_get_iris_aggregator_percentile(operator, kwargs): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -90,18 +91,18 @@ def test_get_iris_aggregator_percentile(operator, kwargs): assert agg_kwargs == kwargs -@pytest.mark.parametrize('kwargs', [{}, {'alphap': 0.5}]) -@pytest.mark.parametrize('operator', ['p10', 'P10.5']) +@pytest.mark.parametrize("kwargs", [{}, {"alphap": 0.5}]) +@pytest.mark.parametrize("operator", ["p10", "P10.5"]) def test_get_iris_aggregator_pxxyy(operator, kwargs): """Test ``get_iris_aggregator``.""" with pytest.warns(ESMValCoreDeprecationWarning): (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) assert agg == iris.analysis.PERCENTILE - assert agg_kwargs == {'percent': float(operator[1:]), **kwargs} + assert agg_kwargs == {"percent": float(operator[1:]), **kwargs} -@pytest.mark.parametrize('kwargs', [{}, {'weights': True}]) -@pytest.mark.parametrize('operator', ['rms', 'rMs', 'RMS']) +@pytest.mark.parametrize("kwargs", [{}, {"weights": True}]) +@pytest.mark.parametrize("operator", ["rms", "rMs", "RMS"]) def test_get_iris_aggregator_rms(operator, kwargs): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -109,25 +110,25 @@ def test_get_iris_aggregator_rms(operator, kwargs): assert agg_kwargs == kwargs -@pytest.mark.parametrize('kwargs', [{}, {'ddof': 1}]) -@pytest.mark.parametrize('operator', ['std', 'STD', 'std_dev', 'STD_DEV']) +@pytest.mark.parametrize("kwargs", [{}, {"ddof": 1}]) +@pytest.mark.parametrize("operator", ["std", "STD", "std_dev", "STD_DEV"]) def test_get_iris_aggregator_std(operator, kwargs): """Test ``get_iris_aggregator``.""" - if operator.lower() == 'std': + if operator.lower() == "std": with pytest.warns(ESMValCoreDeprecationWarning): (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) else: with warnings.catch_warnings(): warnings.simplefilter( - 'error', category=ESMValCoreDeprecationWarning + "error", category=ESMValCoreDeprecationWarning ) (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) assert agg == iris.analysis.STD_DEV assert agg_kwargs == kwargs -@pytest.mark.parametrize('kwargs', [{}, {'weights': True}]) -@pytest.mark.parametrize('operator', ['sum', 'SuM', 'SUM']) +@pytest.mark.parametrize("kwargs", [{}, {"weights": True}]) +@pytest.mark.parametrize("operator", ["sum", "SuM", "SUM"]) def test_get_iris_aggregator_sum(operator, kwargs): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -135,7 +136,7 @@ def test_get_iris_aggregator_sum(operator, kwargs): assert agg_kwargs == kwargs -@pytest.mark.parametrize('operator', ['variance', 'vArIaNcE', 'VARIANCE']) +@pytest.mark.parametrize("operator", ["variance", "vArIaNcE", "VARIANCE"]) def test_get_iris_aggregator_variance(operator): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator) @@ -143,8 +144,8 @@ def test_get_iris_aggregator_variance(operator): assert agg_kwargs == {} -@pytest.mark.parametrize('kwargs', [{'percent': 10, 'weights': True}]) -@pytest.mark.parametrize('operator', ['wpercentile', 'WPERCENTILE']) +@pytest.mark.parametrize("kwargs", [{"percent": 10, "weights": True}]) +@pytest.mark.parametrize("operator", ["wpercentile", "WPERCENTILE"]) def test_get_iris_aggregator_wpercentile(operator, kwargs): """Test ``get_iris_aggregator``.""" (agg, agg_kwargs) = get_iris_aggregator(operator, **kwargs) @@ -152,17 +153,17 @@ def test_get_iris_aggregator_wpercentile(operator, kwargs): assert agg_kwargs == kwargs -@pytest.mark.parametrize('operator', ['invalid', 'iNvAliD', 'INVALID']) +@pytest.mark.parametrize("operator", ["invalid", "iNvAliD", "INVALID"]) def test_get_iris_aggregator_invalid_operator_fail(operator): """Test ``get_iris_aggregator``.""" with pytest.raises(ValueError): get_iris_aggregator(operator) -@pytest.mark.parametrize('operator', ['mean', 'mEaN', 'MEAN']) +@pytest.mark.parametrize("operator", ["mean", "mEaN", "MEAN"]) def test_get_iris_aggregator_no_aggregator_fail(operator, monkeypatch): """Test ``get_iris_aggregator``.""" - monkeypatch.setattr(iris.analysis, 'MEAN', 1) + monkeypatch.setattr(iris.analysis, "MEAN", 1) with pytest.raises(ValueError): get_iris_aggregator(operator) @@ -170,17 +171,17 @@ def test_get_iris_aggregator_no_aggregator_fail(operator, monkeypatch): def test_get_iris_aggregator_invalid_kwarg(): """Test ``get_iris_aggregator``.""" with pytest.raises(ValueError): - get_iris_aggregator('max', invalid_kwarg=1) + get_iris_aggregator("max", invalid_kwarg=1) def test_get_iris_aggregator_missing_kwarg(): """Test ``get_iris_aggregator``.""" with pytest.raises(ValueError): - get_iris_aggregator('percentile') + get_iris_aggregator("percentile") @pytest.mark.parametrize( - 'aggregator,result', + "aggregator,result", [ (iris.analysis.MEAN, True), (iris.analysis.SUM, True), @@ -208,7 +209,7 @@ def _dummy_func(obj, arg, kwarg=2.0): @pytest.mark.parametrize( - 'data,dtype', + "data,dtype", [ (np.array([1.0], dtype=np.float64), np.float64), (np.array([1.0], dtype=np.float32), np.float32), @@ -240,9 +241,9 @@ def test_preserve_float_dtype(data, dtype): if isinstance(data, Cube): assert result.has_lazy_data() == data.has_lazy_data() - assert _dummy_func.__name__ == '_dummy_func' + assert _dummy_func.__name__ == "_dummy_func" signature = inspect.signature(_dummy_func) - assert list(signature.parameters) == ['obj', 'arg', 'kwarg'] + assert list(signature.parameters) == ["obj", "arg", "kwarg"] def test_get_array_module_da(): @@ -261,28 +262,28 @@ def test_get_array_module_mixed(): def _create_sample_full_cube(): - cube = Cube(np.zeros((4, 180, 360)), var_name='co2', units='J') + cube = Cube(np.zeros((4, 180, 360)), var_name="co2", units="J") cube.add_dim_coord( iris.coords.DimCoord( - np.array([10., 40., 70., 110.]), - standard_name='time', - units=Unit('days since 1950-01-01 00:00:00', calendar='gregorian'), + np.array([10.0, 40.0, 70.0, 110.0]), + standard_name="time", + units=Unit("days since 1950-01-01 00:00:00", calendar="gregorian"), ), 0, ) cube.add_dim_coord( iris.coords.DimCoord( - np.arange(-90., 90., 1.), - standard_name='latitude', - units='degrees', + np.arange(-90.0, 90.0, 1.0), + standard_name="latitude", + units="degrees", ), 1, ) cube.add_dim_coord( iris.coords.DimCoord( - np.arange(0., 360., 1.), - standard_name='longitude', - units='degrees', + np.arange(0.0, 360.0, 1.0), + standard_name="longitude", + units="degrees", ), 2, ) @@ -294,13 +295,13 @@ def _create_sample_full_cube(): return cube -@pytest.mark.parametrize('lazy', [True, False]) -def test_get_area_weights(lazy): - """Test _get_area_weights.""" +@pytest.mark.parametrize("lazy", [True, False]) +def test_compute_area_weights(lazy): + """Test _compute_area_weights.""" cube = _create_sample_full_cube() if lazy: - cube.data = cube.lazy_data() - weights = _get_area_weights(cube) + cube.data = cube.lazy_data().rechunk((2, 180, 360)) + weights = _compute_area_weights(cube) if lazy: assert isinstance(weights, da.Array) assert weights.chunks == cube.lazy_data().chunks @@ -314,22 +315,22 @@ def test_get_area_weights(lazy): def test_group_products_string_list(): products = [ PreprocessorFile( - filename='A_B.nc', + filename="A_B.nc", attributes={ - 'project': 'A', - 'dataset': 'B', + "project": "A", + "dataset": "B", }, ), PreprocessorFile( - filename='A_C.nc', + filename="A_C.nc", attributes={ - 'project': 'A', - 'dataset': 'C', - } + "project": "A", + "dataset": "C", + }, ), ] - grouped_by_string = _group_products(products, 'project') - grouped_by_list = _group_products(products, ['project']) + grouped_by_string = _group_products(products, "project") + grouped_by_list = _group_products(products, ["project"]) assert grouped_by_list == grouped_by_string @@ -337,13 +338,13 @@ def test_group_products_string_list(): def test_try_adding_calculated_cell_area(): """Test ``try_adding_calculated_cell_area``.""" cube = _create_sample_full_cube() - cube.coord('latitude').rename('grid_latitude') - cube.coord('longitude').rename('grid_longitude') - lat = AuxCoord(np.zeros((180, 360)), standard_name='latitude') - lon = AuxCoord(np.zeros((180, 360)), standard_name='longitude') + cube.coord("latitude").rename("grid_latitude") + cube.coord("longitude").rename("grid_longitude") + lat = AuxCoord(np.zeros((180, 360)), standard_name="latitude") + lon = AuxCoord(np.zeros((180, 360)), standard_name="longitude") cube.add_aux_coord(lat, (1, 2)) cube.add_aux_coord(lon, (1, 2)) try_adding_calculated_cell_area(cube) - assert cube.cell_measures('cell_area') + assert cube.cell_measures("cell_area") diff --git a/tests/unit/provenance/test_trackedfile.py b/tests/unit/provenance/test_trackedfile.py index 4e41f46cd2..5af28361d7 100644 --- a/tests/unit/provenance/test_trackedfile.py +++ b/tests/unit/provenance/test_trackedfile.py @@ -7,37 +7,37 @@ @pytest.fixture def tracked_file(): file = TrackedFile( - filename='/path/to/file.nc', - attributes={'a': 'A'}, - prov_filename='/original/path/to/file.nc', + filename="/path/to/file.nc", + attributes={"a": "A"}, + prov_filename="/original/path/to/file.nc", ) return file def test_init(tracked_file): """Test `esmvalcore._provenance.TrackedFile.__init__`.""" - assert tracked_file.filename == '/path/to/file.nc' - assert tracked_file.attributes == {'a': 'A'} - assert tracked_file.prov_filename == '/original/path/to/file.nc' + assert tracked_file.filename == "/path/to/file.nc" + assert tracked_file.attributes == {"a": "A"} + assert tracked_file.prov_filename == "/original/path/to/file.nc" def test_initialize_provenance(tracked_file): """Test `esmvalcore._provenance.TrackedFile.initialize_provenancee`.""" provenance = ProvDocument() - provenance.add_namespace('task', uri=ESMVALTOOL_URI_PREFIX + 'task') - activity = provenance.activity('task:test-task-name') + provenance.add_namespace("task", uri=ESMVALTOOL_URI_PREFIX + "task") + activity = provenance.activity("task:test-task-name") tracked_file.initialize_provenance(activity) assert isinstance(tracked_file.provenance, ProvDocument) assert tracked_file.activity == activity - assert str(tracked_file.entity.identifier) == 'file:/path/to/file.nc' + assert str(tracked_file.entity.identifier) == "file:/path/to/file.nc" def test_copy_provenance(tracked_file): """Test `esmvalcore._provenance.TrackedFile.copy_provenance`.""" provenance = ProvDocument() - provenance.add_namespace('task', uri=ESMVALTOOL_URI_PREFIX + 'task') - activity = provenance.activity('task:test-task-name') + provenance.add_namespace("task", uri=ESMVALTOOL_URI_PREFIX + "task") + activity = provenance.activity("task:test-task-name") tracked_file.initialize_provenance(activity) diff --git a/tests/unit/recipe/test_from_datasets.py b/tests/unit/recipe/test_from_datasets.py index aacca5117d..599119195a 100644 --- a/tests/unit/recipe/test_from_datasets.py +++ b/tests/unit/recipe/test_from_datasets.py @@ -17,11 +17,11 @@ def test_to_frozen(): data = { - 'abc': 'x', - 'a': { - 'b': [ - 'd', - 'c', + "abc": "x", + "a": { + "b": [ + "d", + "c", ], }, } @@ -29,16 +29,18 @@ def test_to_frozen(): result = _to_frozen(data) expected = ( ( - 'a', - (( - 'b', + "a", + ( ( - 'c', - 'd', + "b", + ( + "c", + "d", + ), ), - ), ), + ), ), - ('abc', 'x'), + ("abc", "x"), ) assert result == expected @@ -46,15 +48,15 @@ def test_to_frozen(): def test_datasets_to_recipe(): dataset = Dataset( - short_name='tas', - dataset='dataset1', - diagnostic='diagnostic1', + short_name="tas", + dataset="dataset1", + diagnostic="diagnostic1", ) datasets = [dataset] - datasets.append(dataset.copy(short_name='pr')) - datasets.append(dataset.copy(dataset='dataset2')) - datasets.append(dataset.copy(diagnostic='diagnostic2')) - datasets.append(dataset.copy(diagnostic='diagnostic2', dataset='dataset3')) + datasets.append(dataset.copy(short_name="pr")) + datasets.append(dataset.copy(dataset="dataset2")) + datasets.append(dataset.copy(diagnostic="diagnostic2")) + datasets.append(dataset.copy(diagnostic="diagnostic2", dataset="dataset3")) recipe_txt = textwrap.dedent(""" datasets: @@ -92,9 +94,9 @@ def test_update_datasets_in_recipe(): existing_recipe = yaml.safe_load(existing_recipe_txt) dataset = Dataset( - short_name='ta', - dataset='dataset2', - diagnostic='diagnostic1', + short_name="ta", + dataset="dataset2", + diagnostic="diagnostic1", ) recipe_txt = textwrap.dedent(""" @@ -114,12 +116,12 @@ def test_update_datasets_in_recipe(): def test_supplementary_datasets_to_recipe(): dataset = Dataset( - short_name='ta', - dataset='dataset1', + short_name="ta", + dataset="dataset1", ) - dataset['diagnostic'] = 'diagnostic1' - dataset['variable_group'] = 'group1' - dataset.add_supplementary(short_name='areacella') + dataset["diagnostic"] = "diagnostic1" + dataset["variable_group"] = "group1" + dataset.add_supplementary(short_name="areacella") recipe_txt = textwrap.dedent(""" datasets: @@ -139,22 +141,22 @@ def test_supplementary_datasets_to_recipe(): def test_datasets_to_recipe_group_ensembles(): datasets = [ Dataset( - short_name='ta', - ensemble='r1i1p1f1', - dataset='dataset1', + short_name="ta", + ensemble="r1i1p1f1", + dataset="dataset1", ), Dataset( - short_name='ta', - ensemble='r2i1p1f1', - dataset='dataset1', + short_name="ta", + ensemble="r2i1p1f1", + dataset="dataset1", ), Dataset( - short_name='ta', - dataset='dataset2', + short_name="ta", + dataset="dataset2", ), ] for dataset in datasets: - dataset.facets['diagnostic'] = 'diagnostic1' + dataset.facets["diagnostic"] = "diagnostic1" recipe_txt = textwrap.dedent(""" datasets: - {dataset: 'dataset1', ensemble: 'r(1:2)i1p1f1'} @@ -170,7 +172,7 @@ def test_datasets_to_recipe_group_ensembles(): def test_datasets_to_recipe_no_diagnostic(): - dataset = Dataset(short_name='tas') + dataset = Dataset(short_name="tas") msg = "'diagnostic' facet missing from .*" with pytest.raises(RecipeError, match=msg): datasets_to_recipe([dataset]) @@ -178,16 +180,15 @@ def test_datasets_to_recipe_no_diagnostic(): def test_group_identical_facets(): variable = { - 'short_name': - 'tas', - 'additional_datasets': [ + "short_name": "tas", + "additional_datasets": [ { - 'dataset': 'dataset1', - 'ensemble': 'r1i1p1f1', + "dataset": "dataset1", + "ensemble": "r1i1p1f1", }, { - 'dataset': 'dataset2', - 'ensemble': 'r1i1p1f1', + "dataset": "dataset2", + "ensemble": "r1i1p1f1", }, ], } @@ -195,16 +196,14 @@ def test_group_identical_facets(): result = _group_identical_facets(variable) expected = { - 'short_name': - 'tas', - 'ensemble': - 'r1i1p1f1', - 'additional_datasets': [ + "short_name": "tas", + "ensemble": "r1i1p1f1", + "additional_datasets": [ { - 'dataset': 'dataset1', + "dataset": "dataset1", }, { - 'dataset': 'dataset2', + "dataset": "dataset2", }, ], } @@ -215,33 +214,33 @@ def test_group_identical_facets(): def test_group_ensemble_members(): datasets = [ Dataset( - dataset='dataset1', - ensemble='r1i1p1f1', - grid='gn', + dataset="dataset1", + ensemble="r1i1p1f1", + grid="gn", ), Dataset( - dataset='dataset1', - ensemble='r1i1p1f1', - grid='gr1', + dataset="dataset1", + ensemble="r1i1p1f1", + grid="gr1", ), Dataset( - dataset='dataset1', - ensemble='r2i1p1f1', - grid='gn', + dataset="dataset1", + ensemble="r2i1p1f1", + grid="gn", ), ] result = _group_ensemble_members(ds.facets for ds in datasets) print(result) assert result == [ { - 'dataset': 'dataset1', - 'ensemble': 'r(1:2)i1p1f1', - 'grid': 'gn', + "dataset": "dataset1", + "ensemble": "r(1:2)i1p1f1", + "grid": "gn", }, { - 'dataset': 'dataset1', - 'ensemble': 'r1i1p1f1', - 'grid': 'gr1', + "dataset": "dataset1", + "ensemble": "r1i1p1f1", + "grid": "gr1", }, ] @@ -249,38 +248,38 @@ def test_group_ensemble_members(): def test_group_ensemble_members_mix_of_versions(): datasets = [ Dataset( - dataset='dataset1', - ensemble='r1i1p1f1', - exp=['historical', 'ssp585'], - version='v1', + dataset="dataset1", + ensemble="r1i1p1f1", + exp=["historical", "ssp585"], + version="v1", ), Dataset( - dataset='dataset1', - ensemble='r2i1p1f1', - exp=['historical', 'ssp585'], - version='v1', + dataset="dataset1", + ensemble="r2i1p1f1", + exp=["historical", "ssp585"], + version="v1", ), Dataset( - dataset='dataset1', - ensemble='r3i1p1f1', - exp=['historical', 'ssp585'], - version=['v1', 'v2'], + dataset="dataset1", + ensemble="r3i1p1f1", + exp=["historical", "ssp585"], + version=["v1", "v2"], ), ] result = _group_ensemble_members(ds.facets for ds in datasets) print(result) assert result == [ { - 'dataset': 'dataset1', - 'ensemble': 'r3i1p1f1', - 'exp': ['historical', 'ssp585'], - 'version': ['v1', 'v2'], + "dataset": "dataset1", + "ensemble": "r3i1p1f1", + "exp": ["historical", "ssp585"], + "version": ["v1", "v2"], }, { - 'dataset': 'dataset1', - 'ensemble': 'r(1:2)i1p1f1', - 'exp': ['historical', 'ssp585'], - 'version': 'v1', + "dataset": "dataset1", + "ensemble": "r(1:2)i1p1f1", + "exp": ["historical", "ssp585"], + "version": "v1", }, ] @@ -294,7 +293,7 @@ def test_group_ensembles_cmip5(): "r1i2p1", ] groups = _group_ensemble_names(ensembles) - expected = ['r1i2p1', 'r(1:4)i1p1'] + expected = ["r1i2p1", "r(1:4)i1p1"] print(groups) print(expected) assert groups == expected @@ -309,7 +308,7 @@ def test_group_ensembles_cmip6(): "r3i1p1f1", ] groups = _group_ensemble_names(ensembles) - expected = ['r1i1p1f1', 'r(3:4)i1p(1:2)f1'] + expected = ["r1i1p1f1", "r(3:4)i1p(1:2)f1"] print(groups) print(expected) assert groups == expected @@ -317,47 +316,36 @@ def test_group_ensembles_cmip6(): def test_move_one_level_up_diagnostic(): diagnostic = { - 'variables': { - 'tas': { - 'mip': - 'Amon', - 'additional_datasets': [ - { - 'dataset': 'dataset1' - }, - { - 'dataset': 'dataset2' - }, + "variables": { + "tas": { + "mip": "Amon", + "additional_datasets": [ + {"dataset": "dataset1"}, + {"dataset": "dataset2"}, ], }, - 'pr': { - 'additional_datasets': [ - { - 'dataset': 'dataset1' - }, + "pr": { + "additional_datasets": [ + {"dataset": "dataset1"}, ], }, }, } - _move_one_level_up(diagnostic, 'variables', 'additional_datasets') + _move_one_level_up(diagnostic, "variables", "additional_datasets") expected = { - 'variables': { - 'tas': { - 'mip': 'Amon', - 'additional_datasets': [ - { - 'dataset': 'dataset2' - }, + "variables": { + "tas": { + "mip": "Amon", + "additional_datasets": [ + {"dataset": "dataset2"}, ], }, - 'pr': {}, + "pr": {}, }, - 'additional_datasets': [ - { - 'dataset': 'dataset1' - }, + "additional_datasets": [ + {"dataset": "dataset1"}, ], } @@ -366,41 +354,33 @@ def test_move_one_level_up_diagnostic(): def test_move_one_level_up_recipe(): recipe = { - 'diagnostics': { - 'diagnostic1': { - 'variables': { - 'tas': { - 'mip': 'Amon', + "diagnostics": { + "diagnostic1": { + "variables": { + "tas": { + "mip": "Amon", }, }, - 'additional_datasets': [ - { - 'dataset': 'dataset1' - }, - { - 'dataset': 'dataset2' - }, + "additional_datasets": [ + {"dataset": "dataset1"}, + {"dataset": "dataset2"}, ], }, }, } - _move_one_level_up(recipe, 'diagnostics', 'datasets') + _move_one_level_up(recipe, "diagnostics", "datasets") expected = { - 'datasets': [ - { - 'dataset': 'dataset1' - }, - { - 'dataset': 'dataset2' - }, + "datasets": [ + {"dataset": "dataset1"}, + {"dataset": "dataset2"}, ], - 'diagnostics': { - 'diagnostic1': { - 'variables': { - 'tas': { - 'mip': 'Amon', + "diagnostics": { + "diagnostic1": { + "variables": { + "tas": { + "mip": "Amon", }, }, }, diff --git a/tests/unit/recipe/test_io.py b/tests/unit/recipe/test_io.py index 50fae1a396..85d8a0a57c 100644 --- a/tests/unit/recipe/test_io.py +++ b/tests/unit/recipe/test_io.py @@ -2,16 +2,16 @@ def test_copy_dict(): - a = {'a': 1} - b = {'b': a, 'c': a} + a = {"a": 1} + b = {"b": a, "c": a} result = _io._copy(b) - assert result['b'] == result['c'] - assert result['b'] is not result['c'] + assert result["b"] == result["c"] + assert result["b"] is not result["c"] def test_copy_list(): - a = ['a'] - b = {'b': a, 'c': a} + a = ["a"] + b = {"b": a, "c": a} result = _io._copy(b) - assert result['b'] == result['c'] - assert result['b'] is not result['c'] + assert result["b"] == result["c"] + assert result["b"] is not result["c"] diff --git a/tests/unit/recipe/test_recipe.py b/tests/unit/recipe/test_recipe.py index 5e73b2fb92..4dd142bc43 100644 --- a/tests/unit/recipe/test_recipe.py +++ b/tests/unit/recipe/test_recipe.py @@ -25,33 +25,21 @@ def __init__(self, cfg, diagnostics): self.diagnostics = diagnostics -VAR_A = {'dataset': 'A'} -VAR_A_REF_A = {'dataset': 'A', 'reference_dataset': 'A'} -VAR_A_REF_B = {'dataset': 'A', 'reference_dataset': 'B'} +VAR_A = {"dataset": "A"} +VAR_A_REF_A = {"dataset": "A", "reference_dataset": "A"} +VAR_A_REF_B = {"dataset": "A", "reference_dataset": "B"} TEST_ALLOW_SKIPPING = [ - (VAR_A, { - 'skip_nonexistent': False - }, False), - (VAR_A, { - 'skip_nonexistent': True - }, True), - (VAR_A_REF_A, { - 'skip_nonexistent': False - }, False), - (VAR_A_REF_A, { - 'skip_nonexistent': True - }, False), - (VAR_A_REF_B, { - 'skip_nonexistent': False - }, False), - (VAR_A_REF_B, { - 'skip_nonexistent': True - }, True), + (VAR_A, {"skip_nonexistent": False}, False), + (VAR_A, {"skip_nonexistent": True}, True), + (VAR_A_REF_A, {"skip_nonexistent": False}, False), + (VAR_A_REF_A, {"skip_nonexistent": True}, False), + (VAR_A_REF_B, {"skip_nonexistent": False}, False), + (VAR_A_REF_B, {"skip_nonexistent": True}, True), ] -@pytest.mark.parametrize('var,cfg,out', TEST_ALLOW_SKIPPING) +@pytest.mark.parametrize("var,cfg,out", TEST_ALLOW_SKIPPING) def test_allow_skipping(var, cfg, out): """Test ``_allow_skipping``.""" dataset = Dataset(**var) @@ -63,14 +51,14 @@ def test_allow_skipping(var, cfg, out): def test_resume_preprocessor_tasks(mocker, tmp_path): """Test that `Recipe._create_preprocessor_tasks` creates a ResumeTask.""" # Create a mock ResumeTask class that returns a mock instance - resume_task_cls = mocker.patch.object(_recipe, 'ResumeTask', autospec=True) + resume_task_cls = mocker.patch.object(_recipe, "ResumeTask", autospec=True) resume_task = mocker.Mock() resume_task_cls.return_value = resume_task # Create a mock output directory of a previous run - diagnostic_name = 'diagnostic_name' - prev_output = tmp_path / 'recipe_test_20200101_000000' - prev_preproc_dir = prev_output / 'preproc' / diagnostic_name / 'tas' + diagnostic_name = "diagnostic_name" + prev_output = tmp_path / "recipe_test_20200101_000000" + prev_preproc_dir = prev_output / "preproc" / diagnostic_name / "tas" prev_preproc_dir.mkdir(parents=True) # Create a mock recipe @@ -80,17 +68,18 @@ class Session(dict): pass session = Session(resume_from=[prev_output]) - session.preproc_dir = Path('/path/to/recipe_test_20210101_000000/preproc') + session.preproc_dir = Path("/path/to/recipe_test_20210101_000000/preproc") recipe.session = session # Create a very simplified list of datasets diagnostic = { - 'datasets': [Dataset(short_name='tas', variable_group='tas')], + "datasets": [Dataset(short_name="tas", variable_group="tas")], } # Create tasks tasks, failed = _recipe.Recipe._create_preprocessor_tasks( - recipe, diagnostic_name, diagnostic, [], True) + recipe, diagnostic_name, diagnostic, [], True + ) assert tasks == [resume_task] assert not failed @@ -99,105 +88,105 @@ class Session(dict): def create_esgf_search_results(): """Prepare some fake ESGF search results.""" dataset_id = ( - 'CMIP6.CMIP.EC-Earth-Consortium.EC-Earth3.historical.r1i1p1f1' - '.Amon.tas.gr.v20200310|esgf-data1.llnl.gov' + "CMIP6.CMIP.EC-Earth-Consortium.EC-Earth3.historical.r1i1p1f1" + ".Amon.tas.gr.v20200310|esgf-data1.llnl.gov" ) dataset_id_template = ( - '%(mip_era)s.%(activity_drs)s.%(institution_id)s.' - '%(source_id)s.%(experiment_id)s.%(member_id)s.%(table_id)s.' - '%(variable_id)s.%(grid_label)s' + "%(mip_era)s.%(activity_drs)s.%(institution_id)s." + "%(source_id)s.%(experiment_id)s.%(member_id)s.%(table_id)s." + "%(variable_id)s.%(grid_label)s" + ) + file0 = ESGFFile( + [ + pyesgf.search.results.FileResult( + json={ + "dataset_id": dataset_id, + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP6"], + "size": 4745571, + "source_id": ["EC-Earth3"], + "title": "tas_Amon_EC-Earth3_historical_r1i1p1f1_gr_185001-185012.nc", + "url": [ + "http://esgf-data1.llnl.gov/thredds/fileServer/css03_data" + "/CMIP6/CMIP/EC-Earth-Consortium/EC-Earth3/historical" + "/r1i1p1f1/Amon/tas/gr/v20200310/tas_Amon_EC-Earth3" + "_historical_r1i1p1f1_gr_185001-185012.nc" + "|application/netcdf|HTTPServer", + ], + }, + context=None, + ) + ] + ) + file1 = ESGFFile( + [ + pyesgf.search.results.FileResult( + { + "dataset_id": dataset_id, + "dataset_id_template_": [dataset_id_template], + "project": ["CMIP6"], + "size": 4740192, + "source_id": ["EC-Earth3"], + "title": "tas_Amon_EC-Earth3_historical_r1i1p1f1_gr_185101-185112.nc", + "url": [ + "http://esgf-data1.llnl.gov/thredds/fileServer/css03_data" + "/CMIP6/CMIP/EC-Earth-Consortium/EC-Earth3/historical" + "/r1i1p1f1/Amon/tas/gr/v20200310/tas_Amon_EC-Earth3" + "_historical_r1i1p1f1_gr_185101-185112.nc" + "|application/netcdf|HTTPServer", + ], + }, + context=None, + ) + ] ) - file0 = ESGFFile([ - pyesgf.search.results.FileResult( - json={ - 'dataset_id': dataset_id, - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP6'], - 'size': - 4745571, - 'source_id': ['EC-Earth3'], - 'title': - 'tas_Amon_EC-Earth3_historical_r1i1p1f1_gr_185001-185012.nc', - 'url': [ - 'http://esgf-data1.llnl.gov/thredds/fileServer/css03_data' - '/CMIP6/CMIP/EC-Earth-Consortium/EC-Earth3/historical' - '/r1i1p1f1/Amon/tas/gr/v20200310/tas_Amon_EC-Earth3' - '_historical_r1i1p1f1_gr_185001-185012.nc' - '|application/netcdf|HTTPServer', - ], - }, - context=None, - ) - ]) - file1 = ESGFFile([ - pyesgf.search.results.FileResult( - { - 'dataset_id': dataset_id, - 'dataset_id_template_': [dataset_id_template], - 'project': ['CMIP6'], - 'size': - 4740192, - 'source_id': ['EC-Earth3'], - 'title': - 'tas_Amon_EC-Earth3_historical_r1i1p1f1_gr_185101-185112.nc', - 'url': [ - 'http://esgf-data1.llnl.gov/thredds/fileServer/css03_data' - '/CMIP6/CMIP/EC-Earth-Consortium/EC-Earth3/historical' - '/r1i1p1f1/Amon/tas/gr/v20200310/tas_Amon_EC-Earth3' - '_historical_r1i1p1f1_gr_185101-185112.nc' - '|application/netcdf|HTTPServer', - ], - }, - context=None, - ) - ]) return [file0, file1] -@pytest.mark.parametrize("local_availability", ['all', 'partial', 'none']) +@pytest.mark.parametrize("local_availability", ["all", "partial", "none"]) def test_schedule_for_download(monkeypatch, tmp_path, local_availability): """Test that `_schedule_for_download` updates DOWNLOAD_FILES.""" esgf_files = create_esgf_search_results() - download_dir = tmp_path / 'download_dir' - local_dir = Path('/local_dir') + download_dir = tmp_path / "download_dir" + local_dir = Path("/local_dir") # Local files can cover the entire period, part of it, or nothing local_file_options = { - 'all': [f.local_file(local_dir) for f in esgf_files], - 'partial': [esgf_files[1].local_file(local_dir)], - 'none': [], + "all": [f.local_file(local_dir) for f in esgf_files], + "partial": [esgf_files[1].local_file(local_dir)], + "none": [], } local_files = local_file_options[local_availability] variable = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'frequency': 'mon', - 'short_name': 'tas', - 'dataset': 'EC.-Earth3', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'grid': 'gr', - 'timerange': '1850/1851', - 'alias': 'CMIP6_EC-Eeath3_tas', + "project": "CMIP6", + "mip": "Amon", + "frequency": "mon", + "short_name": "tas", + "dataset": "EC.-Earth3", + "exp": "historical", + "ensemble": "r1i1p1f1", + "grid": "gr", + "timerange": "1850/1851", + "alias": "CMIP6_EC-Eeath3_tas", } dataset = Dataset(**variable) files = { - 'all': local_files, - 'partial': local_files + esgf_files[:1], - 'none': esgf_files, + "all": local_files, + "partial": local_files + esgf_files[:1], + "none": esgf_files, } - dataset.session = {'download_dir': download_dir} + dataset.session = {"download_dir": download_dir} dataset.files = list(files[local_availability]) - monkeypatch.setattr(_recipe, 'DOWNLOAD_FILES', set()) + monkeypatch.setattr(_recipe, "DOWNLOAD_FILES", set()) _recipe._schedule_for_download([dataset]) print(esgf_files) expected = { - 'all': set(), - 'partial': set(esgf_files[:1]), - 'none': set(esgf_files), + "all": set(), + "partial": set(esgf_files[:1]), + "none": set(esgf_files), } assert _recipe.DOWNLOAD_FILES == expected[local_availability] @@ -207,12 +196,12 @@ def test_write_html_summary(mocker, caplog): message = "Failed to look up references." recipe_output = mocker.patch.object( esmvalcore.experimental.recipe_output, - 'RecipeOutput', + "RecipeOutput", create_autospec=True, ) recipe_output.from_core_recipe_output.side_effect = LookupError(message) mock_recipe = mocker.create_autospec(_recipe.Recipe, instance=True) - caplog.set_level('WARNING') + caplog.set_level("WARNING") _recipe.Recipe.write_html_summary(mock_recipe) @@ -224,390 +213,471 @@ def test_multi_model_filename_overlap(): """Test timerange in multi-model filename is correct.""" cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', {'timerange': '19900101/19911010'}), - PreprocessorFile(cube, 'B', {'timerange': '19891212/19910505'}), - PreprocessorFile(cube, 'C', {'timerange': '19910202/19921111'}), + PreprocessorFile(cube, "A", {"timerange": "19900101/19911010"}), + PreprocessorFile(cube, "B", {"timerange": "19891212/19910505"}), + PreprocessorFile(cube, "C", {"timerange": "19910202/19921111"}), ] settings = {} # the default setting for "span" is "overlap" attributes = _recipe._get_common_attributes(products, settings) - assert 'timerange' in attributes - assert attributes['timerange'] == '19910202/19910505' - assert attributes['start_year'] == 1991 - assert attributes['end_year'] == 1991 + assert "timerange" in attributes + assert attributes["timerange"] == "19910202/19910505" + assert attributes["start_year"] == 1991 + assert attributes["end_year"] == 1991 def test_multi_model_filename_full(): """Test timerange in multi-model filename is correct.""" cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', {'timerange': '19900101/19911010'}), - PreprocessorFile(cube, 'B', {'timerange': '19891212/19910505'}), - PreprocessorFile(cube, 'C', {'timerange': '19910202/19921111'}), + PreprocessorFile(cube, "A", {"timerange": "19900101/19911010"}), + PreprocessorFile(cube, "B", {"timerange": "19891212/19910505"}), + PreprocessorFile(cube, "C", {"timerange": "19910202/19921111"}), ] - settings = {'span': 'full'} + settings = {"span": "full"} attributes = _recipe._get_common_attributes(products, settings) - assert 'timerange' in attributes - assert attributes['timerange'] == '19891212/19921111' - assert attributes['start_year'] == 1989 - assert attributes['end_year'] == 1992 + assert "timerange" in attributes + assert attributes["timerange"] == "19891212/19921111" + assert attributes["start_year"] == 1989 + assert attributes["end_year"] == 1992 def test_update_multiproduct_multi_model_statistics(): """Test ``_update_multiproduct``.""" - settings = {'multi_model_statistics': {'statistics': ['mean', 'std_dev']}} + settings = {"multi_model_statistics": {"statistics": ["mean", "std_dev"]}} common_attributes = { - 'project': 'CMIP6', - 'diagnostic': 'd', - 'variable_group': 'var', + "project": "CMIP6", + "diagnostic": "d", + "variable_group": "var", } cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', - attributes={'dataset': 'a', - 'timerange': '2000/2005', - **common_attributes}, - settings=settings), - PreprocessorFile(cube, 'B', - attributes={'dataset': 'b', - 'timerange': '2001/2004', - **common_attributes}, - settings=settings), - PreprocessorFile(cube, 'C', - attributes={'dataset': 'c', - 'timerange': '1999/2004', - **common_attributes}, - settings=settings), - PreprocessorFile(cube, 'D', - attributes={'dataset': 'd', - 'timerange': '2002/2010', - **common_attributes}, - settings=settings), + PreprocessorFile( + cube, + "A", + attributes={ + "dataset": "a", + "timerange": "2000/2005", + **common_attributes, + }, + settings=settings, + ), + PreprocessorFile( + cube, + "B", + attributes={ + "dataset": "b", + "timerange": "2001/2004", + **common_attributes, + }, + settings=settings, + ), + PreprocessorFile( + cube, + "C", + attributes={ + "dataset": "c", + "timerange": "1999/2004", + **common_attributes, + }, + settings=settings, + ), + PreprocessorFile( + cube, + "D", + attributes={ + "dataset": "d", + "timerange": "2002/2010", + **common_attributes, + }, + settings=settings, + ), ] - order = ('load', 'multi_model_statistics', 'save') - preproc_dir = '/preproc' - step = 'multi_model_statistics' + order = ("load", "multi_model_statistics", "save") + preproc_dir = "/preproc" + step = "multi_model_statistics" output, settings = _recipe._update_multiproduct( - products, order, preproc_dir, step) + products, order, preproc_dir, step + ) assert len(output) == 2 filenames = [p.filename for p in output] - assert Path( - '/preproc/d/var/CMIP6_MultiModelMean_2002-2004.nc') in filenames - assert Path( - '/preproc/d/var/CMIP6_MultiModelStd_Dev_2002-2004.nc') in filenames + assert ( + Path("/preproc/d/var/CMIP6_MultiModelMean_2002-2004.nc") in filenames + ) + assert ( + Path("/preproc/d/var/CMIP6_MultiModelStd_Dev_2002-2004.nc") + in filenames + ) for product in output: for attr in common_attributes: assert attr in product.attributes assert product.attributes[attr] == common_attributes[attr] - assert 'alias' in product.attributes - assert 'dataset' in product.attributes - assert 'multi_model_statistics' in product.attributes - assert 'timerange' in product.attributes - assert product.attributes['timerange'] == '2002/2004' - assert 'start_year' in product.attributes - assert product.attributes['start_year'] == 2002 - assert 'end_year' in product.attributes - assert product.attributes['end_year'] == 2004 - if 'MultiModelStd_Dev' in str(product.filename): - assert product.attributes['alias'] == 'MultiModelStd_Dev' - assert product.attributes['dataset'] == 'MultiModelStd_Dev' - assert (product.attributes['multi_model_statistics'] == - 'MultiModelStd_Dev') - elif 'MultiModelMean' in str(product.filename): - assert product.attributes['alias'] == 'MultiModelMean' - assert product.attributes['dataset'] == 'MultiModelMean' - assert (product.attributes['multi_model_statistics'] == - 'MultiModelMean') + assert "alias" in product.attributes + assert "dataset" in product.attributes + assert "multi_model_statistics" in product.attributes + assert "timerange" in product.attributes + assert product.attributes["timerange"] == "2002/2004" + assert "start_year" in product.attributes + assert product.attributes["start_year"] == 2002 + assert "end_year" in product.attributes + assert product.attributes["end_year"] == 2004 + if "MultiModelStd_Dev" in str(product.filename): + assert product.attributes["alias"] == "MultiModelStd_Dev" + assert product.attributes["dataset"] == "MultiModelStd_Dev" + assert ( + product.attributes["multi_model_statistics"] + == "MultiModelStd_Dev" + ) + elif "MultiModelMean" in str(product.filename): + assert product.attributes["alias"] == "MultiModelMean" + assert product.attributes["dataset"] == "MultiModelMean" + assert ( + product.attributes["multi_model_statistics"] + == "MultiModelMean" + ) assert len(settings) == 1 - output_products = settings['output_products'] + output_products = settings["output_products"] assert len(output_products) == 1 - stats = output_products[''] + stats = output_products[""] assert len(stats) == 2 - assert 'mean' in stats - assert 'std_dev' in stats - assert 'MultiModelMean' in str(stats['mean'].filename) - assert 'MultiModelStd_Dev' in str(stats['std_dev'].filename) + assert "mean" in stats + assert "std_dev" in stats + assert "MultiModelMean" in str(stats["mean"].filename) + assert "MultiModelStd_Dev" in str(stats["std_dev"].filename) def test_update_multiproduct_multi_model_statistics_percentile(): """Test ``_update_multiproduct``.""" settings = { - 'multi_model_statistics': { - 'statistics': [ - {'operator': 'percentile', 'percent': 5.0}, - {'operator': 'percentile', 'percent': 95.0}, + "multi_model_statistics": { + "statistics": [ + {"operator": "percentile", "percent": 5.0}, + {"operator": "percentile", "percent": 95.0}, ] }, } common_attributes = { - 'project': 'CMIP6', - 'diagnostic': 'd', - 'variable_group': 'var', + "project": "CMIP6", + "diagnostic": "d", + "variable_group": "var", } cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', - attributes={'dataset': 'a', - 'timerange': '2000/2005', - **common_attributes}, - settings=settings), - PreprocessorFile(cube, 'B', - attributes={'dataset': 'b', - 'timerange': '2001/2004', - **common_attributes}, - settings=settings), - PreprocessorFile(cube, 'C', - attributes={'dataset': 'c', - 'timerange': '1999/2004', - **common_attributes}, - settings=settings), - PreprocessorFile(cube, 'D', - attributes={'dataset': 'd', - 'timerange': '2002/2010', - **common_attributes}, - settings=settings), + PreprocessorFile( + cube, + "A", + attributes={ + "dataset": "a", + "timerange": "2000/2005", + **common_attributes, + }, + settings=settings, + ), + PreprocessorFile( + cube, + "B", + attributes={ + "dataset": "b", + "timerange": "2001/2004", + **common_attributes, + }, + settings=settings, + ), + PreprocessorFile( + cube, + "C", + attributes={ + "dataset": "c", + "timerange": "1999/2004", + **common_attributes, + }, + settings=settings, + ), + PreprocessorFile( + cube, + "D", + attributes={ + "dataset": "d", + "timerange": "2002/2010", + **common_attributes, + }, + settings=settings, + ), ] - order = ('load', 'multi_model_statistics', 'save') - preproc_dir = '/preproc' - step = 'multi_model_statistics' + order = ("load", "multi_model_statistics", "save") + preproc_dir = "/preproc" + step = "multi_model_statistics" output, settings = _recipe._update_multiproduct( - products, order, preproc_dir, step) + products, order, preproc_dir, step + ) assert len(output) == 2 filenames = [p.filename for p in output] assert ( - Path('/preproc/d/var/CMIP6_MultiModelPercentile5-0_2002-2004.nc') in - filenames + Path("/preproc/d/var/CMIP6_MultiModelPercentile5-0_2002-2004.nc") + in filenames ) assert ( - Path('/preproc/d/var/CMIP6_MultiModelPercentile95-0_2002-2004.nc') in - filenames + Path("/preproc/d/var/CMIP6_MultiModelPercentile95-0_2002-2004.nc") + in filenames ) for product in output: for attr in common_attributes: assert attr in product.attributes assert product.attributes[attr] == common_attributes[attr] - assert 'alias' in product.attributes - assert 'dataset' in product.attributes - assert 'multi_model_statistics' in product.attributes - assert 'timerange' in product.attributes - assert product.attributes['timerange'] == '2002/2004' - assert 'start_year' in product.attributes - assert product.attributes['start_year'] == 2002 - assert 'end_year' in product.attributes - assert product.attributes['end_year'] == 2004 - if 'MultiModelPercentile5-0' in str(product.filename): - assert product.attributes['alias'] == 'MultiModelPercentile5-0' - assert product.attributes['dataset'] == 'MultiModelPercentile5-0' - assert (product.attributes['multi_model_statistics'] == - 'MultiModelPercentile5-0') - elif 'MultiModelPercentile95-0' in str(product.filename): - assert product.attributes['alias'] == 'MultiModelPercentile95-0' - assert product.attributes['dataset'] == 'MultiModelPercentile95-0' - assert (product.attributes['multi_model_statistics'] == - 'MultiModelPercentile95-0') + assert "alias" in product.attributes + assert "dataset" in product.attributes + assert "multi_model_statistics" in product.attributes + assert "timerange" in product.attributes + assert product.attributes["timerange"] == "2002/2004" + assert "start_year" in product.attributes + assert product.attributes["start_year"] == 2002 + assert "end_year" in product.attributes + assert product.attributes["end_year"] == 2004 + if "MultiModelPercentile5-0" in str(product.filename): + assert product.attributes["alias"] == "MultiModelPercentile5-0" + assert product.attributes["dataset"] == "MultiModelPercentile5-0" + assert ( + product.attributes["multi_model_statistics"] + == "MultiModelPercentile5-0" + ) + elif "MultiModelPercentile95-0" in str(product.filename): + assert product.attributes["alias"] == "MultiModelPercentile95-0" + assert product.attributes["dataset"] == "MultiModelPercentile95-0" + assert ( + product.attributes["multi_model_statistics"] + == "MultiModelPercentile95-0" + ) assert len(settings) == 1 - output_products = settings['output_products'] + output_products = settings["output_products"] assert len(output_products) == 1 - stats = output_products[''] + stats = output_products[""] assert len(stats) == 2 - assert 'percentile5.0' in stats - assert 'percentile95.0' in stats - assert 'MultiModelPercentile5-0' in str(stats['percentile5.0'].filename) - assert 'MultiModelPercentile95-0' in str(stats['percentile95.0'].filename) + assert "percentile5.0" in stats + assert "percentile95.0" in stats + assert "MultiModelPercentile5-0" in str(stats["percentile5.0"].filename) + assert "MultiModelPercentile95-0" in str(stats["percentile95.0"].filename) def test_update_multiproduct_ensemble_statistics(): """Test ``_update_multiproduct``.""" - settings = {'ensemble_statistics': {'statistics': ['median'], - 'span': 'full'}} + settings = { + "ensemble_statistics": {"statistics": ["median"], "span": "full"} + } common_attributes = { - 'dataset': 'CanESM2', - 'project': 'CMIP6', - 'timerange': '2000/2000', - 'diagnostic': 'd', - 'variable_group': 'var', + "dataset": "CanESM2", + "project": "CMIP6", + "timerange": "2000/2000", + "diagnostic": "d", + "variable_group": "var", } cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', - attributes=common_attributes, - settings=settings), - PreprocessorFile(cube, 'B', - attributes=common_attributes, - settings=settings), - PreprocessorFile(cube, 'C', - attributes=common_attributes, - settings=settings), - PreprocessorFile(cube, 'D', - attributes=common_attributes, - settings=settings), + PreprocessorFile( + cube, "A", attributes=common_attributes, settings=settings + ), + PreprocessorFile( + cube, "B", attributes=common_attributes, settings=settings + ), + PreprocessorFile( + cube, "C", attributes=common_attributes, settings=settings + ), + PreprocessorFile( + cube, "D", attributes=common_attributes, settings=settings + ), ] - order = ('load', 'ensemble_statistics', 'save') - preproc_dir = '/preproc' - step = 'ensemble_statistics' + order = ("load", "ensemble_statistics", "save") + preproc_dir = "/preproc" + step = "ensemble_statistics" output, settings = _recipe._update_multiproduct( - products, order, preproc_dir, step) + products, order, preproc_dir, step + ) assert len(output) == 1 product = list(output)[0] assert product.filename == Path( - '/preproc/d/var/CMIP6_CanESM2_EnsembleMedian_2000-2000.nc') + "/preproc/d/var/CMIP6_CanESM2_EnsembleMedian_2000-2000.nc" + ) for attr in common_attributes: assert attr in product.attributes assert product.attributes[attr] == common_attributes[attr] - assert 'alias' in product.attributes - assert product.attributes['alias'] == 'EnsembleMedian' - assert 'dataset' in product.attributes - assert product.attributes['dataset'] == 'CanESM2' - assert 'ensemble_statistics' in product.attributes - assert product.attributes['ensemble_statistics'] == 'EnsembleMedian' - assert 'start_year' in product.attributes - assert product.attributes['start_year'] == 2000 - assert 'end_year' in product.attributes - assert product.attributes['end_year'] == 2000 + assert "alias" in product.attributes + assert product.attributes["alias"] == "EnsembleMedian" + assert "dataset" in product.attributes + assert product.attributes["dataset"] == "CanESM2" + assert "ensemble_statistics" in product.attributes + assert product.attributes["ensemble_statistics"] == "EnsembleMedian" + assert "start_year" in product.attributes + assert product.attributes["start_year"] == 2000 + assert "end_year" in product.attributes + assert product.attributes["end_year"] == 2000 assert len(settings) == 1 - output_products = settings['output_products'] + output_products = settings["output_products"] assert len(output_products) == 1 - stats = output_products['CMIP6_CanESM2'] + stats = output_products["CMIP6_CanESM2"] assert len(stats) == 1 - assert 'median' in stats - assert stats['median'].filename == Path( - '/preproc/d/var/CMIP6_CanESM2_EnsembleMedian_2000-2000.nc') + assert "median" in stats + assert stats["median"].filename == Path( + "/preproc/d/var/CMIP6_CanESM2_EnsembleMedian_2000-2000.nc" + ) def test_update_multiproduct_ensemble_statistics_percentile(): """Test ``_update_multiproduct``.""" settings = { - 'ensemble_statistics': { - 'statistics': [ - {'operator': 'percentile', 'percent': 5}, + "ensemble_statistics": { + "statistics": [ + {"operator": "percentile", "percent": 5}, ], - 'span': 'full', + "span": "full", }, } common_attributes = { - 'dataset': 'CanESM2', - 'project': 'CMIP6', - 'timerange': '2000/2000', - 'diagnostic': 'd', - 'variable_group': 'var', + "dataset": "CanESM2", + "project": "CMIP6", + "timerange": "2000/2000", + "diagnostic": "d", + "variable_group": "var", } cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', - attributes=common_attributes, - settings=settings), - PreprocessorFile(cube, 'B', - attributes=common_attributes, - settings=settings), - PreprocessorFile(cube, 'C', - attributes=common_attributes, - settings=settings), - PreprocessorFile(cube, 'D', - attributes=common_attributes, - settings=settings), + PreprocessorFile( + cube, "A", attributes=common_attributes, settings=settings + ), + PreprocessorFile( + cube, "B", attributes=common_attributes, settings=settings + ), + PreprocessorFile( + cube, "C", attributes=common_attributes, settings=settings + ), + PreprocessorFile( + cube, "D", attributes=common_attributes, settings=settings + ), ] - order = ('load', 'ensemble_statistics', 'save') - preproc_dir = '/preproc' - step = 'ensemble_statistics' + order = ("load", "ensemble_statistics", "save") + preproc_dir = "/preproc" + step = "ensemble_statistics" output, settings = _recipe._update_multiproduct( - products, order, preproc_dir, step) + products, order, preproc_dir, step + ) assert len(output) == 1 product = list(output)[0] assert product.filename == Path( - '/preproc/d/var/CMIP6_CanESM2_EnsemblePercentile5_2000-2000.nc') + "/preproc/d/var/CMIP6_CanESM2_EnsemblePercentile5_2000-2000.nc" + ) for attr in common_attributes: assert attr in product.attributes assert product.attributes[attr] == common_attributes[attr] - assert 'alias' in product.attributes - assert product.attributes['alias'] == 'EnsemblePercentile5' - assert 'dataset' in product.attributes - assert product.attributes['dataset'] == 'CanESM2' - assert 'ensemble_statistics' in product.attributes - assert product.attributes['ensemble_statistics'] == ( - 'EnsemblePercentile5' + assert "alias" in product.attributes + assert product.attributes["alias"] == "EnsemblePercentile5" + assert "dataset" in product.attributes + assert product.attributes["dataset"] == "CanESM2" + assert "ensemble_statistics" in product.attributes + assert product.attributes["ensemble_statistics"] == ( + "EnsemblePercentile5" ) - assert 'start_year' in product.attributes - assert product.attributes['start_year'] == 2000 - assert 'end_year' in product.attributes - assert product.attributes['end_year'] == 2000 + assert "start_year" in product.attributes + assert product.attributes["start_year"] == 2000 + assert "end_year" in product.attributes + assert product.attributes["end_year"] == 2000 assert len(settings) == 1 - output_products = settings['output_products'] + output_products = settings["output_products"] assert len(output_products) == 1 - stats = output_products['CMIP6_CanESM2'] + stats = output_products["CMIP6_CanESM2"] assert len(stats) == 1 - assert 'percentile5' in stats - assert stats['percentile5'].filename == Path( - '/preproc/d/var/CMIP6_CanESM2_EnsemblePercentile5_2000-2000.nc') + assert "percentile5" in stats + assert stats["percentile5"].filename == Path( + "/preproc/d/var/CMIP6_CanESM2_EnsemblePercentile5_2000-2000.nc" + ) def test_update_multiproduct_no_product(): cube = iris.cube.Cube(np.array([1])) products = [ - PreprocessorFile(cube, 'A', attributes=None, settings={'step': {}})] - order = ('load', 'save') - preproc_dir = '/preproc_dir' - step = 'multi_model_statistics' + PreprocessorFile(cube, "A", attributes=None, settings={"step": {}}) + ] + order = ("load", "save") + preproc_dir = "/preproc_dir" + step = "multi_model_statistics" output, settings = _recipe._update_multiproduct( - products, order, preproc_dir, step) + products, order, preproc_dir, step + ) assert output == products assert settings == {} SCRIPTS_CFG = { - 'output_dir': mock.sentinel.output_dir, - 'script': mock.sentinel.script, - 'settings': mock.sentinel.settings, + "output_dir": mock.sentinel.output_dir, + "script": mock.sentinel.script, + "settings": mock.sentinel.settings, } DIAGNOSTICS = { - 'd1': {'scripts': {'s1': {'ancestors': [], **SCRIPTS_CFG}}}, - 'd2': {'scripts': {'s1': {'ancestors': ['d1/pr', 'd1/s1'], - **SCRIPTS_CFG}}}, - 'd3': {'scripts': {'s1': {'ancestors': ['d2/s1'], **SCRIPTS_CFG}}}, - 'd4': {'scripts': { - 's1': {'ancestors': 'd1/pr d1/tas', **SCRIPTS_CFG}, - 's2': {'ancestors': ['d4/pr', 'd4/tas'], **SCRIPTS_CFG}, - 's3': {'ancestors': ['d3/s1'], **SCRIPTS_CFG}, - }}, + "d1": {"scripts": {"s1": {"ancestors": [], **SCRIPTS_CFG}}}, + "d2": { + "scripts": {"s1": {"ancestors": ["d1/pr", "d1/s1"], **SCRIPTS_CFG}} + }, + "d3": {"scripts": {"s1": {"ancestors": ["d2/s1"], **SCRIPTS_CFG}}}, + "d4": { + "scripts": { + "s1": {"ancestors": "d1/pr d1/tas", **SCRIPTS_CFG}, + "s2": {"ancestors": ["d4/pr", "d4/tas"], **SCRIPTS_CFG}, + "s3": {"ancestors": ["d3/s1"], **SCRIPTS_CFG}, + } + }, } TEST_GET_TASKS_TO_RUN = [ (None, None), - ({''}, {''}), - ({'wrong_task/*'}, {'wrong_task/*'}), - ({'d1/*'}, {'d1/*'}), - ({'d2/*'}, {'d2/*', 'd1/pr', 'd1/s1'}), - ({'d3/*'}, {'d3/*', 'd2/s1', 'd1/pr', 'd1/s1'}), - ({'d4/*'}, {'d4/*', 'd1/pr', 'd1/tas', 'd4/pr', 'd4/tas', 'd3/s1', - 'd2/s1', 'd1/s1'}), - ({'wrong_task/*', 'd1/*'}, {'wrong_task/*', 'd1/*'}), - ({'d1/ta'}, {'d1/ta'}), - ({'d4/s2'}, {'d4/s2', 'd4/pr', 'd4/tas'}), - ({'d2/s1', 'd3/ta', 'd1/s1'}, {'d2/s1', 'd1/pr', 'd1/s1', 'd3/ta'}), - ({'d4/s1', 'd4/s2'}, {'d4/s1', 'd1/pr', 'd1/tas', 'd4/s2', 'd4/pr', - 'd4/tas'}), - ({'d4/s3', 'd3/ta'}, {'d4/s3', 'd3/s1', 'd2/s1', 'd1/pr', 'd1/s1', - 'd3/ta'}), + ({""}, {""}), + ({"wrong_task/*"}, {"wrong_task/*"}), + ({"d1/*"}, {"d1/*"}), + ({"d2/*"}, {"d2/*", "d1/pr", "d1/s1"}), + ({"d3/*"}, {"d3/*", "d2/s1", "d1/pr", "d1/s1"}), + ( + {"d4/*"}, + { + "d4/*", + "d1/pr", + "d1/tas", + "d4/pr", + "d4/tas", + "d3/s1", + "d2/s1", + "d1/s1", + }, + ), + ({"wrong_task/*", "d1/*"}, {"wrong_task/*", "d1/*"}), + ({"d1/ta"}, {"d1/ta"}), + ({"d4/s2"}, {"d4/s2", "d4/pr", "d4/tas"}), + ({"d2/s1", "d3/ta", "d1/s1"}, {"d2/s1", "d1/pr", "d1/s1", "d3/ta"}), + ( + {"d4/s1", "d4/s2"}, + {"d4/s1", "d1/pr", "d1/tas", "d4/s2", "d4/pr", "d4/tas"}, + ), + ( + {"d4/s3", "d3/ta"}, + {"d4/s3", "d3/s1", "d2/s1", "d1/pr", "d1/s1", "d3/ta"}, + ), ] -@pytest.mark.parametrize('diags_to_run,tasknames_to_run', - TEST_GET_TASKS_TO_RUN) +@pytest.mark.parametrize( + "diags_to_run,tasknames_to_run", TEST_GET_TASKS_TO_RUN +) def test_get_tasks_to_run(diags_to_run, tasknames_to_run): """Test ``Recipe._get_tasks_to_run``.""" - cfg = {'diagnostics': diags_to_run} + cfg = {"diagnostics": diags_to_run} recipe = MockRecipe(cfg, DIAGNOSTICS) tasks_to_run = recipe._get_tasks_to_run() @@ -616,24 +686,25 @@ def test_get_tasks_to_run(diags_to_run, tasknames_to_run): TEST_CREATE_DIAGNOSTIC_TASKS = [ - (set(), ['s1', 's2', 's3']), - ({'d4/*'}, ['s1', 's2', 's3']), - ({'d4/s1'}, ['s1']), - ({'d4/s1', 'd3/*'}, ['s1']), - ({'d4/s1', 'd4/s2'}, ['s1', 's2']), - ({''}, []), - ({'d3/*'}, []), + (set(), ["s1", "s2", "s3"]), + ({"d4/*"}, ["s1", "s2", "s3"]), + ({"d4/s1"}, ["s1"]), + ({"d4/s1", "d3/*"}, ["s1"]), + ({"d4/s1", "d4/s2"}, ["s1", "s2"]), + ({""}, []), + ({"d3/*"}, []), ] -@pytest.mark.parametrize('tasks_to_run,tasks_run', - TEST_CREATE_DIAGNOSTIC_TASKS) -@mock.patch('esmvalcore._recipe.recipe.DiagnosticTask', autospec=True) +@pytest.mark.parametrize( + "tasks_to_run,tasks_run", TEST_CREATE_DIAGNOSTIC_TASKS +) +@mock.patch("esmvalcore._recipe.recipe.DiagnosticTask", autospec=True) def test_create_diagnostic_tasks(mock_diag_task, tasks_to_run, tasks_run): """Test ``Recipe._create_diagnostic_tasks``.""" - cfg = {'run_diagnostic': True} - diag_name = 'd4' - diag_cfg = DIAGNOSTICS['d4'] + cfg = {"run_diagnostic": True} + diag_name = "d4" + diag_cfg = DIAGNOSTICS["d4"] n_tasks = len(tasks_run) recipe = MockRecipe(cfg, DIAGNOSTICS) @@ -646,36 +717,35 @@ def test_create_diagnostic_tasks(mock_diag_task, tasks_to_run, tasks_run): script=mock.sentinel.script, output_dir=mock.sentinel.output_dir, settings=mock.sentinel.settings, - name=f'{diag_name}{_recipe.TASKSEP}{task_name}', + name=f"{diag_name}{_recipe.TASKSEP}{task_name}", ) assert expected_call in mock_diag_task.mock_calls def test_update_regrid_time(): """Test `_update_regrid_time.""" - dataset = Dataset(frequency='mon') - settings = {'regrid_time': {}} + dataset = Dataset(frequency="mon") + settings = {"regrid_time": {}} _recipe._update_regrid_time(dataset, settings) - assert settings == {'regrid_time': {'frequency': 'mon'}} + assert settings == {"regrid_time": {"frequency": "mon"}} def test_select_dataset_fails(): dataset = Dataset( - dataset='dataset1', - diagnostic='diagnostic1', - variable_group='tas', + dataset="dataset1", + diagnostic="diagnostic1", + variable_group="tas", ) with pytest.raises(RecipeError): - _recipe._select_dataset('dataset2', [dataset]) + _recipe._select_dataset("dataset2", [dataset]) def test_limit_datasets(): - datasets = [ - Dataset(dataset='dataset1', alias='dataset1'), - Dataset(dataset='dataset2', alias='dataset2'), + Dataset(dataset="dataset1", alias="dataset1"), + Dataset(dataset="dataset2", alias="dataset2"), ] - datasets[0].session = {'max_datasets': 1} + datasets[0].session = {"max_datasets": 1} result = _recipe._limit_datasets(datasets, {}) @@ -685,100 +755,99 @@ def test_limit_datasets(): def test_get_default_settings(mocker): mocker.patch.object( _recipe, - '_get_output_file', + "_get_output_file", autospec=True, - return_value=Path('/path/to/file.nc'), + return_value=Path("/path/to/file.nc"), ) session = mocker.create_autospec(esmvalcore.config.Session, instance=True) session.__getitem__.return_value = False dataset = Dataset( - short_name='sic', - original_short_name='siconc', - mip='Amon', - project='CMIP6', + short_name="sic", + original_short_name="siconc", + mip="Amon", + project="CMIP6", ) dataset.session = session settings = _recipe._get_default_settings(dataset) assert settings == { - 'remove_supplementary_variables': {}, - 'save': {'compress': False, 'alias': 'sic'}, + "remove_supplementary_variables": {}, + "save": {"compress": False, "alias": "sic"}, } def test_set_version(mocker): - - dataset = Dataset(short_name='tas') - supplementary = Dataset(short_name='areacella') + dataset = Dataset(short_name="tas") + supplementary = Dataset(short_name="areacella") dataset.supplementaries = [supplementary] - input_dataset = Dataset(short_name='tas') + input_dataset = Dataset(short_name="tas") file1 = mocker.Mock() - file1.facets = {'version': 'v1'} + file1.facets = {"version": "v1"} file2 = mocker.Mock() - file2.facets = {'version': 'v2'} + file2.facets = {"version": "v2"} input_dataset.files = [file1, file2] file3 = mocker.Mock() - file3.facets = {'version': 'v3'} + file3.facets = {"version": "v3"} supplementary.files = [file3] _recipe._set_version(dataset, [input_dataset]) print(dataset) - assert dataset.facets['version'] == ['v1', 'v2'] - assert dataset.supplementaries[0].facets['version'] == 'v3' + assert dataset.facets["version"] == ["v1", "v2"] + assert dataset.supplementaries[0].facets["version"] == "v3" def test_extract_preprocessor_order(): profile = { - 'custom_order': True, - 'regrid': { - 'target_grid': '1x1' - }, - 'derive': { - 'long_name': 'albedo at the surface', - 'short_name': 'alb', - 'standard_name': '', - 'units': '1' + "custom_order": True, + "regrid": {"target_grid": "1x1"}, + "derive": { + "long_name": "albedo at the surface", + "short_name": "alb", + "standard_name": "", + "units": "1", }, } order = _recipe._extract_preprocessor_order(profile) - assert any(order[i:i + 2] == ('regrid', 'derive') - for i in range(len(order) - 1)) + assert any( + order[i : i + 2] == ("regrid", "derive") for i in range(len(order) - 1) + ) def test_update_extract_shape_abs_shapefile(session, tmp_path): """Test ``_update_extract_shape``.""" - session['auxiliary_data_dir'] = '/aux/dir' - shapefile = tmp_path / 'my_custom_shapefile.shp' + session["auxiliary_data_dir"] = "/aux/dir" + shapefile = tmp_path / "my_custom_shapefile.shp" shapefile.write_text("") # create empty file - settings = {'extract_shape': {'shapefile': str(shapefile)}} + settings = {"extract_shape": {"shapefile": str(shapefile)}} _recipe._update_extract_shape(settings, session) - assert isinstance(settings['extract_shape']['shapefile'], Path) - assert settings['extract_shape']['shapefile'] == shapefile + assert isinstance(settings["extract_shape"]["shapefile"], Path) + assert settings["extract_shape"]["shapefile"] == shapefile @pytest.mark.parametrize( - 'shapefile', ['aux_dir/ar6.shp', 'ar6.shp', 'ar6', 'AR6', 'aR6'] + "shapefile", ["aux_dir/ar6.shp", "ar6.shp", "ar6", "AR6", "aR6"] ) def test_update_extract_shape_rel_shapefile(shapefile, session, tmp_path): """Test ``_update_extract_shape``.""" - session['auxiliary_data_dir'] = tmp_path - (tmp_path / 'aux_dir').mkdir(parents=True, exist_ok=True) - aux_dir_shapefile = tmp_path / 'aux_dir' / 'ar6.shp' + session["auxiliary_data_dir"] = tmp_path + (tmp_path / "aux_dir").mkdir(parents=True, exist_ok=True) + aux_dir_shapefile = tmp_path / "aux_dir" / "ar6.shp" aux_dir_shapefile.write_text("") # create empty file - settings = {'extract_shape': {'shapefile': shapefile}} + settings = {"extract_shape": {"shapefile": shapefile}} _recipe._update_extract_shape(settings, session) - if 'aux_dir' in shapefile: - assert settings['extract_shape']['shapefile'] == tmp_path / shapefile + if "aux_dir" in shapefile: + assert settings["extract_shape"]["shapefile"] == tmp_path / shapefile else: ar6_file = ( - Path(esmvalcore.preprocessor.__file__).parent / 'shapefiles' / - 'ar6.shp' + Path(esmvalcore.preprocessor.__file__).parent + / "shapefiles" + / "ar6.shp" ) - assert settings['extract_shape']['shapefile'] == ar6_file + assert settings["extract_shape"]["shapefile"] == ar6_file diff --git a/tests/unit/recipe/test_to_datasets.py b/tests/unit/recipe/test_to_datasets.py index d20f2c0d85..12828dc929 100644 --- a/tests/unit/recipe/test_to_datasets.py +++ b/tests/unit/recipe/test_to_datasets.py @@ -58,71 +58,71 @@ def test_from_recipe(session): reference = [ Dataset( - alias='CMIP3', - dataset='cccma_cgcm3_1', - diagnostic='diagnostic1', - ensemble='run1', - exp='historical', - frequency='mon', - mip='A1', - preprocessor='preprocessor1', - project='CMIP3', + alias="CMIP3", + dataset="cccma_cgcm3_1", + diagnostic="diagnostic1", + ensemble="run1", + exp="historical", + frequency="mon", + mip="A1", + preprocessor="preprocessor1", + project="CMIP3", recipe_dataset_index=0, - short_name='ta', - variable_group='ta850', + short_name="ta", + variable_group="ta850", ), Dataset( - alias='CMIP5', - dataset='EC-EARTH', - diagnostic='diagnostic1', - ensemble='r1i1p1', - exp='historical', - mip='Amon', - preprocessor='preprocessor1', - project='CMIP5', + alias="CMIP5", + dataset="EC-EARTH", + diagnostic="diagnostic1", + ensemble="r1i1p1", + exp="historical", + mip="Amon", + preprocessor="preprocessor1", + project="CMIP5", recipe_dataset_index=1, - short_name='ta', - variable_group='ta850', + short_name="ta", + variable_group="ta850", ), Dataset( - alias='CMIP6', - dataset='AWI-ESM-1-1-LR', - diagnostic='diagnostic1', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - mip='Amon', - preprocessor='preprocessor1', - project='CMIP6', + alias="CMIP6", + dataset="AWI-ESM-1-1-LR", + diagnostic="diagnostic1", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + mip="Amon", + preprocessor="preprocessor1", + project="CMIP6", recipe_dataset_index=2, - short_name='ta', - variable_group='ta850', + short_name="ta", + variable_group="ta850", ), Dataset( - alias='CORDEX', - dataset='RACMO22E', - diagnostic='diagnostic1', - driver='MOHC-HadGEM2-ES', - domain='EUR-11', - ensemble='r1i1p1', - exp='historical', - mip='mon', - preprocessor='preprocessor1', - project='CORDEX', + alias="CORDEX", + dataset="RACMO22E", + diagnostic="diagnostic1", + driver="MOHC-HadGEM2-ES", + domain="EUR-11", + ensemble="r1i1p1", + exp="historical", + mip="mon", + preprocessor="preprocessor1", + project="CORDEX", recipe_dataset_index=3, - short_name='ta', - variable_group='ta850', + short_name="ta", + variable_group="ta850", ), Dataset( - alias='obs4MIPs', - dataset='CERES-EBAF', - diagnostic='diagnostic1', - mip='Amon', - preprocessor='preprocessor1', - project='obs4MIPs', + alias="obs4MIPs", + dataset="CERES-EBAF", + diagnostic="diagnostic1", + mip="Amon", + preprocessor="preprocessor1", + project="obs4MIPs", recipe_dataset_index=4, - short_name='ta', - variable_group='ta850', + short_name="ta", + variable_group="ta850", ), ] for ref_ds in reference: @@ -131,9 +131,9 @@ def test_from_recipe(session): assert datasets == reference -@pytest.mark.parametrize('path_type', [str, Path]) +@pytest.mark.parametrize("path_type", [str, Path]) def test_from_recipe_file(tmp_path, session, path_type): - recipe_file = tmp_path / 'recipe_test.yml' + recipe_file = tmp_path / "recipe_test.yml" recipe_txt = textwrap.dedent(""" datasets: - dataset: AWI-ESM-1-1-LR @@ -149,7 +149,7 @@ def test_from_recipe_file(tmp_path, session, path_type): project: CMIP6 """) - recipe_file.write_text(recipe_txt, encoding='utf-8') + recipe_file.write_text(recipe_txt, encoding="utf-8") datasets = Dataset.from_recipe( path_type(recipe_file), session, @@ -213,8 +213,8 @@ def test_merge_supplementaries_dataset_takes_priority(session): print(datasets) assert len(datasets) == 2 assert all(len(ds.supplementaries) == 1 for ds in datasets) - assert datasets[0].supplementaries[0].facets['exp'] == 'historical' - assert datasets[1].supplementaries[0].facets['exp'] == '1pctCO2' + assert datasets[0].supplementaries[0].facets["exp"] == "historical" + assert datasets[1].supplementaries[0].facets["exp"] == "1pctCO2" def test_merge_supplementaries_combine_dataset_with_variable(session): @@ -252,8 +252,8 @@ def test_merge_supplementaries_combine_dataset_with_variable(session): print(datasets) assert len(datasets) == 1 assert len(datasets[0].supplementaries) == 2 - assert datasets[0].supplementaries[0].facets['short_name'] == 'areacella' - assert datasets[0].supplementaries[1].facets['short_name'] == 'sftlf' + assert datasets[0].supplementaries[0].facets["short_name"] == "areacella" + assert datasets[0].supplementaries[1].facets["short_name"] == "sftlf" def test_merge_supplementaries_missing_short_name_fails(session): @@ -279,26 +279,26 @@ def test_merge_supplementaries_missing_short_name_fails(session): def test_get_input_datasets_derive(session): dataset = Dataset( - dataset='ERA5', - project='native6', - mip='E1hr', - short_name='rlus', - alias='ERA5', + dataset="ERA5", + project="native6", + mip="E1hr", + short_name="rlus", + alias="ERA5", derive=True, force_derivation=True, - frequency='1hr', + frequency="1hr", recipe_dataset_index=0, - tier='3', - type='reanaly', - version='v1', + tier="3", + type="reanaly", + version="v1", ) rlds, rlns = to_datasets._get_input_datasets(dataset) - assert rlds['short_name'] == 'rlds' - assert rlds['long_name'] == 'Surface Downwelling Longwave Radiation' - assert rlds['frequency'] == '1hr' - assert rlns['short_name'] == 'rlns' - assert rlns['long_name'] == 'Surface Net downward Longwave Radiation' - assert rlns['frequency'] == '1hr' + assert rlds["short_name"] == "rlds" + assert rlds["long_name"] == "Surface Downwelling Longwave Radiation" + assert rlds["frequency"] == "1hr" + assert rlns["short_name"] == "rlns" + assert rlns["long_name"] == "Surface Net downward Longwave Radiation" + assert rlns["frequency"] == "1hr" def test_max_years(session): @@ -317,30 +317,29 @@ def test_max_years(session): - dataset: AWI-ESM-1-1-LR grid: gn """) - session['max_years'] = 2 + session["max_years"] = 2 datasets = Dataset.from_recipe(recipe_txt, session) - assert datasets[0].facets['timerange'] == '2000/2001' + assert datasets[0].facets["timerange"] == "2000/2001" -@pytest.mark.parametrize('found_files', [True, False]) +@pytest.mark.parametrize("found_files", [True, False]) def test_dataset_from_files_fails(monkeypatch, found_files): - def from_files(_): - file = LocalFile('/path/to/file') - file.facets = {'facets1': 'value1'} + file = LocalFile("/path/to/file") + file.facets = {"facets1": "value1"} dataset = Dataset( - dataset='*', - short_name='tas', + dataset="*", + short_name="tas", ) dataset.files = [file] if found_files else [] - dataset._file_globs = ['/path/to/tas_*.nc'] + dataset._file_globs = ["/path/to/tas_*.nc"] return [dataset] - monkeypatch.setattr(Dataset, 'from_files', from_files) + monkeypatch.setattr(Dataset, "from_files", from_files) dataset = Dataset( - dataset='*', - short_name='tas', + dataset="*", + short_name="tas", ) with pytest.raises(RecipeError, match="Unable to replace dataset.*"): @@ -348,91 +347,86 @@ def from_files(_): def test_fix_cmip5_fx_ensemble(monkeypatch): - def find_files(self): - if self.facets['ensemble'] == 'r0i0p0': - self._files = ['file1.nc'] + if self.facets["ensemble"] == "r0i0p0": + self._files = ["file1.nc"] - monkeypatch.setattr(Dataset, 'find_files', find_files) + monkeypatch.setattr(Dataset, "find_files", find_files) dataset = Dataset( - dataset='dataset1', - short_name='orog', - mip='fx', - project='CMIP5', - ensemble='r1i1p1', + dataset="dataset1", + short_name="orog", + mip="fx", + project="CMIP5", + ensemble="r1i1p1", ) to_datasets._fix_cmip5_fx_ensemble(dataset) - assert dataset['ensemble'] == 'r0i0p0' + assert dataset["ensemble"] == "r0i0p0" def test_get_supplementary_short_names(monkeypatch): - def _update_cmor_facets(facets): - facets['modeling_realm'] = 'atmos' + facets["modeling_realm"] = "atmos" monkeypatch.setattr( to_datasets, - '_update_cmor_facets', + "_update_cmor_facets", _update_cmor_facets, ) facets = { - 'short_name': 'tas', + "short_name": "tas", } - result = to_datasets._get_supplementary_short_names(facets, 'mask_landsea') - assert result == ['sftlf'] + result = to_datasets._get_supplementary_short_names(facets, "mask_landsea") + assert result == ["sftlf"] def test_append_missing_supplementaries(): supplementaries = [ { - 'short_name': 'areacella', + "short_name": "areacella", }, ] facets = { - 'short_name': 'tas', - 'project': 'CMIP6', - 'mip': 'Amon', + "short_name": "tas", + "project": "CMIP6", + "mip": "Amon", } settings = { - 'mask_landsea': { - 'mask_out': 'land' - }, - 'area_statistics': { - 'operator': 'mean' - }, + "mask_landsea": {"mask_out": "land"}, + "area_statistics": {"operator": "mean"}, } - to_datasets._append_missing_supplementaries(supplementaries, facets, - settings) + to_datasets._append_missing_supplementaries( + supplementaries, facets, settings + ) - short_names = {f['short_name'] for f in supplementaries} - assert short_names == {'areacella', 'sftlf'} + short_names = {f["short_name"] for f in supplementaries} + assert short_names == {"areacella", "sftlf"} def test_report_unexpanded_globs(mocker): dataset = Dataset( - alias='CMIP5', - dataset='*', - diagnostic='diagnostic1', - ensemble='r1i1p1', - exp='historical', - mip='Amon', - preprocessor='preprocessor1', - project='CMIP5', + alias="CMIP5", + dataset="*", + diagnostic="diagnostic1", + ensemble="r1i1p1", + exp="historical", + mip="Amon", + preprocessor="preprocessor1", + project="CMIP5", recipe_dataset_index=1, - short_name='ta', - variable_group='ta850', + short_name="ta", + variable_group="ta850", ) - file = mocker.Mock(facets={'dataset': '*'}) + file = mocker.Mock(facets={"dataset": "*"}) dataset.files = [file] - unexpanded_globs = {'dataset': '*'} + unexpanded_globs = {"dataset": "*"} msg = to_datasets._report_unexpanded_globs( dataset, dataset, unexpanded_globs ) - assert 'paths to the' not in msg + assert "paths to the" not in msg diff --git a/tests/unit/task/test_diagnostic_task.py b/tests/unit/task/test_diagnostic_task.py index 7e6867dd74..78b509b040 100644 --- a/tests/unit/task/test_diagnostic_task.py +++ b/tests/unit/task/test_diagnostic_task.py @@ -14,48 +14,50 @@ def test_write_ncl_settings(tmp_path): """Test minimally write_ncl_settings().""" settings = { - 'run_dir': str(tmp_path / 'run_dir'), - 'diag_script_info': {'profile_diagnostic': False}, - 'var_name': 'tas', + "run_dir": str(tmp_path / "run_dir"), + "diag_script_info": {"profile_diagnostic": False}, + "var_name": "tas", } file_name = tmp_path / "settings" write_ncl_settings(settings, file_name) - with open(file_name, 'r', encoding='utf-8') as file: + with open(file_name, "r", encoding="utf-8") as file: lines = file.readlines() assert 'var_name = "tas"\n' in lines assert 'if (isvar("profile_diagnostic")) then\n' not in lines settings = { - 'run_dir': str(tmp_path / 'run_dir'), - 'profile_diagnostic': True, - 'var_name': 'tas', + "run_dir": str(tmp_path / "run_dir"), + "profile_diagnostic": True, + "var_name": "tas", } file_name = tmp_path / "settings" write_ncl_settings(settings, file_name) - with open(file_name, 'r', encoding='utf-8') as file: + with open(file_name, "r", encoding="utf-8") as file: lines = file.readlines() assert 'var_name = "tas"\n' in lines - assert 'profile_diagnostic' not in lines + assert "profile_diagnostic" not in lines -@pytest.mark.parametrize("ext", ['.jl', '.py', '.ncl', '.R']) +@pytest.mark.parametrize("ext", [".jl", ".py", ".ncl", ".R"]) def test_initialize_env(ext, tmp_path, monkeypatch): """Test that the environmental variables are set correctly.""" - monkeypatch.setattr(esmvalcore._task.DiagnosticTask, '_initialize_cmd', - lambda self: None) + monkeypatch.setattr( + esmvalcore._task.DiagnosticTask, "_initialize_cmd", lambda self: None + ) - esmvaltool_path = tmp_path / 'esmvaltool' - monkeypatch.setattr(esmvalcore.config._diagnostics.DIAGNOSTICS, 'path', - esmvaltool_path) + esmvaltool_path = tmp_path / "esmvaltool" + monkeypatch.setattr( + esmvalcore.config._diagnostics.DIAGNOSTICS, "path", esmvaltool_path + ) - diagnostics_path = esmvaltool_path / 'diag_scripts' + diagnostics_path = esmvaltool_path / "diag_scripts" diagnostics_path.mkdir(parents=True) - script = diagnostics_path / ('test' + ext) + script = diagnostics_path / ("test" + ext) script.touch() settings = { - 'run_dir': str(tmp_path / 'run_dir'), - 'profile_diagnostic': False, + "run_dir": str(tmp_path / "run_dir"), + "profile_diagnostic": False, } task = esmvalcore._task.DiagnosticTask( script, @@ -66,78 +68,78 @@ def test_initialize_env(ext, tmp_path, monkeypatch): # Create correct environment env = {} test_env = copy.deepcopy(task.env) - if ext in ('.jl', '.py'): - env['MPLBACKEND'] = 'Agg' - if ext == '.jl': - env['JULIA_LOAD_PATH'] = f"{esmvaltool_path / 'install' / 'Julia'}" + if ext in (".jl", ".py"): + env["MPLBACKEND"] = "Agg" + if ext == ".jl": + env["JULIA_LOAD_PATH"] = f"{esmvaltool_path / 'install' / 'Julia'}" # check for new type of JULIA_LOAD_PATH # and cut away new path arguments @:@$CONDA_ENV:@stdlib # see https://github.com/ESMValGroup/ESMValCore/issues/1443 - test_env['JULIA_LOAD_PATH'] = \ - task.env['JULIA_LOAD_PATH'].split(":")[0] - if ext in ('.ncl', '.R'): - env['diag_scripts'] = str(diagnostics_path) + test_env["JULIA_LOAD_PATH"] = task.env["JULIA_LOAD_PATH"].split(":")[0] + if ext in (".ncl", ".R"): + env["diag_scripts"] = str(diagnostics_path) assert test_env == env CMD = { # ext, profile: expected command - ('.py', False): ['python'], - ('.py', True): ['python', '-m', 'vprof', '-o'], - ('.ncl', False): ['ncl', '-n', '-p'], - ('.ncl', True): ['ncl', '-n', '-p'], - ('.R', False): ['Rscript'], - ('.R', True): ['Rscript'], - ('.jl', False): ['julia'], - ('.jl', True): ['julia'], - ('', False): [], - ('', True): [], + (".py", False): ["python"], + (".py", True): ["python", "-m", "vprof", "-o"], + (".ncl", False): ["ncl", "-n", "-p"], + (".ncl", True): ["ncl", "-n", "-p"], + (".R", False): ["Rscript"], + (".R", True): ["Rscript"], + (".jl", False): ["julia"], + (".jl", True): ["julia"], + ("", False): [], + ("", True): [], } @pytest.mark.parametrize("ext_profile,cmd", CMD.items()) def test_initialize_cmd(ext_profile, cmd, tmp_path, monkeypatch): """Test creating the command to run the diagnostic script.""" - monkeypatch.setattr(esmvalcore._task.DiagnosticTask, '_initialize_env', - lambda self: None) + monkeypatch.setattr( + esmvalcore._task.DiagnosticTask, "_initialize_env", lambda self: None + ) ext, profile = ext_profile - script = tmp_path / ('test' + ext) + script = tmp_path / ("test" + ext) script.touch() - if ext == '': + if ext == "": # test case where file is executable script.chmod(stat.S_IEXEC) - run_dir = tmp_path / 'run_dir' + run_dir = tmp_path / "run_dir" settings = { - 'run_dir': str(run_dir), - 'profile_diagnostic': profile, + "run_dir": str(run_dir), + "profile_diagnostic": profile, } - monkeypatch.setattr(esmvalcore._task, 'which', lambda x: x) - monkeypatch.setattr(esmvalcore._task.sys, 'executable', 'python') + monkeypatch.setattr(esmvalcore._task, "which", lambda x: x) + monkeypatch.setattr(esmvalcore._task.sys, "executable", "python") - task = esmvalcore._task.DiagnosticTask(script, - settings, - output_dir=str(tmp_path)) + task = esmvalcore._task.DiagnosticTask( + script, settings, output_dir=str(tmp_path) + ) # Append filenames to expected command - if ext == '.py' and profile: - cmd.append(str(run_dir / 'profile.json')) - cmd.append('-c') - cmd.append('c') + if ext == ".py" and profile: + cmd.append(str(run_dir / "profile.json")) + cmd.append("-c") + cmd.append("c") cmd.append(str(script)) assert task.cmd == cmd # test for no executable - monkeypatch.setattr(esmvalcore._task, 'which', lambda x: None) - if ext_profile[0] != '' and ext_profile[0] != '.py': + monkeypatch.setattr(esmvalcore._task, "which", lambda x: None) + if ext_profile[0] != "" and ext_profile[0] != ".py": with pytest.raises(DiagnosticError) as err_mssg: - esmvalcore._task.DiagnosticTask(script, - settings, - output_dir=str(tmp_path)) + esmvalcore._task.DiagnosticTask( + script, settings, output_dir=str(tmp_path) + ) exp_mssg1 = "Cannot execute script " exp_mssg2 = "program '{}' not installed.".format(CMD[ext_profile][0]) assert exp_mssg1 in str(err_mssg.value) @@ -149,42 +151,46 @@ def diagnostic_task(mocker, tmp_path): class TrackedFile(esmvalcore._task.TrackedFile): provenance = None - mocker.patch.object(esmvalcore._task, 'TrackedFile', autospec=TrackedFile) - tags = TagsManager({'plot_type': {'tag': 'tag_value'}}) + mocker.patch.object(esmvalcore._task, "TrackedFile", autospec=TrackedFile) + tags = TagsManager({"plot_type": {"tag": "tag_value"}}) mocker.patch.dict(esmvalcore._task.TAGS, tags) - mocker.patch.object(esmvalcore._task, - '_write_citation_files', - autospec=True) + mocker.patch.object( + esmvalcore._task, "_write_citation_files", autospec=True + ) - mocker.patch.object(esmvalcore._task.DiagnosticTask, '_initialize_cmd') - mocker.patch.object(esmvalcore._task.DiagnosticTask, '_initialize_env') + mocker.patch.object(esmvalcore._task.DiagnosticTask, "_initialize_cmd") + mocker.patch.object(esmvalcore._task.DiagnosticTask, "_initialize_env") settings = { - 'run_dir': str(tmp_path / 'run_dir'), - 'profile_diagnostic': False, - 'some_diagnostic_setting': True, + "run_dir": str(tmp_path / "run_dir"), + "profile_diagnostic": False, + "some_diagnostic_setting": True, } - task = esmvalcore._task.DiagnosticTask('test.py', - settings, - output_dir=str(tmp_path), - name='some-diagnostic-task') + task = esmvalcore._task.DiagnosticTask( + "test.py", + settings, + output_dir=str(tmp_path), + name="some-diagnostic-task", + ) return task def write_mock_provenance(diagnostic_task, record): - run_dir = Path(diagnostic_task.settings['run_dir']) + run_dir = Path(diagnostic_task.settings["run_dir"]) run_dir.mkdir(parents=True) - provenance_file = run_dir / 'diagnostic_provenance.yml' + provenance_file = run_dir / "diagnostic_provenance.yml" provenance_file.write_text(yaml.safe_dump(record)) def test_collect_provenance(mocker, diagnostic_task): tracked_file_instance = mocker.Mock() tracked_file_class = mocker.patch.object( - esmvalcore._task, 'TrackedFile', return_value=tracked_file_instance) - write_citation = mocker.patch.object(esmvalcore._task, - '_write_citation_files') + esmvalcore._task, "TrackedFile", return_value=tracked_file_instance + ) + write_citation = mocker.patch.object( + esmvalcore._task, "_write_citation_files" + ) record = { "test.png": { @@ -211,17 +217,19 @@ def test_collect_provenance(mocker, diagnostic_task): "test.png", { "caption": "Some figure", - "plot_type": ("tag_value", ), + "plot_type": ("tag_value",), "script_file": "test.py", "some_diagnostic_setting": True, }, {ancestor_product}, ) tracked_file_instance.initialize_provenance.assert_called_once_with( - diagnostic_task.activity) + diagnostic_task.activity + ) tracked_file_instance.save_provenance.assert_called_once() - write_citation.assert_called_once_with(tracked_file_instance.filename, - tracked_file_instance.provenance) + write_citation.assert_called_once_with( + tracked_file_instance.filename, tracked_file_instance.provenance + ) diagnostic_task.products.add.assert_called_once_with(tracked_file_instance) @@ -234,13 +242,11 @@ def assert_warned(log, msgs): def test_collect_no_provenance(caplog, diagnostic_task): - diagnostic_task._collect_provenance() assert_warned(caplog, [["No provenance information was written"]]) def test_collect_provenance_no_ancestors(caplog, diagnostic_task): - caplog.set_level(logging.INFO) record = { @@ -253,14 +259,16 @@ def test_collect_provenance_no_ancestors(caplog, diagnostic_task): diagnostic_task._collect_provenance() - assert_warned(caplog, [ - ["No ancestor files specified", "test.png"], - ["Valid ancestor files"], - ]) + assert_warned( + caplog, + [ + ["No ancestor files specified", "test.png"], + ["Valid ancestor files"], + ], + ) def test_collect_provenance_invalid_ancestors(caplog, diagnostic_task): - caplog.set_level(logging.INFO) record = { @@ -274,14 +282,16 @@ def test_collect_provenance_invalid_ancestors(caplog, diagnostic_task): diagnostic_task._collect_provenance() - assert_warned(caplog, [ - ["Invalid ancestor file", "test.png"], - ["Valid ancestor files"], - ]) + assert_warned( + caplog, + [ + ["Invalid ancestor file", "test.png"], + ["Valid ancestor files"], + ], + ) def test_collect_provenance_ancestor_hint(mocker, caplog, diagnostic_task): - caplog.set_level(logging.INFO) record = { @@ -305,7 +315,10 @@ def test_collect_provenance_ancestor_hint(mocker, caplog, diagnostic_task): diagnostic_task.ancestors = [ancestor_task] diagnostic_task._collect_provenance() - assert_warned(caplog, [ - ["Invalid ancestor file", "abc.nc", "test.nc"], - ["Valid ancestor files", "xyz.nc"], - ]) + assert_warned( + caplog, + [ + ["Invalid ancestor file", "abc.nc", "test.nc"], + ["Valid ancestor files", "xyz.nc"], + ], + ) diff --git a/tests/unit/task/test_print.py b/tests/unit/task/test_print.py index 605f017416..0137486877 100644 --- a/tests/unit/task/test_print.py +++ b/tests/unit/task/test_print.py @@ -1,4 +1,5 @@ """Test that a task tree can be printed in a human readable form.""" + import copy import textwrap @@ -11,16 +12,13 @@ @pytest.fixture def preproc_file(): - dataset = Dataset(short_name='tas') - dataset.files = ['/path/to/input_file.nc'] + dataset = Dataset(short_name="tas") + dataset.files = ["/path/to/input_file.nc"] return PreprocessorFile( - filename='/output/preproc/file.nc', - attributes={'short_name': 'tas'}, + filename="/output/preproc/file.nc", + attributes={"short_name": "tas"}, settings={ - 'extract_levels': { - 'scheme': 'linear', - 'levels': [95000] - }, + "extract_levels": {"scheme": "linear", "levels": [95000]}, }, datasets=[dataset], ) @@ -33,20 +31,20 @@ def preproc_task(preproc_file): @pytest.fixture def diagnostic_task(tmp_path): - mock_script = tmp_path / 'script.py' + mock_script = tmp_path / "script.py" mock_script.touch() settings = { - 'run_dir': str('/output/run'), - 'profile_diagnostic': False, + "run_dir": str("/output/run"), + "profile_diagnostic": False, } - task = DiagnosticTask(mock_script, settings, output_dir='/output/run') - task.script = '/some/where/esmvaltool/diag_scripts/test.py' + task = DiagnosticTask(mock_script, settings, output_dir="/output/run") + task.script = "/some/where/esmvaltool/diag_scripts/test.py" return task def test_repr_preproc_task(preproc_task): """Test printing a preprocessor task.""" - preproc_task.name = 'diag_1/tas' + preproc_task.name = "diag_1/tas" result = str(preproc_task) print(result) @@ -66,7 +64,7 @@ def test_repr_preproc_task(preproc_task): def test_repr_diagnostic_task(diagnostic_task): """Test printing a diagnostic task.""" - diagnostic_task.name = 'diag_1/script_1' + diagnostic_task.name = "diag_1/script_1" result = str(diagnostic_task) print(result) @@ -84,8 +82,8 @@ def test_repr_diagnostic_task(diagnostic_task): def test_repr_simple_tree(preproc_task, diagnostic_task): """Test the most common task tree.""" - preproc_task.name = 'diag_1/tas' - diagnostic_task.name = 'diag_1/script_1' + preproc_task.name = "diag_1/tas" + diagnostic_task.name = "diag_1/script_1" diagnostic_task.ancestors = [preproc_task] result = str(diagnostic_task) print(result) @@ -110,21 +108,21 @@ def test_repr_simple_tree(preproc_task, diagnostic_task): def test_repr_full_tree(preproc_task, diagnostic_task): - """Test a more comlicated task tree.""" + """Test a more complicated task tree.""" derive_input_task_1 = copy.deepcopy(preproc_task) - derive_input_task_1.name = 'diag_1/tas_derive_input_1' + derive_input_task_1.name = "diag_1/tas_derive_input_1" derive_input_task_2 = copy.deepcopy(preproc_task) - derive_input_task_2.name = 'diag_1/tas_derive_input_2' + derive_input_task_2.name = "diag_1/tas_derive_input_2" - preproc_task.name = 'diag_1/tas' + preproc_task.name = "diag_1/tas" preproc_task.ancestors = [derive_input_task_1, derive_input_task_2] diagnostic_task_1 = copy.deepcopy(diagnostic_task) - diagnostic_task_1.name = 'diag_1/script_1' + diagnostic_task_1.name = "diag_1/script_1" diagnostic_task_1.ancestors = [preproc_task] - diagnostic_task.name = 'diag_1/script_2' + diagnostic_task.name = "diag_1/script_2" diagnostic_task.ancestors = [diagnostic_task_1] result = str(diagnostic_task) print(result) diff --git a/tests/unit/task/test_resume_task.py b/tests/unit/task/test_resume_task.py index c5f2693970..eb71ab8fc9 100644 --- a/tests/unit/task/test_resume_task.py +++ b/tests/unit/task/test_resume_task.py @@ -5,22 +5,22 @@ def test_run(tmp_path): """Test `esmvalcore._task.ResumeTask.run`.""" - task_name = 'diagnostic_name/var_name' - prev_output_dir = tmp_path / 'recipe_test_20210911_102100' - prev_preproc_dir = prev_output_dir / 'preproc' / task_name + task_name = "diagnostic_name/var_name" + prev_output_dir = tmp_path / "recipe_test_20210911_102100" + prev_preproc_dir = prev_output_dir / "preproc" / task_name prev_preproc_dir.mkdir(parents=True) prev_metadata = { - f'/original/recipe_output/preproc/{task_name}/file.nc': { - 'filename': f'/original/recipe_output/preproc/{task_name}/file.nc', - 'attribute1': 'value1', + f"/original/recipe_output/preproc/{task_name}/file.nc": { + "filename": f"/original/recipe_output/preproc/{task_name}/file.nc", + "attribute1": "value1", } } - prev_metadata_file = prev_preproc_dir / 'metadata.yml' - with prev_metadata_file.open('w', encoding='utf-8') as file: + prev_metadata_file = prev_preproc_dir / "metadata.yml" + with prev_metadata_file.open("w", encoding="utf-8") as file: yaml.safe_dump(prev_metadata, file) - output_dir = tmp_path / 'recipe_test_20211001_092100' - preproc_dir = output_dir / 'preproc' / task_name + output_dir = tmp_path / "recipe_test_20211001_092100" + preproc_dir = output_dir / "preproc" / task_name task = ResumeTask( prev_preproc_dir, @@ -30,15 +30,15 @@ def test_run(tmp_path): result = task.run() - metadata_file = preproc_dir / 'metadata.yml' + metadata_file = preproc_dir / "metadata.yml" assert result == [str(metadata_file)] - with metadata_file.open('rb') as file: + with metadata_file.open("rb") as file: metadata = yaml.safe_load(file) assert metadata == { - str(prev_preproc_dir / 'file.nc'): { - 'filename': str(prev_preproc_dir / 'file.nc'), - 'attribute1': 'value1', + str(prev_preproc_dir / "file.nc"): { + "filename": str(prev_preproc_dir / "file.nc"), + "attribute1": "value1", }, } diff --git a/tests/unit/test_citation.py b/tests/unit/test_citation.py index 4bd8bbdb1b..3905da3fd3 100644 --- a/tests/unit/test_citation.py +++ b/tests/unit/test_citation.py @@ -4,4 +4,4 @@ def test_extract_tags(): tags = "['example1', 'example_2', 'example-3']" result = _citation._extract_tags(tags) - assert result == {'example1', 'example_2', 'example-3'} + assert result == {"example1", "example_2", "example-3"} diff --git a/tests/unit/test_cmor_api.py b/tests/unit/test_cmor_api.py index ac61c5006f..cce1fab9d8 100644 --- a/tests/unit/test_cmor_api.py +++ b/tests/unit/test_cmor_api.py @@ -18,11 +18,10 @@ def test_cmor_check_metadata(mocker): """Test ``cmor_check_metadata``""" mock_get_cmor_checker = mocker.patch.object( - esmvalcore.cmor.check, '_get_cmor_checker', autospec=True + esmvalcore.cmor.check, "_get_cmor_checker", autospec=True ) ( - mock_get_cmor_checker.return_value.return_value.check_metadata. - return_value + mock_get_cmor_checker.return_value.return_value.check_metadata.return_value ) = sentinel.checked_cube cube = cmor_check_metadata( @@ -43,8 +42,7 @@ def test_cmor_check_metadata(mocker): ) mock_get_cmor_checker.return_value.assert_called_once_with(sentinel.cube) ( - mock_get_cmor_checker.return_value.return_value.check_metadata. - assert_called_once_with() + mock_get_cmor_checker.return_value.return_value.check_metadata.assert_called_once_with() ) assert cube == sentinel.checked_cube @@ -52,11 +50,10 @@ def test_cmor_check_metadata(mocker): def test_cmor_check_data(mocker): """Test ``cmor_check_data``""" mock_get_cmor_checker = mocker.patch.object( - esmvalcore.cmor.check, '_get_cmor_checker', autospec=True + esmvalcore.cmor.check, "_get_cmor_checker", autospec=True ) ( - mock_get_cmor_checker.return_value.return_value.check_data. - return_value + mock_get_cmor_checker.return_value.return_value.check_data.return_value ) = sentinel.checked_cube cube = cmor_check_data( @@ -77,8 +74,7 @@ def test_cmor_check_data(mocker): ) mock_get_cmor_checker.return_value.assert_called_once_with(sentinel.cube) ( - mock_get_cmor_checker.return_value.return_value.check_data. - assert_called_once_with() + mock_get_cmor_checker.return_value.return_value.check_data.assert_called_once_with() ) assert cube == sentinel.checked_cube @@ -87,13 +83,13 @@ def test_cmor_check(mocker): """Test ``cmor_check``""" mock_cmor_check_metadata = mocker.patch.object( esmvalcore.cmor.check, - 'cmor_check_metadata', + "cmor_check_metadata", autospec=True, return_value=sentinel.cube_after_check_metadata, ) mock_cmor_check_data = mocker.patch.object( esmvalcore.cmor.check, - 'cmor_check_data', + "cmor_check_data", autospec=True, return_value=sentinel.cube_after_check_data, ) diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py index 25129dbf5d..66d23306ec 100644 --- a/tests/unit/test_dataset.py +++ b/tests/unit/test_dataset.py @@ -16,57 +16,66 @@ def test_repr(): - ds = Dataset(short_name='tas', dataset='dataset1') + ds = Dataset(short_name="tas", dataset="dataset1") - assert repr(ds) == textwrap.dedent(""" + assert ( + repr(ds) + == textwrap.dedent(""" Dataset: {'dataset': 'dataset1', 'short_name': 'tas'} """).strip() + ) def test_repr_session(mocker): - ds = Dataset(short_name='tas', dataset='dataset1') + ds = Dataset(short_name="tas", dataset="dataset1") ds.session = mocker.Mock() - ds.session.session_name = 'test-session' - assert repr(ds) == textwrap.dedent(""" + ds.session.session_name = "test-session" + assert ( + repr(ds) + == textwrap.dedent(""" Dataset: {'dataset': 'dataset1', 'short_name': 'tas'} session: 'test-session' """).strip() + ) def test_repr_supplementary(): - ds = Dataset(dataset='dataset1', short_name='tas') - ds.add_supplementary(short_name='areacella') + ds = Dataset(dataset="dataset1", short_name="tas") + ds.add_supplementary(short_name="areacella") - assert repr(ds) == textwrap.dedent(""" + assert ( + repr(ds) + == textwrap.dedent(""" Dataset: {'dataset': 'dataset1', 'short_name': 'tas'} supplementaries: {'dataset': 'dataset1', 'short_name': 'areacella'} """).strip() + ) @pytest.mark.parametrize( "separator,join_lists,output", [ - ('_', False, "1_d_dom_a_('e1', 'e2')_['ens2', 'ens1']_g1_v1"), - ('_', True, "1_d_dom_a_e1-e2_ens2-ens1_g1_v1"), - (' ', False, "1 d dom a ('e1', 'e2') ['ens2', 'ens1'] g1 v1"), - (' ', True, "1 d dom a e1-e2 ens2-ens1 g1 v1"), - ] + ("_", False, "1_d_dom_a_('e1', 'e2')_['ens2', 'ens1']_g1_v1"), + ("_", True, "1_d_dom_a_e1-e2_ens2-ens1_g1_v1"), + (" ", False, "1 d dom a ('e1', 'e2') ['ens2', 'ens1'] g1 v1"), + (" ", True, "1 d dom a e1-e2 ens2-ens1 g1 v1"), + ], ) def test_get_joined_summary_facet(separator, join_lists, output): ds = Dataset( - test='this should not appear', - rcm_version='1', - driver='d', - domain='dom', - activity='a', - exp=('e1', 'e2'), - ensemble=['ens2', 'ens1'], - grid='g1', - version='v1', + test="this should not appear", + rcm_version="1", + driver="d", + domain="dom", + activity="a", + exp=("e1", "e2"), + ensemble=["ens2", "ens1"], + grid="g1", + version="v1", ) joined_str = ds._get_joined_summary_facets( separator, join_lists=join_lists @@ -76,26 +85,28 @@ def test_get_joined_summary_facet(separator, join_lists, output): def test_short_summary(): ds = Dataset( - project='CMIP6', - dataset='dataset1', - short_name='tos', - mip='Omon', - ) - ds.add_supplementary(short_name='areacello', mip='Ofx') - ds.add_supplementary(short_name='volcello') - expected = ("Dataset: tos, Omon, CMIP6, dataset1, " - "supplementaries: areacello, Ofx; volcello") + project="CMIP6", + dataset="dataset1", + short_name="tos", + mip="Omon", + ) + ds.add_supplementary(short_name="areacello", mip="Ofx") + ds.add_supplementary(short_name="volcello") + expected = ( + "Dataset: tos, Omon, CMIP6, dataset1, " + "supplementaries: areacello, Ofx; volcello" + ) assert ds.summary(shorten=True) == expected def test_long_summary(): - ds = Dataset(dataset='dataset1', short_name='tas') + ds = Dataset(dataset="dataset1", short_name="tas") assert ds.summary(shorten=False) == repr(ds) def test_session_setter(): - ds = Dataset(short_name='tas') - ds.add_supplementary(short_name='areacella') + ds = Dataset(short_name="tas") + ds.add_supplementary(short_name="areacella") assert ds._session is None assert ds.supplementaries[0]._session is None @@ -106,121 +117,121 @@ def test_session_setter(): @pytest.mark.parametrize( - 'facets,added_facets', + "facets,added_facets", [ [ { - 'short_name': 'areacella', - 'project': 'ICON', - 'mip': 'fx', - 'dataset': 'ICON', + "short_name": "areacella", + "project": "ICON", + "mip": "fx", + "dataset": "ICON", }, { # Added from CMOR table - 'original_short_name': 'areacella', - 'standard_name': 'cell_area', - 'long_name': 'Grid-Cell Area for Atmospheric Grid Variables', - 'units': 'm2', - 'modeling_realm': ['atmos', 'land'], - 'frequency': 'fx', + "original_short_name": "areacella", + "standard_name": "cell_area", + "long_name": "Grid-Cell Area for Atmospheric Grid Variables", + "units": "m2", + "modeling_realm": ["atmos", "land"], + "frequency": "fx", # Added from extra facets YAML file - 'latitude': 'grid_latitude', - 'longitude': 'grid_longitude', - 'raw_name': 'cell_area', + "latitude": "grid_latitude", + "longitude": "grid_longitude", + "raw_name": "cell_area", }, ], [ { - 'short_name': 'zg', - 'mip': 'A1', - 'project': 'CMIP3', - 'dataset': 'bccr_bcm2_0', - 'frequency': 'mon', - 'exp': 'historical', - 'ensemble': 'r1i1p1', - 'modeling_realm': 'atmos', + "short_name": "zg", + "mip": "A1", + "project": "CMIP3", + "dataset": "bccr_bcm2_0", + "frequency": "mon", + "exp": "historical", + "ensemble": "r1i1p1", + "modeling_realm": "atmos", }, { # Added from CMOR table - 'original_short_name': 'zg', - 'long_name': 'Geopotential Height', - 'standard_name': 'geopotential_height', - 'units': 'm', + "original_short_name": "zg", + "long_name": "Geopotential Height", + "standard_name": "geopotential_height", + "units": "m", # Added from extra facets YAML file - 'institute': ['BCCR'], + "institute": ["BCCR"], }, ], [ { - 'short_name': 'pr', - 'mip': '3hr', - 'project': 'CMIP5', - 'dataset': 'CNRM-CM5', - 'exp': 'historical', - 'ensemble': 'r1i1p1', - 'timerange': '2000/2000', + "short_name": "pr", + "mip": "3hr", + "project": "CMIP5", + "dataset": "CNRM-CM5", + "exp": "historical", + "ensemble": "r1i1p1", + "timerange": "2000/2000", }, { # Added from CMOR table - 'original_short_name': 'pr', - 'frequency': '3hr', - 'long_name': 'Precipitation', - 'modeling_realm': ['atmos'], - 'standard_name': 'precipitation_flux', - 'units': 'kg m-2 s-1', + "original_short_name": "pr", + "frequency": "3hr", + "long_name": "Precipitation", + "modeling_realm": ["atmos"], + "standard_name": "precipitation_flux", + "units": "kg m-2 s-1", # Added from extra facets YAML file - 'institute': ['CNRM-CERFACS'], - 'product': ['output1', 'output2'], + "institute": ["CNRM-CERFACS"], + "product": ["output1", "output2"], }, ], [ { - 'short_name': 'pr', - 'mip': '3hr', - 'project': 'CMIP6', - 'dataset': 'HadGEM3-GC31-LL', - 'exp': 'historical', - 'ensemble': 'r2i1p1f1', - 'grid': 'gn', - 'timerange': '2000/2001', + "short_name": "pr", + "mip": "3hr", + "project": "CMIP6", + "dataset": "HadGEM3-GC31-LL", + "exp": "historical", + "ensemble": "r2i1p1f1", + "grid": "gn", + "timerange": "2000/2001", }, { # Added from CMOR table - 'activity': 'CMIP', - 'frequency': '3hr', - 'institute': ['MOHC', 'NERC'], - 'long_name': 'Precipitation', - 'modeling_realm': ['atmos'], - 'original_short_name': 'pr', - 'standard_name': 'precipitation_flux', - 'timerange': '2000/2001', - 'units': 'kg m-2 s-1', - } + "activity": "CMIP", + "frequency": "3hr", + "institute": ["MOHC", "NERC"], + "long_name": "Precipitation", + "modeling_realm": ["atmos"], + "original_short_name": "pr", + "standard_name": "precipitation_flux", + "timerange": "2000/2001", + "units": "kg m-2 s-1", + }, ], [ { - 'short_name': 'tas', - 'mip': 'mon', - 'project': 'CORDEX', - 'dataset': 'MOHC-HadGEM3-RA', - 'product': 'output', - 'domain': 'AFR-44', - 'driver': 'ECMWF-ERAINT', - 'exp': 'evaluation', - 'ensemble': 'r1i1p1', - 'institute': 'MOHC', - 'rcm_version': 'v1', - 'timerange': '1991/1993', + "short_name": "tas", + "mip": "mon", + "project": "CORDEX", + "dataset": "MOHC-HadGEM3-RA", + "product": "output", + "domain": "AFR-44", + "driver": "ECMWF-ERAINT", + "exp": "evaluation", + "ensemble": "r1i1p1", + "institute": "MOHC", + "rcm_version": "v1", + "timerange": "1991/1993", }, { # Added from CMOR table - 'frequency': 'mon', - 'long_name': 'Near-Surface Air Temperature', - 'modeling_realm': ['atmos'], - 'original_short_name': 'tas', - 'standard_name': 'air_temperature', - 'timerange': '1991/1993', - 'units': 'K', + "frequency": "mon", + "long_name": "Near-Surface Air Temperature", + "modeling_realm": ["atmos"], + "original_short_name": "tas", + "standard_name": "air_temperature", + "timerange": "1991/1993", + "units": "K", }, ], ], @@ -248,17 +259,17 @@ def test_from_recipe(session, tmp_path): additional_datasets: - {dataset: dataset1} """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") dataset = Dataset( - diagnostic='diagnostic1', - variable_group='tas', - short_name='tas', - dataset='dataset1', - project='CMIP5', - mip='Amon', - alias='dataset1', + diagnostic="diagnostic1", + variable_group="tas", + short_name="tas", + dataset="dataset1", + project="CMIP5", + mip="Amon", + alias="dataset1", recipe_dataset_index=0, ) dataset.session = session @@ -290,68 +301,68 @@ def test_from_recipe_advanced(session, tmp_path): tos: mip: Omon """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") datasets = [ Dataset( - diagnostic='diagnostic1', - variable_group='ta', - short_name='ta', - dataset='dataset1', - project='CMIP6', - mip='Amon', - alias='CMIP6_dataset1', + diagnostic="diagnostic1", + variable_group="ta", + short_name="ta", + dataset="dataset1", + project="CMIP6", + mip="Amon", + alias="CMIP6_dataset1", recipe_dataset_index=0, ), Dataset( - diagnostic='diagnostic1', - variable_group='ta', - short_name='ta', - dataset='dataset2', - project='CMIP6', - mip='Amon', - alias='CMIP6_dataset2', + diagnostic="diagnostic1", + variable_group="ta", + short_name="ta", + dataset="dataset2", + project="CMIP6", + mip="Amon", + alias="CMIP6_dataset2", recipe_dataset_index=1, ), Dataset( - diagnostic='diagnostic1', - variable_group='pr', - short_name='pr', - dataset='dataset1', - project='CMIP6', - mip='Amon', - alias='CMIP6_dataset1', + diagnostic="diagnostic1", + variable_group="pr", + short_name="pr", + dataset="dataset1", + project="CMIP6", + mip="Amon", + alias="CMIP6_dataset1", recipe_dataset_index=0, ), Dataset( - diagnostic='diagnostic1', - variable_group='pr', - short_name='pr', - dataset='dataset2', - project='CMIP6', - mip='Amon', - alias='CMIP6_dataset2', + diagnostic="diagnostic1", + variable_group="pr", + short_name="pr", + dataset="dataset2", + project="CMIP6", + mip="Amon", + alias="CMIP6_dataset2", recipe_dataset_index=1, ), Dataset( - diagnostic='diagnostic1', - variable_group='pr', - short_name='pr', - dataset='dataset3', - project='CMIP5', - mip='Amon', - alias='CMIP5', + diagnostic="diagnostic1", + variable_group="pr", + short_name="pr", + dataset="dataset3", + project="CMIP5", + mip="Amon", + alias="CMIP5", recipe_dataset_index=2, ), Dataset( - diagnostic='diagnostic2', - variable_group='tos', - short_name='tos', - dataset='dataset1', - project='CMIP6', - mip='Omon', - alias='dataset1', + diagnostic="diagnostic2", + variable_group="tos", + short_name="tos", + dataset="dataset1", + project="CMIP6", + mip="Omon", + alias="dataset1", recipe_dataset_index=0, ), ] @@ -374,30 +385,30 @@ def test_from_recipe_with_ranges(session, tmp_path): mip: Amon project: CMIP6 """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") datasets = [ Dataset( - diagnostic='diagnostic1', - variable_group='ta', - short_name='ta', - dataset='dataset1', - ensemble='r1i1p1', - project='CMIP6', - mip='Amon', - alias='r1i1p1', + diagnostic="diagnostic1", + variable_group="ta", + short_name="ta", + dataset="dataset1", + ensemble="r1i1p1", + project="CMIP6", + mip="Amon", + alias="r1i1p1", recipe_dataset_index=0, ), Dataset( - diagnostic='diagnostic1', - variable_group='ta', - short_name='ta', - dataset='dataset1', - ensemble='r2i1p1', - project='CMIP6', - mip='Amon', - alias='r2i1p1', + diagnostic="diagnostic1", + variable_group="ta", + short_name="ta", + dataset="dataset1", + ensemble="r2i1p1", + project="CMIP6", + mip="Amon", + alias="r2i1p1", recipe_dataset_index=1, ), ] @@ -423,27 +434,27 @@ def test_from_recipe_with_supplementary(session, tmp_path): - short_name: sftof mip: fx """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") dataset = Dataset( - diagnostic='diagnostic1', - variable_group='tos', - short_name='tos', - dataset='dataset1', - ensemble='r1i1p1', - project='CMIP5', - mip='Omon', - alias='dataset1', + diagnostic="diagnostic1", + variable_group="tos", + short_name="tos", + dataset="dataset1", + ensemble="r1i1p1", + project="CMIP5", + mip="Omon", + alias="dataset1", recipe_dataset_index=0, ) dataset.supplementaries = [ Dataset( - short_name='sftof', - dataset='dataset1', - ensemble='r1i1p1', - project='CMIP5', - mip='fx', + short_name="sftof", + dataset="dataset1", + ensemble="r1i1p1", + project="CMIP5", + mip="fx", ), ] dataset.session = session @@ -469,27 +480,27 @@ def test_from_recipe_with_skip_supplementary(session, tmp_path): - short_name: areacello skip: true """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") dataset = Dataset( - diagnostic='diagnostic1', - variable_group='tos', - short_name='tos', - dataset='dataset1', - ensemble='r1i1p1', - project='CMIP5', - mip='Omon', - alias='dataset1', + diagnostic="diagnostic1", + variable_group="tos", + short_name="tos", + dataset="dataset1", + ensemble="r1i1p1", + project="CMIP5", + mip="Omon", + alias="dataset1", recipe_dataset_index=0, ) dataset.supplementaries = [ Dataset( - short_name='sftof', - dataset='dataset1', - ensemble='r1i1p1', - project='CMIP5', - mip='fx', + short_name="sftof", + dataset="dataset1", + ensemble="r1i1p1", + project="CMIP5", + mip="fx", ), ] dataset.session = session @@ -497,29 +508,29 @@ def test_from_recipe_with_skip_supplementary(session, tmp_path): assert Dataset.from_recipe(recipe, session) == [dataset] -def test_from_recipe_with_automatic_supplementary(session, tmp_path, - monkeypatch): - +def test_from_recipe_with_automatic_supplementary( + session, tmp_path, monkeypatch +): def _find_files(self): - if self.facets['short_name'] == 'areacello': + if self.facets["short_name"] == "areacello": file = esmvalcore.local.LocalFile() file.facets = { - 'short_name': 'areacello', - 'mip': 'fx', - 'project': 'CMIP5', - 'dataset': 'dataset1', - 'ensemble': 'r0i0p0', - 'exp': 'piControl', - 'institute': 'X', - 'product': 'output1', - 'version': 'v2', + "short_name": "areacello", + "mip": "fx", + "project": "CMIP5", + "dataset": "dataset1", + "ensemble": "r0i0p0", + "exp": "piControl", + "institute": "X", + "product": "output1", + "version": "v2", } files = [file] else: files = [] self._files = files - monkeypatch.setattr(Dataset, '_find_files', _find_files) + monkeypatch.setattr(Dataset, "_find_files", _find_files) recipe_txt = textwrap.dedent(""" preprocessors: @@ -540,34 +551,34 @@ def _find_files(self): preprocessor: global_mean version: v1 """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") dataset = Dataset( - diagnostic='diagnostic1', - variable_group='tos', - short_name='tos', - dataset='dataset1', - ensemble='r1i1p1', - exp='historical', - preprocessor='global_mean', - project='CMIP5', - version='v1', - mip='Omon', - alias='dataset1', + diagnostic="diagnostic1", + variable_group="tos", + short_name="tos", + dataset="dataset1", + ensemble="r1i1p1", + exp="historical", + preprocessor="global_mean", + project="CMIP5", + version="v1", + mip="Omon", + alias="dataset1", recipe_dataset_index=0, ) dataset.supplementaries = [ Dataset( - short_name='areacello', - dataset='dataset1', - institute='X', - product='output1', - ensemble='r0i0p0', - exp='piControl', - project='CMIP5', - version='v2', - mip='fx', + short_name="areacello", + dataset="dataset1", + institute="X", + product="output1", + ensemble="r0i0p0", + exp="piControl", + project="CMIP5", + version="v2", + mip="fx", ), ] dataset.session = session @@ -575,12 +586,15 @@ def _find_files(self): assert Dataset.from_recipe(recipe, session) == [dataset] -@pytest.mark.parametrize('pattern,result', ( - ['a', False], - ['*', True], - ['r?i1p1', True], - ['r[1-3]i1p1*', True], -)) +@pytest.mark.parametrize( + "pattern,result", + ( + ["a", False], + ["*", True], + ["r?i1p1", True], + ["r[1-3]i1p1*", True], + ), +) def test_isglob(pattern, result): assert esmvalcore.dataset._isglob(pattern) == result @@ -588,104 +602,102 @@ def test_isglob(pattern, result): def mock_find_files(*files): files_map = defaultdict(list) for file in files: - files_map[file.facets['short_name']].append(file) + files_map[file.facets["short_name"]].append(file) def find_files(self): - self.files = files_map[self['short_name']] + self.files = files_map[self["short_name"]] for supplementary in self.supplementaries: - supplementary.files = files_map[supplementary['short_name']] + supplementary.files = files_map[supplementary["short_name"]] return find_files def test_from_files(session, monkeypatch): - rootpath = Path('/path/to/data') + rootpath = Path("/path/to/data") file1 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'CAS', - 'FGOALS-g3', - 'historical', - 'r3i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20190827', - 'tas_Amon_FGOALS-g3_historical_r3i1p1f1_gn_199001-199912.nc', + "CMIP6", + "CMIP", + "CAS", + "FGOALS-g3", + "historical", + "r3i1p1f1", + "Amon", + "tas", + "gn", + "v20190827", + "tas_Amon_FGOALS-g3_historical_r3i1p1f1_gn_199001-199912.nc", ) file1.facets = { - 'activity': 'CMIP', - 'institute': 'CAS', - 'dataset': 'FGOALS-g3', - 'exp': 'historical', - 'mip': 'Amon', - 'ensemble': 'r3i1p1f1', - 'short_name': 'tas', - 'grid': 'gn', - 'version': 'v20190827', + "activity": "CMIP", + "institute": "CAS", + "dataset": "FGOALS-g3", + "exp": "historical", + "mip": "Amon", + "ensemble": "r3i1p1f1", + "short_name": "tas", + "grid": "gn", + "version": "v20190827", } file2 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'CAS', - 'FGOALS-g3', - 'historical', - 'r3i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20190827', - 'tas_Amon_FGOALS-g3_historical_r3i1p1f1_gn_200001-200912.nc', + "CMIP6", + "CMIP", + "CAS", + "FGOALS-g3", + "historical", + "r3i1p1f1", + "Amon", + "tas", + "gn", + "v20190827", + "tas_Amon_FGOALS-g3_historical_r3i1p1f1_gn_200001-200912.nc", ) file2.facets = dict(file1.facets) file3 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'NCC', - 'NorESM2-LM', - 'historical', - 'r1i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20190815', - 'tas_Amon_NorESM2-LM_historical_r1i1p1f1_gn_200001-201412.nc', + "CMIP6", + "CMIP", + "NCC", + "NorESM2-LM", + "historical", + "r1i1p1f1", + "Amon", + "tas", + "gn", + "v20190815", + "tas_Amon_NorESM2-LM_historical_r1i1p1f1_gn_200001-201412.nc", ) file3.facets = { - 'activity': 'CMIP', - 'institute': 'NCC', - 'dataset': 'NorESM2-LM', - 'exp': 'historical', - 'mip': 'Amon', - 'ensemble': 'r1i1p1f1', - 'short_name': 'tas', - 'grid': 'gn', - 'version': 'v20190815', + "activity": "CMIP", + "institute": "NCC", + "dataset": "NorESM2-LM", + "exp": "historical", + "mip": "Amon", + "ensemble": "r1i1p1f1", + "short_name": "tas", + "grid": "gn", + "version": "v20190815", } find_files = mock_find_files(file1, file2, file3) - monkeypatch.setattr(Dataset, 'find_files', find_files) + monkeypatch.setattr(Dataset, "find_files", find_files) dataset = Dataset( - short_name='tas', - mip='Amon', - project='CMIP6', - dataset='*', + short_name="tas", + mip="Amon", + project="CMIP6", + dataset="*", ) dataset.session = session datasets = list(dataset.from_files()) expected = [ - Dataset(short_name='tas', - mip='Amon', - project='CMIP6', - dataset='FGOALS-g3'), - Dataset(short_name='tas', - mip='Amon', - project='CMIP6', - dataset='NorESM2-LM'), + Dataset( + short_name="tas", mip="Amon", project="CMIP6", dataset="FGOALS-g3" + ), + Dataset( + short_name="tas", mip="Amon", project="CMIP6", dataset="NorESM2-LM" + ), ] for expected_ds in expected: expected_ds.session = session @@ -695,193 +707,195 @@ def test_from_files(session, monkeypatch): def test_from_files_with_supplementary(session, monkeypatch): - rootpath = Path('/path/to/data') - file = esmvalcore.local.LocalFile( + rootpath = Path("/path/to/data") + file1 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'CAS', - 'FGOALS-g3', - 'historical', - 'r3i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20190827', - 'tas_Amon_FGOALS-g3_historical_r3i1p1f1_gn_199001-199912.nc', + "CMIP6", + "CMIP", + "CAS", + "FGOALS-g3", + "historical", + "r3i1p1f1", + "Amon", + "tas", + "gn", + "v20190827", + "tas_Amon_FGOALS-g3_historical_r3i1p1f1_gn_199001-199912.nc", ) - file.facets = { - 'activity': 'CMIP', - 'institute': 'CAS', - 'dataset': 'FGOALS-g3', - 'exp': 'historical', - 'mip': 'Amon', - 'ensemble': 'r3i1p1f1', - 'short_name': 'tas', - 'grid': 'gn', - 'version': 'v20190827', + file1.facets = { + "activity": "CMIP", + "institute": "CAS", + "dataset": "FGOALS-g3", + "exp": "historical", + "mip": "Amon", + "ensemble": "r3i1p1f1", + "short_name": "tas", + "grid": "gn", + "version": "v20190827", } - afile = esmvalcore.local.LocalFile( + file2 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'CAS', - 'FGOALS-g3', - 'historical', - 'r1i1p1f1', - 'fx', - 'tas', - 'gn', - 'v20210615', - 'areacella_fx_FGOALS-g3_historical_r1i1p1f1_gn.nc', - ) - afile.facets = { - 'activity': 'CMIP', - 'institute': 'CAS', - 'dataset': 'FGOALS-g3', - 'exp': 'historical', - 'mip': 'fx', - 'ensemble': 'r1i1p1f1', - 'short_name': 'areacella', - 'grid': 'gn', - 'version': 'v20210615', + "CMIP6", + "CMIP", + "CAS", + "FGOALS-g3", + "historical", + "r1i1p1f1", + "fx", + "tas", + "gn", + "v20210615", + "areacella_fx_FGOALS-g3_historical_r1i1p1f1_gn.nc", + ) + file2.facets = { + "activity": "CMIP", + "institute": "CAS", + "dataset": "FGOALS-g3", + "exp": "historical", + "mip": "fx", + "ensemble": "r1i1p1f1", + "short_name": "areacella", + "grid": "gn", + "version": "v20210615", } - monkeypatch.setattr(Dataset, 'find_files', mock_find_files(file, afile)) + monkeypatch.setattr(Dataset, "find_files", mock_find_files(file1, file2)) dataset = Dataset( - short_name='tas', - mip='Amon', - project='CMIP6', - dataset='FGOALS-g3', - ensemble='*', + short_name="tas", + mip="Amon", + project="CMIP6", + dataset="FGOALS-g3", + ensemble="*", ) dataset.session = session - dataset.add_supplementary(short_name='areacella', mip='*', ensemble='*') + dataset.add_supplementary(short_name="areacella", mip="*", ensemble="*") expected = Dataset( - short_name='tas', - mip='Amon', - project='CMIP6', - dataset='FGOALS-g3', - ensemble='r3i1p1f1', + short_name="tas", + mip="Amon", + project="CMIP6", + dataset="FGOALS-g3", + ensemble="r3i1p1f1", ) expected.session = session expected.add_supplementary( - short_name='areacella', - mip='fx', - ensemble='r1i1p1f1', + short_name="areacella", + mip="fx", + ensemble="r1i1p1f1", ) datasets = list(dataset.from_files()) assert all(ds.session == session for ds in datasets) - assert all(ads.session == session for ds in datasets - for ads in ds.supplementaries) + assert all( + ads.session == session for ds in datasets for ads in ds.supplementaries + ) assert datasets == [expected] def test_from_files_with_globs(monkeypatch, session): """Test `from_files` with wildcards in dataset and supplementary.""" - rootpath = Path('/path/to/data') - file = esmvalcore.local.LocalFile( + rootpath = Path("/path/to/data") + file1 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'BCC', - 'BCC-CSM2-MR', - 'historical', - 'r1i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20181126', - 'tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc', + "CMIP6", + "CMIP", + "BCC", + "BCC-CSM2-MR", + "historical", + "r1i1p1f1", + "Amon", + "tas", + "gn", + "v20181126", + "tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc", ) - file.facets = { - 'activity': 'CMIP', - 'dataset': 'BCC-CSM2-MR', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'grid': 'gn', - 'institute': 'BCC', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'version': 'v20181126', + file1.facets = { + "activity": "CMIP", + "dataset": "BCC-CSM2-MR", + "exp": "historical", + "ensemble": "r1i1p1f1", + "grid": "gn", + "institute": "BCC", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "version": "v20181126", } - afile = esmvalcore.local.LocalFile( + file2 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'GMMIP', - 'BCC', - 'BCC-CSM2-MR', - 'hist-resIPO', - 'r1i1p1f1', - 'fx', - 'areacella', - 'gn', - 'v20190613', - 'areacella_fx_BCC-CSM2-MR_hist-resIPO_r1i1p1f1_gn.nc', - ) - afile.facets = { - 'activity': 'GMMIP', - 'dataset': 'BCC-CSM2-MR', - 'ensemble': 'r1i1p1f1', - 'exp': 'hist-resIPO', - 'grid': 'gn', - 'institute': 'BCC', - 'mip': 'fx', - 'project': 'CMIP6', - 'short_name': 'areacella', - 'version': 'v20190613', + "CMIP6", + "GMMIP", + "BCC", + "BCC-CSM2-MR", + "hist-resIPO", + "r1i1p1f1", + "fx", + "areacella", + "gn", + "v20190613", + "areacella_fx_BCC-CSM2-MR_hist-resIPO_r1i1p1f1_gn.nc", + ) + file2.facets = { + "activity": "GMMIP", + "dataset": "BCC-CSM2-MR", + "ensemble": "r1i1p1f1", + "exp": "hist-resIPO", + "grid": "gn", + "institute": "BCC", + "mip": "fx", + "project": "CMIP6", + "short_name": "areacella", + "version": "v20190613", } dataset = Dataset( - activity='CMIP', - dataset='*', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - institute='*', - mip='Amon', - project='CMIP6', - short_name='tas', + activity="CMIP", + dataset="*", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + institute="*", + mip="Amon", + project="CMIP6", + short_name="tas", ) dataset.add_supplementary( - short_name='areacella', - mip='fx', - activity='*', - exp='*', + short_name="areacella", + mip="fx", + activity="*", + exp="*", ) - dataset.facets['timerange'] = '*' + dataset.facets["timerange"] = "*" dataset.session = session print(dataset) - monkeypatch.setattr(Dataset, 'find_files', mock_find_files(file, afile)) + monkeypatch.setattr(Dataset, "find_files", mock_find_files(file1, file2)) datasets = list(dataset.from_files()) assert all(ds.session == session for ds in datasets) - assert all(ads.session == session for ds in datasets - for ads in ds.supplementaries) + assert all( + ads.session == session for ds in datasets for ads in ds.supplementaries + ) expected = Dataset( - activity='CMIP', - dataset='BCC-CSM2-MR', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - institute='BCC', - mip='Amon', - project='CMIP6', - short_name='tas', + activity="CMIP", + dataset="BCC-CSM2-MR", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + institute="BCC", + mip="Amon", + project="CMIP6", + short_name="tas", ) expected.add_supplementary( - short_name='areacella', - mip='fx', - activity='GMMIP', - exp='hist-resIPO', + short_name="areacella", + mip="fx", + activity="GMMIP", + exp="hist-resIPO", ) - expected.facets['timerange'] = '185001/201412' + expected.facets["timerange"] = "185001/201412" expected.session = session assert datasets == [expected] @@ -892,77 +906,78 @@ def test_from_files_with_globs_and_missing_facets(monkeypatch, session): Tests a combination of files with complete facets and missing facets. """ - rootpath = Path('/path/to/data') + rootpath = Path("/path/to/data") file1 = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'BCC', - 'BCC-CSM2-MR', - 'historical', - 'r1i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20181126', - 'tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc', + "CMIP6", + "CMIP", + "BCC", + "BCC-CSM2-MR", + "historical", + "r1i1p1f1", + "Amon", + "tas", + "gn", + "v20181126", + "tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc", ) file1.facets = { - 'activity': 'CMIP', - 'dataset': 'BCC-CSM2-MR', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'grid': 'gn', - 'institute': 'BCC', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'version': 'v20181126', + "activity": "CMIP", + "dataset": "BCC-CSM2-MR", + "exp": "historical", + "ensemble": "r1i1p1f1", + "grid": "gn", + "institute": "BCC", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "version": "v20181126", } file2 = esmvalcore.local.LocalFile( rootpath, - 'tas', - 'tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc', + "tas", + "tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc", ) file2.facets = { - 'short_name': 'tas', + "short_name": "tas", } dataset = Dataset( - activity='CMIP', - dataset='*', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - institute='*', - mip='Amon', - project='CMIP6', - short_name='tas', - timerange='*', + activity="CMIP", + dataset="*", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + institute="*", + mip="Amon", + project="CMIP6", + short_name="tas", + timerange="*", ) dataset.session = session print(dataset) - monkeypatch.setattr(Dataset, 'find_files', mock_find_files(file1, file2)) + monkeypatch.setattr(Dataset, "find_files", mock_find_files(file1, file2)) datasets = list(dataset.from_files()) assert all(ds.session == session for ds in datasets) - assert all(ads.session == session for ds in datasets - for ads in ds.supplementaries) + assert all( + ads.session == session for ds in datasets for ads in ds.supplementaries + ) expected = Dataset( - activity='CMIP', - dataset='BCC-CSM2-MR', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - institute='BCC', - mip='Amon', - project='CMIP6', - short_name='tas', - timerange='185001/201412', + activity="CMIP", + dataset="BCC-CSM2-MR", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + institute="BCC", + mip="Amon", + project="CMIP6", + short_name="tas", + timerange="185001/201412", ) expected.session = session @@ -976,64 +991,65 @@ def test_from_files_with_globs_and_automatic_missing(monkeypatch, session): Test with wildcards and files with missing facets that can be automatically added. """ - rootpath = Path('/path/to/data') + rootpath = Path("/path/to/data") file = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'BCC-CSM2-MR', - 'historical', - 'r1i1p1f1', - 'Amon', - 'tas', - 'gn', - 'v20181126', - 'tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc', + "CMIP6", + "BCC-CSM2-MR", + "historical", + "r1i1p1f1", + "Amon", + "tas", + "gn", + "v20181126", + "tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc", ) file.facets = { - 'dataset': 'BCC-CSM2-MR', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'grid': 'gn', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'version': 'v20181126', + "dataset": "BCC-CSM2-MR", + "exp": "historical", + "ensemble": "r1i1p1f1", + "grid": "gn", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "version": "v20181126", } dataset = Dataset( - activity='CMIP', - dataset='*', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - institute='*', - mip='Amon', - project='CMIP6', - short_name='tas', - timerange='*', + activity="CMIP", + dataset="*", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + institute="*", + mip="Amon", + project="CMIP6", + short_name="tas", + timerange="*", ) dataset.session = session print(dataset) - monkeypatch.setattr(Dataset, 'find_files', mock_find_files(file)) + monkeypatch.setattr(Dataset, "find_files", mock_find_files(file)) datasets = list(dataset.from_files()) assert all(ds.session == session for ds in datasets) - assert all(ads.session == session for ds in datasets - for ads in ds.supplementaries) + assert all( + ads.session == session for ds in datasets for ads in ds.supplementaries + ) expected = Dataset( - activity='CMIP', - dataset='BCC-CSM2-MR', - ensemble='r1i1p1f1', - exp='historical', - grid='gn', - mip='Amon', - project='CMIP6', - short_name='tas', - timerange='185001/201412', + activity="CMIP", + dataset="BCC-CSM2-MR", + ensemble="r1i1p1f1", + exp="historical", + grid="gn", + mip="Amon", + project="CMIP6", + short_name="tas", + timerange="185001/201412", ) expected.session = session @@ -1043,67 +1059,68 @@ def test_from_files_with_globs_and_automatic_missing(monkeypatch, session): def test_from_files_with_globs_and_only_missing_facets(monkeypatch, session): """Test `from_files` with wildcards and only files with missing facets.""" - rootpath = Path('/path/to/data') + rootpath = Path("/path/to/data") file = esmvalcore.local.LocalFile( rootpath, - 'CMIP6', - 'CMIP', - 'BCC', - 'BCC-CSM2-MR', - 'historical', - 'r1i1p1f1', - 'Amon', - 'tas', - 'v20181126', - 'tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc', + "CMIP6", + "CMIP", + "BCC", + "BCC-CSM2-MR", + "historical", + "r1i1p1f1", + "Amon", + "tas", + "v20181126", + "tas_Amon_BCC-CSM2-MR_historical_r1i1p1f1_gn_185001-201412.nc", ) file.facets = { - 'activity': 'CMIP', - 'dataset': 'BCC-CSM2-MR', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'institute': 'BCC', - 'mip': 'Amon', - 'project': 'CMIP6', - 'short_name': 'tas', - 'version': 'v20181126', + "activity": "CMIP", + "dataset": "BCC-CSM2-MR", + "exp": "historical", + "ensemble": "r1i1p1f1", + "institute": "BCC", + "mip": "Amon", + "project": "CMIP6", + "short_name": "tas", + "version": "v20181126", } dataset = Dataset( - activity='CMIP', - dataset='*', - ensemble='r1i1p1f1', - exp='historical', - grid='*', - institute='*', - mip='Amon', - project='CMIP6', - short_name='tas', - timerange='*', + activity="CMIP", + dataset="*", + ensemble="r1i1p1f1", + exp="historical", + grid="*", + institute="*", + mip="Amon", + project="CMIP6", + short_name="tas", + timerange="*", ) dataset.session = session print(dataset) - monkeypatch.setattr(Dataset, 'find_files', mock_find_files(file)) + monkeypatch.setattr(Dataset, "find_files", mock_find_files(file)) datasets = list(dataset.from_files()) assert all(ds.session == session for ds in datasets) - assert all(ads.session == session for ds in datasets - for ads in ds.supplementaries) + assert all( + ads.session == session for ds in datasets for ads in ds.supplementaries + ) expected = Dataset( - activity='CMIP', - dataset='BCC-CSM2-MR', - ensemble='r1i1p1f1', - exp='historical', - grid='*', - institute='BCC', - mip='Amon', - project='CMIP6', - short_name='tas', - timerange='*', + activity="CMIP", + dataset="BCC-CSM2-MR", + ensemble="r1i1p1f1", + exp="historical", + grid="*", + institute="BCC", + mip="Amon", + project="CMIP6", + short_name="tas", + timerange="*", ) expected.session = session @@ -1113,16 +1130,16 @@ def test_from_files_with_globs_and_only_missing_facets(monkeypatch, session): def test_match(): dataset1 = Dataset( - short_name='areacella', - ensemble=['r1i1p1f1'], - exp='historical', - modeling_realm=['atmos', 'land'], + short_name="areacella", + ensemble=["r1i1p1f1"], + exp="historical", + modeling_realm=["atmos", "land"], ) dataset2 = Dataset( - short_name='tas', - ensemble='r1i1p1f1', - exp=['historical', 'ssp585'], - modeling_realm=['atmos'], + short_name="tas", + ensemble="r1i1p1f1", + exp=["historical", "ssp585"], + modeling_realm=["atmos"], ) score = dataset1._match(dataset2) @@ -1131,15 +1148,15 @@ def test_match(): def test_remove_duplicate_supplementaries(): dataset = Dataset( - dataset='dataset1', - short_name='tas', - mip='Amon', - project='CMIP6', - exp='historical', + dataset="dataset1", + short_name="tas", + mip="Amon", + project="CMIP6", + exp="historical", ) - supplementary1 = dataset.copy(short_name='areacella') + supplementary1 = dataset.copy(short_name="areacella") supplementary2 = supplementary1.copy() - supplementary1.facets['exp'] = '1pctCO2' + supplementary1.facets["exp"] = "1pctCO2" dataset.supplementaries = [supplementary1, supplementary2] dataset._remove_duplicate_supplementaries() @@ -1150,34 +1167,34 @@ def test_remove_duplicate_supplementaries(): def test_remove_not_found_supplementaries(): dataset = Dataset( - dataset='dataset1', - short_name='tas', - mip='Amon', - project='CMIP6', - exp='historical', + dataset="dataset1", + short_name="tas", + mip="Amon", + project="CMIP6", + exp="historical", ) - dataset.add_supplementary(short_name='areacella', mip='fx', exp='*') + dataset.add_supplementary(short_name="areacella", mip="fx", exp="*") dataset._remove_unexpanded_supplementaries() assert len(dataset.supplementaries) == 0 def test_concatenating_historical_and_future_exps(mocker): - mocker.patch.object(Dataset, 'files', True) + mocker.patch.object(Dataset, "files", True) dataset = Dataset( - dataset='dataset1', - short_name='tas', - mip='Amon', - frequency='mon', - project='CMIP6', - exp=['historical', 'ssp585'], - ) - dataset.add_supplementary(short_name='areacella', mip='fx', frequency='fx') + dataset="dataset1", + short_name="tas", + mip="Amon", + frequency="mon", + project="CMIP6", + exp=["historical", "ssp585"], + ) + dataset.add_supplementary(short_name="areacella", mip="fx", frequency="fx") dataset._fix_fx_exp() assert len(dataset.supplementaries) == 1 - assert dataset.facets['exp'] == ['historical', 'ssp585'] - assert dataset.supplementaries[0].facets['exp'] == 'historical' + assert dataset.facets["exp"] == ["historical", "ssp585"] + assert dataset.supplementaries[0].facets["exp"] == "historical" def test_from_recipe_with_glob(tmp_path, session, mocker): @@ -1194,11 +1211,11 @@ def test_from_recipe_with_glob(tmp_path, session, mocker): additional_datasets: - {dataset: '*', institute: '*'} """) - recipe = tmp_path / 'recipe_test.yml' - recipe.write_text(recipe_txt, encoding='utf-8') + recipe = tmp_path / "recipe_test.yml" + recipe.write_text(recipe_txt, encoding="utf-8") - session['drs']['CMIP5'] = 'ESGF' - CFG['rootpath']['CMIP5'] = [tmp_path] + session["drs"]["CMIP5"] = "ESGF" + CFG["rootpath"]["CMIP5"] = [tmp_path] filenames = [ "cmip5/output1/CSIRO-QCCCE/CSIRO-Mk3-6-0/rcp85/mon/atmos/Amon/r1i1p1/" "v20120323/tas_Amon_CSIRO-Mk3-6-0_rcp85_r1i1p1_200601-210012.nc", @@ -1208,34 +1225,34 @@ def test_from_recipe_with_glob(tmp_path, session, mocker): for filename in filenames: path = tmp_path / filename path.parent.mkdir(parents=True, exist_ok=True) - path.write_text('') + path.write_text("") definitions = [ { - 'diagnostic': 'diagnostic1', - 'variable_group': 'tas', - 'dataset': 'CSIRO-Mk3-6-0', - 'project': 'CMIP5', - 'mip': 'Amon', - 'short_name': 'tas', - 'alias': 'CSIRO-Mk3-6-0', - 'recipe_dataset_index': 0, - 'exp': 'rcp85', - 'ensemble': 'r1i1p1', - 'institute': 'CSIRO-QCCCE', + "diagnostic": "diagnostic1", + "variable_group": "tas", + "dataset": "CSIRO-Mk3-6-0", + "project": "CMIP5", + "mip": "Amon", + "short_name": "tas", + "alias": "CSIRO-Mk3-6-0", + "recipe_dataset_index": 0, + "exp": "rcp85", + "ensemble": "r1i1p1", + "institute": "CSIRO-QCCCE", }, { - 'diagnostic': 'diagnostic1', - 'variable_group': 'tas', - 'dataset': 'HadGEM2-AO', - 'project': 'CMIP5', - 'mip': 'Amon', - 'short_name': 'tas', - 'alias': 'HadGEM2-AO', - 'recipe_dataset_index': 1, - 'exp': 'rcp85', - 'ensemble': 'r1i1p1', - 'institute': 'NIMR-KMA', + "diagnostic": "diagnostic1", + "variable_group": "tas", + "dataset": "HadGEM2-AO", + "project": "CMIP5", + "mip": "Amon", + "short_name": "tas", + "alias": "HadGEM2-AO", + "recipe_dataset_index": 1, + "exp": "rcp85", + "ensemble": "r1i1p1", + "institute": "NIMR-KMA", }, ] expected = [] @@ -1252,92 +1269,89 @@ def test_from_recipe_with_glob(tmp_path, session, mocker): def test_from_ranges(): - dataset = Dataset(ensemble='r(1:2)i1p1f1') + dataset = Dataset(ensemble="r(1:2)i1p1f1") expected = [ - Dataset(ensemble='r1i1p1f1'), - Dataset(ensemble='r2i1p1f1'), + Dataset(ensemble="r1i1p1f1"), + Dataset(ensemble="r2i1p1f1"), ] assert dataset.from_ranges() == expected def test_expand_ensemble(): - dataset = Dataset(ensemble='r(1:2)i(2:3)p(3:4)') + dataset = Dataset(ensemble="r(1:2)i(2:3)p(3:4)") - expanded = dataset._expand_range('ensemble') + expanded = dataset._expand_range("ensemble") ensembles = [ - 'r1i2p3', - 'r1i2p4', - 'r1i3p3', - 'r1i3p4', - 'r2i2p3', - 'r2i2p4', - 'r2i3p3', - 'r2i3p4', + "r1i2p3", + "r1i2p4", + "r1i3p3", + "r1i3p4", + "r2i2p3", + "r2i2p4", + "r2i3p3", + "r2i3p4", ] assert expanded == ensembles def test_expand_subexperiment(): - dataset = Dataset(sub_experiment='s(1998:2005)') + dataset = Dataset(sub_experiment="s(1998:2005)") - expanded = dataset._expand_range('sub_experiment') + expanded = dataset._expand_range("sub_experiment") subexperiments = [ - 's1998', - 's1999', - 's2000', - 's2001', - 's2002', - 's2003', - 's2004', - 's2005', + "s1998", + "s1999", + "s2000", + "s2001", + "s2002", + "s2003", + "s2004", + "s2005", ] assert expanded == subexperiments def test_expand_ensemble_list_ok(): - dataset = Dataset(ensemble=['r0i0p0', 'r1i1p1']) + dataset = Dataset(ensemble=["r0i0p0", "r1i1p1"]) - expected = [['r0i0p0', 'r1i1p1']] + expected = [["r0i0p0", "r1i1p1"]] - assert dataset._expand_range('ensemble') == expected + assert dataset._expand_range("ensemble") == expected def test_expand_ensemble_nolist(): dataset = Dataset( - dataset='XYZ', - ensemble=['r1i1p1', 'r(1:2)i1p1'], + dataset="XYZ", + ensemble=["r1i1p1", "r(1:2)i1p1"], ) with pytest.raises(RecipeError): - dataset._expand_range('ensemble') + dataset._expand_range("ensemble") def create_esgf_file(timerange, version): """Prepare some fake ESGF search results.""" json = { - 'dataset_id': - 'CMIP6.CMIP.EC-Earth-Consortium.EC-Earth3.historical' - f'.r1i1p1f1.Amon.tas.gr.{version}|esgf-data1.llnl.gov', - 'dataset_id_template_': [ - '%(mip_era)s.%(activity_drs)s.%(institution_id)s.' - '%(source_id)s.%(experiment_id)s.%(member_id)s.' - '%(table_id)s.%(variable_id)s.%(grid_label)s' + "dataset_id": "CMIP6.CMIP.EC-Earth-Consortium.EC-Earth3.historical" + f".r1i1p1f1.Amon.tas.gr.{version}|esgf-data1.llnl.gov", + "dataset_id_template_": [ + "%(mip_era)s.%(activity_drs)s.%(institution_id)s." + "%(source_id)s.%(experiment_id)s.%(member_id)s." + "%(table_id)s.%(variable_id)s.%(grid_label)s" ], - 'project': ['CMIP6'], - 'size': - 4745571, - 'source_id': ['EC-Earth3'], - 'title': - f'tas_Amon_EC-Earth3_historical_r1i1p1f1_gr_{timerange}.nc', - 'url': [ - 'http://esgf-data1.llnl.gov/thredds/fileServer/css03_data' - '/CMIP6/CMIP/EC-Earth-Consortium/EC-Earth3/historical' - f'/r1i1p1f1/Amon/tas/gr/{version}/tas_Amon_EC-Earth3' - f'_historical_r1i1p1f1_gr_{timerange}.nc' - '|application/netcdf|HTTPServer', + "project": ["CMIP6"], + "size": 4745571, + "source_id": ["EC-Earth3"], + "title": f"tas_Amon_EC-Earth3_historical_r1i1p1f1_gr_{timerange}.nc", + "url": [ + "http://esgf-data1.llnl.gov/thredds/fileServer/css03_data" + "/CMIP6/CMIP/EC-Earth-Consortium/EC-Earth3/historical" + f"/r1i1p1f1/Amon/tas/gr/{version}/tas_Amon_EC-Earth3" + f"_historical_r1i1p1f1_gr_{timerange}.nc" + "|application/netcdf|HTTPServer", ], } @@ -1349,132 +1363,133 @@ def create_esgf_file(timerange, version): @pytest.fixture def dataset(): dataset = Dataset( - project='CMIP6', - mip='Amon', - frequency='mon', - short_name='tas', - dataset='EC.-Earth3', - exp='historical', - ensemble='r1i1p1f1', - grid='gr', - timerange='1850/1851', - alias='CMIP6_EC-Eeath3_tas', + project="CMIP6", + mip="Amon", + frequency="mon", + short_name="tas", + dataset="EC.-Earth3", + exp="historical", + ensemble="r1i1p1f1", + grid="gr", + timerange="1850/1851", + alias="CMIP6_EC-Eeath3_tas", ) dataset.session = { - 'search_esgf': 'when_missing', - 'download_dir': Path('/download_dir'), - 'rootpath': None, - 'drs': {}, + "search_esgf": "when_missing", + "download_dir": Path("/download_dir"), + "rootpath": None, + "drs": {}, } return dataset -@pytest.mark.parametrize("local_availability", ['all', 'partial', 'none']) +@pytest.mark.parametrize("local_availability", ["all", "partial", "none"]) def test_find_files(mocker, dataset, local_availability): """Test `find_files`.""" esgf_files = [ - create_esgf_file(version='v1', timerange='185001-185012'), - create_esgf_file(version='v1', timerange='185101-185112'), + create_esgf_file(version="v1", timerange="185001-185012"), + create_esgf_file(version="v1", timerange="185101-185112"), ] - local_dir = Path('/local_dir') + local_dir = Path("/local_dir") # Local files can cover the entire period, part of it, or nothing local_file_options = { - 'all': [f.local_file(local_dir) for f in esgf_files], - 'partial': [esgf_files[1].local_file(local_dir)], - 'none': [], + "all": [f.local_file(local_dir) for f in esgf_files], + "partial": [esgf_files[1].local_file(local_dir)], + "none": [], } local_files = local_file_options[local_availability] mocker.patch.object( esmvalcore.dataset.Dataset, - 'augment_facets', + "augment_facets", autospec=True, ) mocker.patch.object( esmvalcore.dataset.local, - 'find_files', + "find_files", autospec=True, return_value=(list(local_files), []), ) mocker.patch.object( esmvalcore.dataset.esgf, - 'find_files', + "find_files", autospec=True, return_value=list(esgf_files), ) expected = { - 'all': local_files, - 'partial': local_files + esgf_files[:1], - 'none': esgf_files, + "all": local_files, + "partial": local_files + esgf_files[:1], + "none": esgf_files, } assert dataset.files == expected[local_availability] def test_find_files_wildcard_timerange(mocker, dataset): """Test that `find_files` works with a '*' in the timerange.""" - dataset.facets['timerange'] = '*' + dataset.facets["timerange"] = "*" esgf_files = [ - create_esgf_file(version='v1', timerange='185001-185012'), - create_esgf_file(version='v1', timerange='185101-185112'), + create_esgf_file(version="v1", timerange="185001-185012"), + create_esgf_file(version="v1", timerange="185101-185112"), ] local_files = [] mocker.patch.object( esmvalcore.dataset.Dataset, - 'augment_facets', + "augment_facets", autospec=True, ) mocker.patch.object( esmvalcore.dataset.local, - 'find_files', + "find_files", autospec=True, return_value=(local_files, []), ) mocker.patch.object( esmvalcore.dataset.esgf, - 'find_files', + "find_files", autospec=True, return_value=list(esgf_files), ) assert dataset.files == esgf_files - assert dataset.facets['timerange'] == '185001/185112' + assert dataset.facets["timerange"] == "185001/185112" def test_find_files_outdated_local(mocker, dataset): """Test newer files from ESGF are found when local data is incomplete.""" esgf_files = [ - create_esgf_file(version='v2', timerange='185001-185012'), - create_esgf_file(version='v2', timerange='185101-185112'), + create_esgf_file(version="v2", timerange="185001-185012"), + create_esgf_file(version="v2", timerange="185101-185112"), ] - local_dir = Path('/local_dir') + local_dir = Path("/local_dir") local_files = [ - create_esgf_file(version='v1', - timerange='185001-185012').local_file(local_dir), + create_esgf_file(version="v1", timerange="185001-185012").local_file( + local_dir + ), ] mocker.patch.object( esmvalcore.dataset.Dataset, - 'augment_facets', + "augment_facets", autospec=True, ) mocker.patch.object( esmvalcore.dataset.local, - 'find_files', + "find_files", autospec=True, return_value=(local_files, []), ) mocker.patch.object( esmvalcore.dataset.esgf, - 'find_files', + "find_files", autospec=True, return_value=list(esgf_files), ) @@ -1483,52 +1498,52 @@ def test_find_files_outdated_local(mocker, dataset): @pytest.mark.parametrize( - 'project', - ['CESM', 'EMAC', 'ICON', 'IPSLCM', 'OBS', 'OBS6', 'ana4mips', 'native6'], + "project", + ["CESM", "EMAC", "ICON", "IPSLCM", "OBS", "OBS6", "ana4mips", "native6"], ) def test_find_files_non_esgf_projects(mocker, project, monkeypatch): """Test that find_files does never download files for non-ESGF projects.""" - monkeypatch.setitem(CFG, 'search_esgf', 'always') + monkeypatch.setitem(CFG, "search_esgf", "always") mock_local_find_files = mocker.patch.object( esmvalcore.dataset.local, - 'find_files', + "find_files", autospec=True, return_value=(mock.sentinel.files, mock.sentinel.file_globs), ) mock_esgf_find_files = mocker.patch.object( esmvalcore.dataset.esgf, - 'find_files', + "find_files", autospec=True, ) tas = Dataset( - short_name='tas', - mip='Amon', + short_name="tas", + mip="Amon", project=project, - dataset='MY_DATASET', - timerange='2000/2000', - account='account', - case='case', - channel='channel', - dir='dir', - exp='amip', - freq='freq', - gcomp='gcomp', - group='group', - ipsl_varname='ipsl_varname', - model='model', - out='out', - root='root', - scomp='scomp', - simulation='simulation', - status='status', - string='string', - tag='tag', - tdir='tdir', + dataset="MY_DATASET", + timerange="2000/2000", + account="account", + case="case", + channel="channel", + dir="dir", + exp="amip", + freq="freq", + gcomp="gcomp", + group="group", + ipsl_varname="ipsl_varname", + model="model", + out="out", + root="root", + scomp="scomp", + simulation="simulation", + status="status", + string="string", + tag="tag", + tdir="tdir", tier=3, - tperiod='tperiod', - type='sat', - var_type='var_type', + tperiod="tperiod", + type="sat", + var_type="var_type", version=1, ) tas.augment_facets() @@ -1542,89 +1557,89 @@ def test_find_files_non_esgf_projects(mocker, project, monkeypatch): def test_set_version(): - dataset = Dataset(short_name='tas') - dataset.add_supplementary(short_name='areacella') - file_v1 = esmvalcore.local.LocalFile('/path/to/v1/tas.nc') - file_v1.facets['version'] = 'v1' - file_v2 = esmvalcore.local.LocalFile('/path/to/v2/tas.nc') - file_v2.facets['version'] = 'v2' - afile = esmvalcore.local.LocalFile('/path/to/v3/areacella.nc') - afile.facets['version'] = 'v3' + dataset = Dataset(short_name="tas") + dataset.add_supplementary(short_name="areacella") + file_v1 = esmvalcore.local.LocalFile("/path/to/v1/tas.nc") + file_v1.facets["version"] = "v1" + file_v2 = esmvalcore.local.LocalFile("/path/to/v2/tas.nc") + file_v2.facets["version"] = "v2" + areacella_file = esmvalcore.local.LocalFile("/path/to/v3/areacella.nc") + areacella_file.facets["version"] = "v3" dataset.files = [file_v2, file_v1] - dataset.supplementaries[0].files = [afile] + dataset.supplementaries[0].files = [areacella_file] dataset.set_version() - assert dataset.facets['version'] == ['v1', 'v2'] - assert dataset.supplementaries[0].facets['version'] == 'v3' + assert dataset.facets["version"] == ["v1", "v2"] + assert dataset.supplementaries[0].facets["version"] == "v3" -@pytest.mark.parametrize('timerange', ['*', '185001/*', '*/185112']) +@pytest.mark.parametrize("timerange", ["*", "185001/*", "*/185112"]) def test_update_timerange_from_esgf(mocker, timerange): esgf_files = [ - create_esgf_file(version='v20200310', timerange='185001-185012'), - create_esgf_file(version='v20200310', timerange='185101-185112'), + create_esgf_file(version="v20200310", timerange="185001-185012"), + create_esgf_file(version="v20200310", timerange="185101-185112"), ] variable = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'frequency': 'mon', - 'short_name': 'tas', - 'dataset': 'EC.-Earth3', - 'exp': 'historical', - 'ensemble': 'r1i1p1f1', - 'grid': 'gr', - 'timerange': timerange, + "project": "CMIP6", + "mip": "Amon", + "frequency": "mon", + "short_name": "tas", + "dataset": "EC.-Earth3", + "exp": "historical", + "ensemble": "r1i1p1f1", + "grid": "gr", + "timerange": timerange, } mocker.patch.object( Dataset, - 'files', + "files", new_callable=mocker.PropertyMock, return_value=esgf_files, ) dataset = Dataset(**variable) dataset._update_timerange() - assert dataset['timerange'] == '185001/185112' + assert dataset["timerange"] == "185001/185112" TEST_YEAR_FORMAT = [ - ('1/301', '0001/0301'), - ('10/P2Y', '0010/P2Y'), - ('P2Y/10', 'P2Y/0010'), + ("1/301", "0001/0301"), + ("10/P2Y", "0010/P2Y"), + ("P2Y/10", "P2Y/0010"), ] -@pytest.mark.parametrize('input_time,output_time', TEST_YEAR_FORMAT) +@pytest.mark.parametrize("input_time,output_time", TEST_YEAR_FORMAT) def test_update_timerange_year_format(session, input_time, output_time): variable = { - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', - 'dataset': 'HadGEM3-GC31-LL', - 'exp': 'historical', - 'ensemble': 'r2i1p1f1', - 'grid': 'gr', - 'timerange': input_time + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", + "dataset": "HadGEM3-GC31-LL", + "exp": "historical", + "ensemble": "r2i1p1f1", + "grid": "gr", + "timerange": input_time, } dataset = Dataset(**variable) dataset.session = session dataset._update_timerange() - assert dataset['timerange'] == output_time + assert dataset["timerange"] == output_time -@pytest.mark.parametrize('search_esgf', ['never', 'when_missing', 'always']) +@pytest.mark.parametrize("search_esgf", ["never", "when_missing", "always"]) def test_update_timerange_no_files(session, search_esgf): - session['search_esgf'] = search_esgf + session["search_esgf"] = search_esgf variable = { - 'alias': 'CMIP6', - 'project': 'CMIP6', - 'mip': 'Amon', - 'short_name': 'tas', - 'original_short_name': 'tas', - 'dataset': 'HadGEM3-GC31-LL', - 'exp': 'historical', - 'ensemble': 'r2i1p1f1', - 'grid': 'gr', - 'timerange': '*/2000', + "alias": "CMIP6", + "project": "CMIP6", + "mip": "Amon", + "short_name": "tas", + "original_short_name": "tas", + "dataset": "HadGEM3-GC31-LL", + "exp": "historical", + "ensemble": "r2i1p1f1", + "grid": "gr", + "timerange": "*/2000", } dataset = Dataset(**variable) dataset.files = [] @@ -1635,10 +1650,10 @@ def test_update_timerange_no_files(session, search_esgf): def test_update_timerange_typeerror(): dataset = Dataset( - short_name='tas', - mip='Amon', - project='CMIP6', - dataset='dataset1', + short_name="tas", + mip="Amon", + project="CMIP6", + dataset="dataset1", timerange=42, ) msg = r"timerange should be a string, got '42'" @@ -1648,38 +1663,39 @@ def test_update_timerange_typeerror(): def test_load(mocker, session): dataset = Dataset( - short_name='chl', - mip='Oyr', - project='CMIP5', - dataset='CanESM2', - exp='historical', - frequency='yr', - timerange='2000/2005', - ensemble='r1i1p1', + short_name="chl", + mip="Oyr", + project="CMIP5", + dataset="CanESM2", + exp="historical", + frequency="yr", + timerange="2000/2005", + ensemble="r1i1p1", ) dataset.session = session - output_file = Path('/path/to/output.nc') + output_file = Path("/path/to/output.nc") fix_dir_prefix = Path( session.preproc_dir, - 'fixed_files', - 'chl_Oyr_CMIP5_CanESM2_historical_r1i1p1_', + "fixed_files", + "chl_Oyr_CMIP5_CanESM2_historical_r1i1p1_", ) _get_output_file = mocker.patch.object( esmvalcore.dataset, - '_get_output_file', + "_get_output_file", create_autospec=True, return_value=output_file, ) args = {} order = [] - def mock_preprocess(items, step, input_files, output_file, debug, - **kwargs): + def mock_preprocess( + items, step, input_files, output_file, debug, **kwargs + ): order.append(step) args[step] = kwargs return items - mocker.patch.object(esmvalcore.dataset, 'preprocess', mock_preprocess) + mocker.patch.object(esmvalcore.dataset, "preprocess", mock_preprocess) items = [mocker.sentinel.file] dataset.files = items @@ -1689,81 +1705,81 @@ def mock_preprocess(items, step, input_files, output_file, debug, assert cube == items[0] load_order = [ - 'fix_file', - 'load', - 'fix_metadata', - 'concatenate', - 'cmor_check_metadata', - 'clip_timerange', - 'fix_data', - 'cmor_check_data', - 'add_supplementary_variables', + "fix_file", + "load", + "fix_metadata", + "concatenate", + "cmor_check_metadata", + "clip_timerange", + "fix_data", + "cmor_check_data", + "add_supplementary_variables", ] assert order == load_order load_args = { - 'load': { - 'ignore_warnings': None, + "load": { + "ignore_warnings": None, }, - 'fix_file': { - 'add_unique_suffix': True, - 'session': session, - 'dataset': 'CanESM2', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'frequency': 'yr', - 'mip': 'Oyr', - 'output_dir': fix_dir_prefix, - 'project': 'CMIP5', - 'short_name': 'chl', - 'timerange': '2000/2005', + "fix_file": { + "add_unique_suffix": True, + "session": session, + "dataset": "CanESM2", + "ensemble": "r1i1p1", + "exp": "historical", + "frequency": "yr", + "mip": "Oyr", + "output_dir": fix_dir_prefix, + "project": "CMIP5", + "short_name": "chl", + "timerange": "2000/2005", }, - 'fix_metadata': { - 'check_level': CheckLevels.DEFAULT, - 'session': session, - 'dataset': 'CanESM2', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'frequency': 'yr', - 'mip': 'Oyr', - 'project': 'CMIP5', - 'short_name': 'chl', - 'timerange': '2000/2005', + "fix_metadata": { + "check_level": CheckLevels.DEFAULT, + "session": session, + "dataset": "CanESM2", + "ensemble": "r1i1p1", + "exp": "historical", + "frequency": "yr", + "mip": "Oyr", + "project": "CMIP5", + "short_name": "chl", + "timerange": "2000/2005", }, - 'cmor_check_metadata': { - 'check_level': CheckLevels.DEFAULT, - 'cmor_table': 'CMIP5', - 'mip': 'Oyr', - 'short_name': 'chl', - 'frequency': 'yr', + "cmor_check_metadata": { + "check_level": CheckLevels.DEFAULT, + "cmor_table": "CMIP5", + "mip": "Oyr", + "short_name": "chl", + "frequency": "yr", }, - 'clip_timerange': { - 'timerange': '2000/2005', + "clip_timerange": { + "timerange": "2000/2005", }, - 'fix_data': { - 'check_level': CheckLevels.DEFAULT, - 'session': session, - 'dataset': 'CanESM2', - 'ensemble': 'r1i1p1', - 'exp': 'historical', - 'frequency': 'yr', - 'mip': 'Oyr', - 'project': 'CMIP5', - 'short_name': 'chl', - 'timerange': '2000/2005', + "fix_data": { + "check_level": CheckLevels.DEFAULT, + "session": session, + "dataset": "CanESM2", + "ensemble": "r1i1p1", + "exp": "historical", + "frequency": "yr", + "mip": "Oyr", + "project": "CMIP5", + "short_name": "chl", + "timerange": "2000/2005", }, - 'cmor_check_data': { - 'check_level': CheckLevels.DEFAULT, - 'cmor_table': 'CMIP5', - 'mip': 'Oyr', - 'short_name': 'chl', - 'frequency': 'yr', + "cmor_check_data": { + "check_level": CheckLevels.DEFAULT, + "cmor_table": "CMIP5", + "mip": "Oyr", + "short_name": "chl", + "frequency": "yr", }, - 'concatenate': { - 'check_level': CheckLevels.DEFAULT, + "concatenate": { + "check_level": CheckLevels.DEFAULT, }, - 'add_supplementary_variables': { - 'supplementary_cubes': [], + "add_supplementary_variables": { + "supplementary_cubes": [], }, } @@ -1775,7 +1791,7 @@ def mock_preprocess(items, step, input_files, output_file, debug, def test_load_fail(session): dataset = Dataset() dataset.session = session - dataset.session['search_esgf'] = 'when_missing' + dataset.session["search_esgf"] = "when_missing" dataset.files = [] with pytest.raises(InputFilesNotFound): dataset.load() diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 1d73412cd3..1718c03b76 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -5,10 +5,10 @@ from esmvalcore.exceptions import SuppressedError -@pytest.mark.parametrize('exception', [SuppressedError, ValueError]) +@pytest.mark.parametrize("exception", [SuppressedError, ValueError]) def test_suppressedhook(capsys, exception): try: - raise exception('error') + raise exception("error") except exception: args = sys.exc_info() sys.excepthook(*args) diff --git a/tests/unit/test_iris_helpers.py b/tests/unit/test_iris_helpers.py index e49b6b803a..ccfd6fbbf6 100644 --- a/tests/unit/test_iris_helpers.py +++ b/tests/unit/test_iris_helpers.py @@ -1,4 +1,5 @@ """Tests for :mod:`esmvalcore.iris_helpers`.""" + import datetime from copy import deepcopy from itertools import permutations @@ -33,66 +34,68 @@ @pytest.fixture def cubes(): """Test cubes.""" - cubes = CubeList([ - Cube(0.0, var_name='a', long_name='a'), - Cube(0.0, var_name='a', long_name='b'), - Cube(0.0, var_name='c', long_name='d'), - ]) + cubes = CubeList( + [ + Cube(0.0, var_name="a", long_name="a"), + Cube(0.0, var_name="a", long_name="b"), + Cube(0.0, var_name="c", long_name="d"), + ] + ) return cubes @pytest.fixture def units(): - return Unit('days since 0001-01-01', calendar='proleptic_gregorian') + return Unit("days since 0001-01-01", calendar="proleptic_gregorian") def test_add_leading_dim_to_cube(): """Test :func:`esmvalcore.iris_helpers.add_leading_dim_to_cube`.""" lat_coord = DimCoord( [0.0, 1.0, 2.0], - var_name='lat', - standard_name='latitude', - long_name='latitude', - units='degrees_north', + var_name="lat", + standard_name="latitude", + long_name="latitude", + units="degrees_north", ) lon_coord = DimCoord( [0.0, 1.0], - var_name='lon', - standard_name='longitude', - long_name='longitude', - units='degrees_east', + var_name="lon", + standard_name="longitude", + long_name="longitude", + units="degrees_east", ) height_coord = AuxCoord( [2.0, 1.0], - var_name='height', - standard_name='height', - long_name='height', - units='m', - attributes={'positive': 'up'}, + var_name="height", + standard_name="height", + long_name="height", + units="m", + attributes={"positive": "up"}, ) land_mask = AncillaryVariable( [0.5, 0.2], - var_name='sftlf', + var_name="sftlf", standard_name=None, - long_name='Land fraction', - units='1', + long_name="Land fraction", + units="1", ) cell_area = CellMeasure( [1.0, 2.0], - var_name='areacella', - standard_name='cell_area', - long_name='Cell Area', - units='m2', - measure='area', + var_name="areacella", + standard_name="cell_area", + long_name="Cell Area", + units="m2", + measure="area", ) cube = Cube( [1, 42], - var_name='ta', - standard_name='air_temperature', - long_name='Air Temperature', - units='K', - attributes={'model_name': 'ESM'}, - cell_methods=[CellMethod('mean', coords='time')], + var_name="ta", + standard_name="air_temperature", + long_name="Air Temperature", + units="K", + attributes={"model_name": "ESM"}, + cell_methods=[CellMethod("mean", coords="time")], aux_coords_and_dims=[(height_coord, 0)], dim_coords_and_dims=[(lon_coord, 0)], ancillary_variables_and_dims=[(land_mask, 0)], @@ -103,12 +106,12 @@ def test_add_leading_dim_to_cube(): np.testing.assert_equal(new_cube.data, [[1, 42], [1, 42], [1, 42]]) - assert new_cube.var_name == 'ta' - assert new_cube.standard_name == 'air_temperature' - assert new_cube.long_name == 'Air Temperature' - assert new_cube.units == 'K' - assert new_cube.cell_methods == (CellMethod('mean', coords='time'),) - assert new_cube.attributes == {'model_name': 'ESM'} + assert new_cube.var_name == "ta" + assert new_cube.standard_name == "air_temperature" + assert new_cube.long_name == "Air Temperature" + assert new_cube.units == "K" + assert new_cube.cell_methods == (CellMethod("mean", coords="time"),) + assert new_cube.attributes == {"model_name": "ESM"} assert new_cube.coords(lat_coord, dim_coords=True) assert new_cube.coords(lon_coord, dim_coords=True) @@ -125,17 +128,20 @@ def test_add_leading_dim_to_cube(): def test_add_leading_dim_to_cube_non_1d(): """Test :func:`esmvalcore.iris_helpers.add_leading_dim_to_cube`.""" - coord_2d = AuxCoord([[0, 1], [2, 3]], var_name='coord_2d') + coord_2d = AuxCoord([[0, 1], [2, 3]], var_name="coord_2d") msg = "Multi-dimensional coordinate not supported: 'coord_2d'" with pytest.raises(CoordinateMultiDimError, match=msg): add_leading_dim_to_cube(mock.sentinel.cube, coord_2d) -@pytest.mark.parametrize("date, dtype, expected", [ - (datetime.datetime(1, 1, 1), np.float64, 0.0), - (datetime.datetime(1, 1, 1), int, 0.0), - (datetime.datetime(1, 1, 2, 12), np.float64, 1.5), -]) +@pytest.mark.parametrize( + "date, dtype, expected", + [ + (datetime.datetime(1, 1, 1), np.float64, 0.0), + (datetime.datetime(1, 1, 1), int, 0.0), + (datetime.datetime(1, 1, 2, 12), np.float64, 1.5), + ], +) def test_date2num_scalar(date, dtype, expected, units): num = date2num(date, units, dtype=dtype) assert num == expected @@ -147,7 +153,7 @@ def assert_attributes_equal(attrs_1: dict, attrs_2: dict) -> None: print(pformat(dict(attrs_1))) print(pformat(dict(attrs_2))) assert len(attrs_1) == len(attrs_2) - for (attr, val) in attrs_1.items(): + for attr, val in attrs_1.items(): assert attr in attrs_2 np.testing.assert_array_equal(attrs_2[attr], val) @@ -156,22 +162,20 @@ def make_cube_with_attrs(index): """Make cube that contains different types of attributes.""" attributes = { # Identical attribute values across cubes - 'int': 42, - 'float': 3.1415, - 'bool': True, - 'str': 'Hello, world', - 'list': [1, 1, 2, 3, 5, 8, 13], - 'tuple': (1, 2, 3, 4, 5), - 'nparray': np.arange(42), - + "int": 42, + "float": 3.1415, + "bool": True, + "str": "Hello, world", + "list": [1, 1, 2, 3, 5, 8, 13], + "tuple": (1, 2, 3, 4, 5), + "nparray": np.arange(42), # Differing attribute values across cubes - 'diff_int': index, - 'diff_str': 'abc'[index], - 'diff_nparray': np.arange(index), - 'mix': np.arange(3) if index == 0 else index, - 'diff_list': [index, index], - 'diff_tuple': (index, index), - + "diff_int": index, + "diff_str": "abc"[index], + "diff_nparray": np.arange(index), + "mix": np.arange(3) if index == 0 else index, + "diff_list": [index, index], + "diff_tuple": (index, index), # Differing attribute keys across cubes str(index + 1000): index, str(index % 2 + 100): index, @@ -188,26 +192,26 @@ def make_cube_with_attrs(index): def test_merge_cube_attributes(cubes): """Test `merge_cube_attributes`.""" expected_attributes = { - 'int': 42, - 'float': 3.1415, - 'bool': True, - 'str': 'Hello, world', - 'list': [1, 1, 2, 3, 5, 8, 13], - 'tuple': (1, 2, 3, 4, 5), - 'nparray': np.arange(42), - 'diff_int': '0 1 2', - 'diff_str': 'a b c', - 'diff_nparray': '[0 1] [0] []', - 'mix': '1 2 [0 1 2]', - 'diff_list': '[0, 0] [1, 1] [2, 2]', - 'diff_tuple': '(0, 0) (1, 1) (2, 2)', - '1000': 0, - '1001': 1, - '1002': 2, - '100': '0 2', - '101': 1, - '0': 0, - '1': 1, + "int": 42, + "float": 3.1415, + "bool": True, + "str": "Hello, world", + "list": [1, 1, 2, 3, 5, 8, 13], + "tuple": (1, 2, 3, 4, 5), + "nparray": np.arange(42), + "diff_int": "0 1 2", + "diff_str": "a b c", + "diff_nparray": "[0 1] [0] []", + "mix": "1 2 [0 1 2]", + "diff_list": "[0, 0] [1, 1] [2, 2]", + "diff_tuple": "(0, 0) (1, 1) (2, 2)", + "1000": 0, + "1001": 1, + "1002": 2, + "100": "0 2", + "101": 1, + "0": 0, + "1": 1, } cubes = deepcopy(cubes) merge_cube_attributes(cubes) @@ -233,13 +237,13 @@ def test_merge_cube_attributes_1_cube(): def test_merge_cube_attributes_global_local(): cube1 = CUBES[0].copy() cube2 = CUBES[1].copy() - cube1.attributes.globals['attr1'] = 1 - cube1.attributes.globals['attr2'] = 1 - cube1.attributes.globals['attr3'] = 1 - cube2.attributes.locals['attr1'] = 1 + cube1.attributes.globals["attr1"] = 1 + cube1.attributes.globals["attr2"] = 1 + cube1.attributes.globals["attr3"] = 1 + cube2.attributes.locals["attr1"] = 1 merge_cube_attributes([cube1, cube2]) for cube in [cube1, cube2]: - for attr in ['attr1', 'attr2', 'attr3']: + for attr in ["attr1", "attr2", "attr3"]: assert attr in cube.attributes.globals @@ -247,24 +251,24 @@ def test_merge_cube_attributes_global_local(): def cube_3d(): """3D sample cube.""" # DimCoords - x = DimCoord([0, 1, 2], var_name='x') - y = DimCoord([0, 1, 2], var_name='y') - z = DimCoord([0, 1, 2, 3], var_name='z') + x = DimCoord([0, 1, 2], var_name="x") + y = DimCoord([0, 1, 2], var_name="y") + z = DimCoord([0, 1, 2, 3], var_name="z") # AuxCoords aux_x = AuxCoord( da.ones(3, chunks=1), bounds=da.ones((3, 3), chunks=(1, 1)), - var_name='aux_x', + var_name="aux_x", ) - aux_z = AuxCoord(da.ones(4, chunks=1), var_name='aux_z') - aux_xy = AuxCoord(da.ones((3, 3), chunks=(1, 1)), var_name='xy') - aux_xz = AuxCoord(da.ones((3, 4), chunks=(1, 1)), var_name='xz') - aux_yz = AuxCoord(da.ones((3, 4), chunks=(1, 1)), var_name='yz') + aux_z = AuxCoord(da.ones(4, chunks=1), var_name="aux_z") + aux_xy = AuxCoord(da.ones((3, 3), chunks=(1, 1)), var_name="xy") + aux_xz = AuxCoord(da.ones((3, 4), chunks=(1, 1)), var_name="xz") + aux_yz = AuxCoord(da.ones((3, 4), chunks=(1, 1)), var_name="yz") aux_xyz = AuxCoord( da.ones((3, 3, 4), chunks=(1, 1, 1)), bounds=da.ones((3, 3, 4, 3), chunks=(1, 1, 1, 1)), - var_name='xyz', + var_name="xyz", ) aux_coords_and_dims = [ (aux_x, 0), @@ -277,15 +281,15 @@ def cube_3d(): # CellMeasures and AncillaryVariables cell_measure = CellMeasure( - da.ones((3, 4), chunks=(1, 1)), var_name='cell_measure' + da.ones((3, 4), chunks=(1, 1)), var_name="cell_measure" ) anc_var = AncillaryVariable( - da.ones((3, 4), chunks=(1, 1)), var_name='anc_var' + da.ones((3, 4), chunks=(1, 1)), var_name="anc_var" ) return Cube( da.ones((3, 3, 4), chunks=(1, 1, 1)), - var_name='cube', + var_name="cube", dim_coords_and_dims=[(x, 0), (y, 1), (z, 2)], aux_coords_and_dims=aux_coords_and_dims, cell_measures_and_dims=[(cell_measure, (1, 2))], @@ -297,82 +301,82 @@ def test_rechunk_cube_fully_lazy(cube_3d): """Test ``rechunk_cube``.""" input_cube = cube_3d.copy() - x_coord = input_cube.coord('x') - result = rechunk_cube(input_cube, [x_coord, 'y'], remaining_dims=2) + x_coord = input_cube.coord("x") + result = rechunk_cube(input_cube, [x_coord, "y"], remaining_dims=2) assert input_cube == cube_3d assert result == cube_3d assert result.core_data().chunksize == (3, 3, 2) - assert result.coord('aux_x').core_points().chunksize == (3,) - assert result.coord('aux_z').core_points().chunksize == (1,) - assert result.coord('xy').core_points().chunksize == (3, 3) - assert result.coord('xz').core_points().chunksize == (3, 2) - assert result.coord('yz').core_points().chunksize == (3, 2) - assert result.coord('xyz').core_points().chunksize == (3, 3, 2) - assert result.coord('aux_x').core_bounds().chunksize == (3, 2) - assert result.coord('aux_z').core_bounds() is None - assert result.coord('xy').core_bounds() is None - assert result.coord('xz').core_bounds() is None - assert result.coord('yz').core_bounds() is None - assert result.coord('xyz').core_bounds().chunksize == (3, 3, 2, 2) - assert result.cell_measure('cell_measure').core_data().chunksize == (3, 2) - assert result.ancillary_variable('anc_var').core_data().chunksize == (3, 2) - - -@pytest.mark.parametrize('complete_dims', [['x', 'y'], ['xy']]) + assert result.coord("aux_x").core_points().chunksize == (3,) + assert result.coord("aux_z").core_points().chunksize == (1,) + assert result.coord("xy").core_points().chunksize == (3, 3) + assert result.coord("xz").core_points().chunksize == (3, 2) + assert result.coord("yz").core_points().chunksize == (3, 2) + assert result.coord("xyz").core_points().chunksize == (3, 3, 2) + assert result.coord("aux_x").core_bounds().chunksize == (3, 2) + assert result.coord("aux_z").core_bounds() is None + assert result.coord("xy").core_bounds() is None + assert result.coord("xz").core_bounds() is None + assert result.coord("yz").core_bounds() is None + assert result.coord("xyz").core_bounds().chunksize == (3, 3, 2, 2) + assert result.cell_measure("cell_measure").core_data().chunksize == (3, 2) + assert result.ancillary_variable("anc_var").core_data().chunksize == (3, 2) + + +@pytest.mark.parametrize("complete_dims", [["x", "y"], ["xy"]]) def test_rechunk_cube_partly_lazy(cube_3d, complete_dims): """Test ``rechunk_cube``.""" input_cube = cube_3d.copy() # Realize some arrays input_cube.data - input_cube.coord('xyz').points - input_cube.coord('xyz').bounds - input_cube.cell_measure('cell_measure').data + input_cube.coord("xyz").points + input_cube.coord("xyz").bounds + input_cube.cell_measure("cell_measure").data result = rechunk_cube(input_cube, complete_dims, remaining_dims=2) assert input_cube == cube_3d assert result == cube_3d assert not result.has_lazy_data() - assert result.coord('aux_x').core_points().chunksize == (3,) - assert result.coord('aux_z').core_points().chunksize == (1,) - assert result.coord('xy').core_points().chunksize == (3, 3) - assert result.coord('xz').core_points().chunksize == (3, 2) - assert result.coord('yz').core_points().chunksize == (3, 2) - assert not result.coord('xyz').has_lazy_points() - assert result.coord('aux_x').core_bounds().chunksize == (3, 2) - assert result.coord('aux_z').core_bounds() is None - assert result.coord('xy').core_bounds() is None - assert result.coord('xz').core_bounds() is None - assert result.coord('yz').core_bounds() is None - assert not result.coord('xyz').has_lazy_bounds() - assert not result.cell_measure('cell_measure').has_lazy_data() - assert result.ancillary_variable('anc_var').core_data().chunksize == (3, 2) + assert result.coord("aux_x").core_points().chunksize == (3,) + assert result.coord("aux_z").core_points().chunksize == (1,) + assert result.coord("xy").core_points().chunksize == (3, 3) + assert result.coord("xz").core_points().chunksize == (3, 2) + assert result.coord("yz").core_points().chunksize == (3, 2) + assert not result.coord("xyz").has_lazy_points() + assert result.coord("aux_x").core_bounds().chunksize == (3, 2) + assert result.coord("aux_z").core_bounds() is None + assert result.coord("xy").core_bounds() is None + assert result.coord("xz").core_bounds() is None + assert result.coord("yz").core_bounds() is None + assert not result.coord("xyz").has_lazy_bounds() + assert not result.cell_measure("cell_measure").has_lazy_data() + assert result.ancillary_variable("anc_var").core_data().chunksize == (3, 2) @pytest.fixture def lat_coord_1d(): """1D latitude coordinate.""" - return DimCoord([0, 1], standard_name='latitude') + return DimCoord([0, 1], standard_name="latitude") @pytest.fixture def lon_coord_1d(): """1D longitude coordinate.""" - return DimCoord([0, 1], standard_name='longitude') + return DimCoord([0, 1], standard_name="longitude") @pytest.fixture def lat_coord_2d(): """2D latitude coordinate.""" - return AuxCoord([[0, 1]], standard_name='latitude') + return AuxCoord([[0, 1]], standard_name="latitude") @pytest.fixture def lon_coord_2d(): """2D longitude coordinate.""" - return AuxCoord([[0, 1]], standard_name='longitude') + return AuxCoord([[0, 1]], standard_name="longitude") def test_has_regular_grid_no_lat_lon(): diff --git a/tests/unit/test_iris_io.py b/tests/unit/test_iris_io.py index 1b81c78d2f..f7ca0611ef 100644 --- a/tests/unit/test_iris_io.py +++ b/tests/unit/test_iris_io.py @@ -1,4 +1,5 @@ """Test various issues we discovered with iris over time.""" + import dask.array as da import iris import numpy as np @@ -8,34 +9,34 @@ def create_fully_lazy_cube(): """Create cube with lazy aux coord and aggregate over this dimension.""" - dim_coord = DimCoord(da.arange(10), var_name='time') + dim_coord = DimCoord(da.arange(10), var_name="time") # fully lazy coord points - aux_coord = AuxCoord(da.arange(10), var_name='year') + aux_coord = AuxCoord(da.arange(10), var_name="year") cube = Cube( da.arange(10), - var_name='x', + var_name="x", dim_coords_and_dims=[(dim_coord, 0)], aux_coords_and_dims=[(aux_coord, 0)], ) - cube = cube.collapsed('time', iris.analysis.MEAN) + cube = cube.collapsed("time", iris.analysis.MEAN) return cube def create_regular_cube(): """Create cube with lazy aux coord and aggregate over this dimension.""" - dim_coord = DimCoord(np.arange(10), var_name='time') + dim_coord = DimCoord(np.arange(10), var_name="time") # fully lazy coord points - aux_coord = AuxCoord(np.arange(10), var_name='year') + aux_coord = AuxCoord(np.arange(10), var_name="year") cube = Cube( np.arange(10), - var_name='x', + var_name="x", dim_coords_and_dims=[(dim_coord, 0)], aux_coords_and_dims=[(aux_coord, 0)], ) - cube = cube.collapsed('time', iris.analysis.MEAN) + cube = cube.collapsed("time", iris.analysis.MEAN) return cube @@ -48,7 +49,7 @@ def test_iris_save_with_lazy_coordinate(tmp_path): """ print("iris version:", iris.__version__) cube = create_fully_lazy_cube() - save_path = tmp_path / 'test_iris_v32.nc' + save_path = tmp_path / "test_iris_v32.nc" iris.save(cube, save_path) print("Attempted to load ", save_path) loaded_cube = iris.load_cube(save_path.as_posix()) @@ -63,7 +64,7 @@ def test_iris_save_with_regular_coordinate(tmp_path): """ print("iris version:", iris.__version__) cube = create_regular_cube() - save_path = tmp_path / 'test_iris_v32.nc' + save_path = tmp_path / "test_iris_v32.nc" iris.save(cube, save_path) print("Attempted to load ", save_path) loaded_cube = iris.load_cube(save_path.as_posix()) diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index f83ced8b2e..ee326103c3 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -8,7 +8,7 @@ from esmvalcore.config._logging import FilterMultipleNames, configure_logging -@pytest.mark.parametrize('level', (None, 'INFO', 'DEBUG')) +@pytest.mark.parametrize("level", (None, "INFO", "DEBUG")) def test_logging_with_level(level): """Test log level configuration.""" ret = configure_logging(console_log_level=level) @@ -35,32 +35,32 @@ def test_logging_with_output_dir(tmp_path): def test_logging_log_level_invalid(): """Test failure condition for invalid level specification.""" with pytest.raises(ValueError): - configure_logging(console_log_level='FAIL') + configure_logging(console_log_level="FAIL") @pytest.mark.parametrize( - 'names,mode,output', + "names,mode,output", [ - (['test'], 'allow', False), - (['test'], 'disallow', True), - (['test', 'another.test'], 'allow', False), - (['test', 'another.test'], 'disallow', True), - (['test', 'm.a.b.c'], 'allow', False), - (['test', 'm.a.b.c'], 'disallow', True), - (['a.b.c'], 'allow', True), - (['a.b.c'], 'disallow', False), - (['a'], 'allow', True), - (['a'], 'disallow', False), - (['a.b', 'test'], 'allow', True), - (['a.b', 'test'], 'disallow', False), - (['a.b', 'a.b.c'], 'allow', True), - (['a.b', 'a.b.c'], 'disallow', False), - ] + (["test"], "allow", False), + (["test"], "disallow", True), + (["test", "another.test"], "allow", False), + (["test", "another.test"], "disallow", True), + (["test", "m.a.b.c"], "allow", False), + (["test", "m.a.b.c"], "disallow", True), + (["a.b.c"], "allow", True), + (["a.b.c"], "disallow", False), + (["a"], "allow", True), + (["a"], "disallow", False), + (["a.b", "test"], "allow", True), + (["a.b", "test"], "disallow", False), + (["a.b", "a.b.c"], "allow", True), + (["a.b", "a.b.c"], "disallow", False), + ], ) def test_filter_multiple_names(names, mode, output): """Test `FilterMultipleNames`.""" filter = FilterMultipleNames(names, mode) record = logging.LogRecord( - 'a.b.c', 'level', 'path', 'lineno', 'msg', [], 'exc_info' + "a.b.c", "level", "path", "lineno", "msg", [], "exc_info" ) assert filter.filter(record) is output diff --git a/tests/unit/test_naming.py b/tests/unit/test_naming.py index 41971a60a8..da453eee4f 100644 --- a/tests/unit/test_naming.py +++ b/tests/unit/test_naming.py @@ -9,7 +9,7 @@ class TestNaming(unittest.TestCase): def setUp(self): """Prepare tests""" - folder = os.path.join(__file__, '..', '..', '..') + folder = os.path.join(__file__, "..", "..", "..") self.esmvaltool_folder = os.path.abspath(folder) def test_windows_reserved_names(self): @@ -19,22 +19,45 @@ def test_windows_reserved_names(self): Files can not differ from a reserved name by the extension only """ reserved_names = { - 'CON', 'PRN', 'AUX', 'NUL', 'COM1', 'COM2', 'COM3', 'COM4', 'COM5', - 'COM6', 'COM7', 'COM8', 'COM9', 'LPT1', 'LPT2', 'LPT3', 'LPT4', - 'LPT5', 'LPT6', 'LPT7', 'LPT8', 'LPT9' + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", } for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): - error_msg = 'Reserved windows name found at {}.' \ - ' Please rename it ' \ - '(Windows reserved names are: {})' \ - ''.format(dirpath, ','.join(reserved_names)) + error_msg = ( + "Reserved windows name found at {}." + " Please rename it " + "(Windows reserved names are: {})" + "".format(dirpath, ",".join(reserved_names)) + ) self.assertTrue(reserved_names.isdisjoint(dirnames), error_msg) self.assertTrue(reserved_names.isdisjoint(filenames), error_msg) - without_extensions = (os.path.splitext(filename)[0] - for filename in filenames) + without_extensions = ( + os.path.splitext(filename)[0] for filename in filenames + ) self.assertTrue( - reserved_names.isdisjoint(without_extensions), error_msg) + reserved_names.isdisjoint(without_extensions), error_msg + ) def test_avoid_casing_collisions(self): """ @@ -45,11 +68,11 @@ def test_avoid_casing_collisions(self): for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): self.assertEqual( len(filenames) + len(dirnames), - len({name.lower() - for name in filenames + dirnames}), - 'Colliding names found at {0}. Please do not ' - 'use names that only differ in ' - 'capitalization'.format(dirpath)) + len({name.lower() for name in filenames + dirnames}), + "Colliding names found at {0}. Please do not " + "use names that only differ in " + "capitalization".format(dirpath), + ) def test_no_namelist(self): """ @@ -57,15 +80,18 @@ def test_no_namelist(self): This will help us to avoid bad merges with stale branches """ - exclude_paths = ['esmvaltool/diag_scripts/cvdp/cvdp'] + exclude_paths = ["esmvaltool/diag_scripts/cvdp/cvdp"] for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): - if '.git' in dirpath.split(os.sep): + if ".git" in dirpath.split(os.sep): continue if any([item in dirpath for item in exclude_paths]): continue self.assertFalse( - any('namelist' in name.lower() - for name in filenames + dirnames), - 'Namelist reference found at {}. Please use "recipe" instead'. - format(dirpath)) + any( + "namelist" in name.lower() for name in filenames + dirnames + ), + 'Namelist reference found at {}. Please use "recipe" instead'.format( + dirpath + ), + ) diff --git a/tests/unit/test_provenance.py b/tests/unit/test_provenance.py index 5dba8c9bab..b6c20dbc2e 100644 --- a/tests/unit/test_provenance.py +++ b/tests/unit/test_provenance.py @@ -1,24 +1,25 @@ """Test `esmvalcore._provenance`.""" + from esmvalcore._provenance import TrackedFile def test_set(): assert { - TrackedFile('file1.nc', attributes={}), - TrackedFile('file1.nc', attributes={}), - TrackedFile('file2.nc', attributes={}), + TrackedFile("file1.nc", attributes={}), + TrackedFile("file1.nc", attributes={}), + TrackedFile("file2.nc", attributes={}), } == { - TrackedFile('file1.nc', attributes={}), - TrackedFile('file2.nc', attributes={}), + TrackedFile("file1.nc", attributes={}), + TrackedFile("file2.nc", attributes={}), } def test_sort(): - file1 = TrackedFile('file1.nc', attributes={}) - file2 = TrackedFile('file2.nc', attributes={}) + file1 = TrackedFile("file1.nc", attributes={}) + file2 = TrackedFile("file2.nc", attributes={}) assert sorted([file2, file1]) == [file1, file2] def test_equals(): - file = TrackedFile('file.nc', attributes={}) - assert file == TrackedFile('file.nc', attributes={}) + file = TrackedFile("file.nc", attributes={}) + assert file == TrackedFile("file.nc", attributes={}) diff --git a/tests/unit/test_version.py b/tests/unit/test_version.py index b825114b76..3dca6d14ba 100644 --- a/tests/unit/test_version.py +++ b/tests/unit/test_version.py @@ -1,9 +1,9 @@ """Test that esmvalcore.__version__ returns a version number.""" + import re import esmvalcore def test_version(): - assert re.match(r"^\d+\.\d+\.\d+\S*$", esmvalcore.__version__)