diff --git a/.github/jobs/docker_setup.sh b/.github/jobs/docker_setup.sh index 46e1e61e84..f5c923dad9 100755 --- a/.github/jobs/docker_setup.sh +++ b/.github/jobs/docker_setup.sh @@ -36,11 +36,17 @@ MET_TAG=`${GITHUB_WORKSPACE}/scripts/docker/hooks/get_met_version` MET_DOCKER_REPO=met-dev if [ "${MET_TAG}" != "develop" ]; then MET_DOCKER_REPO=met +elif [ "${EXTERNAL_TRIGGER}" == "true" ]; then + # if MET tag is develop and external repo triggered workflow + # then append -lite to MET tag name to use tag generated by + # MET GHA workflow that does not include MET unit test tools + MET_TAG=${MET_TAG}-lite fi # if MET_FORCE_TAG variable is set and not empty, use that version instead if [ ! -z "$MET_FORCE_TAG" ]; then MET_TAG=$MET_FORCE_TAG + MET_DOCKER_REPO=met fi echo Using MET_DOCKER_REPO=$MET_DOCKER_REPO diff --git a/.github/jobs/get_use_cases_to_run.sh b/.github/jobs/get_use_cases_to_run.sh index bfd2991038..39c250474c 100755 --- a/.github/jobs/get_use_cases_to_run.sh +++ b/.github/jobs/get_use_cases_to_run.sh @@ -9,7 +9,7 @@ run_all_use_cases=$2 run_unit_tests=$3 echo Run use cases: $run_use_cases -echo Run All use cases: $run_all_use_cases +echo Run all use cases: $run_all_use_cases echo Run unit tests: $run_unit_tests # if running use cases, generate JQ filter to use diff --git a/.github/jobs/set_job_controls.sh b/.github/jobs/set_job_controls.sh index 007adb50eb..e8015992c1 100755 --- a/.github/jobs/set_job_controls.sh +++ b/.github/jobs/set_job_controls.sh @@ -94,3 +94,11 @@ echo ::set-output name=branch_name::$branch_name # get use cases to run .github/jobs/get_use_cases_to_run.sh $run_use_cases $run_all_use_cases $run_unit_tests + +# echo output variables to review in logs +echo branch_name: $branch_name +echo run_diff: $run_diff +echo run_save_truth_data: $run_save_truth_data +echo external_trigger: $external_trigger +echo run_get_image: $run_get_image +echo run_get_input_data: $run_get_input_data diff --git a/.github/parm/use_case_groups.json b/.github/parm/use_case_groups.json index 025054ccc9..372caf3779 100644 --- a/.github/parm/use_case_groups.json +++ b/.github/parm/use_case_groups.json @@ -61,12 +61,12 @@ }, { "category": "marine_and_cryosphere", - "index_list": "3-4", + "index_list": "3-5", "run": false }, { "category": "marine_and_cryosphere", - "index_list": "5", + "index_list": "6", "run": false }, { diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 858fb6fbe2..1b47909874 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -84,6 +84,7 @@ jobs: env: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + EXTERNAL_TRIGGER: ${{ needs.job_control.outputs.external_trigger }} #MET_FORCE_TAG: 10.0.0 update_data_volumes: diff --git a/docs/Release_Guide/met_official.rst b/docs/Release_Guide/met_official.rst index 1cf1457509..7bef52530a 100644 --- a/docs/Release_Guide/met_official.rst +++ b/docs/Release_Guide/met_official.rst @@ -13,6 +13,7 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/update_release_notes_official.rst .. include:: release_steps/rotate_authorship.rst .. include:: release_steps/merge_release_issue.rst +.. include:: release_steps/met/create_release_reference_branch.rst .. include:: release_steps/create_release_branch.rst .. include:: release_steps/create_release_on_github.rst .. include:: release_steps/create_release_extra.rst diff --git a/docs/Release_Guide/metplus_official.rst b/docs/Release_Guide/metplus_official.rst index caf0b4deda..47a1ba344d 100644 --- a/docs/Release_Guide/metplus_official.rst +++ b/docs/Release_Guide/metplus_official.rst @@ -15,7 +15,6 @@ Create a new vX.Y.Z official release from the develop branch. .. include:: release_steps/metplus/update_manage_externals.rst .. include:: release_steps/merge_release_issue.rst .. include:: release_steps/metplus/create_release_reference_branch.rst -.. include:: release_steps/metplus/update_release_content.rst .. include:: release_steps/push_release_branch.rst .. include:: release_steps/create_release_on_github.rst .. include:: release_steps/metplus/create_release_extra.rst diff --git a/docs/Release_Guide/release_steps/create_release_on_github.rst b/docs/Release_Guide/release_steps/create_release_on_github.rst index a20cd4f28a..c1346b2cd0 100644 --- a/docs/Release_Guide/release_steps/create_release_on_github.rst +++ b/docs/Release_Guide/release_steps/create_release_on_github.rst @@ -13,7 +13,6 @@ Create Release on GitHub https://|projectRepo|.readthedocs.io/en/vX.Y.Z-betaN/Users_Guide/release-notes.html (Note: the URL will not be active until the release is created) -* Add a link to the PDF of the |projectRepo| User's Guide, if available. - The PDF can be downloaded from ReadTheDocs if it is available, i.e. +* Add links to the HTML and PDF versions of the |projectRepo| User's Guide on ReadTheDocs. https://|projectRepo|.readthedocs.io/_/downloads/en/vX.Y.Z-betaN/pdf/ (Note: the URL will not be active until the release is created) diff --git a/docs/Release_Guide/release_steps/finalize_release_on_github_official.rst b/docs/Release_Guide/release_steps/finalize_release_on_github_official.rst index ca2abafbd7..45123c695d 100644 --- a/docs/Release_Guide/release_steps/finalize_release_on_github_official.rst +++ b/docs/Release_Guide/release_steps/finalize_release_on_github_official.rst @@ -1,14 +1,33 @@ Finalize Release on GitHub -------------------------- -* Close the GitHub issue for creating this official release. -* Edit the milestone for the current release by updating the *Due date* with the actual release date. -* If necessary, create a new milestone for the next official release (e.g. next vX.Y.Z release). -* If necessary, reassign any remaining issues for the current milestone to the next one. -* Close the current milestone. -* Confirm that all existing development projects for the current milestone are closed. -* If necessary, create development projects for the next milestone (e.g. |projectRepo|-X.Y.Z-beta1, beta2, beta3). -* Update the repository settings by resetting the *Default branch* to the new main_vX.Y branch: +* Update milestones: + + * Edit the milestone for the current release by updating the *Due date* with the actual release date. + + * Close the current milestone. + + * Create a new milestone for the first bugfix release (e.g. first vX.Y.1 (bugfix) release). + + * If necessary, create a new milestone for the next official release (e.g. next vX.Y.Z release). + +* Update issues: + + * Close the GitHub issue for creating this official release. + + * If necessary, reassign any remaining issues for the current milestone to other milestones. + +* Update projects: + + * Confirm that all existing development projects for the current milestone are closed. + + * If necessary, create development projects for the next milestone (e.g. |projectRepo|-X.Y.Z-beta1, beta2, beta3). + +* Update branches: + + * Remove any remaining stale development branches from the new release. + + * Update the repository settings by resetting the *Default branch* to the new main_vX.Y branch: .. parsed-literal:: @@ -16,4 +35,3 @@ Finalize Release on GitHub -> Settings -> Branches (tab on left) -> change the drop down to new branch - diff --git a/docs/Release_Guide/release_steps/met/create_release_reference_branch.rst b/docs/Release_Guide/release_steps/met/create_release_reference_branch.rst new file mode 100644 index 0000000000..c4018922da --- /dev/null +++ b/docs/Release_Guide/release_steps/met/create_release_reference_branch.rst @@ -0,0 +1,35 @@ +Create Release Reference Branch +------------------------------- + +* Create a branch from the develop branch for the reference branch for the + new official release and push it to GitHub. The branch name should match + the format main_vX.Y-ref where X.Y is the major/minor release number. + +.. parsed-literal:: + + cd |projectRepo| + git checkout develop + git pull + git checkout -b main_vX.Y-ref + +Push Reference Branch to GitHub +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + git push -u origin main_vX.Y-ref + +Pushing this branch to GitHub should trigger the GitHub Actions automation +that runs all of the use cases and creates Docker data volumes with the output +data. These data will be used to verify that any bugfixes applied to the +main_vX.Y branch does not break any of existing logic. + +Monitor GitHub Actions Workflow +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Navigate to https://github.com/dtcenter/MET/actions and verify that a +*Testing* workflow was triggered on the *main_vX.Y-ref* branch. + +* Wait until the entire workflow has run successfully. The final job entitled + "Create Output Docker Data Volumes" should create Docker data volumes for + each use case category on DockerHub (dtcenter/met-dev). diff --git a/docs/Release_Guide/release_steps/metplus/create_release_reference_branch.rst b/docs/Release_Guide/release_steps/metplus/create_release_reference_branch.rst index 3ff40d7d75..db2a4db7ec 100644 --- a/docs/Release_Guide/release_steps/metplus/create_release_reference_branch.rst +++ b/docs/Release_Guide/release_steps/metplus/create_release_reference_branch.rst @@ -18,6 +18,31 @@ Create Release Reference Branch git checkout develop git pull git checkout -b main_vX.Y-ref + +Update the version number +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Update the content that should go into the release version but remain unchanged +in the develop branch. + +Remove **-dev** from the version number: + +* As of METplus 4.0.0, we are naming releases with X.Y.Z format even if Z is 0. +* As of METplus v4.0.0, the file containing the version number is located at + **metplus/VERSION** (in earlier releases, the file was located at + docs/version or doc/version). +* In the develop branch, the version should match the upcoming release + with -dev added to the end like X.Y.Z-betaN-dev, i.e. 4.0.0-beta1-dev +* Remove **-dev** from the version number so that it matches the release + you are creating. + +Commit these changes. + +Push Reference Branch to GitHub +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + git push -u origin main_vX.Y-ref Pushing this branch to GitHub should trigger the GitHub Actions automation @@ -25,6 +50,9 @@ that runs all of the use cases and creates Docker data volumes with the output data. These data will be used to verify that any bugfixes applied to the main_vX.Y branch does not break any of existing logic. +Monitor GitHub Actions Workflow +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + Navigate to https://github.com/dtcenter/METplus/actions and verify that a *Testing* workflow was triggered on the *main_vX.Y-ref* branch. @@ -35,6 +63,9 @@ Navigate to https://github.com/dtcenter/METplus/actions and verify that a each use case category on DockerHub (dtcenter/metplus-data-dev). The names of these volumes start with *output-*. +Create main branch and push to GitHub +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + * After the truth data volumes have been generated, create the main_vX.Y branch off of the -ref branch. diff --git a/docs/Release_Guide/release_steps/metplus/update_release_content.rst b/docs/Release_Guide/release_steps/metplus/update_release_content.rst deleted file mode 100644 index 3137e22794..0000000000 --- a/docs/Release_Guide/release_steps/metplus/update_release_content.rst +++ /dev/null @@ -1,19 +0,0 @@ -Update Release Content ----------------------- - -Update content that should go into the release version but remain unchanged -in the develop branch. - -Update the version number -^^^^^^^^^^^^^^^^^^^^^^^^^ - -Remove **-dev** from the version number: - -* As of METplus 4.0.0, we are naming releases with X.Y.Z format even if Z is 0. -* As of METplus v4.0.0, the file containing the version number is located at - **metplus/VERSION** (in earlier releases, the file was located at - docs/version or doc/version). -* In the develop branch, the version should match the upcoming release - with -dev added to the end like X.Y.Z-betaN-dev, i.e. 4.0.0-beta1-dev -* Remove **-dev** from the version number so that it matches the release - you are creating. diff --git a/docs/Release_Guide/release_steps/open_release_issue.rst b/docs/Release_Guide/release_steps/open_release_issue.rst index a1ea1abaec..46cdc15d83 100644 --- a/docs/Release_Guide/release_steps/open_release_issue.rst +++ b/docs/Release_Guide/release_steps/open_release_issue.rst @@ -7,7 +7,7 @@ Open Release Issue * "Create the |projectRepo| vX.Y.Z-betaN (or -rcN) release" for a development release. -* For an official or development release, assign the issue to the current |projectRepo| development project. +* For an official or development release, assign the issue to the corresponding |projectRepo| development project. * For a bugfix release, assign the issue to the organization level support project. diff --git a/docs/_static/marine_and_cryosphere-UserScript_fcstRTOFS_obsAOML_calcTransport.png b/docs/_static/marine_and_cryosphere-UserScript_fcstRTOFS_obsAOML_calcTransport.png new file mode 100644 index 0000000000..68df6c1143 Binary files /dev/null and b/docs/_static/marine_and_cryosphere-UserScript_fcstRTOFS_obsAOML_calcTransport.png differ diff --git a/docs/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.py b/docs/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.py new file mode 100644 index 0000000000..73709c5a56 --- /dev/null +++ b/docs/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.py @@ -0,0 +1,159 @@ +""" +UserScript: Python Script to compute cable transport +======================================================= + +model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf + +""" +############################################################################## +# Scientific Objective +# -------------------- +# The Florida Current flows northward along the eastern Florida coast and feeds to the Gulf Stream. More info can +# be obtained from: https://www.aoml.noaa.gov/phod/floridacurrent/index.php +# +# This use case utilizes a Python script to calculate transport (units Sv) variations of the Florida current +# using a submarine cable and snapshot estimates made by shipboard instruments. The code compares the transport +# using RTOFS data and compare it with the AOML cable transport data and computes BIAS, RMSE, CORRELATION, and +# Scatter Index. The operational code utilizes 21 days of data and computes 7 day statistics. +# For the use case 3 days of data are utilized. The valid date is passed though an argument. The valid date +# is the last processed day i.e. the code grabs 3 previous days of data. + +############################################################################## +# Datasets +# --------------------- +# +# | **Forecast:** RTOFS u(3zuio) amd ,v(3zvio) files via Python Embedding script/file +# +# | **Observations:** AOML Florida Current data via Python Embedding script/file +# +# +# | **Location:** All of the input data required for this use case can be found in the met_test sample data tarball. Click here to the METplus releases page and download sample data for the appropriate release: https://github.com/dtcenter/METplus/releases +# | This tarball should be unpacked into the directory that you will set the value of INPUT_BASE. See `Running METplus`_ section for more information. +# +# | **Data Source:** NOMADS RTOFS Global + Daily mean transport (https://www.aoml.noaa.gov/phod/floridacurrent/data_access.php)+ Eightmilecable (static, provided with the use case) + +############################################################################## +# External Dependencies +# --------------------- +# +# You will need to use a version of Python 3.6+ that has the following packages installed: +# +# * scikit-learn +# * pyproj +# +# If the version of Python used to compile MET did not have these libraries at the time of compilation, you will need to add these packages or create a new Python environment with these packages. +# +# If this is the case, you will need to set the MET_PYTHON_EXE environment variable to the path of the version of Python you want to use. If you want this version of Python to only apply to this use case, set it in the [user_env_vars] section of a METplus configuration file.:: +# +# [user_env_vars] +# MET_PYTHON_EXE = /path/to/python/with/required/packages/bin/python + +############################################################################## +# METplus Components +# ------------------ +# +# This use case utilizes the METplus UserScript wrapper to generate a +# command to run with Python Embedding for the specified valid time. + +############################################################################## +# METplus Workflow +# ---------------- +# +# This use case uses UserScript. All the gridded data being pulled from the files via Python Embedding. +# All of the desired statistics are in the log file. +# It processes the following run time: +# +# | **Valid:** 2021-10-28 +# +# The code grabs the 20211028, 20211027, and 20211026 24 hour RTOFS files. + +############################################################################## +# METplus Configuration +# --------------------- +# +# METplus first loads all of the configuration files found in parm/metplus_config, +# then it loads any configuration files passed to METplus via the command line +# i.e. -c parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf +# +# .. highlight:: bash +# .. literalinclude:: ../../../../parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf + +############################################################################## +# MET Configuration +# --------------------- +# +# None. All of the processing is completed in the UserScript +# + +############################################################################## +# User Script +# ---------------- +# +# This use case uses one Python script to read forecast and observation data +# as well as processing the desired statistics. +# +# parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/read_aomlcable_rtofs_transport.py +# +# .. highlight:: python +# .. literalinclude:: ../../../../parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/read_aomlcable_rtofs_transport.py +# + +############################################################################## +# Running METplus +# --------------- +# +# This use case can be run two ways: +# +# 1) Passing in UserScript_fcstRTOFS_obsAOML_calcTransport.conf then a user-specific system configuration file:: +# +# run_metplus.py /path/to/METplus/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf /path/to/user_system.conf +# +# 2) Modifying the configurations in parm/metplus_config, then passing in UserScript_fcstRTOFS_obsAOML_calcTransport.conf:: +# +# run_metplus.py /path/to/METplus/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf +# +# The former method is recommended. Whether you add them to a user-specific configuration file or modify the metplus_config files, the following variables must be set correctly: +# +# * **INPUT_BASE** - Path to directory where sample data tarballs are unpacked (See Datasets section to obtain tarballs). This is not required to run METplus, but it is required to run the examples in parm/use_cases +# * **OUTPUT_BASE** - Path where METplus output will be written. This must be in a location where you have write permissions +# * **MET_INSTALL_DIR** - Path to location where MET is installed locally +# +# Example User Configuration File:: +# +# [config] +# INPUT_BASE = /path/to/sample/input/data +# OUTPUT_BASE = /path/to/output/dir +# MET_INSTALL_DIR = /path/to/met-X.Y +# +# + +############################################################################## +# Expected Output +# --------------- +# +# A successful run will output the following both to the screen and to the logfile:: +# +# INFO: METplus has successfully finished running. +# +# Refer to the value set for **OUTPUT_BASE** to find where the output data was generated. +# Output for use case will be found in calc_transport (relative to **OUTPUT_BASE**) +# and will contain the following files: +# +# * calc_transport.log + +############################################################################## +# Keywords +# -------- +# +# .. note:: +# +# * UserScriptUseCase +# * PythonEmbeddingFileUseCase +# * MarineAndCryosphereAppUseCase +# +# Navigate to the :ref:`quick-search` page to discover other similar use cases. +# +# +# +# sphinx_gallery_thumbnail_path = '_static/marine_and_cryosphere-UserScript_fcstRTOFS_obsAOML_calcTransport.png' + diff --git a/internal_tests/use_cases/all_use_cases.txt b/internal_tests/use_cases/all_use_cases.txt index f4bfbed186..32c4609407 100644 --- a/internal_tests/use_cases/all_use_cases.txt +++ b/internal_tests/use_cases/all_use_cases.txt @@ -93,6 +93,7 @@ Category: marine_and_cryosphere 3::GridStat_fcstRTOFS_obsSMOS_climWOA_sss::model_applications/marine_and_cryosphere/GridStat_fcstRTOFS_obsSMOS_climWOA_sss.conf:: icecover_env, py_embed 4::GridStat_fcstRTOFS_obsSMAP_climWOA_sss::model_applications/marine_and_cryosphere/GridStat_fcstRTOFS_obsSMAP_climWOA_sss.conf:: icecover_env, py_embed 5::GridStat_fcstRTOFS_obsAVISO_climHYCOM_ssh::model_applications/marine_and_cryosphere/GridStat_fcstRTOFS_obsAVISO_climHYCOM_ssh.conf:: icecover_env, py_embed +6::UserScript_fcstRTOFS_obsAOML_calcTransport::model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf:: icecover_env, py_embed #X::GridStat_fcstRTOFS_obsGHRSST_climWOA_sst::model_applications/marine_and_cryosphere/GridStat_fcstRTOFS_obsGHRSST_climWOA_sst.conf, model_applications/marine_and_cryosphere/GridStat_fcstRTOFS_obsGHRSST_climWOA_sst/ci_overrides.conf:: icecover_env, py_embed diff --git a/metplus/VERSION b/metplus/VERSION index 0e7ebd0348..3d298f67af 100644 --- a/metplus/VERSION +++ b/metplus/VERSION @@ -1 +1 @@ -4.1.0-dev \ No newline at end of file +5.0.0-beta1-dev diff --git a/parm/use_cases/met_tool_wrapper/Example/Example.conf b/parm/use_cases/met_tool_wrapper/Example/Example.conf index 023c6d5acd..6286e47ae5 100644 --- a/parm/use_cases/met_tool_wrapper/Example/Example.conf +++ b/parm/use_cases/met_tool_wrapper/Example/Example.conf @@ -58,4 +58,4 @@ EXAMPLE_INPUT_DIR = /dir/containing/example/data [filename_templates] # Fake template to use to look for input data. This template is substituted with the time information of each # run time that is executed -EXAMPLE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/file_{init?fmt=%Y%m%d}_{init?fmt=%2H}_F{lead?fmt=%3H}.{custom?fmt=%s} \ No newline at end of file +EXAMPLE_INPUT_TEMPLATE = {init?fmt=%Y%m%d}/file_{init?fmt=%Y%m%d}_{init?fmt=%H}_F{lead?fmt=%3H}.{custom?fmt=%s} \ No newline at end of file diff --git a/parm/use_cases/met_tool_wrapper/PB2NC/PB2NC.conf b/parm/use_cases/met_tool_wrapper/PB2NC/PB2NC.conf index 591058aa8a..a10a30caee 100644 --- a/parm/use_cases/met_tool_wrapper/PB2NC/PB2NC.conf +++ b/parm/use_cases/met_tool_wrapper/PB2NC/PB2NC.conf @@ -15,7 +15,7 @@ PB2NC_OFFSETS = 12 PB2NC_SKIP_IF_OUTPUT_EXISTS = True PB2NC_INPUT_DIR = {INPUT_BASE}/met_test/data/sample_obs/prepbufr -PB2NC_INPUT_TEMPLATE = ndas.t{da_init?fmt=%2H}z.prepbufr.tm{offset?fmt=%2H}.{da_init?fmt=%Y%m%d}.nr +PB2NC_INPUT_TEMPLATE = ndas.t{da_init?fmt=%H}z.prepbufr.tm{offset?fmt=%2H}.{da_init?fmt=%Y%m%d}.nr PB2NC_OUTPUT_DIR = {OUTPUT_BASE}/pb2nc PB2NC_OUTPUT_TEMPLATE = sample_pb.nc diff --git a/parm/use_cases/met_tool_wrapper/PlotDataPlane/PlotDataPlane_grib1.conf b/parm/use_cases/met_tool_wrapper/PlotDataPlane/PlotDataPlane_grib1.conf index fcb19721f2..5a2a113c74 100644 --- a/parm/use_cases/met_tool_wrapper/PlotDataPlane/PlotDataPlane_grib1.conf +++ b/parm/use_cases/met_tool_wrapper/PlotDataPlane/PlotDataPlane_grib1.conf @@ -56,7 +56,7 @@ PLOT_DATA_PLANE_OUTPUT_DIR = [filename_templates] # Template to look for input to PlotDataPlane relative to PLOT_DATA_PLANE_INPUT_DIR -PLOT_DATA_PLANE_INPUT_TEMPLATE = {INPUT_BASE}/met_test/data/sample_fcst/{valid?fmt=%Y%m%d%H}/nam.t{valid?fmt=%2H}z.awip1236.tm00.{valid?fmt=%Y%m%d}.grb +PLOT_DATA_PLANE_INPUT_TEMPLATE = {INPUT_BASE}/met_test/data/sample_fcst/{valid?fmt=%Y%m%d%H}/nam.t{valid?fmt=%H}z.awip1236.tm00.{valid?fmt=%Y%m%d}.grb # Template to use to write output from PlotDataPlane -PLOT_DATA_PLANE_OUTPUT_TEMPLATE = {OUTPUT_BASE}/met_tool_wrapper/plot_data_plane/nam.t00z.awip1236.tm{valid?fmt=%2H}.{valid?fmt=%Y%m%d}_TMPZ2.ps +PLOT_DATA_PLANE_OUTPUT_TEMPLATE = {OUTPUT_BASE}/met_tool_wrapper/plot_data_plane/nam.t00z.awip1236.tm{valid?fmt=%H}.{valid?fmt=%Y%m%d}_TMPZ2.ps diff --git a/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRRE_obsHRRRE_Sfc_MultiField.conf b/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRRE_obsHRRRE_Sfc_MultiField.conf index f3dc96a313..03adfa304b 100644 --- a/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRRE_obsHRRRE_Sfc_MultiField.conf +++ b/parm/use_cases/model_applications/convection_allowing_models/EnsembleStat_fcstHRRRE_obsHRRRE_Sfc_MultiField.conf @@ -204,7 +204,7 @@ ENSEMBLE_STAT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/convection_allowing_ [filename_templates] # input and output templates for pb2nc -PB2NC_INPUT_TEMPLATE = {da_init?fmt=%Y%m%d}/{da_init?fmt=%Y%j%H%M}.rap.t{da_init?fmt=%2H}z.prepbufr.tm{offset?fmt=%2H}.{da_init?fmt=%Y%m%d} +PB2NC_INPUT_TEMPLATE = {da_init?fmt=%Y%m%d}/{da_init?fmt=%Y%j%H%M}.rap.t{da_init?fmt=%H}z.prepbufr.tm{offset?fmt=%2H}.{da_init?fmt=%Y%m%d} PB2NC_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/{valid?fmt=%Y%m%d%H}.rap.nc diff --git a/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf b/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf new file mode 100644 index 0000000000..cd7bfe6489 --- /dev/null +++ b/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport.conf @@ -0,0 +1,73 @@ +[config] + +# List of applications to run +PROCESS_LIST = UserScript + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = VALID + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +VALID_TIME_FMT = %Y%m%d + +# Start time for METplus run - must match VALID_TIME_FMT +VALID_BEG = 20211028 + +# Increment between METplus runs (in seconds if no units are specified) +# Must be >= 60 seconds +VALID_INCREMENT = 24H + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = + +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run + +LOOP_ORDER = processes + +USER_SCRIPT_RUNTIME_FREQ = RUN_ONCE + +USER_SCRIPT_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/marine_and_cryosphere/calc_transport + +USER_SCRIPT_INPUT_TEMPLATE = {VALID_BEG} + +# The valid date increments backwards +USER_SCRIPT_COMMAND = {PARM_BASE}/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/read_aomlcable_rtofs_transport.py {USER_SCRIPT_INPUT_TEMPLATE} + + +[user_env_vars] + +# Calc Transport specific variables + +CALC_TRANSPORT_RTOFS_DIRNAME = {INPUT_BASE}/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/RTOFS + +CALC_TRANSPORT_CABLE_FILENAME = {INPUT_BASE}/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/FC_cable_transport_2021.dat + +CALC_TRANSPORT_EIGHTMILE_FILENAME = {INPUT_BASE}/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/eightmilecable.dat + +CALC_TRANSPORT_LEAD_TIME = 24 + +# Calculate stats for number of days. The operational website uses 21 days +# of data and then calculates 7 day stats. For the use case both of them are 3 days each. +# The code calculates the number of subdirectories +# under RTOFS directory, however, CALC_TRANSPORT_STATS_DAY is the number of days the statistics +# will be calculated. +CALC_TRANSPORT_STATS_DAY = 3 + +CALC_TRANSPORT_LOG_FILE = calc_transport.log + +OUTPUT_DIR = {OUTPUT_BASE}/model_applications/marine_and_cryosphere/calc_transport diff --git a/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/read_aomlcable_rtofs_transport.py b/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/read_aomlcable_rtofs_transport.py new file mode 100755 index 0000000000..4d92710c68 --- /dev/null +++ b/parm/use_cases/model_applications/marine_and_cryosphere/UserScript_fcstRTOFS_obsAOML_calcTransport/read_aomlcable_rtofs_transport.py @@ -0,0 +1,188 @@ +#! /usr/bin/env python3 +""" +Florida Cable Transport Class-4 Validation System +Adapted from Todd Spindler's code +""" + +from netCDF4 import Dataset +import numpy as np +from pyproj import Geod +import math +from sklearn.metrics import mean_squared_error +from datetime import datetime, timedelta +import pandas as pd +import sys, os +import logging + +vDate=datetime.strptime(sys.argv[1],'%Y%m%d') +rtofsdir = os.environ.get('CALC_TRANSPORT_RTOFS_DIRNAME') +cablefile = os.environ.get('CALC_TRANSPORT_CABLE_FILENAME') +eightmilefile = os.environ.get('CALC_TRANSPORT_EIGHTMILE_FILENAME') + +print('Starting Cable V&V at',datetime.now(),'for',vDate) + + + +if not os.path.exists(cablefile): + print('missing AOML Cable transport file for',vDate) + +#----------------------------------------------- +# read cable transport data from AOML +#----------------------------------------------- + +# read the AOML dataset +names=['year','month','day','transport'] +cable=pd.read_csv(cablefile,comment='%',names=names,delimiter=' ', + skipinitialspace=True,header=None,usecols=list(range(4))) +cable['date']=pd.to_datetime(cable[['year','month','day']]) +cable.index=cable.date +cable['error']=2.0 +del cable['year'], cable['month'], cable['day'], cable['date'] +print(cable) + +#----------------------------------------------- +# full cross-section transport calculation +#----------------------------------------------- +def calc_transport(dates,fcst): + """ + Calculate the transport of water across the Florida Straits + This extracts the section and integrates the flow through it. + """ + transport=[] + fcst_str='f{:03d}'.format(fcst) + cable_loc=np.loadtxt(eightmilefile,dtype='int',usecols=(0,1)) + eightmile_lat = 26.5167 + eightmile_lon = -78.7833%360 + wpb_lat = 26.7153425 + wpb_lon = -80.0533746%360 + cable_angle = math.atan((eightmile_lat-wpb_lat)/(eightmile_lon-wpb_lon)) + g=Geod(ellps='WGS84') + + for date in dates: + print('DATE :', date, ' DATES :',dates) + print('processing',date.strftime('%Y%m%d'),'fcst',fcst) + rundate=date-timedelta(fcst/24.) # calc rundate from fcst and date + ufile=rtofsdir+'/'+rundate.strftime('%Y%m%d')+'/rtofs_glo_3dz_'+fcst_str+'_daily_3zuio.nc' + vfile=rtofsdir+'/'+rundate.strftime('%Y%m%d')+'/rtofs_glo_3dz_'+fcst_str+'_daily_3zvio.nc' + + print(ufile) + print(vfile) + + udata=Dataset(ufile) + vdata=Dataset(vfile) + + lon=udata['Longitude'][:] + lat=udata['Latitude'][:] + depth=udata['Depth'][:] + + usection=np.zeros((depth.shape[0],cable_loc.shape[0])) + vsection=np.zeros((depth.shape[0],cable_loc.shape[0])) + + udata=udata['u'][:].squeeze() + vdata=vdata['v'][:].squeeze() + + for ncol,(row,col) in enumerate(cable_loc): + usection[:,ncol]=udata[:,row,col].filled(fill_value=0.0) + vsection[:,ncol]=vdata[:,row,col].filled(fill_value=0.0) + + lon=lon[cable_loc[:,0],cable_loc[:,1]] + lat=lat[cable_loc[:,0],cable_loc[:,1]] + + # compute the distances along the track + _,_,dist=g.inv(lon[0:-1],lat[0:-1],lon[1:],lat[1:]) + depth=np.diff(depth) + usection=usection[:-1,:-1] + vsection=vsection[:-1,:-1] + + dist,depth=np.meshgrid(dist,depth) + u,v=rotate(usection,vsection,cable_angle) + trans1=(v*dist*depth).sum()/1e6 + #print(date.strftime('%Y-%m-%d'),' transport:',transport,'Sv') + transport.append(trans1) + + return transport + +#----------------------------------------------- +# retrieve model data +#----------------------------------------------- +def get_model(dates,fcsts): + + transport={'dates':dates} + + + for fcst in fcsts: + transport[fcst]=calc_transport(dates,fcst) + + model=pd.DataFrame(transport) + model.index=model.dates + del model['dates'] + #del model['validDates'] + + print(model) + return model +#----------------------------------------------- +# coordinate rotation +#----------------------------------------------- +def rotate(u,v,phi): + # phi is in radians + u2 = u*math.cos(phi) + v*math.sin(phi) + v2 = -u*math.sin(phi) + v*math.cos(phi) + return u2,v2 + +#----------------------------------------------- +if __name__ == "__main__": + + want_date=vDate + DateSet=True + + fcst = int(os.environ.get('CALC_TRANSPORT_LEAD_TIME')) + no_of_fcst_stat_days = int(os.environ.get('CALC_TRANSPORT_STATS_DAY')) + + fcsts=list(range(fcst,fcst+1,24)) + + start_date=want_date + stop_date=want_date + cable=cable[:stop_date] + + # Count the number in the subdirs RTOFS dir + path, dirs, files = next(os.walk(rtofsdir)) + dir_count = len(dirs) + dir_count + + """ + Setup logging + """ + logfile = os.environ.get('CALC_TRANSPORT_LOG_FILE') + + + for end_date in pd.date_range(start_date,stop_date): + dates=pd.date_range(end=end_date,periods=dir_count) + model=get_model(dates,fcsts) + + both=pd.merge(cable,model,left_index=True,right_index=True,how='inner') + print("both :", both) + both=both[both.index.max()-timedelta(no_of_fcst_stat_days):] + + diff=both[fcst] - both.transport + bias=diff.mean() + rmse=mean_squared_error(both.transport,both[fcst])**0.5 + if both[fcst].mean() != 0.0: + scatter_index=100.0*(((diff**2).mean())**0.5 - bias**2)/both.transport.mean() + else: + scatter_index=np.nan + + corr=both[fcst].corr(both.transport) + +# print("BIAS :",bias, "RMSE :",rmse, "CORR :",corr, "SCATTER INDEX :",scatter_index) + + outdir = os.environ.get('OUTPUT_DIR') + + if not os.path.exists(outdir): + print(f"Creating output directory: {outdir}") + os.makedirs(outdir) + + expected_file = os.path.join(outdir,logfile) + print(expected_file) + + with open(expected_file, 'w') as f: + print("BIAS :",bias, "RMSE :",rmse, "CORR :",corr, "SCATTER INDEX :",scatter_index, file=f) diff --git a/parm/use_cases/model_applications/medium_range/PointStat_fcstGFS_obsNAM_Sfc_MultiField_PrepBufr.conf b/parm/use_cases/model_applications/medium_range/PointStat_fcstGFS_obsNAM_Sfc_MultiField_PrepBufr.conf index bf86801b45..224833c82c 100644 --- a/parm/use_cases/model_applications/medium_range/PointStat_fcstGFS_obsNAM_Sfc_MultiField_PrepBufr.conf +++ b/parm/use_cases/model_applications/medium_range/PointStat_fcstGFS_obsNAM_Sfc_MultiField_PrepBufr.conf @@ -130,7 +130,7 @@ OBS_POINT_STAT_INPUT_DIR = {PB2NC_OUTPUT_DIR} POINT_STAT_OUTPUT_DIR = {OUTPUT_BASE}/{OBTYPE} [filename_templates] -PB2NC_INPUT_TEMPLATE = nam.{da_init?fmt=%Y%m%d}/nam.t{da_init?fmt=%2H}z.prepbufr.tm{offset?fmt=%2H} +PB2NC_INPUT_TEMPLATE = nam.{da_init?fmt=%Y%m%d}/nam.t{da_init?fmt=%H}z.prepbufr.tm{offset?fmt=%2H} PB2NC_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/nam.{valid?fmt=%Y%m%d%H}.nc diff --git a/parm/use_cases/model_applications/precipitation/PointStat_fcstMULTI_obsMETAR_PtypeComparisons.conf b/parm/use_cases/model_applications/precipitation/PointStat_fcstMULTI_obsMETAR_PtypeComparisons.conf index aaf0a32504..738d1c8d03 100644 --- a/parm/use_cases/model_applications/precipitation/PointStat_fcstMULTI_obsMETAR_PtypeComparisons.conf +++ b/parm/use_cases/model_applications/precipitation/PointStat_fcstMULTI_obsMETAR_PtypeComparisons.conf @@ -16,10 +16,10 @@ PB2NC_OFFSETS = 0, 12 PB2NC_SKIP_IF_OUTPUT_EXISTS = True PB2NC_INPUT_DIR = {INPUT_BASE}/model_applications/precipitation/PointStat_fcstMULTI_obsMETAR_PtypeComparisons -PB2NC_INPUT_TEMPLATE = nam.{valid?fmt=%Y%m%d}.t{valid?fmt=%2H}z.prepbufr.tm00 +PB2NC_INPUT_TEMPLATE = nam.{valid?fmt=%Y%m%d}.t{valid?fmt=%H}z.prepbufr.tm00 PB2NC_OUTPUT_DIR = {OUTPUT_BASE}/model_applications/precipitation -PB2NC_OUTPUT_TEMPLATE = nam.obsfile_sfc_prwe.{valid?fmt=%m%d%Y}_{valid?fmt=%2H}z.nc +PB2NC_OUTPUT_TEMPLATE = nam.obsfile_sfc_prwe.{valid?fmt=%m%d%Y}_{valid?fmt=%H}z.nc PB2NC_CONFIG_FILE = {PARM_BASE}/met_config/PB2NCConfig_wrapped diff --git a/parm/use_cases/model_applications/s2s/GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast.conf b/parm/use_cases/model_applications/s2s/GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast.conf index 160291463f..6a59ac2da9 100644 --- a/parm/use_cases/model_applications/s2s/GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast.conf +++ b/parm/use_cases/model_applications/s2s/GridStat_SeriesAnalysis_fcstNMME_obsCPC_seasonal_forecast.conf @@ -70,7 +70,7 @@ FCST_IS_PROB = false # Increase verbosity of MET tools #LOG_MET_VERBOSITY=4 -GRID_STAT_OUTPUT_PREFIX = {MODEL}-hindcast_{CURRENT_OBS_NAME}_vs_{OBTYPE}_IC{init?fmt=%Y%b}_V{valid?fmt=%Y%2m%d} +GRID_STAT_OUTPUT_PREFIX = {MODEL}-hindcast_{CURRENT_OBS_NAME}_vs_{OBTYPE}_IC{init?fmt=%Y%b}_V{valid?fmt=%Y%m%d} # sets the desc variable in the SeriesAnalysis config file SERIES_ANALYSIS_DESC = hindcast @@ -137,12 +137,12 @@ SERIES_ANALYSIS_CLIMO_MEAN_INPUT_DIR = # format of filenames # FCST -FCST_GRID_STAT_INPUT_TEMPLATE = nmme_pr_hcst_{init?fmt=%b}IC_{valid?fmt=%2m}_*.nc +FCST_GRID_STAT_INPUT_TEMPLATE = nmme_pr_hcst_{init?fmt=%b}IC_{valid?fmt=%m}_*.nc # ANLYS OBS_GRID_STAT_INPUT_TEMPLATE = obs_cpc_pp.1x1.nc -BOTH_SERIES_ANALYSIS_INPUT_TEMPLATE = grid_stat_{MODEL}-hindcast_precip_vs_{OBTYPE}_IC{init?fmt=%Y%b}_V{valid?fmt=%Y%2m}01_*pairs.nc +BOTH_SERIES_ANALYSIS_INPUT_TEMPLATE = grid_stat_{MODEL}-hindcast_precip_vs_{OBTYPE}_IC{init?fmt=%Y%b}_V{valid?fmt=%Y%m}01_*pairs.nc SERIES_ANALYSIS_OUTPUT_TEMPLATE = series_analysis_{MODEL}_{OBTYPE}_stats_F{lead?fmt=%2m}_{instance?fmt=%s}.nc diff --git a/parm/use_cases/model_applications/s2s/UserScript_fcstGFS_obsERA_WeatherRegime.conf b/parm/use_cases/model_applications/s2s/UserScript_fcstGFS_obsERA_WeatherRegime.conf index d064182945..02f48837cb 100644 --- a/parm/use_cases/model_applications/s2s/UserScript_fcstGFS_obsERA_WeatherRegime.conf +++ b/parm/use_cases/model_applications/s2s/UserScript_fcstGFS_obsERA_WeatherRegime.conf @@ -179,9 +179,13 @@ FCST_WR_FREQ = {OBS_WR_FREQ} # These variables control reordering the forecast weather regime to match the # observations if their orders are different -# It is recommended to set this to False if this is the first time running the +# REORDER_FCST_MANUAL will use the order in FCST_ORDER, whereas REORDER_FCST will +# use a pattern correlation to reorder +# It is recommended to set REORDER_FCST_MANUAL to False if this is the first time running the # case REORDER_FCST = True +REORDER_FCST_MANUAL = False +#Order to use if REORDER_FCST_MANUAL = True; will be ignored if REORER_FCST_MANUAL = False FCST_ORDER = 1,3,4,2,5,6 # Type, name and directory of Output File for weather regime classification @@ -213,9 +217,9 @@ FCST_KMEANS_PLOT_OUTPUT_NAME = fcst_kmeans KMEANS_PLOT_LEVELS = -80, -70, -60, -50, -40, -30, -20, -10, 0, 10, 20, 30, 40, 50, 60, 70, 80 # Frequency Plot title and output file name -OBS_FREQ_PLOT_TITLE = ERA Seasonal Cycle of WR Days/Week (1979-2017) +OBS_FREQ_PLOT_TITLE = ERA Seasonal Cycle of WR Days/Week (2000-2017) OBS_FREQ_PLOT_OUTPUT_NAME = obs_freq -FCST_FREQ_PLOT_TITLE = GFS Seasonal Cycle of WR Days/Week (1979-2017) +FCST_FREQ_PLOT_TITLE = GFS Seasonal Cycle of WR Days/Week (2000-2017) FCST_FREQ_PLOT_OUTPUT_NAME = fcst_freq # MPR file information diff --git a/parm/use_cases/model_applications/s2s/UserScript_obsERA_obsOnly_WeatherRegime/WeatherRegime_driver.py b/parm/use_cases/model_applications/s2s/UserScript_obsERA_obsOnly_WeatherRegime/WeatherRegime_driver.py index e91a3082af..66ddff0002 100755 --- a/parm/use_cases/model_applications/s2s/UserScript_obsERA_obsOnly_WeatherRegime/WeatherRegime_driver.py +++ b/parm/use_cases/model_applications/s2s/UserScript_obsERA_obsOnly_WeatherRegime/WeatherRegime_driver.py @@ -6,7 +6,7 @@ import warnings from metcalcpy.contributed.blocking_weather_regime.WeatherRegime import WeatherRegimeCalculation -from metcalcpy.contributed.blocking_weather_regime.Blocking_WeatherRegime_util import parse_steps, read_nc_met, write_mpr_file, reorder_fcst_regimes +from metcalcpy.contributed.blocking_weather_regime.Blocking_WeatherRegime_util import parse_steps, read_nc_met, write_mpr_file, reorder_fcst_regimes,reorder_fcst_regimes_correlate from metplotpy.contributed.weather_regime import plot_weather_regime as pwr @@ -16,7 +16,8 @@ def main(): if not steps_list_obs and not steps_list_fcst: warnings.warn('No processing steps requested for either the model or observations,') - warnings.warn('No data will be processed') + warnings.warn(' nothing will be run') + warnings.warn('Set FCST_STEPS and/or OBS_STEPS in the [user_env_vars] section to process data') ###################################################################### @@ -129,21 +130,22 @@ def main(): if ("KMEANS" in steps_list_obs): print('Running Obs K Means') kmeans_obs,wrnum_obs,perc_obs,wrc_obs= steps_obs.run_K_means(z500_detrend_2d_obs,timedict_obs,z500_obs.shape) + steps_obs.write_K_means_file(timedict_obs,wrc_obs) if ("KMEANS" in steps_list_fcst): print('Running Forecast K Means') kmeans_fcst,wrnum_fcst,perc_fcst,wrc_fcst = steps_fcst.run_K_means(z500_detrend_2d_fcst,timedict_fcst, z500_fcst.shape) - - if ("KMEANS" in steps_list_obs) and ("KMEANS" in steps_list_fcst): - # Check to see if reordering the data so that the weather regime patterns match between - # the forecast and observations, is needed - #TODO: make this automated based on spatial correlations reorder_fcst = os.environ.get('REORDER_FCST','False').lower() - fcst_order_str = os.environ['FCST_ORDER'].split(',') - fcst_order = [int(fo) for fo in fcst_order_str] - if reorder_fcst == 'true': + reorder_fcst_manual = os.environ.get('REORDER_FCST_MANUAL','False').lower() + if (reorder_fcst == 'true') and ("KMEANS" in steps_list_obs): + kmeans_fcst,perc_fcst,wrc_fcst = reorder_fcst_regimes_correlate(kmeans_obs,kmeans_fcst,perc_fcst,wrc_fcst,wrnum_fcst) + if reorder_fcst_manual == 'true': + fcst_order_str = os.environ['FCST_ORDER'].split(',') + fcst_order = [int(fo) for fo in fcst_order_str] kmeans_fcst,perc_fcst,wrc_fcst = reorder_fcst_regimes(kmeans_fcst,perc_fcst,wrc_fcst,wrnum_fcst,fcst_order) + steps_fcst.write_K_means_file(timedict_fcst,wrc_fcst) + # Write matched pair output for weather regime classification modname = os.environ.get('MODEL_NAME','GFS') @@ -177,9 +179,13 @@ def main(): if ("TIMEFREQ" in steps_list_obs): + if not ("KMEANS" in steps_list_obs): + raise Exception('Must run observed Kmeans before running frequencies.') wrfreq_obs,dlen_obs,ts_diff_obs = steps_obs.compute_wr_freq(wrc_obs) if ("TIMEFREQ" in steps_list_fcst): + if not ("KMEANS" in steps_list_fcst): + raise Exception('Must run forecast Kmeans before running frequencies.') wrfreq_fcst,dlen_fcst,ts_diff_fcst = steps_fcst.compute_wr_freq(wrc_fcst) if ("TIMEFREQ" in steps_list_obs) and ("TIMEFREQ" in steps_list_fcst):