diff --git a/.github/workflows/markdown-profile.yml b/.github/workflows/markdown-profile.yml index 90b987f8f9e..55d17578508 100644 --- a/.github/workflows/markdown-profile.yml +++ b/.github/workflows/markdown-profile.yml @@ -52,12 +52,7 @@ jobs: cd $GITHUB_WORKSPACE/external/example_scripts/notebooks ls ./markdown echo "Move profile markdown examples" - mv ./markdown/profile/0._profile_introduction.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ - mv ./markdown/profile/1._analysis_preprocess_en4.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ - mv ./markdown/profile/2._analysis_extract_and_compare.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ - mv ./markdown/profile/3._analysis_extract_and_compare_single_process_tutorial.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ - mv ./markdown/profile/4._analysis_mask_means_tutorial.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ - mv ./markdown/profile/5._analysis_average_into_grid_boxes.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ + mv ./markdown/profile/introduction_to_profile_class.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ mv ./markdown/profile/wod_bgc_ragged_example.md $GITHUB_WORKSPACE/site/content/en/docs/Examples/Notebooks/Profile/ echo "Move images" rsync -r ./markdown_images/profile/ $GITHUB_WORKSPACE/site/static diff --git a/content/en/docs/Examples/Notebooks/Profile/1._analysis_preprocess_en4.md b/content/en/docs/Examples/Notebooks/Profile/1._analysis_preprocess_en4.md deleted file mode 100644 index 90441e7b523..00000000000 --- a/content/en/docs/Examples/Notebooks/Profile/1._analysis_preprocess_en4.md +++ /dev/null @@ -1,232 +0,0 @@ ---- - title: "1. analysis preprocess en4" - linkTitle: "1. analysis preprocess en4" - weight: 5 - - description: > - 1. analysis preprocess en4 example. ---- -Script for processing raw EN4 data prior to analysis. -See docstring of `Profile.process_en4()` for more specifics on what it does. - -This script will just load modules, read in raw EN4 data, cut out a geographical box, call the processing routine and write the processed data to file. - -You don't have to do this for each EN4 file individually if you don't want, you can read in multiple using `multiple = True` on the creation of the profile object. However, if analysing model data in parallel chunks, you may want to split up the processing into smaller files to make the analysis faster. - - -```python -import sys - -# IF USING A DEVELOPMENT BRANCH OF COAST, ADD THE REPOSITORY TO PATH: -# sys.path.append(' 1 profile = coast.Profile(config=fn_cfg_prof) - 2 profile.read_en4(fn_prof, multiple=multiple) - - - File /usr/share/miniconda/envs/coast/lib/python3.8/site-packages/coast/data/profile.py:64, in Profile.__init__(self, dataset, config) - 62 debug(f"Creating a new {get_slug(self)}") - 63 self.config = config - ---> 64 super().__init__(self.config) - 66 # If dataset is provided, put inside this object - 67 if dataset is not None: - - - File /usr/share/miniconda/envs/coast/lib/python3.8/site-packages/coast/data/index.py:34, in Indexed.__init__(self, config) - 32 if config: - 33 print(config) - ---> 34 self.json_config = ConfigParser(config) - 35 self.chunks = self.json_config.config.chunks - 36 self.dim_mapping = self.json_config.config.dataset.dimension_map - - - File /usr/share/miniconda/envs/coast/lib/python3.8/site-packages/coast/data/config_parser.py:18, in ConfigParser.__init__(self, json_path) - 12 def __init__(self, json_path: Union[Path, str]): - 13 """Config parser constructor. - 14 - 15 Args: - 16 json_path (Union[Path, str]): path to json config file. - 17 """ - ---> 18 with open(json_path, "r") as j: - 19 json_content = json.loads(j.read()) - 20 conf_type = ConfigTypes(json_content[ConfigKeys.TYPE]) - - - FileNotFoundError: [Errno 2] No such file or directory: '/Users/jeff/gitHub/COAsT/config/example_en4_profiles.json' - - - -```python -profile.dataset -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[5], line 1 - ----> 1 profile.dataset - - - NameError: name 'profile' is not defined - - -Get geographical indices to extract. - - -```python -profile = profile.subset_indices_lonlat_box(longitude_bounds, latitude_bounds) -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[6], line 1 - ----> 1 profile = profile.subset_indices_lonlat_box(longitude_bounds, latitude_bounds) - - - NameError: name 'profile' is not defined - - - -```python -profile.quick_plot() -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[7], line 1 - ----> 1 profile.quick_plot() - - - NameError: name 'profile' is not defined - - -Cut out a time slice of the data. - - -```python -profile = profile.time_slice(date0=datetime(2010, 1, 1), date1=datetime(2010, 1, 20)) -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[8], line 1 - ----> 1 profile = profile.time_slice(date0=datetime(2010, 1, 1), date1=datetime(2010, 1, 20)) - - - NameError: name 'profile' is not defined - - -Process the extracted data into new processed profile. - - -```python -processed_profile = profile.process_en4() -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[9], line 1 - ----> 1 processed_profile = profile.process_en4() - - - NameError: name 'profile' is not defined - - -Sometimes the following line is needed to avoid an error:: -`processed_profile.dataset["time"] = ("id_dim", pd.to_datetime(processed_profile.dataset.time.values))` - -Write processed profiles to file. - - -```python -processed_profile.dataset.to_netcdf(fn_out) -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[10], line 1 - ----> 1 processed_profile.dataset.to_netcdf(fn_out) - - - NameError: name 'processed_profile' is not defined - - - -```python - -``` diff --git a/content/en/docs/Examples/Notebooks/Profile/2._analysis_extract_and_compare.md b/content/en/docs/Examples/Notebooks/Profile/2._analysis_extract_and_compare.md deleted file mode 100644 index 94bbc1f54c4..00000000000 --- a/content/en/docs/Examples/Notebooks/Profile/2._analysis_extract_and_compare.md +++ /dev/null @@ -1,303 +0,0 @@ ---- - title: "2. analysis extract and compare" - linkTitle: "2. analysis extract and compare" - weight: 5 - - description: > - 2. analysis extract and compare example. ---- -This script demonstrates how to use the Profile and Gridded objects to extract model profiles and do some comparisons with observed profiles. -It will do a nearest neighbour extraction of model data (with time interpolation of your choice). It will then calculate differences between the model and obs and averaged profiles and errors into surface and bottom layers. - -This script will result in five new files being written: -- 1. extracted_profiles: Model data on model levels extracted at obs locs -- 2. interpolated_profiles: Model data on ref depth level -- 3. interpolated_obs: Obs data on ref depth levels -- 4. profile_errors: Differences between interpolated_profiles and _obs -- 5. surface_data: Surface data and errors -- 6. bottom_data: Bottom data and errors - -If you are dealing with very large datasets, you should take a look at the script `analysis_extract_and_compare_single_process_tutorial.ipynb`. This script demonstrates a single process that can be used to build a parallel scheme. - -This script can be used with COAsT example data. Please set: - -fn_dom = path.join('./example_files', "coast_example_nemo_domain.nc") -fn_dat = path.join('./example_files', "coast_example_nemo_data.nc") -dn_out = "./example_files" -fn_prof = path.join('./example_files', "coast_example_EN4_201008.nc") -fn_cfg_nemo = path.join('./config', "example_nemo_grid_t.json") -fn_cfg_prof = path.join('./config', "example_en4_profiles.json") - - -```python -import sys - -# IF USING A DEVELOPMENT BRANCH OF COAST, ADD THE REPOSITORY TO PATH: -# sys.path.append('') -import coast -import xarray as xr -import numpy as np -import datetime -from dateutil.relativedelta import relativedelta -import os.path as path - -print("Modules loaded", flush=True) - -# Name of the run -- used mainly for naming output files -run_name = "co7" -``` - - Modules loaded - - -Figure out what the date range is for this analysis process. - - -```python -start_date = datetime.datetime(2007, 1, 1) -end_date = datetime.datetime(2010, 12, 1) -print("Analysis Range: {0} -->> {1}".format(start_date.strftime("%Y%m%d"), end_date.strftime("%Y%m%d")), flush=True) -``` - - Analysis Range: 20070101 -->> 20101201 - - -Depth averaging settings. - - -```python -ref_depth = np.concatenate((np.arange(1, 100, 2), np.arange(100, 300, 5), np.arange(300, 1000, 50))) -surface_def = 5 # in metres -bottom_height = [10, 30, 100] # Use bottom heights of 10m, 30m and 100m for... -bottom_thresh = [100, 500, np.inf] # ...depths less than 100m, 500m and infinite -``` - -File paths (All) -- use format suggestions. - - -```python -""" -fn_dom = "" -fn_dat = "" # .format(run_name, start_date.year) -dn_out = "" # .format(run_name) -fn_prof = "" -fn_cfg_nemo = "" -fn_cfg_prof = "" -""" - -fn_dom = path.join('./example_files', "coast_example_nemo_domain.nc") -fn_dat = path.join('./example_files', "coast_example_nemo_data.nc") -dn_out = "./example_files" -fn_prof = path.join('./example_files', "coast_example_en4_201008.nc") -fn_cfg_nemo = path.join('./config', "example_nemo_grid_t.json") -fn_cfg_prof = path.join('./config', "example_en4_profiles.json") -``` - -CREATE NEMO OBJECT and read in NEMO data. Extract latitude and longitude array. - - -```python -print("Reading model data..", flush=True) -nemo = coast.Gridded(fn_dat, fn_dom, multiple=True, config=fn_cfg_nemo) -lon = nemo.dataset.longitude.values.squeeze() -lat = nemo.dataset.latitude.values.squeeze() -print("NEMO object created", flush=True) -``` - - Reading model data.. - NEMO object created - - -Extract time indices between start and end dates. - - -```python -nemo = nemo.time_slice(start_date, end_date) -``` - -Create a landmask array -- important for obs_operator. We can get a landmask from bottom_level. - - -```python -nemo.dataset["landmask"] = nemo.dataset.bottom_level == 0 -nemo.dataset = nemo.dataset.rename({"depth_0": "depth"}) -print("Landmask calculated", flush=True) -``` - - Landmask calculated - - -CREATE EN4 PROFILE OBJECT -If you have not already processed the data: - - -```python -profile = coast.Profile(config=fn_cfg_prof) -profile.read_en4(fn_prof) -profile = profile.process_en4() -``` - - ./config/example_en4_profiles.json - - -If you have already processed then uncomment: -profile = coast.Profile(dataset = xr.open_dataset(fn_prof, chunks={"id_dim": 10000})) - - -```python -print("Profile object created", flush=True) -``` - - Profile object created - - -Slice out the Profile times. - - -```python -profile = profile.time_slice(start_date, end_date) -``` - -Extract only the variables that we want. - - -```python -nemo.dataset = nemo.dataset[["temperature", "bathymetry", "bottom_level", "landmask"]] -profile.dataset = profile.dataset[["potential_temperature", "practical_salinity", "depth"]] -profile.dataset = profile.dataset.rename({"potential_temperature": "temperature", "practical_salinity": "salinity"}) -``` - -Create Profile analysis object. - - -```python -profile_analysis = coast.ProfileAnalysis() -``` - -Interpolate model to obs using obs_operator(). - - -```python -model_profiles = profile.obs_operator(nemo) -print("Obs_operator successful.", flush=True) -``` - - Obs_operator successful. - - -Throw away profiles where the interpolation distance is larger than 5km. - - -```python -keep_indices = model_profiles.dataset.interp_dist <= 5 -model_profiles = model_profiles.isel(id_dim=keep_indices) -profile = profile.isel(id_dim=keep_indices) -``` - -Load the profiles (careful with memory). - - -```python -profile.dataset.load() -print("Model interpolated to obs locations", flush=True) -``` - - Model interpolated to obs locations - - -Vertical Interpolation of model profiles to obs depths. - - -```python -model_profiles_interp = profile_analysis.interpolate_vertical(model_profiles, profile, interp_method="linear") -print("Model interpolated to obs depths", flush=True) -``` - - Model interpolated to obs depths - - -Vertical interpolation of model profiles to reference depths. - - -```python -model_profiles_interp = profile_analysis.interpolate_vertical(model_profiles_interp, ref_depth) -model_profiles.dataset.to_netcdf( - dn_out - + "extracted_profiles_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -model_profiles_interp.dataset.to_netcdf( - dn_out - + "interpolated_profiles_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Model interpolated to ref depths", flush=True) -``` - - Model interpolated to ref depths - - -Interpolation of obs profiles to reference depths. - - -```python -profile_interp = profile_analysis.interpolate_vertical(profile, ref_depth) -profile_interp.dataset.to_netcdf( - dn_out + "interpolated_obs_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Obs interpolated to reference depths", flush=True) -``` - - Obs interpolated to reference depths - - -Difference between Model and Obs. - - -```python -differences = profile_analysis.difference(profile_interp, model_profiles_interp) -differences.dataset.load() -differences.dataset.to_netcdf( - dn_out + "profile_errors_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Calculated errors and written", flush=True) -``` - - Calculated errors and written - - -Surface Values and errors. - - -```python -model_profiles_surface = profile_analysis.depth_means(model_profiles, [0, surface_def]) -obs_profiles_surface = profile_analysis.depth_means(profile, [0, surface_def]) -surface_errors = profile_analysis.difference(obs_profiles_surface, model_profiles_surface) -surface_data = xr.merge( - (surface_errors.dataset, model_profiles_surface.dataset, obs_profiles_surface.dataset), compat="override" -) -surface_data.to_netcdf( - dn_out + "surface_data_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -``` - -Bottom values and errors. - - -```python -model_profiles_bottom = profile_analysis.bottom_means(model_profiles, bottom_height, bottom_thresh) -obs_bathymetry = model_profiles.dataset["bathymetry"].values -profile.dataset["bathymetry"] = (["id_dim"], obs_bathymetry) -obs_profiles_bottom = profile_analysis.bottom_means(profile, bottom_height, bottom_thresh) -bottom_errors = profile_analysis.difference(model_profiles_bottom, obs_profiles_bottom) -bottom_data = xr.merge( - (bottom_errors.dataset, model_profiles_bottom.dataset, obs_profiles_bottom.dataset), compat="override" -) -bottom_data.to_netcdf( - dn_out + "bottom_data_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Bottom and surface data estimated and written", flush=True) -print("DONE", flush=True) -``` - - Bottom and surface data estimated and written - DONE - diff --git a/content/en/docs/Examples/Notebooks/Profile/3._analysis_extract_and_compare_single_process_tutorial.md b/content/en/docs/Examples/Notebooks/Profile/3._analysis_extract_and_compare_single_process_tutorial.md deleted file mode 100644 index 688eb87ff61..00000000000 --- a/content/en/docs/Examples/Notebooks/Profile/3._analysis_extract_and_compare_single_process_tutorial.md +++ /dev/null @@ -1,241 +0,0 @@ ---- - title: "3. analysis extract and compare single process tutorial" - linkTitle: "3. analysis extract and compare single process tutorial" - weight: 5 - - description: > - 3. analysis extract and compare single process tutorial example. ---- -This runs the same analysis as `analysis_extract_and_compare.py` however it -does so in time blocks (multiples of months) to avoid memory problems, and can not be run with the `example_files`. -At the top of this file are two variables called min_date and end_date -and freq_monthgs. These are the dates that this script will run an analysis -between and the monthly multiples to run per block. You must pass an index -to this file at the command line, telling the script which month index to run. - -For example... If the dates are between 20040101 and 20050101, then an index -of 0 will run for the period 20040101 -> 20040201. An index of 4 will run -for the period 20040301 -> 20040401. - -This script exists to be used as part of a parallel scheme on a platform like -JASMIN. For example, using a command interface such as jug. This script -should be run on each process, being passed just a single index. - -If uneditted, this script will output five files PER PROCESS to the output -directory specified by dn_out: - -1) extracted_profiles: Model data on model levels extracted at obs locs -2) interpolated_profiles: Model data on ref depth level -3) interpolated_obs: Obs data on ref depth levels -4) profile_errors: Differences between interpolated_profiles and _obs -5) surface_data: Surface data and errors -6) bottom_data: Bottom data and errors - -The files can then be concatenated and given to an averaging routine such -as `analysis_mask_means.py` or `analysis_average_into_grid.py`. - -### Import relevant packages - -``` -import sys -import coast -import xarray as xr -import numpy as np -import datetime -from dateutil.relativedelta import relativedelta -``` - -### Define settings - -``` -index = 1 -# Name of the run -- used mainly for naming output files -run_name = "co7" -# Start and end dates for the analysis. The script will cut down model -# and EN4 data to be witin this range. -min_date = datetime.datetime(2004, 1, 1) -freq_months = 12 -end_date = datetime.datetime(2004, 3, 1) -``` - -### Figure out what the date range is for this analysis process - -``` -start_date = min_date + relativedelta(months=int(index * freq_months)) -end_date = start_date + relativedelta(months=int(freq_months)) -print("Analysis Range: {0} -->> {1}".format(start_date.strftime("%Y%m%d"), end_date.strftime("%Y%m%d")), flush=True) -``` - -### Set depth averaging settings - -``` -ref_depth = np.concatenate((np.arange(1, 100, 2), np.arange(100, 300, 5), np.arange(300, 1000, 50))) -surface_def = 5 # in metres -bottom_height = [10, 30, 100] # Use bottom heights of 10m, 30m and 100m for... -bottom_thresh = [100, 500, np.inf] # ...depths less than 100m, 500m and infinite -``` - -### Set file paths - -``` -# define some file paths - -fn_dom = "" -fn_dat = "" # .format(run_name, start_date.year) -dn_out = "" # .format(run_name) -fn_prof = "" -fn_cfg_nemo = "" -fn_cfg_prof = "" -``` - -### Create NEMO object and read in NEMO data. - -``` -nemo = coast.Gridded(fn_dat, fn_dom, multiple=True, config=fn_cfg_nemo) -``` - -### Extract latitude and longitude - -``` -lat = nemo.dataset.latitude.values.squeeze() -lon = nemo.dataset.longitude.values.squeeze() -``` - -### Extract time indices between start and end dates - -``` -nemo = nemo.time_slice(start_date, end_date) -``` - -``` -nemo.dataset.temperature.values -``` - -### Create a landmask array -This is important for obs_operator We can get a landmask from bottom_level. - -``` -nemo.dataset["landmask"] = nemo.dataset.bottom_level == 0 -nemo.dataset = nemo.dataset.rename({"depth_0": "depth"}) -print("Landmask calculated", flush=True) -``` - -### Create EN4 Profile object - -``` -# CREATE EN4 PROFILE OBJECT containing processed data. We just need to -# create a Profile object and place the data straight into its dataset -profile = coast.Profile() -profile.dataset = xr.open_dataset(fn_prof, chunks={"id_dim": 10000}) -profile = profile.time_slice(start_date, end_date) -print("Profile object created", flush=True) -``` - -### Extract only the variables that we want - -``` -nemo.dataset = nemo.dataset[["temperature", "salinity", "bathymetry", "bottom_level", "landmask"]] -profile.dataset = profile.dataset[["potential_temperature", "practical_salinity", "depth"]] -profile.dataset = profile.dataset.rename({"potential_temperature": "temperature", "practical_salinity": "salinity"}) -``` - -### Create Profile analysis object - -``` -profile_analysis = coast.ProfileAnalysis() -``` - -### Interpolate model to obs using obs_operator() - -``` -model_profiles = profile.obs_operator(nemo) -print("Obs_operator successful.", flush=True) -``` - -### Throw away profiles where the interpolation distance is larger than 5km. - -``` -keep_indices = model_profiles.dataset.interp_dist <= 5 -model_profiles = model_profiles.isel(profile=keep_indices) -profile = profile.isel(profile=keep_indices) -``` - -### Load the profiles (careful with memory) - -``` -profile.dataset.load() -print("Model interpolated to obs locations", flush=True) -``` - -### Vertical Interpolation of model profiles to obs depths - -``` -model_profiles_interp = profile_analysis.interpolate_vertical(model_profiles, profile, interp_method="linear") -print("Model interpolated to obs depths", flush=True) -``` - -### Vertical interpolation of model profiles to reference depths - -``` -model_profiles_interp = profile_analysis.interpolate_vertical(model_profiles_interp, ref_depth) -model_profiles.dataset.to_netcdf( - dn_out - + "extracted_profiles_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -model_profiles_interp.dataset.to_netcdf( - dn_out - + "interpolated_profiles_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Model interpolated to ref depths", flush=True) -``` - -### Interpolation of obs profiles to reference depths - -``` -profile_interp = profile_analysis.interpolate_vertical(profile, ref_depth) -profile_interp.dataset.to_netcdf( - dn_out + "interpolated_obs_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Obs interpolated to reference depths", flush=True) -``` - -### Get difference between Model and Obs - -``` -differences = profile_analysis.difference(profile_interp, model_profiles_interp) -differences.dataset.load() -differences.dataset.to_netcdf( - dn_out + "profile_errors_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -print("Calculated errors and written", flush=True) -``` - -### Get surface values and errors - -``` -model_profiles_surface = profile_analysis.depth_means(model_profiles, [0, surface_def]) -obs_profiles_surface = profile_analysis.depth_means(profile, [0, surface_def]) -surface_errors = profile_analysis.difference(obs_profiles_surface, model_profiles_surface) -surface_data = xr.merge( - (surface_errors.dataset, model_profiles_surface.dataset, obs_profiles_surface.dataset), compat="override" -) -surface_data.to_netcdf( - dn_out + "surface_data_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -``` - -### Get bottom values and errors - -``` -model_profiles_bottom = profile_analysis.bottom_means(model_profiles, bottom_height, bottom_thresh) -obs_bathymetry = model_profiles.dataset["bathymetry"].values -profile.dataset["bathymetry"] = (["id_dim"], obs_bathymetry) -obs_profiles_bottom = profile_analysis.bottom_means(profile, bottom_height, bottom_thresh) -bottom_errors = profile_analysis.difference(model_profiles_bottom, obs_profiles_bottom) -bottom_data = xr.merge( - (bottom_errors.dataset, model_profiles_bottom.dataset, obs_profiles_bottom.dataset), compat="override" -) -bottom_data.to_netcdf( - dn_out + "bottom_data_{0}_{1}_{2}.nc".format(run_name, start_date.strftime("%Y%m"), end_date.strftime("%Y%m")) -) -``` diff --git a/content/en/docs/Examples/Notebooks/Profile/4._analysis_mask_means_tutorial.md b/content/en/docs/Examples/Notebooks/Profile/4._analysis_mask_means_tutorial.md deleted file mode 100644 index 21e8574de09..00000000000 --- a/content/en/docs/Examples/Notebooks/Profile/4._analysis_mask_means_tutorial.md +++ /dev/null @@ -1,177 +0,0 @@ ---- - title: "4. analysis mask means tutorial" - linkTitle: "4. analysis mask means tutorial" - weight: 5 - - description: > - 4. analysis mask means tutorial example. ---- -Tutorial to calculate mask means (regional means) of variables within a Profile object. - -Provide paths to four files: - - fn_dom : NEMO domain file defining mask lon/lat. - fn_cfg_nemo : NEMO config file. - fn_profile : Path to netCDF containing profile data. - fn_out : Path to netCDF output file. - -You can use this script with example files by setting: - - fn_dom = path.join('./example_files', "coast_example_nemo_domain.nc") - fn_prof = path.join('./example_files', "coast_example_en4_201008.nc") - fn_cfg_nemo = path.join('./config', "example_nemo_grid_t.json") - fn_cfg_prof = path.join('./config', "example_en4_profiles.json") - -### Import relevant packages - - -```python -import coast -import numpy as np -from os import path -``` - -### Set filepaths to data and configuration - - -```python -""" -fn_dom = "" -fn_cfg_nemo = "" -fn_cfg_prof = "" -fn_prof = "" -fn_out = "" -""" - -fn_out = "./output.nc" -fn_dom = path.join('./example_files', "coast_example_nemo_domain.nc") -fn_prof = path.join('./example_files', "coast_example_en4_201008.nc") -fn_cfg_nemo = path.join('./config', "example_nemo_grid_t.json") -fn_cfg_prof = path.join('./config', "example_en4_profiles.json") -``` - -### Create NEMO object and read in NEMO data - - -```python -nemo = coast.Gridded(fn_domain=fn_dom, multiple=True, config=fn_cfg_nemo) -``` - -### Extract latitude and longitude array - - -```python -lon = nemo.dataset.longitude.values.squeeze() -lat = nemo.dataset.latitude.values.squeeze() -``` - -### Create analysis object and mask maker object - - -```python -profile_analysis = coast.ProfileAnalysis() -``` - -### Make Profile object and read data - - -```python -profile = coast.Profile(config=fn_cfg_prof) -profile.read_en4(fn_prof) -``` - - ./config/example_en4_profiles.json - - -### Make MaskMaker object and define Regional Masks - - -```python -# Make MaskMaker object -mm = coast.MaskMaker() - -# Define Regional Masks -regional_masks = [] -bath = nemo.dataset.bathymetry.values -regional_masks.append(np.ones(lon.shape)) -regional_masks.append(mm.region_def_nws_north_sea(lon, lat, bath)) -regional_masks.append(mm.region_def_nws_outer_shelf(lon, lat, bath)) -regional_masks.append(mm.region_def_nws_english_channel(lon, lat, bath)) -regional_masks.append(mm.region_def_nws_norwegian_trench(lon, lat, bath)) -regional_masks.append(mm.region_def_kattegat(lon, lat, bath)) -regional_masks.append(mm.region_def_south_north_sea(lon, lat, bath)) -off_shelf = mm.region_def_off_shelf(lon, lat, bath) -off_shelf[regional_masks[3].astype(bool)] = 0 -off_shelf[regional_masks[4].astype(bool)] = 0 -regional_masks.append(off_shelf) -regional_masks.append(mm.region_def_irish_sea(lon, lat, bath)) - -region_names = [ - "whole_domain", - "north_sea", - "outer_shelf", - "eng_channel", - "nor_trench", - "kattegat", - "southern_north_sea", - "irish_sea", - "off_shelf", -] - -mask_list = mm.make_mask_dataset(lon, lat, regional_masks) -mask_indices = profile_analysis.determine_mask_indices(profile, mask_list) -``` - - - --------------------------------------------------------------------------- - - AttributeError Traceback (most recent call last) - - Cell In[7], line 8 - 6 bath = nemo.dataset.bathymetry.values - 7 regional_masks.append(np.ones(lon.shape)) - ----> 8 regional_masks.append(mm.region_def_nws_north_sea(lon, lat, bath)) - 9 regional_masks.append(mm.region_def_nws_outer_shelf(lon, lat, bath)) - 10 regional_masks.append(mm.region_def_nws_english_channel(lon, lat, bath)) - - - AttributeError: 'MaskMaker' object has no attribute 'region_def_nws_north_sea' - - -### Do mask averaging - - -```python -mask_means = profile_analysis.mask_means(profile, mask_indices) -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[8], line 1 - ----> 1 mask_means = profile_analysis.mask_means(profile, mask_indices) - - - NameError: name 'mask_indices' is not defined - - -### Save mask dataset to file - - -```python -mask_means.to_netcdf(fn_out) -``` - - - --------------------------------------------------------------------------- - - NameError Traceback (most recent call last) - - Cell In[9], line 1 - ----> 1 mask_means.to_netcdf(fn_out) - - - NameError: name 'mask_means' is not defined - diff --git a/content/en/docs/Examples/Notebooks/Profile/5._analysis_average_into_grid_boxes.md b/content/en/docs/Examples/Notebooks/Profile/5._analysis_average_into_grid_boxes.md deleted file mode 100644 index 43a2c69b429..00000000000 --- a/content/en/docs/Examples/Notebooks/Profile/5._analysis_average_into_grid_boxes.md +++ /dev/null @@ -1,115 +0,0 @@ ---- - title: "5. analysis average into grid boxes" - linkTitle: "5. analysis average into grid boxes" - weight: 5 - - description: > - 5. analysis average into grid boxes example. ---- -Script for showing use of `Profile.average_into_grid_boxes()`. This routines -takes all data in a Profile obejct and averages it into lat/lon grid boxes. - -This script can be used for comparing observed and modelled climatologies. -It should be run AFTER the nearest profiles have been extracted from the model -data, such as shown in `analysis_extract_and_compare.py`. - -Input and output files should be provided as a list. If you only have -one input file, then just enclose the string in []. - -### Relevant imports and filepath configuration - - -```python -import coast -import numpy as np -import xarray as xr -import os -from os import path - -# List of input files -fn_prof = path.join('./example_files', "coast_example_en4_201008.nc") -fn_cfg_prof = path.join('./config', "example_en4_profiles.json") # If needed -fn_out = path.join('./example_files', 'mask_mean.nc') # Names of output files (coresponding to fn_in_list), include ".nc" -``` - -Define longitude and latitude grid. - - -```python -grid_lon = np.arange(-15, 15, 0.5) -grid_lat = np.arange(45, 65, 0.5) -``` - -### Load the data -Load in data for averaging (e.g. surface data). - - -```python -prof_data = coast.Profile(config=fn_cfg_prof) -prof_data.read_en4(fn_prof) -profile_analysis = coast.ProfileAnalysis() -``` - - ./config/example_en4_profiles.json - - -Take just the data we want so it is faster - - -```python -prof_data.dataset = prof_data.dataset[["temperature", "practical_salinity"]] -``` - -### Process, merge and save - -Average all data across all seasons. - - -```python -prof_gridded = profile_analysis.average_into_grid_boxes(prof_data, grid_lon, grid_lat) -``` - -Average data for each season. - - -```python -prof_gridded_DJF = profile_analysis.average_into_grid_boxes( - prof_data, grid_lon, grid_lat, season="DJF", var_modifier="_DJF" -) -prof_gridded_MAM = profile_analysis.average_into_grid_boxes( - prof_data, grid_lon, grid_lat, season="MAM", var_modifier="_MAM" -) -prof_gridded_JJA = profile_analysis.average_into_grid_boxes( - prof_data, grid_lon, grid_lat, season="JJA", var_modifier="_JJA" -) -prof_gridded_SON = profile_analysis.average_into_grid_boxes( - prof_data, grid_lon, grid_lat, season="SON", var_modifier="_SON" -) -``` - -Merge together. - - -```python -ds_prof_gridded = xr.merge( - ( - prof_gridded.dataset, - prof_gridded_DJF.dataset, - prof_gridded_MAM.dataset, - prof_gridded_JJA.dataset, - prof_gridded_SON.dataset, - ) -) -``` - -Save to file. - - -```python -ds_prof_gridded.to_netcdf(fn_out) -``` - - -```python - -``` diff --git a/content/en/docs/Examples/Notebooks/Profile/0._profile_introduction.md b/content/en/docs/Examples/Notebooks/Profile/introduction_to_profile_class.md similarity index 99% rename from content/en/docs/Examples/Notebooks/Profile/0._profile_introduction.md rename to content/en/docs/Examples/Notebooks/Profile/introduction_to_profile_class.md index 0847e7ad531..0116c2bc964 100644 --- a/content/en/docs/Examples/Notebooks/Profile/0._profile_introduction.md +++ b/content/en/docs/Examples/Notebooks/Profile/introduction_to_profile_class.md @@ -1,10 +1,10 @@ --- - title: "0. profile introduction" - linkTitle: "0. profile introduction" + title: "Introduction to profile class" + linkTitle: "Introduction to profile class" weight: 5 description: > - 0. profile introduction example. + Introduction to profile class example. --- ## Example useage of Profile object. diff --git a/content/en/docs/Reference/_index.md b/content/en/docs/Reference/_index.md deleted file mode 100644 index c45eabe8aa5..00000000000 --- a/content/en/docs/Reference/_index.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: "Reference" -linkTitle: "Reference" -weight: 5 -description: > - Low level reference docs. ---- \ No newline at end of file