diff --git a/.gitignore b/.gitignore index d7a9a9f..27eaaaa 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,7 @@ test/dr.xml /test/run.out *.pyc *.pyc +pycallgraph_tree/ +dr2xml_*.xml +pycallgraph_use.py +tests/ \ No newline at end of file diff --git a/DR2xml.ipynb b/DR2xml.ipynb index f2d56cc..ef1a345 100644 --- a/DR2xml.ipynb +++ b/DR2xml.ipynb @@ -333,7 +333,7 @@ }, "outputs": [], "source": [ - " simulation_settings={ \n", + "simulation_settings={ \n", " #--- Dictionnary describing the necessary attributes for a given simulation\n", " #--- Warning : some lines are commented out in this example but should be \n", " #--- un-commented in some cases. See comments\n", diff --git a/Xparse.py b/Xparse.py index 10671f5..b936072 100644 --- a/Xparse.py +++ b/Xparse.py @@ -1,22 +1,29 @@ +#!/usr/bin/python # coding: utf-8 -# Whats' necessary for reading XIOS xml file and process attribute's inheritance for -# being able to request the grid_ref for any valid XIOS 'field' object +""" +Whats' necessary for reading XIOS xml file and process attribute's inheritance for +being able to request the grid_ref for any valid XIOS 'field' object -# Main useful functions : -# context = init_context(context_name,printout=False) -# grid = id2grid(field_id,context,printout=False) +Main useful functions : + context = init_context(context_name,printout=False) + grid = id2grid(field_id,context,printout=False) +""" + +from __future__ import print_function, division, absolute_import, unicode_literals + +from collections import OrderedDict import os import os.path -import re import sys -from xml_interface import get_root_of_xml_file, create_string_from_xml_element +# Interface to xml tools +from xml_interface import get_root_of_xml_file # Define for each object kind those attributes useful for grid inheritance -attributes = dict() +attributes = OrderedDict() attributes['field'] = ['grid_ref', 'field_ref'] attributes['field_definition'] = attributes['field'] attributes['field_group'] = attributes['field'] @@ -52,14 +59,14 @@ def read_src(elt, path_parse, printout=False, level=0, dont_read=[]): if skip: continue if printout: - print level * "\t" + "Reading %s" % filen + print(level * "\t" + "Reading %s" % filen) et = get_root_of_xml_file(filen) if printout: - print level * "\t" + "Reading %s, %s=%s" % (filen, et.tag, gattrib(et, 'id', 'no_id')) + print(level * "\t" + "Reading %s, %s=%s" % (filen, et.tag, gattrib(et, 'id', 'no_id'))) for el in et: if printout: - print (level + 1) * "\t" + "Storing %s in %s id=%s" % ( - el.tag, child.tag, gattrib(child, 'id', 'no_id')) + print((level + 1) * "\t" + "Storing %s in %s id=%s" % (el.tag, child.tag, + gattrib(child, 'id', 'no_id'))) child.append(el) for child in elt: # print level*"\t"+"Recursing on %s %s"%(child.tag,gattrib(child,'id','no_id')) @@ -67,6 +74,13 @@ def read_src(elt, path_parse, printout=False, level=0, dont_read=[]): def gattrib(e, attrib_name, default=None): + """ + Get the value of an attribute of an element. + :param e: xml element + :param attrib_name: name of the attribute + :param default: default value if attribute is missing + :return: the value of the attribute or default + """ if attrib_name in e.attrib: return e.attrib[attrib_name] else: @@ -80,7 +94,7 @@ def merge_sons(elt, printout=False, level=0): """ toremove = [] # Using a dict with first instance of an elt for each tag (or tag+id) - bytag = dict() + bytag = OrderedDict() tags_to_merge = ['context', 'file_definition', 'field_definition', 'axis_definition', 'grid_definition', 'calendar', 'field', 'field_group', 'file_group'] @@ -106,7 +120,7 @@ def merge_sons(elt, printout=False, level=0): else: name = 'no_id' if printout: - print level * "\t" + "Moving %s %s content to %s" % (child.tag, name, tag) + print(level * "\t" + "Moving %s %s content to %s" % (child.tag, name, tag)) # # Move childs from secondary entry to first entry (brother) for sub in child: @@ -117,12 +131,12 @@ def merge_sons(elt, printout=False, level=0): toremove.append(child) for child in toremove: if printout: - print "removing one %s child : %s" % (`elt`, `child`) + print("removing one %s child : %s" % (repr(elt), repr(child))) elt.remove(child) # Recursion for child in elt: if printout: - print level * "\t" + "%s %s" % (child.tag, child.attrib.get('id', 'no_id')) + print(level * "\t" + "%s %s" % (child.tag, child.attrib.get('id', 'no_id'))) merge_sons(child, printout, level + 1) @@ -134,23 +148,23 @@ def solve_downward(attrib, elt, value=None, printout=False, level=0): for child in elt: value_down = value if printout: - print level * "\t" + " solving on " + `child`, + print(level * "\t" + " solving on " + repr(child),) if attrib in attributes.get(child.tag, []): if attrib not in child.attrib: if value is not None: child.attrib[attrib] = value if printout: - print " set :" + value + print(" set :" + value) else: if printout: - print " pass" + print(" pass") else: value_down = child.attrib[attrib] if printout: - print " get :" + value_down + print(" get :" + value_down) else: if printout: - print + print() solve_downward(attrib, child, value_down, printout, level + 1) @@ -161,15 +175,15 @@ def make_index(elt, index=None, printout=False, level=0): crossing their id multiple times """ if index is None: - index = dict() + index = OrderedDict() for child in elt: if 'id' in child.attrib: the_id = child.attrib['id'] if printout: - print level * "\t" + " indexing " + the_id, + print(level * "\t" + " indexing " + the_id,) if the_id in index: if printout: - print " (merging)" + print(" (merging)") # Update indexed object with current attributes for a in child.attrib: index[the_id].attrib[a] = child.attrib[a] @@ -178,7 +192,7 @@ def make_index(elt, index=None, printout=False, level=0): index[the_id].append(sub) else: if printout: - print " init index" + print(" init index") index[the_id] = child # else: # if printout : print @@ -196,13 +210,13 @@ def attrib_by_ref(elt, attrib, index, printout, level): if '_ref' in a: refid = elt.attrib[a] if printout: - print "\n" + (level + 1) * "\t" + a + " -> " + refid, + print("\n" + (level + 1) * "\t" + a + " -> " + refid,) try: ref = index[refid] if attrib in ref.attrib: rep = ref.attrib[attrib] if printout: - print " ---> !! GOT : " + rep + " !!!" + print(" ---> !! GOT : " + rep + " !!!") return rep else: rep = attrib_by_ref(ref, attrib, index, printout, level + 1) @@ -210,7 +224,7 @@ def attrib_by_ref(elt, attrib, index, printout, level): return rep except: if not refid.startswith("dummy_"): - print "Error : reference '%s' is invalid" % refid + print("Error : reference '%s' is invalid" % refid) sys.exit(1) @@ -220,13 +234,13 @@ def solve_by_ref(attrib, index, elt, printout=False, level=0): """ got_one = 0 for child in elt: - if type(child) != type('') and child.tag != 'variable': + if not isinstance(child, str) and child.tag != 'variable': if 'id' in child.attrib: name = child.attrib['id'] else: - name = `child` + name = repr(child) if printout: - print level * "\t" + attrib + " by_ref on " + name, + print(level * "\t" + attrib + " by_ref on " + name,) # if child.tag in attributes and attrib in attributes[child.tag]: if attrib not in child.attrib: @@ -237,31 +251,44 @@ def solve_by_ref(attrib, index, elt, printout=False, level=0): got_one = got_one + 1 else: if printout: - print + print() else: if printout: - print ", already set : %s" % child.attrib[attrib] + print(", already set : %s" % child.attrib[attrib]) got = solve_by_ref(attrib, index, child, printout, level + 1) got_one = got_one + got else: if printout: - print " : N/A" + print(" : N/A") return got_one def select_context(rootel, context_id): + """ + Find the context corresponding to context_id + :param rootel: root of xml element + :param context_id: id of the context to find + :return: context corresponding to context_id in rootel + """ for context in rootel: if 'id' in context.attrib and context.attrib['id'] == context_id: return context def init_context(context_id, path_parse="./", printout=False): + """ + Create the index for xml elements + :param context_id: id of the context of the index + :param path_parse: directory of the xml iodef + :param printout: boolean to active verbose log + :return: the index of the context + """ xmldef = path_parse + "iodef.xml" if printout: - print "Parsing %s ..." % xmldef, + print("Parsing %s ..." % xmldef,) rootel = get_root_of_xml_file(xmldef) if printout: - print "sourcing files ...", + print("sourcing files ...",) read_src(rootel, path_parse, printout=printout, dont_read=["dr2xml_"]) merge_sons(rootel, printout) rootel = select_context(rootel, context_id) @@ -275,16 +302,23 @@ def init_context(context_id, path_parse="./", printout=False): while True: n = solve_by_ref(ref, index, rootel, printout) if printout: - print "%d refs solved" % n + print("%d refs solved" % n) if n == 0: break # ET.dump(rootel) return index else: - print "Xparse::init_context : context %s not found in %s" % (context_id, xmldef) + print("Xparse::init_context : context %s not found in %s" % (context_id, xmldef)) def id2gridid(field_id, index, printout=False): + """ + Call to id2grid and get "id" parameter + :param field_id: id of the field + :param index: index of the xml elements + :param printout: boolean to active verbose log + :return: the id of the grid corresponding to the entry parameters. + """ grid = id2grid(field_id, index, printout=printout) return grid.attrib['id'] @@ -299,7 +333,7 @@ def id2grid(field_id, index, printout=False): grid_ref_field_id = attrib['grid_ref'] if grid_ref_field_id in index: if printout: - print "grid_ref value for %s is %s" % (grid_ref_field_id, `index[grid_ref_field_id]`) + print("grid_ref value for %s is %s" % (grid_ref_field_id, repr(index[grid_ref_field_id]))) return index[grid_ref_field_id] else: # if printout: print("field %s grid reference is %s @@ -323,7 +357,7 @@ def idHasExprWithAt(field_id, index, printout=False): attrib = index[field_id].attrib if 'expr' in attrib: if printout: - print "In withAt, for %s, expr=%s" % (field_id, attrib['expr']) + print("In withAt, for %s, expr=%s" % (field_id, attrib['expr'])) return '@' in attrib['expr'] else: # if printout : print "In withAt, for %s, no expr"%(field_id) @@ -336,26 +370,29 @@ def idHasExprWithAt(field_id, index, printout=False): if False: nemo = init_context('nemo', "./", False) - # print nemo.keys() + # print list(nemo) grid = id2grid("CMIP6_O18sw", nemo, True) - print grid.attrib['id'] - print + print(grid.attrib['id']) + print() arpsfx = init_context('arpsfx', "./", False) grid = id2grid("CMIP6_cdnc", arpsfx, True) # grid=None if grid is not None: # print "Grid id is :"+grid.attrib['id'] - print create_string_from_xml_element(grid) + print(create_string_from_xml_element(grid)) grid_string = create_string_from_xml_element(grid) new_grid_string = re.sub('axis_ref= *.([\w_])*.', 'axis_ref="axis_autre"', grid_string) - print new_grid_string + print(new_grid_string) class Xparse_error(Exception): + """ + Xparse exceptions class. + """ def __init__(self, valeur): self.valeur = valeur def __str__(self): - return `self.valeur` + return repr(self.valeur) diff --git a/Xwrite.py b/Xwrite.py index f9b01f7..822e134 100644 --- a/Xwrite.py +++ b/Xwrite.py @@ -4,47 +4,82 @@ """ XIOS writing files tools. """ + +from __future__ import print_function, division, absolute_import, unicode_literals + +# To access reduce function in python3 +from functools import reduce +# To have ordered dictionaries +from collections import OrderedDict + import json import re import datetime +# Utilities +from utils import dr2xml_error -from Xparse import id2gridid, idHasExprWithAt -from cfsites import cfsites_domain_id, add_cfsites_in_defs +# Global variables and configuration tools from config import get_config_variable + +# Interface to settings dictionaries from settings_interface import get_variable_from_lset_with_default, get_variable_from_lset_without_default, \ get_variable_from_sset_with_default, get_source_id_and_type, get_variable_from_sset_without_default, \ - get_variable_from_sset_else_lset_with_default, is_key_in_lset, is_key_in_sset -from vars_cmor import ping_alias + get_variable_from_sset_else_lset_with_default, is_key_in_lset, is_key_in_sset, get_lset_iteritems, \ + get_sset_iteritems +# Interface to Data Request from dr_interface import get_DR_version -from grids import change_domain_in_grid, change_axes_in_grid, get_grid_def_with_lset -from postprocessing import process_vertical_interpolation, process_zonal_mean, process_diurnal_cycle + +# Settings tools from analyzer import DRgrid2gridatts, analyze_cell_time_method, freq2datefmt, longest_possible_period, \ Cmip6Freq2XiosFreq -from file_splitting import split_frequency_for_variable -from utils import dr2xml_error + +# CFsites tools +from cfsites import cfsites_domain_id, add_cfsites_in_defs, cfsites_grid_id, cfsites_input_filedef + +# Tools to deal with ping files +from pingfiles_interface import check_for_file_input + +# Grids tools +from grids import change_domain_in_grid, change_axes_in_grid, get_grid_def_with_lset, create_standard_domains + +# Variables tools +from vars_cmor import ping_alias from vars_home import get_simplevar -from vars_selection import get_sc, endyear_for_CMORvar +from vars_selection import get_sc, endyear_for_CMORvar, get_grid_choice + +# Post-processing tools +from postprocessing import process_vertical_interpolation, process_zonal_mean, process_diurnal_cycle + +# XIOS tools +from Xparse import id2gridid, idHasExprWithAt + +# File splitting tools +from file_splitting import split_frequency_for_variable warnings_for_optimisation = [] def wr(out, key, dic_or_val=None, num_type="string", default=None): - if not get_variable_from_lset_with_default("print_variables", True): - return """ - Short cut for a repetitive pattern : writing in 'out' + Short cut for a repetitive pattern : writing in 'out' a string variable name and value - If dic_or_val is not None - if dic_or_val is a dict, - if key is in value is dic_or_val[key], + If dic_or_val is not None + if dic_or_val is a dict, + if key is in value is dic_or_val[key], otherwise use default as value , except if default is False otherwise, use arg dic_or_val as value if not None nor False, otherwise use value of local variable 'key' """ + print_variables = get_variable_from_lset_with_default("print_variables", True) + if not print_variables: + return + elif isinstance(print_variables, list) and key not in print_variables: + return + val = None - if type(dic_or_val) == type({}): + if isinstance(dic_or_val, (dict, OrderedDict)): if key in dic_or_val: val = dic_or_val[key] else: @@ -52,12 +87,12 @@ def wr(out, key, dic_or_val=None, num_type="string", default=None): if default is not False: val = default else: - print 'error : %s not in dic and default is None' % key + print('error : %s not in dic and default is None' % key) else: if dic_or_val is not None: val = dic_or_val else: - print 'error in wr, no value provided for %s' % key + print('error in wr, no value provided for %s' % key) if val: if num_type == "string": # val=val.replace(">",">").replace("<","<").replace("&","&").replace("'","&apos").replace('"',""").strip() @@ -69,11 +104,11 @@ def wr(out, key, dic_or_val=None, num_type="string", default=None): out.write(' \n') -def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, - field_defs, axis_defs, grid_defs, domain_defs, scalar_defs, file_defs, - dummies, skipped_vars_per_table, actually_written_vars, - prefix, context, grid, pingvars=None, enddate=None, - attributes=[], debug=[]): +def write_xios_file_def_for_svar(sv, year, table, lset, sset, out, cvspath, + field_defs, axis_defs, grid_defs, domain_defs, scalar_defs, file_defs, + dummies, skipped_vars_per_table, actually_written_vars, + prefix, context, grid, pingvars=None, enddate=None, + attributes=[], debug=[]): """ Generate an XIOS file_def entry in out for : - a dict for laboratory settings @@ -133,7 +168,7 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, else: alias = get_variable_from_lset_without_default("ping_variables_prefix") + "tau_stress" if sv.label in debug: - print "write_xios_file_def ... processing %s, alias=%s" % (sv.label, alias) + print("write_xios_file_def_for_svar ... processing %s, alias=%s" % (sv.label, alias)) # suppression des terminaisons en "Clim" pour l'alias : elles concernent uniquement les cas # d'absence de variation inter-annuelle sur les GHG. Peut-etre genant pour IPSL ? @@ -190,8 +225,17 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, if grid == "": # either native or close-to-native grid_choice = get_variable_from_lset_without_default('grid_choice', source_id) - grid_label, target_hgrid_id, zgrid_id, grid_resolution, grid_description = \ - get_variable_from_lset_without_default('grids', grid_choice, context) + if sv.type == "dev": + grid_ref = sv.description.split('|')[1] + if grid_ref == "native": + grid_label, target_hgrid_id, zgrid_id, grid_resolution, grid_description = \ + get_variable_from_lset_without_default('grids_dev', sv.label, grid_choice, context) + else: + grid_label, target_hgrid_id, zgrid_id, grid_resolution, grid_description = \ + get_variable_from_lset_without_default('grids', grid_choice, context) + else: + grid_label, target_hgrid_id, zgrid_id, grid_resolution, grid_description = \ + get_variable_from_lset_without_default('grids', grid_choice, context) else: if grid == 'cfsites': target_hgrid_id = cfsites_domain_id @@ -199,7 +243,8 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, else: target_hgrid_id = get_variable_from_lset_without_default("ping_variables_prefix") + grid zgrid_id = "TBD : Should create zonal grid for CMIP6 standard grid %s" % grid - grid_label, grid_resolution, grid_description = DRgrid2gridatts(grid) + grid_label, grid_resolution, grid_description = DRgrid2gridatts(grid, is_dev=(grid == "native" and + sv.type == "dev")) if table[-1:] == "Z": # e.g. 'AERmonZ','EmonZ', 'EdayZ' grid_label += "z" @@ -247,13 +292,13 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, elif is_key_in_lset("parent_activity_id"): parent_activity_id = get_variable_from_lset_without_default("parent_activity_id") else: - parent_activity_id= get_variable_from_sset_with_default("activity_id", exp_entry["parent_activity_id"]) - if type(parent_activity_id) == type([]): + parent_activity_id = get_variable_from_sset_with_default("activity_id", exp_entry["parent_activity_id"]) + if isinstance(parent_activity_id, list) and len(parent_activity_id) > 1: parent_activity_id = reduce(lambda x, y: x+" "+y, parent_activity_id) - parent_experiment_id = \ - get_variable_from_sset_else_lset_with_default("parent_experiment_id", - default=reduce(lambda x, y: - x+" "+y, exp_entry['parent_experiment_id'])) + parent_experiment_id = get_variable_from_sset_else_lset_with_default("parent_experiment_id", + default=exp_entry['parent_experiment_id']) + if isinstance(parent_experiment_id, list) and len(parent_experiment_id) > 1: + parent_experiment_id = reduce(lambda x, y: x+" "+y, parent_experiment_id) required_components = exp_entry['required_model_components'] # .split(" ") allowed_components = exp_entry['additional_allowed_model_components'] # .split(" ") # @@ -263,13 +308,13 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, for c in required_components: if c not in actual_components: ok = False - print "Model component %s is required by CMIP6 CV for experiment %s and not present (present=%s)" % \ - (c, experiment_id, `actual_components`) + print("Model component %s is required by CMIP6 CV for experiment %s and not present (present=%s)" % + (c, experiment_id, repr(actual_components))) for c in actual_components: if c not in allowed_components and c not in required_components: ok = False or get_variable_from_sset_with_default('bypass_CV_components', False) - print "Warning: Model component %s is present but not required nor allowed (%s)" % \ - (c, `allowed_components`) + print("Warning: Model component %s is present but not required nor allowed (%s)" % + (c, repr(allowed_components))) if not ok: raise dr2xml_error("Issue with model components") # @@ -285,6 +330,7 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, if "fx" in sv.frequency: filename = "%s%s_%s_%s_%s_%s_%s" % \ (prefix, sv.label, table, source_id, expid_in_filename, member_id, grid_label) + varname_for_filename = "%s%s" % (prefix, sv.label) else: varname_for_filename = sv.mipVarLabel if get_variable_from_lset_with_default('use_cmorvar_label_in_filename', False): @@ -301,6 +347,12 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, filename = "%s%s_%s_%s_%s_%s_%s_%s%s" % \ (prefix, varname_for_filename, table, source_id, expid_in_filename, member_id, grid_label, date_range, suffix) + # Create an other file which will contain the list of file names of perso and dev variables + list_perso_and_dev_file_name = "dr2xml_list_perso_and_dev_file_names" + if sv.type in ["perso", "dev"]: + with open(list_perso_and_dev_file_name, mode="a") as list_perso_and_dev: + list_perso_and_dev.write("*%s_%s_%s_%s_%s_%s*\n" % (varname_for_filename, table, source_id, expid_in_filename, + member_id, grid_label)) # if not (is_key_in_lset('mip_era') or is_key_in_sset("mip_era")): further_info_url = "https://furtherinfo.es-doc.org/%s.%s.%s.%s.%s.%s" % ( @@ -367,8 +419,8 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, else: # Use requestItems-based end date as the latest possible date when it is earlier than run end date if sv.label in debug: - print "split_last_date year %d derived from DR for variable %s in table %s " \ - "for year %d" % (lastyear, sv.label, table, year) + print("split_last_date year %d derived from DR for variable %s in table %s for year %d" % + (lastyear, sv.label, table, year)) endyear = "%04d" % (lastyear + 1) if lastyear < 1000: dr2xml_error( @@ -390,7 +442,7 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, # (lset['source_id'],sset['experiment_id'],sset.get('project',"CMIP6"))) out.write(' >\n') # - if type(activity_id) == type([]): + if isinstance(activity_id, list): activity_idr = reduce(lambda x, y: x + " " + y, activity_id) else: activity_idr = activity_id @@ -444,8 +496,8 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, wr(out, 'grid', grid_description) wr(out, 'grid_label', grid_label) wr(out, 'nominal_resolution', grid_resolution) - comment = get_variable_from_lset_with_default('comment', '') + " " + \ - get_variable_from_sset_with_default('comment', '') + dynamic_comment + comment = get_variable_from_lset_with_default('comment', '') +\ + " " + get_variable_from_sset_with_default('comment', '') + dynamic_comment wr(out, 'comment', comment) wr(out, 'history', sset, default='none') wr(out, "initialization_index", initialization_index, num_type="int") @@ -528,7 +580,7 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, raise dr2xml_error("Fatal: source for %s not found in CMIP6_CV at %s, nor in lset" % (source_id, cvspath)) wr(out, 'source', source) wr(out, 'source_id', source_id) - if type(source_type) == type([]): + if isinstance(source_type, list): source_type = reduce(lambda x, y: x + " " + y, source_type) wr(out, 'source_type', source_type) # @@ -556,10 +608,10 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, if variant_info != "": wr(out, "variant_info", variant_info) wr(out, "variant_label", variant_label) - for name, value in attributes: + for name, value in sorted(list(attributes)): wr(out, name, value) - non_stand_att = get_variable_from_lset_with_default("non_standard_attributes", dict()) - for name in non_stand_att: + non_stand_att = get_variable_from_lset_with_default("non_standard_attributes", OrderedDict()) + for name in sorted(list(non_stand_att)): wr(out, name, non_stand_att[name]) # # -------------------------------------------------------------------- @@ -581,29 +633,29 @@ def write_xios_file_def(sv, year, table, lset, sset, out, cvspath, # get_DR_version() psol_field = create_xios_aux_elmts_defs(sv_psol, get_variable_from_lset_without_default("ping_variables_prefix") - + "ps", table, field_defs, - axis_defs, grid_defs, domain_defs, scalar_defs, dummies, context, - target_hgrid_id, zgrid_id, pingvars) + + "ps", table, field_defs, axis_defs, grid_defs, domain_defs, + scalar_defs, dummies, context, target_hgrid_id, zgrid_id, pingvars) out.write(psol_field) else: - print "Warning: Cannot complement model levels with psol for variable %s and table %s" % \ - (sv.label, sv.frequency) + print("Warning: Cannot complement model levels with psol for variable %s and table %s" % + (sv.label, sv.frequency)) # - names = {} - if sv.spatial_shp == 'XY-A' or sv.spatial_shp == 'S-A': + names = OrderedDict() + if sv.spatial_shp in ['XY-A', 'S-A']: # add entries for auxilliary variables : ap, ap_bnds, b, b_bnds - names = {"ap": "vertical coordinate formula term: ap(k)", - "ap_bnds": "vertical coordinate formula term: ap(k+1/2)", - "b": "vertical coordinate formula term: b(k)", - "b_bnds": "vertical coordinate formula term: b(k+1/2)"} - if sv.spatial_shp == 'XY-AH' or sv.spatial_shp == 'S-AH': + names["ap"] = "vertical coordinate formula term: ap(k)" + names["ap_bnds"] = "vertical coordinate formula term: ap(k+1/2)" + names["b"] = "vertical coordinate formula term: b(k)" + names["b_bnds"] = "vertical coordinate formula term: b(k+1/2)" + elif sv.spatial_shp in ['XY-AH', 'S-AH']: # add entries for auxilliary variables : ap, ap_bnds, b, b_bnds - names = {"ahp": "vertical coordinate formula term: ap(k)", - "ahp_bnds": "vertical coordinate formula term: ap(k+1/2)", - "bh": "vertical coordinate formula term: b(k)", - "bh_bnds": "vertical coordinate formula term: b(k+1/2)"} - for tab in names: + names["ahp"] = "vertical coordinate formula term: ap(k)" + names["ahp_bnds"] = "vertical coordinate formula term: ap(k+1/2)" + names["bh"] = "vertical coordinate formula term: b(k)" + names["bh_bnds"] = "vertical coordinate formula term: b(k+1/2)" + + for tab in list(names): out.write('\t\n' % (get_variable_from_lset_without_default("ping_variables_prefix"), tab, tab.replace('h', ''), names[tab])) @@ -639,7 +691,7 @@ def create_xios_aux_elmts_defs(sv, alias, table, field_defs, axis_defs, grid_def # Build XIOS axis elements (stored in axis_defs) # Proceed with vertical interpolation if needed # --- - # Build XIOS auxilliary field elements (stored in field_defs) + # Build XIOS auxiliary field elements (stored in field_defs) # -------------------------------------------------------------------- ssh = sv.spatial_shp prefix = get_variable_from_lset_without_default("ping_variables_prefix") @@ -657,8 +709,14 @@ def create_xios_aux_elmts_defs(sv, alias, table, field_defs, axis_defs, grid_def else: (grid_id, grid_ref) = sv.description.split("|") sv.description = None - field_defs[alias_ping] = ' %s ' % (name, num_type, value) + \ @@ -1087,3 +1168,113 @@ def make_source_string(sources, source_id): if description != "none": rep = rep + "\n" + realm + ": " + description return rep + + +def write_xios_file_def(filename, svars_per_table, year, lset, sset, cvs_path, field_defs, axis_defs, grid_defs, + scalar_defs, file_defs, dummies, skipped_vars_per_table, actually_written_vars, prefix, context, + pingvars=None, enddate=None, attributes=[]): + """ + Write XIOS file_def. + """ + # -------------------------------------------------------------------- + # Start writing XIOS file_def file: + # file_definition node, including field child-nodes + # -------------------------------------------------------------------- + with open(filename, "w") as out: + out.write(' \n' % context) + out.write(' \n' % get_DR_version()) + out.write(' \n' % "??") + out.write(' \n' % get_config_variable("CMIP6_conventions_version")) + out.write(' \n' % get_config_variable("version")) + out.write('\n') + out.write('\n') + out.write(' \n' % year) + # + domain_defs = OrderedDict() + # for table in ['day'] : + out.write('\n \n') + foo, sourcetype = get_source_id_and_type() + for table in sorted(list(svars_per_table)): + count = OrderedDict() + for svar in sorted(svars_per_table[table], key=lambda x: (x.label + "_" + table)): + if get_variable_from_lset_with_default("allow_duplicates_in_same_table", False) \ + or svar.mipVarLabel not in count: + if not get_variable_from_lset_with_default("use_cmorvar_label_in_filename", False) \ + and svar.mipVarLabel in count: + form = "If you really want to actually produce both %s and %s in table %s, " + \ + "you must set 'use_cmorvar_label_in_filename' to True in lab settings" + raise dr2xml_error(form % (svar.label, count[svar.mipVarLabel].label, table)) + count[svar.mipVarLabel] = svar + for grid in svar.grids: + a, hgrid, b, c, d = get_variable_from_lset_without_default('grids', get_grid_choice(), context) + check_for_file_input(svar, hgrid, pingvars, field_defs, grid_defs, domain_defs, file_defs) + write_xios_file_def_for_svar(svar, year, table, lset, sset, out, cvs_path, + field_defs, axis_defs, grid_defs, domain_defs, scalar_defs, file_defs, + dummies, skipped_vars_per_table, actually_written_vars, + prefix, context, grid, pingvars, enddate, attributes) + else: + print("Duplicate variable %s,%s in table %s is skipped, preferred is %s" % + (svar.label, svar.mipVarLabel, table, count[svar.mipVarLabel].label)) + + if cfsites_grid_id in grid_defs: + out.write(cfsites_input_filedef()) + for file_def in file_defs: + out.write(file_defs[file_def]) + out.write('\n \n') + # + # -------------------------------------------------------------------- + # End writing XIOS file_def file: + # field_definition, axis_definition, grid_definition + # and domain_definition auxilliary nodes + # -------------------------------------------------------------------- + # Write all domain, axis, field defs needed for these file_defs + out.write(' \n') + if get_variable_from_lset_with_default("nemo_sources_management_policy_master_of_the_world", False) \ + and context == 'nemo': + out.write('\n') + for obj in list(field_defs): + out.write("\t" + field_defs[obj] + "\n") + if get_variable_from_lset_with_default("nemo_sources_management_policy_master_of_the_world", False) \ + and context == 'nemo': + out.write('\n') + out.write('\n \n') + # + out.write('\n \n') + out.write('\n') + for obj in list(axis_defs): + out.write("\t" + axis_defs[obj] + "\n") + if False and get_variable_from_lset_with_default('use_union_zoom', False): + for obj in sorted(list(union_axis_defs)): + out.write("\t" + union_axis_defs[obj] + "\n") + out.write('\n') + out.write(' \n') + # + out.write('\n \n') + out.write('\n') + if get_variable_from_lset_without_default('grid_policy') != "native": + create_standard_domains(domain_defs) + for obj in list(domain_defs): + out.write("\t" + domain_defs[obj] + "\n") + out.write('\n') + out.write(' \n') + # + out.write('\n \n') + for obj in list(grid_defs): + out.write("\t" + grid_defs[obj]) + if False and get_variable_from_lset_with_default('use_union_zoom', False): + for obj in list(union_grid_defs): + out.write("\t" + union_grid_defs[obj] + "\n") + out.write(' \n') + # + out.write('\n \n') + for obj in list(scalar_defs): + out.write("\t" + scalar_defs[obj] + "\n") + out.write(' \n') + # + out.write(' \n') diff --git a/analyzer.py b/analyzer.py index 05205c4..230b14b 100644 --- a/analyzer.py +++ b/analyzer.py @@ -13,23 +13,20 @@ and the corresponding frequency (albeit this is instrumental in DRS), and because we need to translate anyway to XIOS syntax """ +from __future__ import print_function, division, absolute_import, unicode_literals + import sys -from settings_interface import get_variable_from_lset_with_default -from dr_interface import print_DR_errors +# Utilities from utils import dr2xml_error +# Global variables and configuration tools +from config import add_value_in_list_config_variable -cell_method_warnings = [] - - -def initialize_cell_method_warnings(init): - global cell_method_warnings - cell_method_warnings = init - - -def get_cell_method_warnings(): - return cell_method_warnings +# Interface to settings dictionaries +from settings_interface import get_variable_from_lset_with_default +# Interface to Data Request +from dr_interface import print_DR_errors def freq2datefmt(in_freq, operation, table): @@ -138,199 +135,220 @@ def analyze_cell_time_method(cm, label, table, printout=False): "where sea-ice", "where cloud" since we suppose fields required in this way are physically undefined oustide of "where something". """ - global cell_method_warnings operation = None detect_missing = False clim = False # if cm is None: - if print_DR_errors: - print "DR Error: cell_time_method is None for %15s in table %s, averaging" % (label, table) - operation = "average" + if "fx" in table: + # Case of fixed fields required by home data request + operation = "once" + else: + if print_DR_errors: + print("DR Error: cell_time_method is None for %15s in table %s, averaging" % (label, table)) + operation = "average" # ---------------------------------------------------------------------------------------------------------------- elif "time: mean (with samples weighted by snow mass)" in cm: # [amnla-tmnsn]: Snow Mass Weighted (LImon : agesnow, tsnLi) - cell_method_warnings.append(('Cannot yet handle time: mean (with samples weighted by snow mass)', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('Cannot yet handle time: mean (with samples weighted by snow mass)', + label, table)) if printout: - print "Will not explicitly handle time: mean (with samples weighted by snow mass) for " + \ - "%15s in table %s -> averaging" % (label, table) + print("Will not explicitly handle time: mean (with samples weighted by snow mass) for " + \ + "%15s in table %s -> averaging" % (label, table)) operation = "average" # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where cloud" in cm: # [amncl-twm]: Weighted Time Mean on Cloud (2 variables ISSCP # albisccp et pctisccp, en emDay et emMon) - cell_method_warnings.append(('Will not explicitly handle time: mean where cloud', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('Will not explicitly handle time: mean where cloud', label, table)) if printout: - print "Note : assuming that " + \ + print("Note : assuming that " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ------------------------------------------------------------------------------------- elif "time: mean where sea_ice_melt_pound" in cm: # [amnnsimp-twmm]: Weighted Time Mean in Sea-ice Melt Pounds (uniquement des # variables en SImon) - cell_method_warnings.append(('time: mean where sea_ice_melt_pound', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where sea_ice_melt_pound', label, table)) if printout: - print "Note : assuming that 'time: mean where sea_ice_melt_pound' " + \ + print("Note : assuming that 'time: mean where sea_ice_melt_pound' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ------------------------------------------------------------------------------------------------- elif "time: mean where sea_ice" in cm: # [amnsi-twm]: Weighted Time Mean on Sea-ice (presque que des # variables en SImon, sauf sispeed et sithick en SIday) - cell_method_warnings.append(('time: mean where sea_ice', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where sea_ice', label, table)) if printout: - print "Note : assuming that 'time: mean where sea_ice' " + \ + print("Note : assuming that 'time: mean where sea_ice' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True elif "time: mean where sea" in cm: # [amnesi-tmn]: # Area Mean of Ext. Prop. on Sea Ice : pas utilisee - print "time: mean where sea is not supposed to be used (%s,%s)" % (label, table) + print("time: mean where sea is not supposed to be used (%s,%s)" % (label, table)) # ------------------------------------------------------------------------------------- elif "time: mean where sea" in cm: # [amnesi-tmn]: # Area Mean of Ext. Prop. on Sea Ice : pas utilisee - print "time: mean where sea is not supposed to be used (%s,%s)" % (label, table) + print("time: mean where sea is not supposed to be used (%s,%s)" % (label, table)) # ------------------------------------------------------------------------------------- elif "time: mean where floating_ice_shelf" in cm: # [amnfi-twmn]: Weighted Time Mean on Floating Ice Shelf (presque que des # variables en Imon, Iyr, sauf sftflt en LImon !?) - cell_method_warnings.append(('time: mean where floating_ice_shelf', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where floating_ice_shelf', label, table)) if printout: - print "Note : assuming that 'time: mean where floating_ice_shelf' " + \ + print("Note : assuming that 'time: mean where floating_ice_shelf' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where grounded_ice_sheet" in cm: # [amngi-twm]: Weighted Time Mean on Grounded Ice Shelf (uniquement des # variables en Imon, Iyr) - cell_method_warnings.append(('time: mean where grounded_ice_sheet', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where grounded_ice_sheet', label, table)) if printout: - print "Note : assuming that 'time: mean where grounded_ice_sheet' " + \ + print("Note : assuming that 'time: mean where grounded_ice_sheet' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where ice_sheet" in cm: # [amnni-twmn]: Weighted Time Mean on Ice Shelf (uniquement des # variables en Imon, Iyr) - cell_method_warnings.append(('time: mean where ice_sheet', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where ice_sheet', label, table)) if printout: - print "Note : assuming that 'time: mean where ice_sheet' " + \ + print("Note : assuming that 'time: mean where ice_sheet' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where landuse" in cm: # [amlu-twm]: Weighted Time Mean on Land Use Tiles (uniquement des # variables suffixees en 'Lut') - cell_method_warnings.append(('time: mean where land_use', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where land_use', label, table)) if printout: - print "Note : assuming that 'time: mean where landuse' " + \ + print("Note : assuming that 'time: mean where landuse' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where crops" in cm: # [amc-twm]: Weighted Time Mean on Crops (uniquement des # variables suffixees en 'Crop') - cell_method_warnings.append(('time: mean where crops', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where crops', label, table)) if printout: - print "Note : assuming that 'time: mean where crops' " + \ + print("Note : assuming that 'time: mean where crops' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where natural_grasses" in cm: # [amng-twm]: Weighted Time Mean on Natural Grasses (uniquement des # variables suffixees en 'Grass') - cell_method_warnings.append(('time: mean where natural_grasses', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where natural_grasses', label, table)) if printout: - print "Note : assuming that 'time: mean where natural_grasses' " + \ + print("Note : assuming that 'time: mean where natural_grasses' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where shrubs" in cm: # [ams-twm]: Weighted Time Mean on Shrubs (uniquement des # variables suffixees en 'Shrub') - cell_method_warnings.append(('time: mean where shrubs', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where shrubs', label, table)) if printout: - print "Note : assuming that 'time: mean where shrubs' " + \ + print("Note : assuming that 'time: mean where shrubs' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where trees" in cm: # [amtr-twm]: Weighted Time Mean on Bare Ground (uniquement des # variables suffixees en 'Tree') - cell_method_warnings.append(('time: mean where trees', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where trees', label, table)) if printout: - print "Note : assuming that 'time: mean where trees' " + \ + print("Note : assuming that 'time: mean where trees' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: mean where vegetation" in cm: - # [amv-twm]: Weighted Time Mean on Vegetation (pas de varibles concernees) - cell_method_warnings.append(('time: mean where vegetation', label, table)) + # [amv-twm]: Weighted Time Mean on Vegetation (pas de variables concernees) + add_value_in_list_config_variable("cell_method_warnings", + ('time: mean where vegetation', label, table)) if printout: - print "Note : assuming that 'time: mean where vegetation' " + \ + print("Note : assuming that 'time: mean where vegetation' " + \ " for %15s in table %s is well handled by 'detect_missing'" \ - % (label, table) + % (label, table)) operation = "average" detect_missing = True # ---------------------------------------------------------------------------------------------------------------- elif "time: maximum within days time: mean over days" in cm: # [dmax]: Daily Maximum : tasmax Amon seulement if label != 'tasmax' and label != 'sfcWindmax': - print "Error: issue with variable %s in table %s " % (label, table) + \ - "and cell method time: maximum within days time: mean over days" + print("Error: issue with variable %s in table %s " % (label, table) + \ + "and cell method time: maximum within days time: mean over days") # we assume that pingfile provides a reference field which already implements "max within days" operation = "average" # ---------------------------------------------------------------------------------------------------------------- elif "time: minimum within days time: mean over days" in cm: # [dmin]: Daily Minimum : tasmin Amon seulement if label != 'tasmin': - print "Error: issue with variable %s in table %s " % (label, table) + \ - "and cell method time: minimum within days time: mean over days" + print("Error: issue with variable %s in table %s " % (label, table) + \ + "and cell method time: minimum within days time: mean over days") # we assume that pingfile provides a reference field which already implements "min within days" operation = "average" # ---------------------------------------------------------------------------------------------------------------- elif "time: mean within years time: mean over years" in cm: # [aclim]: Annual Climatology - cell_method_warnings.append(('Cannot yet compute annual climatology - must do it as a postpro', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('Cannot yet compute annual climatology - must do it as a postpro', + label, table)) if printout: - print "Cannot yet compute annual climatology for " + \ - "%15s in table %s -> averaging" % (label, table) + print("Cannot yet compute annual climatology for " + \ + "%15s in table %s -> averaging" % (label, table)) # Could transform in monthly fields to be post-processed operation = "average" # ---------------------------------------------------------------------------------------------------------------- elif "time: mean within days time: mean over days" in cm: # [amn-tdnl]: Mean Diurnal Cycle - cell_method_warnings.append(('File structure for diurnal cycle is not yet CF-compliant', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('File structure for diurnal cycle is not yet CF-compliant', label, table)) operation = "average" clim = True # ---------------------------------------------------------------------------------------------------------------- # mpmoine_correction:analyze_cell_time_method: ajout du cas 'Maximum Hourly Rate' elif "time: mean within hours time: maximum over hours" in cm: - cell_method_warnings.append(('Cannot yet compute maximum hourly rate', label, table)) + add_value_in_list_config_variable("cell_method_warnings", + ('Cannot yet compute maximum hourly rate', label, table)) if printout: - print "TBD: Cannot yet compute maximum hourly rate for " + \ - " %15s in table %s -> averaging" % (label, table) + print("TBD: Cannot yet compute maximum hourly rate for " + \ + " %15s in table %s -> averaging" % (label, table)) # Could output a time average of 24 hourly fields at 01 UTC, 2UTC ... operation = "average" # ---------------------------------------------------------------------------------------------------------------- @@ -356,8 +374,8 @@ def analyze_cell_time_method(cm, label, table, printout=False): operation = "once" # ---------------------------------------------------------------------------------------------------------------- else: - print "Warning: issue when analyzing cell_time_method " + \ - "%s for %15s in table %s, assuming it is once" % (cm, label, table) + print("Warning: issue when analyzing cell_time_method " + \ + "%s for %15s in table %s, assuming it is once" % (cm, label, table)) operation = "once" if not operation: @@ -378,7 +396,7 @@ def Cmip6Freq2XiosFreq(freq, table): if table == "CFsubhr": rep = get_variable_from_lset_with_default("CFsubhr_frequency", "1ts") elif table is None: - print "Issue in dr2xml with table None and freq=", freq + print("Issue in dr2xml with table None and freq=", freq) sys.exit(0) else: rep = "1ts" @@ -443,7 +461,7 @@ def guess_freq_from_table_name(table): elif "fx" in table: return "fx" else: - print "ERROR in guess_freq_from_table : cannot deduce frequency from table named %s" % table + print("ERROR in guess_freq_from_table : cannot deduce frequency from table named %s" % table) sys.exit(1) @@ -514,7 +532,7 @@ def cellmethod2area(method): return "isf" -def DRgrid2gridatts(grid): +def DRgrid2gridatts(grid, is_dev=False): """ Returns label, resolution, description for a DR grid name""" if grid == "cfsites": return "gn", "100 km", "data sampled in model native grid by nearest neighbour method " diff --git a/cfsites.py b/cfsites.py index ded480a..c94dec5 100644 --- a/cfsites.py +++ b/cfsites.py @@ -1,7 +1,16 @@ -# CFsites-related elements (CFMIP) -# A file named cfsites_grid_file_name must be provided at runtime, which -# includes a field named cfsites_grid_field_id, defined on an unstructured -# grid which is composed of CF sites +#!/usr/bin/python +# -*- coding: utf-8 -*- + +""" +CFsites-related elements (CFMIP) +A file named cfsites_grid_file_name must be provided at runtime, which +includes a field named cfsites_grid_field_id, defined on an unstructured +grid which is composed of CF sites +""" + +from __future__ import print_function, division, absolute_import, unicode_literals + + cfsites_radix = "cfsites" cfsites_domain_id = cfsites_radix + "_domain" cfsites_grid_id = cfsites_radix + "_grid" diff --git a/config.py b/config.py index 4029a10..95a3ba2 100644 --- a/config.py +++ b/config.py @@ -5,34 +5,72 @@ Configuration variables and associated tools. """ +from __future__ import print_function, division, absolute_import, unicode_literals + +import sys + +# Utilities from utils import dr2xml_error +# Python version +python_version = "python"+sys.version[0] + # General variables -version = "1.16" # dr2xml version +version = "2.0" # dr2xml version # CMIP6 variables conventions = "CF-1.7 CMIP-6.2" +# The current code should comply with this version of spec doc at +# https://docs.google.com/document/d/1h0r8RZr_f3-8egBMMh7aqLwy3snpD6_MrDz1q8n5XUk/edit +CMIP6_conventions_version = "v6.2.4" - +# Variable used for storing index of xml files context_index = None +# Variable used to store cell method warnings +cell_method_warnings = list() + # Functions to deal with those configuration variables def set_config_variable(variable, value): + """ + Set the value of the indicated global variable + """ if variable == "context_index": global context_index context_index = value + elif variable == "cell_method_warnings": + global cell_method_warnings + cell_method_warnings = value else: - raise dr2xml_error("Unknown configuration variable %s." % variable) + raise dr2xml_error("Can not set configuration variable %s." % variable) def get_config_variable(variable): + """ + Get the value of the indicated global variable. + """ if variable == "context_index": return context_index elif variable == "conventions": return conventions elif variable == "version": return version + elif variable == "CMIP6_conventions_version": + return CMIP6_conventions_version + elif variable == "cell_method_warnings": + return cell_method_warnings else: raise dr2xml_error("Unknown configuration variable %s." % variable) + + +def add_value_in_list_config_variable(variable, value): + """ + Add a value to a list-type configuration variable. + """ + if variable == "cell_method_warnings": + global cell_method_warnings + cell_method_warnings.append(value) + else: + raise dr2xml_error("Could not add a value to configuration variable %s." % variable) diff --git a/create_ping_files.ipynb b/create_ping_files.ipynb index 9bf05b2..bdc03f6 100644 --- a/create_ping_files.ipynb +++ b/create_ping_files.ipynb @@ -188,7 +188,7 @@ ], "source": [ "#from dr2xml import select_CMORvars_for_lab, pingFileForRealmsList\n", - "from dr2xml import pingFileForRealmsList\n", + "from pingfiles_interface import pingFileForRealmsList\n", "from vars_selection import gather_AllSimpleVars" ] }, diff --git a/doc/DR2xml.py b/doc/DR2xml.py index 154ab5b..a971e85 100644 --- a/doc/DR2xml.py +++ b/doc/DR2xml.py @@ -1,7 +1,12 @@ -# coding: utf-8 +#!/usr/bin/python +# -*- coding: utf-8 -*- # # An example for generating the XIOS file_def for a given lab, model, experiment, year and XIOS context +from __future__ import print_function, division, absolute_import, unicode_literals + +from dr2xml import generate_file_defs + # In[ ]: # --- Select your laboratory: 'cnrm', 'cerfacs' or'ipsl' @@ -389,30 +394,28 @@ # In[ ]: note = "VERSIONS:" -print note -print "-" * len(note) - -from dr2xml import generate_file_defs +print(note) +print("-" * len(note)) # In[ ]: note = "\nLAB AND MODEL SETTINGS:" -print note -print "-" * len(note) +print(note) +print("-" * len(note)) for k, v in lab_and_model_settings.items(): - print "* ", k, "=", v + print("* ", k, "=", v) note = "\nSIMULATION SETTINGS:" -print note -print "-" * len(note) +print(note) +print("-" * len(note)) # Path to local copy of CMIP6 CVs, which you can get from https://github.com/WCRP-CMIP/CMIP6_CVs # my_cvspath="/Users/moine/Codes/MyDevel_Codes/CMIP6_DATA_SUITE/CMIP6_CVs/" my_cvspath = "~/dev/CMIP6_CVs" for k, v in simulation_settings.items(): - print "* ", k, "=", v + print("* ", k, "=", v) # In[ ]: @@ -439,26 +442,24 @@ # In[ ]: note = "VERSIONS:" -print note -print "-" * len(note) - -from dr2xml import generate_file_defs +print(note) +print("-" * len(note)) # In[ ]: note = "\nLAB AND MODEL SETTINGS:" -print note -print "-" * len(note) +print(note) +print("-" * len(note)) for k, v in lab_and_model_settings.items(): - print "* ", k, "=", v + print("* ", k, "=", v) note = "\nSIMULATION SETTINGS:" -print note -print "-" * len(note) +print(note) +print("-" * len(note)) for k, v in simulation_settings.items(): - print "* ", k, "=", v + print("* ", k, "=", v) # In[ ]: diff --git a/doc/create_ping_files.py b/doc/create_ping_files.py index 13bf2cb..b889f8d 100644 --- a/doc/create_ping_files.py +++ b/doc/create_ping_files.py @@ -1,7 +1,14 @@ -# coding: utf-8 +#!/usr/bin/python +# -*- coding: utf-8 -*- # # Create ping files based on lab choices +from __future__ import print_function, division, absolute_import, unicode_literals + +# from dr2xml import select_CMORvars_for_lab, pingFileForRealmsList +from pingfiles_interface import pingFileForRealmsList +from vars_selection import gather_AllSimpleVars + # In[ ]: # Select your laboratory among: 'cnrm', 'cerfacs', 'ipsl' @@ -107,10 +114,6 @@ # In[ ]: -# from dr2xml import select_CMORvars_for_lab, pingFileForRealmsList -from dr2xml import pingFileForRealmsList -from vars_selection import gather_AllSimpleVars - # ## Select all variables to consider, based on lab settings # In[ ]: @@ -124,7 +127,9 @@ help(pingFileForRealmsList) -# ### When using function create_ping_files with argument exact=False, each ping file will adress all variables which realm includes or is included in one of the strings in a realms set

e.g for set ['ocean','seaIce'], ping file 'ping_ocean_seaIce.xml' will includes variables which realm is either 'ocean' or 'seaIce' or 'ocean seaIce' +# When using function create_ping_files with argument exact=False, each ping file will adress all variables which +# realm includes or is included in one of the strings in a realms set

e.g for set ['ocean','seaIce'], +# ping file 'ping_ocean_seaIce.xml' will includes variables which realm is either 'ocean' or 'seaIce' or 'ocean seaIce' # ## Create various ping files for various sets of realms @@ -137,7 +142,7 @@ # Generate one ping file per context: for my_context in settings["realms_per_context"].keys(): - print "=== CREATING PINGFILE FOR CONTEXT", my_context + print("=== CREATING PINGFILE FOR CONTEXT", my_context) realms = settings['realms_per_context'][my_context] pingFileForRealmsList(my_context, realms, svars, settings["path_special_defs"], comments=" ", exact=False, dummy=True, @@ -155,7 +160,7 @@ single_realms = [['ocean'], ['seaIce'], ['ocnBgchem'], ['atmos'], ['land'], ['landIce'], ['atmosChem'], ['aerosol']] for rs in single_realms: # print rs[0] - print "=== CREATING PINGFILE FOR SINGLE REALM", rs + print("=== CREATING PINGFILE FOR SINGLE REALM", rs) pingFileForRealmsList(rs[0], rs, svars, settings["path_special_defs"], prefix=settings['ping_variables_prefix'], comments=" ", exact=False, dummy=True, dummy_with_shape=True, filename=my_dir + 'ping_%s.xml' % rs[0]) diff --git a/dr2xml.py b/dr2xml.py index c7ba859..877b97f 100644 --- a/dr2xml.py +++ b/dr2xml.py @@ -49,79 +49,55 @@ #################################### # End of pre-requisites #################################### -import json -import datetime -import re -import collections -import sys -import os -import glob -# Utilities -from utils import dr2xml_error +from __future__ import print_function, division, absolute_import, unicode_literals + +import sys -# Settings and config -from config import get_config_variable, set_config_variable -from analyzer import freq2datefmt, analyze_cell_time_method, Cmip6Freq2XiosFreq, longest_possible_period, \ - initialize_cell_method_warnings, get_cell_method_warnings, DRgrid2gridatts +import cProfile +import pstats +import io -# Data request interface -from dr_interface import get_DR_version, initialize_sc, get_collection, get_uid, get_request_by_id_by_sect, \ - get_experiment_label, print_DR_errors +from collections import OrderedDict -# XML interface -from xml_interface import create_xml_element_from_string, create_string_from_xml_element, get_root_of_xml_file, \ - create_xml_element, create_xml_string +# Global variables and configuration tools +from config import get_config_variable, set_config_variable, python_version -# Simulations and laboratory settings dictionnaries interface +# Interface to settings dictionaries from settings_interface import initialize_dict, get_variable_from_lset_with_default, \ - is_key_in_sset, get_variable_from_sset_without_default, is_sset_not_None, get_source_id_and_type, \ - get_variable_from_sset_and_lset_without_default, get_variable_from_sset_with_default_in_sset, \ - get_variable_from_sset_with_default, is_key_in_lset, get_variable_from_sset_else_lset_with_default, \ - get_lset_iteritems, get_sset_iteritems, get_variable_from_lset_without_default - -# XIOS linked modules -from Xparse import init_context, id2grid, id2gridid, idHasExprWithAt -from Xwrite import wr, write_xios_file_def - -# Grids modules -from grids import get_grid_def, guess_simple_domain_grid_def, create_grid_def, create_axis_def, change_domain_in_grid, \ - get_grid_def_with_lset, change_axes_in_grid, isVertDim, scalar_vertical_dimension -from grids_selection import decide_for_grids - -# Variables modules -from vars_home import process_homeVars, complement_svar_using_cmorvar, \ - multi_plev_suffixes, single_plev_suffixes, get_simplevar -from vars_cmor import simple_CMORvar, simple_Dim -from vars_selection import endyear_for_CMORvar, RequestItem_applies_for_exp_and_year, select_CMORvars_for_lab, \ - gather_AllSimpleVars, get_sc, initialize_sn_issues, get_grid_choice - -# Split frequencies module -from file_splitting import split_frequency_for_variable, timesteps_per_freq_and_duration - -# Statistics module -from infos import print_SomeStats + get_variable_from_lset_without_default +# Interface to Data Request +from dr_interface import get_DR_version, get_uid, get_request_by_id_by_sect + +# Tools to deal with ping files +from pingfiles_interface import read_pingfiles_variables -# CFsites handling has its own module -from cfsites import cfsites_domain_id, cfsites_grid_id, cfsites_input_filedef, add_cfsites_in_defs +# Tools to deal with computation of used pressure levels +from plevs_unions import create_xios_axis_and_grids_for_plevs_unions -# Post-processing modules -from postprocessing import process_vertical_interpolation, process_zonal_mean, process_diurnal_cycle +# Variables tools +from vars_home import multi_plev_suffixes, single_plev_suffixes +from vars_selection import initialize_sn_issues, select_variables_to_be_processed -print "\n", 50 * "*", "\n*" -print "* %29s" % "dr2xml version: ", get_config_variable("version") +# XIOS reading and writing tools +from Xparse import init_context +from Xwrite import write_xios_file_def -# The current code should comply with this version of spec doc at -# https://docs.google.com/document/d/1h0r8RZr_f3-8egBMMh7aqLwy3snpD6_MrDz1q8n5XUk/edit -CMIP6_conventions_version = "v6.2.4" -print "* %29s" % "CMIP6 conventions version: ", CMIP6_conventions_version +# Info printing tools +from infos import print_SomeStats + + +print("\n", 50 * "*", "\n*") +print("* %29s" % "dr2xml version: ", get_config_variable("version")) + +print("* %29s" % "CMIP6 conventions version: ", get_config_variable("CMIP6_conventions_version")) # mpmoine_merge_dev2_v0.12: posixpath.dirname ne marche pas chez moi # TBS# from os import path as os_path # TBS# prog_path=os_path.abspath(os_path.split(__file__)[0]) -print "* %29s" % "CMIP6 Data Request version: ", get_DR_version() -print "\n*\n", 50 * "*" +print("* %29s" % "CMIP6 Data Request version: ", get_DR_version()) +print("\n*\n", 50 * "*") """ An example/template of settings for a lab and a model""" example_lab_and_model_settings = { @@ -606,16 +582,18 @@ "max_split_freq": None, 'unused_contexts': [], # If you havn't set a 'configuration', you may fine tune here + # perso_sdims_description variable should be a dictionnary which described each element of the + # custom sdim shape + 'perso_sdims_description': {}, } def generate_file_defs(lset, sset, year, enddate, context, cvs_path, pingfiles=None, dummies='include', printout=False, dirname="./", prefix="", attributes=[], select="on_expt_and_year"): - # A wrapper for profiling top-level function : generate_file_defs_inner - import cProfile - import pstats - import StringIO + """ + A wrapper for profiling top-level function : generate_file_defs_inner + """ pr = cProfile.Profile() pr.enable() # Initialize lset and sset variables for all functions @@ -624,7 +602,10 @@ def generate_file_defs(lset, sset, year, enddate, context, cvs_path, pingfiles=N dummies=dummies, printout=printout, dirname=dirname, prefix=prefix, attributes=attributes, select=select) pr.disable() - s = StringIO.StringIO() + if python_version == "python2": + s = io.BytesIO() + else: + s = io.StringIO() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() @@ -662,173 +643,49 @@ def generate_file_defs_inner(lset, sset, year, enddate, context, cvs_path, pingf debug = False cmvk = "CMIP6_CV_version" if cmvk in attributes: - print "* %s: %s" % (cmvk, attributes[cmvk]) + print("* %s: %s" % (cmvk, attributes[cmvk])) # -------------------------------------------------------------------- # Parse XIOS settings file for the context # -------------------------------------------------------------------- - print "\n", 50 * "*", "\n" - print "Processing context ", context - print "\n", 50 * "*", "\n" + print() + print(50 * "*") + print() + print("Processing context ", context) + print() + print(50 * "*") + print() set_config_variable("context_index", init_context(context, get_variable_from_lset_with_default("path_to_parse", "./"), printout=get_variable_from_lset_with_default("debug_parsing", False))) if get_config_variable("context_index") is None: sys.exit(1) - initialize_cell_method_warnings([]) + set_config_variable("cell_method_warnings", list()) warnings_for_optimisation = [] - initialize_sn_issues(dict()) - + initialize_sn_issues(OrderedDict()) # # -------------------------------------------------------------------- - # Extract CMOR variables for the experiment and year and lab settings + # Select variables that should be processed # -------------------------------------------------------------------- - skipped_vars_per_table = {} - actually_written_vars = [] - mip_vars_list = gather_AllSimpleVars(year, printout, select) - # Group CMOR vars per realm - svars_per_realm = dict() - for svar in mip_vars_list: - realm = svar.modeling_realm - if realm not in svars_per_realm: - svars_per_realm[realm] = [] - if svar not in svars_per_realm[realm]: - add = True - for ovar in svars_per_realm[realm]: - if ovar.label == svar.label and ovar.spatial_shp == svar.spatial_shp \ - and ovar.frequency == svar.frequency and ovar.cell_methods == svar.cell_methods: - add = False - # Settings may allow for duplicate var in two tables. In DR01.00.21, this actually - # applies to very few fields (ps-Aermon, tas-ImonAnt, areacellg) - if get_variable_from_lset_with_default('allow_duplicates', True) or add: - svars_per_realm[realm].append(svar) - else: - print "Not adding duplicate %s (from %s) for realm %s" % (svar.label, svar.mipTable, realm) - else: - old = svars_per_realm[realm][0] - print "Duplicate svar %s %s %s %s" % (old.label, old.grid, svar.label, svar.grid) - pass - if printout: - print "\nRealms for these CMORvars :", svars_per_realm.keys() - # - # -------------------------------------------------------------------- - # Select on context realms, grouping by table - # Excluding 'excluded_vars' and 'excluded_spshapes' lists - # -------------------------------------------------------------------- - svars_per_table = dict() - context_realms = get_variable_from_lset_without_default('realms_per_context', context) - processed_realms = [] - for realm in context_realms: - if realm in processed_realms: - continue - processed_realms.append(realm) - excludedv = dict() - print "Processing realm '%s' of context '%s'" % (realm, context) - # print 50*"_" - excludedv = dict() - if realm in svars_per_realm: - for svar in svars_per_realm[realm]: - # exclusion de certaines spatial shapes (ex. Polar Stereograpic Antarctic/Groenland) - if svar.label not in get_variable_from_lset_without_default('excluded_vars') and \ - svar.spatial_shp and \ - svar.spatial_shp not in get_variable_from_lset_without_default("excluded_spshapes"): - if svar.mipTable not in svars_per_table: - svars_per_table[svar.mipTable] = [] - svars_per_table[svar.mipTable].append(svar) - else: - if printout: - reason = "unknown reason" - if svar.label in get_variable_from_lset_without_default('excluded_vars'): - reason = "They are in exclusion list " - if not svar.spatial_shp: - reason = "They have no spatial shape " - if svar.spatial_shp in get_variable_from_lset_without_default("excluded_spshapes"): - reason = "They have excluded spatial shape : %s" % svar.spatial_shp - if reason not in excludedv: - excludedv[reason] = [] - excludedv[reason].append((svar.label, svar.mipTable)) - if printout and len(excludedv.keys()) > 0: - print "The following pairs (variable,table) have been excluded for these reasons :" - for reason in excludedv: - print "\t", reason, ":", excludedv[reason] - if debug: - print "For table AMon: ", [v.label for v in svars_per_table["Amon"]] - # - # -------------------------------------------------------------------- - # Add svars belonging to the orphan list - # -------------------------------------------------------------------- - if context in get_variable_from_lset_without_default('orphan_variables'): - orphans = get_variable_from_lset_without_default('orphan_variables', context) - for svar in mip_vars_list: - if svar.label in orphans: - if svar.label not in get_variable_from_lset_without_default('excluded_vars') and svar.spatial_shp and \ - svar.spatial_shp not in get_variable_from_lset_without_default("excluded_spshapes"): - if svar.mipTable not in svars_per_table: - svars_per_table[svar.mipTable] = [] - svars_per_table[svar.mipTable].append(svar) - # - # -------------------------------------------------------------------- - # Remove svars belonging to other contexts' orphan lists - # -------------------------------------------------------------------- - for other_context in get_variable_from_lset_without_default('orphan_variables'): - if other_context != context: - orphans = get_variable_from_lset_without_default('orphan_variables', other_context) - for table in svars_per_table: - toremove = [] - for svar in svars_per_table[table]: - if svar.label in orphans: - toremove.append(svar) - for svar in toremove: - svars_per_table[table].remove(svar) - if debug: - print "Pour table AMon: ", [v.label for v in svars_per_table["Amon"]] + skipped_vars_per_table = OrderedDict() + actually_written_vars = list() + svars_per_table = select_variables_to_be_processed(year, context, select, printout, debug) # # -------------------------------------------------------------------- # Read ping_file defined variables # -------------------------------------------------------------------- - pingvars = [] - all_ping_refs = {} - if pingfiles is not None: - all_pingvars = [] - # print "pingfiles=",pingfiles - for pingfile in pingfiles.split(): - ping_refs = read_xml_elmt_or_attrib(pingfile, tag='field', attrib='field_ref') - # ping_refs=read_xml_elmt_or_attrib(pingfile, tag='field') - if ping_refs is None: - print "Error: issue accessing pingfile " + pingfile - return - all_ping_refs.update(ping_refs) - if dummies == "include": - pingvars = ping_refs.keys() - else: - pingvars = [v for v in ping_refs if 'dummy' not in ping_refs[v]] - if dummies == "forbid": - if len(pingvars) != len(ping_refs): - for v in ping_refs: - if v not in pingvars: - print v, - print - raise dr2xml_error("They are still dummies in %s , while option is 'forbid' :" % pingfile) - else: - pingvars = ping_refs.keys() - elif dummies == "skip": - pass - else: - print "Forbidden option for dummies : " + dummies - sys.exit(1) - all_pingvars.extend(pingvars) - pingvars = all_pingvars + pingvars, all_ping_refs = read_pingfiles_variables(pingfiles, dummies) # - field_defs = dict() - axis_defs = dict() - grid_defs = dict() - file_defs = dict() - scalar_defs = dict() + field_defs = OrderedDict() + axis_defs = OrderedDict() + grid_defs = OrderedDict() + file_defs = OrderedDict() + scalar_defs = OrderedDict() # # -------------------------------------------------------------------- # Build all plev union axis and grids # -------------------------------------------------------------------- if get_variable_from_lset_with_default('use_union_zoom', False): - svars_full_list = [] + svars_full_list = list() for svl in svars_per_table.values(): svars_full_list.extend(svl) create_xios_axis_and_grids_for_plevs_unions(svars_full_list, multi_plev_suffixes.union(single_plev_suffixes), @@ -836,651 +693,40 @@ def generate_file_defs_inner(lset, sset, year, enddate, context, cvs_path, pingf printout=False) # # -------------------------------------------------------------------- - # Start writing XIOS file_def file: - # file_definition node, including field child-nodes + # Write XIOS file_def # -------------------------------------------------------------------- # filename=dirname+"filedefs_%s.xml"%context filename = dirname + "dr2xml_%s.xml" % context - with open(filename, "w") as out: - out.write(' \n' % context) - out.write(' \n' % get_DR_version()) - out.write(' \n' % "??") - out.write(' \n' % CMIP6_conventions_version) - out.write(' \n' % get_config_variable("version")) - out.write('\n') - out.write('\n') - out.write(' \n' % year) - # - domain_defs = dict() - # for table in ['day'] : - out.write('\n \n') - foo, sourcetype = get_source_id_and_type() - for table in sorted(svars_per_table.keys()): - count = dict() - for svar in sorted(svars_per_table[table], key=lambda x: (x.label + "_" + table)): - if get_variable_from_lset_with_default("allow_duplicates_in_same_table", False) \ - or svar.mipVarLabel not in count: - if not get_variable_from_lset_with_default("use_cmorvar_label_in_filename", False) \ - and svar.mipVarLabel in count: - form = "If you really want to actually produce both %s and %s in table %s, " + \ - "you must set 'use_cmorvar_label_in_filename' to True in lab settings" - raise dr2xml_error(form % (svar.label, count[svar.mipVarLabel].label, table)) - count[svar.mipVarLabel] = svar - for grid in svar.grids: - a, hgrid, b, c, d = get_variable_from_lset_without_default('grids', get_grid_choice(), context) - check_for_file_input(svar, hgrid, pingvars, field_defs, grid_defs, domain_defs, file_defs) - write_xios_file_def(svar, year, table, lset, sset, out, cvs_path, - field_defs, axis_defs, grid_defs, domain_defs, scalar_defs, file_defs, - dummies, skipped_vars_per_table, actually_written_vars, - prefix, context, grid, pingvars, enddate, attributes) - else: - print "Duplicate variable %s,%s in table %s is skipped, preferred is %s" % \ - (svar.label, svar.mipVarLabel, table, count[svar.mipVarLabel].label) - - if cfsites_grid_id in grid_defs: - out.write(cfsites_input_filedef()) - for file_def in file_defs: - out.write(file_defs[file_def]) - out.write('\n \n') - # - # -------------------------------------------------------------------- - # End writing XIOS file_def file: - # field_definition, axis_definition, grid_definition - # and domain_definition auxilliary nodes - # -------------------------------------------------------------------- - # Write all domain, axis, field defs needed for these file_defs - out.write(' \n') - if get_variable_from_lset_with_default("nemo_sources_management_policy_master_of_the_world", False) \ - and context == 'nemo': - out.write('\n') - for obj in sorted(field_defs.keys()): - out.write("\t" + field_defs[obj] + "\n") - if get_variable_from_lset_with_default("nemo_sources_management_policy_master_of_the_world", False) \ - and context == 'nemo': - out.write('\n') - out.write('\n \n') - # - out.write('\n \n') - out.write('\n') - for obj in sorted(axis_defs.keys()): - out.write("\t" + axis_defs[obj] + "\n") - if False and get_variable_from_lset_with_default('use_union_zoom', False): - for obj in sorted(union_axis_defs.keys()): - out.write("\t" + union_axis_defs[obj] + "\n") - out.write('\n') - out.write(' \n') - # - out.write('\n \n') - out.write('\n') - if get_variable_from_lset_without_default('grid_policy') != "native": - create_standard_domains(domain_defs) - for obj in sorted(domain_defs.keys()): - out.write("\t" + domain_defs[obj] + "\n") - out.write('\n') - out.write(' \n') - # - out.write('\n \n') - for obj in grid_defs.keys(): - out.write("\t" + grid_defs[obj]) - if False and get_variable_from_lset_with_default('use_union_zoom', False): - for obj in sorted(union_grid_defs.keys()): - out.write("\t" + union_grid_defs[obj] + "\n") - out.write(' \n') - # - out.write('\n \n') - for obj in sorted(scalar_defs.keys()): - out.write("\t" + scalar_defs[obj] + "\n") - out.write(' \n') - # - out.write(' \n') + write_xios_file_def(filename, svars_per_table, year, lset, sset, cvs_path, field_defs, axis_defs, grid_defs, + scalar_defs, file_defs, dummies, skipped_vars_per_table, actually_written_vars, prefix, context, + pingvars, enddate, attributes) if printout: - print "\nfile_def written as %s" % filename + print("\nfile_def written as %s" % filename) + # + # -------------------------------------------------------------------- + # Print infos about the run + # -------------------------------------------------------------------- # mpmoine_petitplus:generate_file_defs: pour sortir des stats sur ce que l'on sort reelement # SS - non : gros plus if printout: print_SomeStats(context, svars_per_table, skipped_vars_per_table, actually_written_vars, get_variable_from_lset_with_default("print_stats_per_var_label", False)) - warn = dict() - for warning, label, table in get_cell_method_warnings(): + warn = OrderedDict() + for warning, label, table in get_config_variable("cell_method_warnings"): if warning not in warn: warn[warning] = set() warn[warning].add(label) if len(warn) > 0: - print "\nWarnings about cell methods (with var list)" + print("\nWarnings about cell methods (with var list)") for w in warn: - print "\t", w, " for vars : ", warn[w] + print("\t", w, " for vars : ", warn[w]) if len(warnings_for_optimisation) > 0: - print "Warning for fields which cannot be optimised (i.e. average before remap) because of an expr with @\n\t", + print("Warning for fields which cannot be optimised (i.e. average before remap) because of an expr with @\n\t",) for w in warnings_for_optimisation: - print w.replace(get_variable_from_lset_without_default('ping_variables_prefix'), ""), - print - - -def create_xios_axis_and_grids_for_plevs_unions(svars, plev_sfxs, dummies, axis_defs, grid_defs, field_defs, ping_refs, - printout=False): - """ - Objective of this function is to optimize Xios vertical interpolation requested in pressure levels. - Process in 2 steps: - * First, search pressure levels unions for each simple variable label without psuffix and build a dictionnary : - dict_plevs is a 3-level intelaced dictionnary containing for each var (key=svar label_without_psuffix), - the list of svar (key=svar label,value=svar object) per pressure levels set (key=sdim label): - { "varX": - { "plevA": {"svar1":svar1,"svar2":svar2,"svar3":svar3}, - "plevB": {"svar4":svar4,"svar5":svar5}, - "plevC": {"svar6":svar6} }, - "varY": - { "plevA": {"svar7":svar7}, - "plevD": {"svar8":svar8,"svar9":svar9} } - } - * Second, create create all of the Xios union axis (axis id: union_plevs_) - """ - # - prefix = get_variable_from_lset_without_default("ping_variables_prefix") - # First, search plev unions for each label_without_psuffix and build dict_plevs - dict_plevs = {} - for sv in svars: - if not sv.modeling_realm: - print "Warning: no modeling_realm associated to:", sv.label, sv.mipTable, sv.mip_era - for sd in sv.sdims.values(): - # couvre les dimensions verticales de type 'plev7h' ou 'p850' - if sd.label.startswith("p") and any(sd.label.endswith(s) for s in plev_sfxs) and sd.label != 'pl700': - lwps = sv.label_without_psuffix - if lwps: - present_in_ping = (prefix + lwps) in ping_refs - dummy_in_ping = None - if present_in_ping: - dummy_in_ping = ("dummy" in ping_refs[prefix + lwps]) - - if present_in_ping and (not dummy_in_ping or dummies == 'include'): - sv.sdims[sd.label].is_zoom_of = "union_plevs_" + lwps - if lwps not in dict_plevs: - dict_plevs[lwps] = {sd.label: {sv.label: sv}} - else: - if sd.label not in dict_plevs[lwps]: - dict_plevs[lwps].update({sd.label: {sv.label: sv}}) - else: - if sv.label not in dict_plevs[lwps][sd.label].keys(): - dict_plevs[lwps][sd.label].update({sv.label: sv}) - else: - # TBS# print sv.label,"in table",sv.mipTable,"already listed for",sd.label - pass - else: - if printout: - print "Info: ", lwps, "not taken into account for building plevs union axis because ", \ - prefix + lwps, - if not present_in_ping: - print "is not an entry in the pingfile" - else: - print "has a dummy reference in the pingfile" - - # svar will be expected on a zoom axis of the union. Corresponding vertical dim must - # have a zoom_label named plevXX_ (multiple pressure levels) - # or pXX_ (single pressure level) - sv.sdims[sd.label].zoom_label = 'zoom_' + sd.label + "_" + lwps - else: - print "Warning: dim is pressure but label_without_psuffix=", lwps, \ - "for", sv.label, sv.mipTable, sv.mip_era - # else : - # print "for var %s/%s, dim %s is not related to pressure"%(sv.label,sv.label_without_psuffix,sd.label) - # - # Second, create xios axis for union of plevs - union_axis_defs = axis_defs - union_grid_defs = grid_defs - # union_axis_defs={} - # union_grid_defs={} - for lwps in dict_plevs.keys(): - sdim_union = simple_Dim() - plevs_union_xios = "" - plevs_union = set() - for plev in dict_plevs[lwps].keys(): - plev_values = [] - for sv in dict_plevs[lwps][plev].values(): - if not plev_values: - # svar is the first one with this plev => get its level values - # on reecrase les attributs de sdim_union a chaque nouveau plev. Pas utile mais - # c'est la facon la plus simple de faire - sdsv = sv.sdims[plev] - if sdsv.stdname: - sdim_union.stdname = sdsv.stdname - if sdsv.long_name: - sdim_union.long_name = sdsv.long_name - if sdsv.positive: - sdim_union.positive = sdsv.positive - if sdsv.out_name: - sdim_union.out_name = sdsv.out_name - if sdsv.units: - sdim_union.units = sdsv.units - if sdsv.requested: - # case of multi pressure levels - plev_values = set(sdsv.requested.split()) - sdim_union.is_union_for.append(sv.label + "_" + sd.label) - elif sdsv.value: - # case of single pressure level - plev_values = set(sdsv.value.split()) - sdim_union.is_union_for.append(sv.label + "_" + sd.label) - else: - print "Warning: No requested nor value found for", svar.label, "with vertical dimesion", plev - plevs_union = plevs_union.union(plev_values) - if printout: - print " -- on", plev, ":", plev_values - if printout: - print " *", sv.label, "(", sv.mipTable, ")" - list_plevs_union = list(plevs_union) - list_plevs_union_num = [float(lev) for lev in list_plevs_union] - list_plevs_union_num.sort(reverse=True) - list_plevs_union = [str(lev) for lev in list_plevs_union_num] - for lev in list_plevs_union: - plevs_union_xios += " " + lev - if printout: - print ">>> XIOS plevs union:", plevs_union_xios - sdim_union.label = "union_plevs_" + lwps - if len(list_plevs_union) > 1: - sdim_union.requested = plevs_union_xios - if len(list_plevs_union) == 1: - sdim_union.value = plevs_union_xios - if printout: - print "creating axis def for union :%s" % sdim_union.label - axis_def = create_axis_def(sdim_union, union_axis_defs, field_defs) - create_grid_def(union_grid_defs, axis_def, sdim_union.out_name, - id2gridid(prefix + lwps, get_config_variable("context_index"))) - # - # return (union_axis_defs,union_grid_defs) - - -# -def pingFileForRealmsList(settings, context, lrealms, svars, path_special, dummy="field_atm", - dummy_with_shape=False, exact=False, - comments=False, prefix="CV_", filename=None, debug=[]): - """Based on a list of realms LREALMS and a list of simplified vars - SVARS, create the ping file which name is ~ - ping_.xml, which defines fields for all vars in - SVARS, with a field_ref which is either 'dummy' or '?' - (depending on logical DUMMY) - - If EXACT is True, the match between variable realm string and one - of the realm string in the list must be exact. Otherwise, the - variable realm must be included in (or include) one of the realm list - strings - - COMMENTS, if not False nor "", will drive the writing of variable - description and units as an xml comment. If it is a string, it - will be printed before this comment string (and this allows for a - line break) - - DUMMY, if not false, should be either 'True', for a standard dummy - label or a string used as the name of all field_refs. If False, - the field_refs look like ?. - - If DUMMY is True and DUMMY_WITH_SHAPE is True, dummy labels wiill - include the highest rank shape requested by the DR, for - information - - Field ids do include the provided PREFIX - - The ping file includes a construct - - For those MIP varnames which have a corresponding field_definition - in a file named like ./inputs/DX_field_defs_.xml (path being - relative to source code location), this latter field_def is - inserted in the ping file (rather than a default one). This brings - a set of 'standard' definitions fo variables which can be derived - from DR-standard ones - - """ - name = "" - for r in lrealms: - name += "_" + r.replace(" ", "%") - lvars = [] - for v in svars: - if exact: - if any([v.modeling_realm == r for r in lrealms]): - lvars.append(v) - else: - var_realms = v.modeling_realm.split(" ") - if any([v.modeling_realm == r or r in var_realms - for r in lrealms]): - lvars.append(v) - if context in settings['orphan_variables'] and \ - v.label in settings['orphan_variables'][context]: - lvars.append(v) - lvars.sort(key=lambda x: x.label_without_psuffix) - - # Remove duplicates : want to get one single entry for all variables having - # the same label without psuffix, and one for each having different non-ambiguous label - # Keep the one with the best piority - uniques = [] - best_prio = dict() - for v in lvars: - lna = v.label_non_ambiguous - lwps = v.label_without_psuffix - if (lna not in best_prio) or (lna in best_prio and v.Priority < best_prio[lna].Priority): - best_prio[lna] = v - elif (lwps not in best_prio) or (lwps in best_prio and v.Priority < best_prio[lwps].Priority): - best_prio[lwps] = v - # elif not v.label_without_psuffix in labels : - # uniques.append(v); labels.append(v.label_without_psuffix) - - # lvars=uniques - lvars = best_prio.values() - lvars.sort(key=lambda x: x.label_without_psuffix) - # - if filename is None: - filename = "ping" + name + ".xml" - if filename[-4:] != ".xml": - filename += ".xml" - # - if path_special: - specials = read_special_fields_defs(lrealms, path_special) - else: - specials = False - with open(filename, "w") as fp: - fp.write('\n' % (get_config_variable("varsion"), - get_DR_version())) - fp.write('\n' % `lrealms`) - fp.write('\n' % `exact`) - fp.write(' \n\n') - fp.write('\n' % context) - fp.write("\n") - if settings.get("nemo_sources_management_policy_master_of_the_world", False) and context == 'nemo': - out.write('\n') - if exact: - fp.write("\n") - else: - fp.write("\n") - for v in lvars: - if v.label_non_ambiguous: - label = v.label_non_ambiguous - else: - label = v.label_without_psuffix - if v.label in debug: - print "pingFile ... processing %s in table %s, label=%s" % (v.label, v.mipTable, label) - - if specials and label in specials: - line = create_string_from_xml_element(specials[label]).replace("DX_", prefix) - # if 'ta' in label : print "ta is special : "+line - line = line.replace("\n", "").replace("\t", "") - fp.write(' ') - fp.write(line) - else: - fp.write(' ' % (dummys + '"')) - else: - fp.write('?%-16s' % (label + '"') + ' />') - if comments: - # Add units, stdname and long_name as a comment string - if type(comments) == type(""): - fp.write(comments) - fp.write("" % (v.Priority, v.units, v.stdname, v.description)) - fp.write("\n") - if 'atmos' in lrealms or 'atmosChem' in lrealms or 'aerosol' in lrealms: - for tab in ["ap", "ap_bnds", "b", "b_bnds"]: - fp.write('\t\n' - % (prefix, tab)) - if settings.get("nemo_sources_management_policy_master_of_the_world", False) and context == 'nemo': - out.write('\n') - fp.write("\n") - # - print "%3d variables written for %s" % (len(lvars), filename) - # - # Write axis_defs, domain_defs, ... read from relevant input/DX_ files - if path_special: - for obj in ["axis", "domain", "grid", "field"]: - copy_obj_from_DX_file(fp, obj, prefix, lrealms, path_special) - fp.write('\n') - - -def copy_obj_from_DX_file(fp, obj, prefix, lrealms, path_special): - # Insert content of DX__defs files (changing prefix) - # print "copying %s defs :"%obj, - subrealms_seen = [] - for realm in lrealms: - for subrealm in realm.split(): - if subrealm in subrealms_seen: - continue - subrealms_seen.append(subrealm) - # print "\tand realm %s"%subrealm, - defs = DX_defs_filename(obj, subrealm, path_special) - if os.path.exists(defs): - with open(defs, "r") as fields: - # print "from %s"%defs - fp.write("\n<%s_definition>\n" % obj) - lines = fields.readlines() - for line in lines: - if not obj + "_definition" in line: - fp.write(line.replace("DX_", prefix)) - fp.write("\n" % obj) - else: - pass - print " no file :%s " % defs - - -def DX_defs_filename(obj, realm, path_special): - # TBS# return prog_path+"/inputs/DX_%s_defs_%s.xml"%(obj,realm) - return path_special + "/DX_%s_defs_%s.xml" % (obj, realm) - - -def get_xml_childs(elt, tag='field', groups=['context', 'field_group', - 'field_definition', 'axis_definition', 'axis', 'domain_definition', - 'domain', 'grid_definition', 'grid', 'interpolate_axis']): - """ - Returns a list of elements in tree ELT - which have tag TAG, by digging in sub-elements - named as in GROUPS - """ - if elt.tag in groups: - rep = [] - for child in elt: - rep.extend(get_xml_childs(child, tag)) - return rep - elif elt.tag == tag: - return [elt] - else: - # print 'Syntax error : tag %s not allowed'%elt.tag - # Case of an unkown tag : don't dig in - return [] - - -def read_xml_elmt_or_attrib(filename, tag='field', attrib=None, printout=False): - """ - Returns a dict of objects tagged TAG in FILENAME, which - - keys are ids - - values depend on ATTRIB - * if ATTRIB is None : object (elt) - * else : values of attribute ATTRIB (None if field does not have attribute ATTRIB) - Returns None if filename does not exist - """ - # - rep = dict() - if printout: - print "processing file %s :" % filename, - if os.path.exists(filename): - if printout: - print "OK", filename - root = get_root_of_xml_file(filename) - defs = get_xml_childs(root, tag) - if defs: - for field in defs: - if printout: - print ".", - key = field.attrib['id'] - if attrib is None: - value = field - else: - value = field.attrib.get(attrib, None) - rep[key] = value - if printout: - print - return rep - else: - if printout: - print "No file " - return None - - -def read_special_fields_defs(realms, path_special, printout=False): - special = dict() - subrealms_seen = [] - for realm in realms: - for subrealm in realm.split(): - if subrealm in subrealms_seen: - continue - subrealms_seen.append(subrealm) - d = read_xml_elmt_or_attrib(DX_defs_filename("field", subrealm, path_special), tag='field', - printout=printout) - if d: - special.update(d) - rep = dict() - # Use raw label as key - for r in special: - rep[r.replace("DX_", "")] = special[r] - return rep - - -def highest_rank(svar): - """Returns the shape with the highest needed rank among the CMORvars - referencing a MIPvar with this label - This, assuming dr2xml would handle all needed shape reductions - """ - # mipvarlabel=svar.label_without_area - mipvarlabel = svar.label_without_psuffix - shapes = [] - altdims = set() - for cvar in get_collection('CMORvar').items: - v = get_uid(cvar.vid) - if v.label == mipvarlabel: - try: - st = get_uid(cvar.stid) - try: - sp = get_uid(st.spid) - shape = sp.label - except: - if print_DR_errors: - print "DR Error: issue with spid for " + \ - st.label + " " + v.label + str(cvar.mipTable) - # One known case in DR 1.0.2: hus in 6hPlev - shape = "XY" - if "odims" in st.__dict__: - try: - map(altdims.add, st.odims.split("|")) - except: - print "Issue with odims for " + v.label + " st=" + st.label - except: - print "DR Error: issue with stid for :" + v.label + " in table section :" + str(cvar.mipTableSection) - shape = "?st" - else: - # Pour recuperer le spatial_shp pour le cas de variables qui n'ont - # pas un label CMORvar de la DR (ex. HOMEvar ou EXTRAvar) - shape = svar.spatial_shp - if shape: - shapes.append(shape) - # if not shapes : shape="??" - if len(shapes) == 0: - shape = "XY" - elif any(["XY-A" in s for s in shapes]): - shape = "XYA" - elif any(["XY-O" in s for s in shapes]): - shape = "XYO" - elif any(["XY-AH" in s for s in shapes]): - shape = "XYAh" # Zhalf - elif any(["XY-SN" in s for s in shapes]): - shape = "XYSn" # snow levels - elif any(["XY-S" in s for s in shapes]): - shape = "XYSo" # soil levels - elif any(["XY-P" in s for s in shapes]): - shape = "XYA" - elif any(["XY-H" in s for s in shapes]): - shape = "XYA" - # - elif any(["XY-na" in s for s in shapes]): - shape = "XY" # analyser realm, pb possible sur ambiguite singleton - # - elif any(["YB-na" in s for s in shapes]): - shape = "basin_zonal_mean" - elif any(["YB-O" in s for s in shapes]): - shape = "basin_merid_section" - elif any(["YB-R" in s for s in shapes]): - shape = "basin_merid_section_density" - elif any(["S-A" in s for s in shapes]): - shape = "COSP-A" - elif any(["S-AH" in s for s in shapes]): - shape = "COSP-AH" - elif any(["na-A" in s for s in shapes]): - shape = "site-A" - elif any(["Y-A" in s for s in shapes]): - shape = "XYA" # lat-A - elif any(["Y-P" in s for s in shapes]): - shape = "XYA" # lat-P - elif any(["Y-na" in s for s in shapes]): - shape = "lat" - elif any(["TRS-na" in s for s in shapes]): - shape = "TRS" - elif any(["TR-na" in s for s in shapes]): - shape = "TR" - elif any(["L-na" in s for s in shapes]): - shape = "COSPcurtain" - elif any(["L-H40" in s for s in shapes]): - shape = "COSPcurtainH40" - elif any(["S-na" in s for s in shapes]): - shape = "XY" # fine once remapped - elif any(["na-na" in s for s in shapes]): - shape = "0d" # analyser realm - # else : shape="??" - else: - shape = "XY" - # - for d in altdims: - dims = d.split(' ') - for dim in dims: - shape += "_" + dim - # - return shape - - -def create_standard_domains(domain_defs): - """ - Add to dictionnary domain_defs the Xios string representation for DR-standard horizontal grids, such as '1deg' - - """ - # Next definition is just for letting the workflow work when using option dummy='include' - # Actually, ping_files for production run at CNRM do not activate variables on that grid (IceSheet vars) - domain_defs['25km'] = create_standard_domain('25km', 1440, 720) - domain_defs['50km'] = create_standard_domain('50km', 720, 360) - domain_defs['100km'] = create_standard_domain('100km', 360, 180) - domain_defs['1deg'] = create_standard_domain('1deg', 360, 180) - domain_defs['2deg'] = create_standard_domain('2deg', 180, 90) - - -def create_standard_domain(resol, ni, nj): - return ' ' % (resol, ni, nj) + \ - ' ' + \ - ' ' - # return ' - # '%(resol,ni,nj) +\ - # ' '+\ - # ' ' + print(w.replace(get_variable_from_lset_without_default('ping_variables_prefix'), ""),) + print() def RequestItemInclude(ri, var_label, freq): @@ -1533,54 +779,3 @@ def realm_is_processed(realm, source_type): return rep -def check_for_file_input(sv, hgrid, pingvars, field_defs, grid_defs, domain_defs, file_defs, printout=False): - """ - - - Add an entry in pingvars - """ - externs = get_variable_from_lset_with_default('fx_from_file', []) - # print "/// sv.label=%s"%sv.label, sv.label in externs ,"hgrid=",hgrid - if sv.label in externs and \ - any([d == hgrid for d in externs[sv.label]]): - pingvar = get_variable_from_lset_without_default('ping_variables_prefix') + sv.label - pingvars.append(pingvar) - # Add a grid made of domain hgrid only - grid_id = "grid_" + hgrid - grid_def = '\n' % (grid_id, hgrid) - - # Add a grid and domain for reading the file (don't use grid above to avoid reampping) - file_domain_id = "remapped_%s_file_domain" % sv.label - domain_defs[file_domain_id] = '' % file_domain_id + \ - '' - file_grid_id = "remapped_%s_file_grid" % sv.label - grid_defs[file_grid_id] = '\n' % (file_grid_id, file_domain_id) - if printout: - print domain_defs[file_domain_id] - if printout: - print grid_defs[file_grid_id] - - # Create xml for reading the variable - filename = externs[sv.label][hgrid][get_grid_choice()] - file_id = "remapped_%s_file" % sv.label - field_in_file_id = "%s_%s" % (sv.label, hgrid) - # field_in_file_id=sv.label - file_def = '\n' % \ - (file_id, filename) - file_def += '\n\t'\ - % (field_in_file_id, sv.label, file_grid_id) - file_def += '\n' - file_defs[file_id] = file_def - if printout: - print file_defs[file_id] - # - # field_def='%s'%\ - field_def = '' % (pingvar, grid_id, field_in_file_id) - field_defs[field_in_file_id] = field_def - context_index = get_config_variable("context_index") - context_index[pingvar] = create_xml_element_from_string(field_def) - - if printout: - print field_defs[field_in_file_id] - # diff --git a/dr_interface.py b/dr_interface.py index c5bd2df..4478f48 100644 --- a/dr_interface.py +++ b/dr_interface.py @@ -5,6 +5,7 @@ Interface between the Data Request and dr2xml. """ +from __future__ import print_function, division, absolute_import, unicode_literals from scope import dreqQuery import dreq @@ -16,6 +17,9 @@ def get_uid(id=None): + """ + Get the uid of an element if precised, else the list of all elements. + """ if id is None: return dq.inx.uid else: @@ -23,24 +27,42 @@ def get_uid(id=None): def get_request_by_id_by_sect(id, request): + """ + Get the attribute request of the element id. + """ return dq.inx.iref_by_sect[id].a[request] def get_experiment_label(experiment): + """ + Get the experiment from its label. + """ return dq.inx.experiment.label[experiment][0] def get_collection(collection): + """ + Get the collection corresponding to the collection id. + """ return dq.coll[collection] def get_CMORvarId_by_label(label): + """ + Get the id of the CMOR var corresponding to label. + """ return dq.inx.CMORvar.label[label] def initialize_sc(tierMax): + """ + Initialize module sc variable + """ return dreqQuery(dq=dq, tierMax=tierMax) def get_DR_version(): + """ + Get the version of the DR + """ return dq.version diff --git a/file_splitting.py b/file_splitting.py index b938252..bbde7c7 100644 --- a/file_splitting.py +++ b/file_splitting.py @@ -5,9 +5,17 @@ Tools to compute split frequencies. """ -from settings_interface import get_variable_from_lset_with_default, get_variable_from_lset_without_default +from __future__ import print_function, division, absolute_import, unicode_literals + +from collections import OrderedDict + +# Utilities from utils import dr2xml_grid_error +# Interface to settings dictionaries +from settings_interface import get_variable_from_lset_with_default, get_variable_from_lset_without_default + + compression_factor = None splitfreqs = None @@ -23,13 +31,13 @@ def read_splitfreqs(): return try: freq = open("splitfreqs.dat", "r") - print "Reading split_freqs from file" + print("Reading split_freqs from file") except: splitfreqs = False return lines = freq.readlines() freq.close() - splitfreqs = dict() + splitfreqs = OrderedDict() for line in lines: if line[0] == '#': continue @@ -37,7 +45,7 @@ def read_splitfreqs(): table = line.split()[1] freq = line.split()[2] if varlabel not in splitfreqs: - splitfreqs[varlabel] = dict() + splitfreqs[varlabel] = OrderedDict() # Keep smallest factor for each variablelabel if table not in splitfreqs[varlabel]: splitfreqs[varlabel][table] = freq @@ -62,7 +70,7 @@ def read_compression_factors(): compression_factor = False return lines = fact.readlines() - compression_factor = dict() + compression_factor = OrderedDict() for line in lines: if line[0] == '#': continue @@ -70,7 +78,7 @@ def read_compression_factors(): table = line.split()[1] factor = float(line.split()[2]) if varlabel not in compression_factor: - compression_factor[varlabel] = dict() + compression_factor[varlabel] = OrderedDict() # Keep smallest factor for each variablelabel if table not in compression_factor[varlabel] or \ compression_factor[varlabel][table] > factor: @@ -103,11 +111,11 @@ def split_frequency_for_variable(svar, grid, mcfg, context, printout=False): if compression_factor and svar.label in compression_factor and \ svar.mipTable in compression_factor[svar.label]: if printout: - print "Dividing size of %s by %g : %g -> %g" % (svar.label, + print("Dividing size of %s by %g : %g -> %g" % (svar.label, compression_factor[svar.label][svar.mipTable], size, - (size + 0.) / compression_factor[svar.label][svar.mipTable]) - size = (size + 0.) / compression_factor[svar.label][svar.mipTable] + (size + 0.) // compression_factor[svar.label][svar.mipTable])) + size = (size + 0.) // compression_factor[svar.label][svar.mipTable] # else: # # Some COSP outputs are highly compressed # if 'cfad' in svar.label : size/=10. @@ -120,7 +128,7 @@ def split_frequency_for_variable(svar, grid, mcfg, context, printout=False): size_per_year = size * timesteps_per_freq_and_duration(freq, 365, sts) nbyears = max_size / float(size_per_year) if printout: - print "size per year=%s, size=%s, nbyears=%g" % (`size_per_year`, `size`, nbyears) + print("size per year=%s, size=%s, nbyears=%g" % (repr(size_per_year), repr(size), nbyears)) if nbyears > 1.: if nbyears > 500: return "500y" @@ -159,10 +167,10 @@ def split_frequency_for_variable(svar, grid, mcfg, context, printout=False): if nbdays > 1.: return "1d" else: - raise (dr2xml_grids_error("No way to put even a single day of data in %g for frequency %s, var %s," + raise (dr2xml_grid_error("No way to put even a single day of data in %g for frequency %s, var %s," " table %s" % (max_size, freq, svar.label, svar.mipTable))) else: - raise dr2xml_grids_error( + raise dr2xml_grid_error( "Warning: field_size returns 0 for var %s, cannot compute split frequency." % svar.label) @@ -206,7 +214,7 @@ def timesteps_per_freq_and_duration(freq, nbdays, sampling_tstep): elif freq == "1hrCM": return (int(float(nbdays) / 31) + 1) * 24. else: - raise (dr2xml_grids_error("Frequency %s is not handled" % freq)) + raise (dr2xml_grid_error("Frequency %s is not handled" % freq)) def field_size(svar, mcfg): @@ -312,6 +320,6 @@ def field_size(svar, mcfg): siz = 1 if siz == 0: - raise dr2xml_grids_error("Cannot compute field_size for var %s and shape %s" % (svar.label, s)) + raise dr2xml_grid_error("Cannot compute field_size for var %s and shape %s" % (svar.label, s)) return siz diff --git a/graphviz/dr2xml_total.py b/graphviz/dr2xml_total.py index c131229..efd51fe 100644 --- a/graphviz/dr2xml_total.py +++ b/graphviz/dr2xml_total.py @@ -470,7 +470,7 @@ def write_xios_file_def(cmv, table, lset, sset, out, cvspath, # global sc - # mpmoine_amelioration:write_xios_file_def: gestion ici des attributs pour lesquels on a recupere des chaines vides (" " est Faux mais est ecrit " "") + # mpmoine_amelioration:write_xios_file_def_for_svar: gestion ici des attributs pour lesquels on a recupere des chaines vides (" " est Faux mais est ecrit " "") # -------------------------------------------------------------------- # Set to NOT-SET field attributes that can be empty strings # -------------------------------------------------------------------- @@ -490,19 +490,19 @@ def write_xios_file_def(cmv, table, lset, sset, out, cvspath, if cmv.type == 'perso': alias = cmv.label else: - # mpmoine_correction:write_xios_file_def: si on a defini un label non ambigu alors on l'untilise comme alias (i.e. le field_ref) - # mpmoine_correction:write_xios_file_def: et pour l'alias seulement (le nom de variable dans le nom de fichier restant svar.label) + # mpmoine_correction:write_xios_file_def_for_svar: si on a defini un label non ambigu alors on l'untilise comme alias (i.e. le field_ref) + # mpmoine_correction:write_xios_file_def_for_svar: et pour l'alias seulement (le nom de variable dans le nom de fichier restant svar.label) if cmv.label_non_ambiguous: alias = lset["ping_variables_prefix"] + cmv.label_non_ambiguous else: alias = lset["ping_variables_prefix"] + cmv.label - # mpmoine_correction:write_xios_file_def: suppression des terminaisons en "Clim" pour l'alias (i.e. le field_ref) le cas echeant + # mpmoine_correction:write_xios_file_def_for_svar: suppression des terminaisons en "Clim" pour l'alias (i.e. le field_ref) le cas echeant split_alias = alias.split("Clim") alias = split_alias[0] if pingvars is not None: - # mpmoine_zoom_modif:write_xios_file_def: dans le pingfile, on attend plus les alias complets des variables (CMIP6_