diff --git a/fre/pp/tests/AM5_example/am5.yaml b/fre/pp/tests/AM5_example/am5.yaml index 4f3e5a4d..15facbd2 100644 --- a/fre/pp/tests/AM5_example/am5.yaml +++ b/fre/pp/tests/AM5_example/am5.yaml @@ -38,12 +38,13 @@ fre_properties: - &FMSincludes "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" - &momIncludes "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" +# compile information +build: + compileYaml: "compile.yaml" + platformYaml: "yaml_include/platforms.yaml" + + shared: - # compile information - compile: - compileYaml: &compile_yaml "compile.yaml" - platformYaml: "yaml_include/platforms.yaml" - # directories shared across tools directories: &shared_directories history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] @@ -70,46 +71,36 @@ experiments: - name: "c96L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml + - "yaml_include/pp-test.c96_amip.yaml" - name: "c96L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c96_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_amip" pp: - "yaml_include/pp.c384_amip.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim1850F" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_pdclim2010AERF" pp: - "yaml_include/pp.c384_clim.yaml" - compile: *compile_yaml - name: "c384L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c384_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_OM4_p25_piControl_noBLING_DynVeg" pp: - "yaml_include/pp.c96_amip.yaml" - "yaml_include/pp.om4.yaml" - compile: *compile_yaml - name: "c96L65_am5f7b12r1_amip_cosp" pp: - "yaml_include/pp.c96_amip.yaml" - compile: *compile_yaml diff --git a/fre/pp/tests/AM5_example/compile.yaml b/fre/pp/tests/AM5_example/compile.yaml deleted file mode 100644 index 5f9a361b..00000000 --- a/fre/pp/tests/AM5_example/compile.yaml +++ /dev/null @@ -1,67 +0,0 @@ -compile: - experiment: "am5" - container_addlibs: - baremetal_linkerflags: - src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" - branch: "2022.01" - cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" - otherFlags: *FMSincludes - - component: "am5_phys" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" - branch: "2022.01" - otherFlags: *FMSincludes - - component: "GFDL_atmos_cubed_sphere" - requires: ["FMS", "am5_phys"] - repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" - branch: "2022.01" - paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", - "GFDL_atmos_cubed_sphere/model", - "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", - "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", - "GFDL_atmos_cubed_sphere/tools", - "GFDL_atmos_cubed_sphere/GFDL_tools"] - otherFlags: *FMSincludes - - component: "atmos_drivers" - requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] - repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE" - branch: "2022.01" - paths: ["atmos_drivers/coupled"] - otherFlags: *FMSincludes - - component: "ice_sis" - requires: ["FMS", "ice_param", "mom6"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" - branch: "2021.02" - otherFlags: !join [*FMSincludes, " ", *momIncludes] - - component: "ice_param" - repo: "https://github.com/NOAA-GFDL/ice_param.git" - cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" - branch: "2021.02" - requires: ["FMS", "mom6"] - otherFlags: !join [*FMSincludes," ", *momIncludes] - - component: "land_lad2" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" - branch: "2022.01" - branch: "land_lad2_2021.02" - doF90Cpp: True - cppdefs: "-DINTERNAL_FILE_NML" - otherFlags: *FMSincludes - - component: "mom6" - requires: ["FMS"] - paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] - branch: ["2021.02","dev/gfdl/2018.04.06"] - repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] - makeOverrides: 'OPENMP=""' - otherFlags: !join [*FMSincludes, " ", *momIncludes] - - component: "FMScoupler" - paths: ["FMScoupler/full", "FMScoupler/shared"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - branch: "2022.01" - requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] - otherFlags: !join [*FMSincludes, " ", *momIncludes] diff --git a/fre/pp/tests/AM5_example/yaml_include/platforms.yaml b/fre/pp/tests/AM5_example/yaml_include/platforms.yaml deleted file mode 100644 index 7e1b9f49..00000000 --- a/fre/pp/tests/AM5_example/yaml_include/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/test - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/test - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/pp/tests/AM5_example/yaml_include/pp-test.c96_amip.yaml b/fre/pp/tests/AM5_example/yaml_include/pp-test.c96_amip.yaml new file mode 100644 index 00000000..ff42a51e --- /dev/null +++ b/fre/pp/tests/AM5_example/yaml_include/pp-test.c96_amip.yaml @@ -0,0 +1,44 @@ +# local reusable variable overrides +fre_properties: + - &custom_interp "200,200" + +# directory overrides +#c96_amip_directories: +directories: + <<: *shared_directories + ptmp_dir: "/ptmp/$USER" + pp_grid_spec: *GRID_SPEC96 + +#c96_amip_postprocess: +postprocess: + # pp setting overrides + settings: + <<: *shared_settings + pp_start: *ANA_AMIP_START + pp_stop: *ANA_AMIP_END + pp_chunk_a: *PP_AMIP_CHUNK96 + pp_components: "atmos atmos_scalar" + switches: + <<: *shared_switches + do_statics: False + + # main pp instructions + components: + - type: "atmos_cmip-TEST" + sources: "atmos_month_cmip atmos_8xdaily_cmip atmos_daily_cmip" + sourceGrid: "cubedsphere" + xyInterp: *custom_interp + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos-TEST" + sources: "atmos_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos_level_cmip-TEST" + sources: "atmos_level_cmip" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos' diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index caf7cad0..3177b3cd 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -44,8 +44,8 @@ def get_compile_paths(mainyaml_dir,comb): else: raise ValueError("Incorrect platform yaml path given; does not exist.") else: + py_path=None raise ValueError("No platform yaml path given!") - #py_path=None # set compile yaml filepath if comb_model["build"]["compileYaml"] is not None: @@ -55,12 +55,11 @@ def get_compile_paths(mainyaml_dir,comb): else: raise ValueError("Incorrect compile yaml path given; does not exist.") else: + cy_path=None raise ValueError("No compile yaml path given!") - #cy_path=None return (py_path,cy_path) - def experiment_check(mainyaml_dir,comb,experiment): """ Check that the experiment given is an experiment listed in the model yaml. @@ -102,17 +101,17 @@ def experiment_check(mainyaml_dir,comb,experiment): ay_path=[] for a in analysisyaml: # prepend the directory containing the yaml - a = Path(mainyaml_dir, a) - if Path(a).exists(): + if Path(os.path.join(mainyaml_dir, a)).exists(): ay=Path(os.path.join(mainyaml_dir,a)) ay_path.append(ay) else: - raise ValueError("Incorrect analysis yaml ath given; does not exist.") + raise ValueError("Incorrect analysis yaml path given; does not exist.") else: ay_path=None return (ey_path,ay_path) +########################################################################################### ## COMPILE CLASS ## class init_compile_yaml(): """ class holding routines for initalizing compilation yamls """ @@ -134,7 +133,10 @@ def __init__(self,yamlfile,platform,target): # Name of the combined yaml base_name=f"combined-{self.namenopath}.yaml" - self.combined = base_name if len(self.mainyaml_dir) == 0 else f"{self.mainyaml_dir}/{base_name}" + if len(self.mainyaml_dir) == 0: + self.combined = base_name + else: + self.combined = f"{self.mainyaml_dir}/{base_name}" print("Combining yaml files: ") @@ -206,6 +208,7 @@ def clean_yaml(self): print(f"Combined yaml located here: {os.path.abspath(self.combined)}") return self.combined +########################################################################################### ## PP CLASS ## class init_pp_yaml(): """ class holding routines for initalizing post-processing yamls """ @@ -248,38 +251,152 @@ def combine_model(self): def combine_experiment(self): """ - Combine experiment yamls with the defined combined.yaml + Combine experiment yamls with the defined combined.yaml. + If more than 1 pp yaml defined, return a list of paths. """ # Experiment Check (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) ## COMBINE EXPERIMENT YAML INFO - if ey_path is not None: + # If only 1 pp yaml defined, combine with model yaml + if ey_path is not None and len(ey_path) == 1: + #expyaml_path = os.path.join(mainyaml_dir, i) + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(ey_path[0],'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + print(f" experiment yaml: {ey_path[0]}") + + # If more than 1 pp yaml listed, create an intermediate yaml folder to combine + # each model and pp yaml into own combined yaml file + # (Must be done for aliases defined) + elif ey_path is not None and len(ey_path) > 1: + pp_yamls = [] for i in ey_path: - #expyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: + pp_exp = str(i).rsplit('/', maxsplit=1)[-1] + + #create yamlfiles in folder + cwd=os.getcwd() + tmp_yaml_folder = os.path.join(cwd,"model_x_exp_yamls") + os.makedirs(tmp_yaml_folder, exist_ok=True) + shutil.copy(self.combined, os.path.join(tmp_yaml_folder,f"combined-{pp_exp}")) + with open(os.path.join(tmp_yaml_folder,f"combined-{pp_exp}"),'a', + encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: #copy expyaml into combined shutil.copyfileobj(f2,f1) - print(f" experiment yaml: {i}") + pp_yamls.append(os.path.join(tmp_yaml_folder,f"combined-{pp_exp}")) + + return pp_yamls def combine_analysis(self): """ Combine analysis yamls with the defined combined.yaml + If more than 1 analysis yaml defined, return a list of paths. """ # Experiment Check (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - ## COMBINE EXPERIMENT YAML INFO - if ay_path is not None: + ## COMBINE ANALYSIS YAML INFO + # If only 1 analysis yaml listed, combine with model yaml + if ay_path is not None and len(ay_path) == 1: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(ay_path[0],'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + + # If more than 1 analysis yaml listed, create an intermediate yaml folder to combine + # each model and analysis yaml into own combined yaml file + elif ay_path is not None and len(ay_path) > 1: + analysis_yamls=[] for i in ay_path: - #analysisyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: + analysis = str(i).rsplit('/', maxsplit=1)[-1] + + #create yamlfiles in folder + cwd=os.getcwd() + tmp_yaml_folder = os.path.join(cwd,"model_x_analysis_yamls") + os.makedirs(tmp_yaml_folder, exist_ok=True) + + shutil.copy(self.combined, os.path.join(tmp_yaml_folder,f"combined-{analysis}")) + with open(os.path.join(tmp_yaml_folder,f"combined-{analysis}"),'a', + encoding='UTF-8') as f1: with open(i,'r',encoding='UTF-8') as f2: - #f1.write(f"\n### {i.upper()} settings ###\n") #copy expyaml into combined shutil.copyfileobj(f2,f1) - print(f" analysis yaml: {i}") + + analysis_yamls.append(os.path.join(tmp_yaml_folder,f"combined-{analysis}")) + + return analysis_yamls + + def merge_multiple_yamls(self, pp_list, analysis_list): + """ + Merge separately combined post-processing and analysis + yamls into fully combined yaml (without overwriting). + """ + result = {} + + # If more than one post-processing yaml is listed, update + # dictionary with content from 1st yaml in list + # Looping through rest of yamls listed, compare key value pairs. + # If instance of key is a dictionary in both result and loaded + # yamlfile, update the key in result to + # include the loaded yaml file's value. + if pp_list is not None and len(pp_list) > 1: + result.update(yaml_load(pp_list[0])) + for i in pp_list[1:]: + yf = yaml_load(i) + for key in result: + if key in yf: + if isinstance(result[key],dict) and isinstance(yf[key],dict): + if key == "postprocess": + result[key]["components"] = yf[key]["components"] + result[key]["components"] + # If only one post-processing yaml listed, do nothing + # (already combined in 'combine_experiments' function) + elif pp_list is not None and len(pp_list) == 1: + pass + + # If more than one analysis yaml is listed, update dictionary with content from 1st yaml + # Looping through rest of yamls listed, compare key value pairs. + # If instance of key is a dictionary in both result and loaded yamlfile, update the key + # in result to include the loaded yaml file's value. + if analysis_list is not None and len(analysis_list) > 1: + result.update(yaml_load(analysis_list[0])) + for i in analysis_list[1:]: + yf = yaml_load(i) + for key in result: + if key in yf: + if isinstance(result[key],dict) and isinstance(yf[key],dict): + if key == "analysis": + result[key] = yf[key] | result[key] + # If only one analysis yaml listed, do nothing + # (already combined in 'combine_analysis' function) + elif analysis_list is not None and len(analysis_list) == 1: + pass + + # Dump the updated result dictionary back into the final combined yaml file + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(result,f,default_flow_style=False,sort_keys=False) + if pp_list is not None: + for i in pp_list: + exp = str(i).rsplit('/', maxsplit=1)[-1] + print(f" experiment yaml: {exp}") + if analysis_list is not None: + for i in analysis_list: + analysis = str(i).rsplit('/', maxsplit=1)[-1] + print(f" analysis yaml: {analysis}") + + def remove_tmp_yamlfiles(self, exp_yamls, analysis_yamls): + """ + Clean up separately created model/pp experiment and + model/analysis yamls. They are used for final combined + yaml but not needed separately. + """ + # Remove intermediate model_x_exp_yamls folder if it is not empty + if exp_yamls is not None and Path(exp_yamls[0]).exists(): + shutil.rmtree(os.path.dirname(exp_yamls[0])) + # Remove intermediate model_x_analysis_yamls if not empty + if analysis_yamls is not None and Path(analysis_yamls[0]).exists(): + shutil.rmtree(os.path.dirname(analysis_yamls[0])) def clean_yaml(self): """ @@ -296,12 +413,14 @@ def clean_yaml(self): if kc in full_yaml.keys(): del full_yaml[kc] + # Dump cleaned dictionary back into combined yaml file with open(self.combined,'w') as f: yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) print(f"Combined yaml located here: {os.path.abspath(self.combined)}") return self.combined +########################################################################################### ## Functions to combine the yaml files ## def get_combined_compileyaml(comb): """ @@ -310,9 +429,9 @@ def get_combined_compileyaml(comb): comb : combined yaml object """ # Merge model into combined file - comb_model = comb.combine_model() + comb.combine_model() # Merge compile.yaml into combined file - comb_compile = comb.combine_compile() + comb.combine_compile() # Merge platforms.yaml into combined file full_combined = comb.combine_platforms() # Clean the yaml @@ -339,6 +458,7 @@ def combined_compile_existcheck(combined,yml,platform,target): return full_combined +########################################################################################### def get_combined_ppyaml(comb): """ Combine the model, experiment, and analysis yamls @@ -346,11 +466,16 @@ def get_combined_ppyaml(comb): comb : combined yaml object """ # Merge model into combined file - comb_model = comb.combine_model() + comb.combine_model() # Merge pp experiment yamls into combined file - comb_exp = comb.combine_experiment() - # Merge pp analysis yamls, if defined, into combined file + comb_pp = comb.combine_experiment() + # Merge analysis yamls, if defined, into combined file comb_analysis = comb.combine_analysis() + # Merge model/pp and model/analysis yamls if more than 1 is defined + # (without overwriting the yaml) + comb.merge_multiple_yamls(comb_pp, comb_analysis) + # Remove separate combined pp yaml files + comb.remove_tmp_yamlfiles(comb_pp, comb_analysis) # Clean the yaml full_combined = comb.clean_yaml() @@ -358,6 +483,10 @@ def get_combined_ppyaml(comb): ########################################################################################### def consolidate_yamls(yamlfile,experiment,platform,target,use): + """ + Depending on `use` argument passed, either create the final + combined yaml for compilation or post-processing + """ # Regsiter tag handler yaml.add_constructor('!join', join_constructor) diff --git a/fre/yamltools/tests/AM5_example/am5.yaml b/fre/yamltools/tests/AM5_example/am5.yaml index c57c997c..1cb07353 100644 --- a/fre/yamltools/tests/AM5_example/am5.yaml +++ b/fre/yamltools/tests/AM5_example/am5.yaml @@ -70,3 +70,7 @@ experiments: - name: "c96L65_am5f7b12r1_amip" pp: - "pp_yamls/pp.c96_amip.yaml" + - "pp_yamls/pp-TEST.c96_amip.yaml" + analysis: + - "analysis_yamls/clouds.yaml" + - "analysis_yamls/land.yaml" diff --git a/fre/yamltools/tests/AM5_example/analysis_yamls/clouds.yaml b/fre/yamltools/tests/AM5_example/analysis_yamls/clouds.yaml new file mode 100644 index 00000000..a8a64a3b --- /dev/null +++ b/fre/yamltools/tests/AM5_example/analysis_yamls/clouds.yaml @@ -0,0 +1,11 @@ +analysis: + clouds-test: + required: + data_frequency: 'mon' + date_range: [*ANA_AMIP_START, *ANA_AMIP_END] + workflow: + components: ["atmos-test"] + cumulative: False + product: 't-test' + script_frequency: *PP_AMIP_CHUNK96 + switch: True diff --git a/fre/yamltools/tests/AM5_example/analysis_yamls/land.yaml b/fre/yamltools/tests/AM5_example/analysis_yamls/land.yaml new file mode 100644 index 00000000..e23ff554 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/analysis_yamls/land.yaml @@ -0,0 +1,11 @@ +analysis: + land-test: + required: + data_frequency: 'mon-test' + date_range: [*ANA_AMIP_START, *ANA_AMIP_END] + workflow: + components: ["land-test"] + cumulative: False + product: 'ts-test' + script_frequency: 'R1' + switch: True diff --git a/fre/yamltools/tests/AM5_example/pp_yamls/pp-TEST.c96_amip.yaml b/fre/yamltools/tests/AM5_example/pp_yamls/pp-TEST.c96_amip.yaml new file mode 100644 index 00000000..963f8e18 --- /dev/null +++ b/fre/yamltools/tests/AM5_example/pp_yamls/pp-TEST.c96_amip.yaml @@ -0,0 +1,38 @@ +# local reusable variable overrides +fre_properties: + - &custom_interp "180,360" + +# directory overrides +#c96_amip_directories: +directories: + <<: *shared_directories + ptmp_dir: "/ptmp/$USER" + pp_grid_spec: *GRID_SPEC96 + +#c96_amip_postprocess: +postprocess: + # pp setting overrides + settings: + <<: *shared_settings + pp_start: *ANA_AMIP_START + pp_stop: *ANA_AMIP_END + pp_chunk_a: *PP_AMIP_CHUNK96 + pp_components: "atmos atmos_scalar" + switches: + <<: *shared_switches + do_statics: False + + # main pp instructions + components: + - type: "atmos_cmip-TEST" + sources: "atmos_month_cmip atmos_8xdaily_cmip atmos_daily_cmip" + sourceGrid: "cubedsphere" + xyInterp: *custom_interp + interpMethod: "conserve_order2" + inputRealm: 'atmos' + - type: "atmos-TEST" + sources: "atmos_month" + sourceGrid: "cubedsphere" + xyInterp: *PP_XYINTERP96 + interpMethod: "conserve_order2" + inputRealm: 'atmos'