From 4d77e450600a3bad899eb48261d1a610391c91d2 Mon Sep 17 00:00:00 2001 From: emilyhcliu <36091766+emilyhcliu@users.noreply.github.com> Date: Tue, 14 Jan 2025 14:28:23 -0500 Subject: [PATCH] add components for surface snow data (#14) This PR includes the following changes: 1. Add mapping and Python for surface snow data 2. Move test configuration from /spoc/dump/config to /spoc/ush/test 3. Remove an old test configuration file 4. Update README for test (/spoc/ush/test) 5. Change filename convention change from `bufr2ioda` to `bufr` (more general) 6. Fix Python coding norm errors (ctest is adding to obsForge - in progress [obsForge PR #4](https://github.com/NOAA-EMC/obsForge/pull/4) --- ...r2ioda_script_backend_satwnd_amv_goes.yaml | 58 ------ dump/mapping/bufr_satwnd_amv_goes.py | 31 +-- .../mapping/bufr_satwnd_amv_goes_mapping.yaml | 15 +- dump/mapping/bufr_sfcsno.py | 189 ++++++++++++++++++ dump/mapping/bufr_sfcsno_mapping.yaml | 96 +++++++++ ush/test/README.md | 36 ++-- .../bufr_bufr4backend_satwnd_amv_goes.yaml | 7 +- ush/test/config/bufr_bufr4backend_sfcsno.yaml | 21 ++ .../bufr_script4backend_satwnd_amv_goes.yaml | 7 +- .../config/bufr_script4backend_sfcsno.yaml | 23 +++ ush/test/encodeBufr.sh | 12 +- 11 files changed, 393 insertions(+), 102 deletions(-) delete mode 100644 dump/config/bufr2ioda_script_backend_satwnd_amv_goes.yaml create mode 100755 dump/mapping/bufr_sfcsno.py create mode 100755 dump/mapping/bufr_sfcsno_mapping.yaml rename dump/config/bufr_bufr_backend_satwnd_amv_goes.yaml => ush/test/config/bufr_bufr4backend_satwnd_amv_goes.yaml (88%) create mode 100644 ush/test/config/bufr_bufr4backend_sfcsno.yaml rename dump/config/bufr_script_backend_satwnd_amv_goes.yaml => ush/test/config/bufr_script4backend_satwnd_amv_goes.yaml (89%) create mode 100644 ush/test/config/bufr_script4backend_sfcsno.yaml diff --git a/dump/config/bufr2ioda_script_backend_satwnd_amv_goes.yaml b/dump/config/bufr2ioda_script_backend_satwnd_amv_goes.yaml deleted file mode 100644 index b9f19ce..0000000 --- a/dump/config/bufr2ioda_script_backend_satwnd_amv_goes.yaml +++ /dev/null @@ -1,58 +0,0 @@ -time window: - begin: "2018-04-14T21:00:00Z" - end: "2023-12-15T03:00:00Z" - -observations: -- obs space: - name: "satwind_goes-16" - observed variables: [windSpeed, windDirection] - derived variables: [windEastward, windNorthward] - simulated variables: [windEastward, windNorthward] - obsdatain: - engine: - type: script - script file: "bufr2ioda_satwnd_amv_goes.py" - args: - input_path: "./testinput/2021080100/gdas.t00z.satwnd.tm00.bufr_d" - mapping_path: "./bufr2ioda_satwnd_amv_goes_mapping.yaml" - category: "goes-16" - obsdataout: - engine: - type: H5File - obsfile: "./testoutput/2021080100/script_backend/gdas.t00z.satwnd.abi_goes-16.tm00.nc" - -- obs space: - name: "satwind_goes-17" - observed variables: [windSpeed, windDirection] - derived variables: [windEastward, windNorthward] - simulated variables: [windEastward, windNorthward] - obsdatain: - engine: - type: script - script file: "bufr2ioda_satwnd_amv_goes.py" - args: - input_path: "./testinput/2021080100/gdas.t00z.satwnd.tm00.bufr_d" - mapping_path: "./bufr2ioda_satwnd_amv_goes_mapping.yaml" - category: "goes-17" - obsdataout: - engine: - type: H5File - obsfile: "./testoutput/2021080100/script_backend/gdas.t00z.satwnd.abi_goes-17.tm00.nc" - -- obs space: - name: "satwind_goes-18" - observed variables: [windSpeed, windDirection] - derived variables: [windEastward, windNorthward] - simulated variables: [windEastward, windNorthward] - obsdatain: - engine: - type: script - script file: "bufr2ioda_satwnd_amv_goes.py" - args: - input_path: "./testinput/2021080100/gdas.t00z.satwnd.tm00.bufr_d" - mapping_path: "./bufr2ioda_satwnd_amv_goes_mapping.yaml" - category: "goes-18" - obsdataout: - engine: - type: H5File - obsfile: "./testoutput/2021080100/script_backend/gdas.t00z.satwnd.abi_goes-18.tm00.nc" diff --git a/dump/mapping/bufr_satwnd_amv_goes.py b/dump/mapping/bufr_satwnd_amv_goes.py index d062a40..a687221 100755 --- a/dump/mapping/bufr_satwnd_amv_goes.py +++ b/dump/mapping/bufr_satwnd_amv_goes.py @@ -5,8 +5,8 @@ import time import numpy as np import bufr -from pyioda.ioda.Engines.Bufr import Encoder as iodaEncoder -from bufr.encoders.netcdf import Encoder as netcdfEncoder +from pyioda.ioda.Engines.Bufr import Encoder as iodaEncoder +from bufr.encoders.netcdf import Encoder as netcdfEncoder from wxflow import Logger # Initialize Logger @@ -14,17 +14,18 @@ log_level = os.getenv('LOG_LEVEL', 'INFO') logger = Logger('BUFR2IODA_satwnd_amv_goes.py', level=log_level, colored_log=False) + def logging(comm, level, message): """ Logs a message to the console or log file, based on the specified logging level. - This function ensures that logging is only performed by the root process (`rank 0`) - in a distributed computing environment. The function maps the logging level to + This function ensures that logging is only performed by the root process (`rank 0`) + in a distributed computing environment. The function maps the logging level to appropriate logger methods and defaults to the 'INFO' level if an invalid level is provided. Parameters: comm: object - The communicator object, typically from a distributed computing framework + The communicator object, typically from a distributed computing framework (e.g., MPI). It must have a `rank()` method to determine the process rank. level: str The logging level as a string. Supported levels are: @@ -33,7 +34,7 @@ def logging(comm, level, message): - 'WARNING' - 'ERROR' - 'CRITICAL' - If an invalid level is provided, a warning will be logged, and the level + If an invalid level is provided, a warning will be logged, and the level will default to 'INFO'. message: str The message to be logged. @@ -73,6 +74,7 @@ def logging(comm, level, message): # Call the logging method log_method(message) + def _make_description(mapping_path, update=False): description = bufr.encoders.Description(mapping_path) @@ -116,6 +118,7 @@ def _make_description(mapping_path, update=False): return description + def compute_wind_components(wdir, wspd): """ Compute the U and V wind components from wind direction and wind speed. @@ -130,9 +133,10 @@ def compute_wind_components(wdir, wspd): wdir_rad = np.radians(wdir) # Convert degrees to radians u = -wspd * np.sin(wdir_rad) v = -wspd * np.cos(wdir_rad) - + return u.astype(np.float32), v.astype(np.float32) + def _get_obs_type(swcm, chanfreq): """ Determine the observation type based on `swcm` and `chanfreq`. @@ -164,6 +168,7 @@ def _get_obs_type(swcm, chanfreq): return obstype + def _make_obs(comm, input_path, mapping_path): # Get container from mapping file first @@ -175,7 +180,7 @@ def _make_obs(comm, input_path, mapping_path): logging(comm, 'DEBUG', f'category map = {container.get_category_map()}') # Add new/derived data into container - for cat in container.all_sub_categories(): + for cat in container.all_sub_categories(): logging(comm, 'DEBUG', f'category = {cat}') @@ -193,7 +198,7 @@ def _make_obs(comm, input_path, mapping_path): container.add('variables/windNorthward', wob, paths, cat) else: - # Add new variables: ObsType/windEastward & ObsType/windNorthward + # Add new variables: ObsType/windEastward & ObsType/windNorthward swcm = container.get('variables/windComputationMethod', cat) chanfreq = container.get('variables/sensorCentralFrequency', cat) @@ -209,7 +214,7 @@ def _make_obs(comm, input_path, mapping_path): container.add('variables/obstype_uwind', obstype, paths, cat) container.add('variables/obstype_vwind', obstype, paths, cat) - # Add new variables: ObsValue/windEastward & ObsValue/windNorthward + # Add new variables: ObsValue/windEastward & ObsValue/windNorthward wdir = container.get('variables/windDirection', cat) wspd = container.get('variables/windSpeed', cat) @@ -231,6 +236,7 @@ def _make_obs(comm, input_path, mapping_path): return container + def create_obs_group(input_path, mapping_path, category, env): comm = bufr.mpi.Comm(env["comm_name"]) @@ -250,7 +256,7 @@ def create_obs_group(input_path, mapping_path, category, env): container = _make_obs(comm, input_path, mapping_path) - # Gather data from all tasks into all tasks. Each task will have the complete record + # Gather data from all tasks into all tasks. Each task will have the complete record logging(comm, 'INFO', f'Gather data from all tasks into all tasks') container.all_gather(comm) @@ -269,6 +275,7 @@ def create_obs_group(input_path, mapping_path, category, env): logging(comm, 'INFO', f'Return the encoded data for {category}') return data + def create_obs_file(input_path, mapping_path, output_path): comm = bufr.mpi.Comm("world") @@ -279,7 +286,7 @@ def create_obs_file(input_path, mapping_path, output_path): # Encode the data if comm.rank() == 0: - netcdfEncoder(description).encode(container, output_path) + netcdfEncoder(description).encode(container, output_path) logging(comm, 'INFO', f'Return the encoded data') diff --git a/dump/mapping/bufr_satwnd_amv_goes_mapping.yaml b/dump/mapping/bufr_satwnd_amv_goes_mapping.yaml index 26a75f5..9d0a0f6 100755 --- a/dump/mapping/bufr_satwnd_amv_goes_mapping.yaml +++ b/dump/mapping/bufr_satwnd_amv_goes_mapping.yaml @@ -17,6 +17,14 @@ bufr: minute: "*/MINU" second: "*/SECO" + dataReceiptTime: + datetime: + year: "*/RCYR" + month: "*/RCMO" + day: "*/RCDY" + hour: "*/RCHR" + minute: "*/RCMI" + latitude: query: "*/CLATH" @@ -114,7 +122,12 @@ encoder: # MetaData - name: "MetaData/dateTime" source: variables/timestamp - longName: "Datetime" + longName: "Observation Time" + units: "seconds since 1970-01-01T00:00:00Z" + + - name: "MetaData/dataReceiptTime" + source: variables/dataReceiptTime + longName: "Observation Receipt Time" units: "seconds since 1970-01-01T00:00:00Z" - name: "MetaData/latitude" diff --git a/dump/mapping/bufr_sfcsno.py b/dump/mapping/bufr_sfcsno.py new file mode 100755 index 0000000..03a9102 --- /dev/null +++ b/dump/mapping/bufr_sfcsno.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python3 +import sys +import os +import argparse +import time +import numpy as np +import bufr +from pyioda.ioda.Engines.Bufr import Encoder as iodaEncoder +from bufr.encoders.netcdf import Encoder as netcdfEncoder +from wxflow import Logger + +# Initialize Logger +# Get log level from the environment variable, default to 'INFO it not set +log_level = os.getenv('LOG_LEVEL', 'INFO') +logger = Logger('BUFR2IODA_sfcsno.py', level=log_level, colored_log=False) + + +def logging(comm, level, message): + """ + Logs a message to the console or log file, based on the specified logging level. + + This function ensures that logging is only performed by the root process (`rank 0`) + in a distributed computing environment. The function maps the logging level to + appropriate logger methods and defaults to the 'INFO' level if an invalid level is provided. + + Parameters: + comm: object + The communicator object, typically from a distributed computing framework + (e.g., MPI). It must have a `rank()` method to determine the process rank. + level: str + The logging level as a string. Supported levels are: + - 'DEBUG' + - 'INFO' + - 'WARNING' + - 'ERROR' + - 'CRITICAL' + If an invalid level is provided, a warning will be logged, and the level + will default to 'INFO'. + message: str + The message to be logged. + + Behavior: + - Logs messages only on the root process (`comm.rank() == 0`). + - Maps the provided logging level to a method of the logger object. + - Defaults to 'INFO' and logs a warning if an invalid logging level is given. + - Supports standard logging levels for granular control over log verbosity. + + Example: + >>> logging(comm, 'DEBUG', 'This is a debug message.') + >>> logging(comm, 'ERROR', 'An error occurred!') + + Notes: + - Ensure that a global `logger` object is configured before using this function. + - The `comm` object should conform to MPI-like conventions (e.g., `rank()` method). + """ + + if comm.rank() == 0: + # Define a dictionary to map levels to logger methods + log_methods = { + 'DEBUG': logger.debug, + 'INFO': logger.info, + 'WARNING': logger.warning, + 'ERROR': logger.error, + 'CRITICAL': logger.critical, + } + + # Get the appropriate logging method, default to 'INFO' + log_method = log_methods.get(level.upper(), logger.info) + + if log_method == logger.info and level.upper() not in log_methods: + # Log a warning if the level is invalid + logger.warning(f'log level = {level}: not a valid level --> set to INFO') + + # Call the logging method + log_method(message) + + +def _mask_container(container, mask): + + new_container = bufr.DataContainer() + for var_name in container.list(): + var = container.get(var_name) + paths = container.get_paths(var_name) + new_container.add(var_name, var[mask], paths) + + return new_container + + +def _make_description(mapping_path, update=False): + + description = bufr.encoders.Description(mapping_path) + + return description + + +def _make_obs(comm, input_path, mapping_path): + """ + Create the ioda snow depth observations: + - reads state of ground (sogr) and snow depth (snod) + - applys sogr conditions to the missing snod values + - removes the filled/missing snow values and creates the masked container + + Parameters + ---------- + comm: object + The communicator object (e.g., MPI) + input_path: str + The input bufr file + mapping_path: str + The input bufr2ioda mapping file + """ + + # Get container from mapping file first + logging(comm, 'INFO', 'Get container from bufr') + container = bufr.Parser(input_path, mapping_path).parse(comm) + + logging(comm, 'DEBUG', f'container list (original): {container.list()}') + + # Add new/derived data into container + sogr = container.get('variables/groundState') + snod = container.get('variables/totalSnowDepth') + snod[(sogr <= 11.0) & snod.mask] = 0.0 + snod[(sogr == 15.0) & snod.mask] = 0.0 + container.replace('variables/totalSnowDepth', snod) + snod_upd = container.get('variables/totalSnowDepth') + + masked_container = _mask_container(container, (~snod.mask)) + + return masked_container + + +def create_obs_group(input_path, mapping_path, env): + + comm = bufr.mpi.Comm(env["comm_name"]) + + description = _make_description(mapping_path, update=False) + container = _make_obs(comm, input_path, mapping_path) + + # Gather data from all tasks into all tasks. Each task will have the complete record + logging(comm, 'INFO', f'Gather data from all tasks into all tasks') + container.all_gather(comm) + + # Encode the data + logging(comm, 'INFO', f'Encode data') + data = next(iter(iodaEncoder(mapping_path).encode(container).values())) + + logging(comm, 'INFO', f'Return the encoded data') + + return data + + +def create_obs_file(input_path, mapping_path, output_path): + + comm = bufr.mpi.Comm("world") + container = _make_obs(comm, input_path, mapping_path) + container.gather(comm) + + description = _make_description(mapping_path, update=False) + + # Encode the data + if comm.rank() == 0: + netcdfEncoder(description).encode(container, output_path) + + logging(comm, 'INFO', f'Return the encoded data') + + +if __name__ == '__main__': + + start_time = time.time() + + bufr.mpi.App(sys.argv) + comm = bufr.mpi.Comm("world") + + # Required input arguments as positional arguments + parser = argparse.ArgumentParser(description="Convert BUFR to NetCDF using a mapping file.") + parser.add_argument('input', type=str, help='Input BUFR file') + parser.add_argument('mapping', type=str, help='BUFR2IODA Mapping File') + parser.add_argument('output', type=str, help='Output NetCDF file') + + args = parser.parse_args() + mapping = args.mapping + infile = args.input + output = args.output + + create_obs_file(infile, mapping, output) + + end_time = time.time() + running_time = end_time - start_time + logging(comm, 'INFO', f'Total running time: {running_time}') diff --git a/dump/mapping/bufr_sfcsno_mapping.yaml b/dump/mapping/bufr_sfcsno_mapping.yaml new file mode 100755 index 0000000..adac740 --- /dev/null +++ b/dump/mapping/bufr_sfcsno_mapping.yaml @@ -0,0 +1,96 @@ +# (C) Copyright 2024 NOAA/NWS/NCEP/EMC +# + +bufr: + variables: + # MetaData + timestamp: + datetime: + year: "*/YEAR[1]" + month: "*/MNTH[1]" + day: "*/DAYS[1]" + hour: "*/HOUR[1]" + minute: "*/MINU[1]" + + dataReceiptTime: + datetime: + year: "[*/RCYR, */RCPTIM{1}/RCYR]" + month: "[*/RCMO, */RCPTIM{1}/RCMO]" + day: "[*/RCDY, */RCPTIM{1}/RCDY]" + hour: "[*/RCHR, */RCPTIM{1}/RCHR]" + minute: "[*/RCMI, */RCPTIM{1}/RCMI]" + + latitude: + query: "[*/CLAT, */CLATH]" + + longitude: + query: "[*/CLON, */CLONH]" + + stationIdentification: + query: "*/RPID" + + stationElevation: + query: "[*/SELV, */HSMSL]" + type: float + + # ObsValue + totalSnowDepth: + query: "[*/SNWSQ1/TOSD, */MTRMSC/TOSD, */STGDSNDM/TOSD]" + + groundState: + query: "[*/GRDSQ1/SOGR, */STGDSNDM/SOGR]" + +encoder: + variables: + + # MetaData + - name: "MetaData/dateTime" + coordinates: "longitude latitude" + source: variables/timestamp + longName: "Datetime" + units: "seconds since 1970-01-01T00:00:00Z" + + - name: "MetaData/dataReceiptTime" + source: variables/dataReceiptTime + longName: "Observation Receipt Time" + units: "seconds since 1970-01-01T00:00:00Z" + + - name: "MetaData/latitude" + coordinates: "longitude latitude" + source: variables/latitude + longName: "Latitude" + units: "degree_north" + range: [-90, 90] + + - name: "MetaData/longitude" + coordinates: "longitude latitude" + source: variables/longitude + longName: "Longitude" + units: "degree_east" + range: [-180, 180] + + - name: "MetaData/stationElevation" + coordinates: "longitude latitude" + source: variables/stationElevation + longName: "Height of Station" + units: "m" + + - name: "MetaData/stationIdentification" + coordinates: "longitude latitude" + source: variables/stationIdentification + longName: "Identification of Observing Location" + units: "index" + + # ObsValue + - name: "ObsValue/totalSnowDepth" + coordinates: "longitude latitude" + source: variables/totalSnowDepth + longName: "Total Snow Depth" + units: "m" + + - name: "ObsValue/groundState" + coordinates: "longitude latitude" + source: variables/groundState + longName: "STATE OF THE GROUND" + units: "index" + diff --git a/ush/test/README.md b/ush/test/README.md index 9254b93..4bb167a 100644 --- a/ush/test/README.md +++ b/ush/test/README.md @@ -1,5 +1,5 @@ ## Test bufr_query mapping, Python converter script, and ioda configuration YAML in obsForge -This is a prototype for testing BUFR to IODA conversion and is still evolving. +This is a prototype for testing BUFR to NETCDF and other formats (IODA, ZARR, ...etc.) conversion and is still evolving. ## Prerequisite - Clone and build obsForge @@ -17,10 +17,6 @@ This is a prototype for testing BUFR to IODA conversion and is still evolving. git checkout feature/bufr_in_parallel - cd ../spoc - - git checkout feature/dump_satwind_goes - cd ../../ ./build.sh @@ -33,20 +29,22 @@ This is a prototype for testing BUFR to IODA conversion and is still evolving. ``` ## Elements should be in the working directory from SPOC -- Required input files: +Creat a working directory (e.g. work_dir) + +- Required input files in ./work_dir: - - bufr_satwnd_amv_goes.py + - bufr_satwnd_amv_goes.py (copied /spoc/dump/mapping) - - bufr_satwnd_amv_goes_mapping.yaml + - bufr_satwnd_amv_goes_mapping.yaml (copied from /spoc/dump/mapping) - - bufr_bufr_backend_satwnd_amv_goes.yaml + - bufr_bufr4backend_satwnd_amv_goes.yaml (copied from /spoc/ush/test/config) - - bufr_script_backend_satwnd_amv_goes.yaml + - bufr_script4backend_satwnd_amv_goes.yaml (copied from /spoc/ush/test/config) - - testinput/2021080100/gdas.t00z.satwnd.tm00.bufr_d (copied from the global dump) + - /testinput/2021080100/gdas.t00z.satwnd.tm00.bufr_d (copied from the global dump) -- Processing shell script: - - encodeBufr.sh +- Processing shell script in ./work_dir : + - ./encodeBufr.sh (copied from /spoc/ush/test) ## How to run the test shell script - Get the help page for usage @@ -60,7 +58,7 @@ This is a prototype for testing BUFR to IODA conversion and is still evolving. : observation type to create (e.g., satwnd_amv_goes, atms, sfcsno) : sensor (e.g., abi, atms); for non-satellite dta, sensor is usually obstype (e.g., sfcsno) : split the data into multiple files based on category (false or true) - : mode of operation (e.g., bufr_backend, script_backend, bufr2netcdf, script2netcdf) + : mode of operation (e.g., bufr4backend, script4backend, bufr2netcdf, script2netcdf) : number of processors (positive integer to run with MPI, or zero for serial execution) ``` @@ -75,11 +73,11 @@ This is a prototype for testing BUFR to IODA conversion and is still evolving. ``` obsforge_dir="/scratch1/NCEPDEV/da/Emily.Liu/EMC-obsForge/obsForge" - encodeBufr.sh ${obsforge_dir} 2021080100 satwnd satwnd_amv_goes abi true script_backend 4 + encodeBufr.sh ${obsforge_dir} 2021080100 satwnd satwnd_amv_goes abi true script4backend 4 - encodeBufr.sh ${obsforge_dir} 2021080100 sfcsno sfcsno sfcsno false script_backend 4 + encodeBufr.sh ${obsforge_dir} 2021080100 sfcsno sfcsno sfcsno false script4backend 4 - encodeBufr.sh ${obsforge_dir} 2021080100 atms atms atms true script_backend 4 + encodeBufr.sh ${obsforge_dir} 2021080100 atms atms atms true script4backend 4 ``` - Run with user-defined mode and number of processes @@ -89,7 +87,7 @@ This is a prototype for testing BUFR to IODA conversion and is still evolving. encodeBufr.sh "" "" "" "" "" "" script2netcdf" 0 - encodeBufr.sh "" "" "" "" "" "" bufr_backend" 12 + encodeBufr.sh "" "" "" "" "" "" bufr4backend" 12 - encodeBufr.sh "" "" "" "" "" "" script_backend" 4 + encodeBufr.sh "" "" "" "" "" "" script4backend" 4 ``` diff --git a/dump/config/bufr_bufr_backend_satwnd_amv_goes.yaml b/ush/test/config/bufr_bufr4backend_satwnd_amv_goes.yaml similarity index 88% rename from dump/config/bufr_bufr_backend_satwnd_amv_goes.yaml rename to ush/test/config/bufr_bufr4backend_satwnd_amv_goes.yaml index 7bbe7fa..aa3c090 100644 --- a/dump/config/bufr_bufr_backend_satwnd_amv_goes.yaml +++ b/ush/test/config/bufr_bufr4backend_satwnd_amv_goes.yaml @@ -1,6 +1,7 @@ time window: begin: "2018-04-14T21:00:00Z" end: "2023-12-15T03:00:00Z" + bound to include: begin observations: - obs space: @@ -19,7 +20,7 @@ observations: obsdataout: engine: type: H5File - obsfile: "./testoutput/2021080100/bufr_backend/gdas.t00z.satwnd.abi_goes-16.tm00.nc" + obsfile: "./testoutput/2021080100/bufr4backend/gdas.t00z.satwnd.abi_goes-16.tm00.nc" - obs space: name: "satwind_goes-17" @@ -37,7 +38,7 @@ observations: obsdataout: engine: type: H5File - obsfile: "./testoutput/2021080100/bufr_backend/gdas.t00z.satwnd.abi_goes-17.tm00.nc" + obsfile: "./testoutput/2021080100/bufr4backend/gdas.t00z.satwnd.abi_goes-17.tm00.nc" - obs space: name: "satwind_goes-18" @@ -55,4 +56,4 @@ observations: obsdataout: engine: type: H5File - obsfile: "./testoutput/2021080100/bufr_backend/gdas.t00z.satwnd.abi_goes-18.tm00.nc" + obsfile: "./testoutput/2021080100/bufr4backend/gdas.t00z.satwnd.abi_goes-18.tm00.nc" diff --git a/ush/test/config/bufr_bufr4backend_sfcsno.yaml b/ush/test/config/bufr_bufr4backend_sfcsno.yaml new file mode 100644 index 0000000..7c73900 --- /dev/null +++ b/ush/test/config/bufr_bufr4backend_sfcsno.yaml @@ -0,0 +1,21 @@ +time window: + begin: "2018-04-14T21:00:00Z" + end: "2023-12-15T03:00:00Z" + bound to include: begin + +observations: +- obs space: + name: "sfcsno_snow" + distribution: + name: Halo + halo size: 250e3 + simulated variables: [totalSnowDepth] + obsdatain: + engine: + type: bufr + obsfile: "./testinput/2021080100/gdas.t00z.sfcsno.tm00.bufr_d" + mapping file: "./bufr2ioda_sfcsno_mapping.yaml" + obsdataout: + engine: + type: H5File + obsfile: "./testoutput/2021080100/bufr4backend/gdas.t00z.sfcsno.tm00.nc" diff --git a/dump/config/bufr_script_backend_satwnd_amv_goes.yaml b/ush/test/config/bufr_script4backend_satwnd_amv_goes.yaml similarity index 89% rename from dump/config/bufr_script_backend_satwnd_amv_goes.yaml rename to ush/test/config/bufr_script4backend_satwnd_amv_goes.yaml index 6ddfa27..ccb8d77 100644 --- a/dump/config/bufr_script_backend_satwnd_amv_goes.yaml +++ b/ush/test/config/bufr_script4backend_satwnd_amv_goes.yaml @@ -1,6 +1,7 @@ time window: begin: "2018-04-14T21:00:00Z" end: "2023-12-15T03:00:00Z" + bound to include: begin observations: - obs space: @@ -19,7 +20,7 @@ observations: obsdataout: engine: type: H5File - obsfile: "./testoutput/2021080100/script_backend/gdas.t00z.satwnd.abi_goes-16.tm00.nc" + obsfile: "./testoutput/2021080100/script4backend/gdas.t00z.satwnd.abi_goes-16.tm00.nc" - obs space: name: "satwind_goes-17" @@ -37,7 +38,7 @@ observations: obsdataout: engine: type: H5File - obsfile: "./testoutput/2021080100/script_backend/gdas.t00z.satwnd.abi_goes-17.tm00.nc" + obsfile: "./testoutput/2021080100/script4backend/gdas.t00z.satwnd.abi_goes-17.tm00.nc" - obs space: name: "satwind_goes-18" @@ -55,4 +56,4 @@ observations: obsdataout: engine: type: H5File - obsfile: "./testoutput/2021080100/script_backend/gdas.t00z.satwnd.abi_goes-18.tm00.nc" + obsfile: "./testoutput/2021080100/script4backend/gdas.t00z.satwnd.abi_goes-18.tm00.nc" diff --git a/ush/test/config/bufr_script4backend_sfcsno.yaml b/ush/test/config/bufr_script4backend_sfcsno.yaml new file mode 100644 index 0000000..3c00255 --- /dev/null +++ b/ush/test/config/bufr_script4backend_sfcsno.yaml @@ -0,0 +1,23 @@ +time window: + begin: "2018-04-14T21:00:00Z" + end: "2023-12-15T03:00:00Z" + bound to include: begin + +observations: +- obs space: + name: "sfcsno_snow" + distribution: + name: Halo + halo size: 250e3 + simulated variables: [totalSnowDepth] + obsdatain: + engine: + type: script + script file: "bufr2ioda_sfcsno.py" + args: + input_path: "./testinput/2021080100/gdas.t00z.sfcsno.tm00.bufr_d" + mapping_path: "./bufr2ioda_sfcsno_mapping.yaml" + obsdataout: + engine: + type: H5File + obsfile: "./testoutput/2021080100/script4backend/gdas.t00z.sfcsno.tm00.nc" diff --git a/ush/test/encodeBufr.sh b/ush/test/encodeBufr.sh index 13e404f..ff0d621 100755 --- a/ush/test/encodeBufr.sh +++ b/ush/test/encodeBufr.sh @@ -9,7 +9,7 @@ bufrtype="${3:-satwnd}" obstype="${4:-satwnd_amv_goes}" sensor="${5:-abi}" split_by_category="${6:-true}" -mode="${7:-script_backend}" +mode="${7:-script4backend}" nproc="${8:-4}" # ========================== @@ -23,7 +23,7 @@ usage() { echo " : observation type to create (e.g., satwnd_amv_goes, atms, cris, sfcsno )" echo " : sensor (e.g., abi, atms); for non-satellite dta, sensor is usually obstype (e.g., sfcsno)" echo " : split the output file into multiple files based on category (false or true)" - echo " : mode of operation (e.g., bufr_backend, script_backend, bufr2netcdf, script2netcdf)" + echo " : mode of operation (e.g., bufr4backend, script4backend, bufr2netcdf, script2netcdf)" echo " : number of processors (positive integer to run with MPI, or zero for serial execution)" exit 1 } @@ -54,8 +54,8 @@ fi # ============== # Validate mode # ============== -if [[ "$mode" != "bufr_backend" && "$mode" != "script_backend" && "$mode" != "bufr2netcdf" && "$mode" != "script2netcdf" ]]; then - echo "Error: Invalid mode '$mode'. Expected 'bufr_backend' or 'script_backend' or 'bufr2netcdf' or 'script2netcdf'." +if [[ "$mode" != "bufr4backend" && "$mode" != "script4backend" && "$mode" != "bufr2netcdf" && "$mode" != "script2netcdf" ]]; then + echo "Error: Invalid mode '$mode'. Expected 'bufr4backend' or 'script4backend' or 'bufr2netcdf' or 'script2netcdf'." usage fi @@ -174,7 +174,7 @@ fi # ============================= # Run ioda bufr/script backend # ============================= -if [[ "$mode" == "bufr_backend" || "$mode" == "script_backend" ]]; then +if [[ "$mode" == "bufr4backend" || "$mode" == "script4backend" ]]; then if [[ ! -f "$ioda_config_yaml" ]]; then echo "Error: ioda configuration file not found: $ioda_config_yaml" exit 1 @@ -209,7 +209,7 @@ elif [[ "$mode" == "script2netcdf" ]]; then srun -n "$nproc" --mem 96G --time 00:30:00 python bufr_${obstype}.py "$input_file" "$mapping_file" "$output_file" || { echo "Error: MPI Python script2netcdf failed"; python bufr_${obstype}.py --help; exit 1; } fi else - echo Incorrect running mode ${mode} ... Valid modes are: bufr_backend, script_back, bufr2netcdf, or script2netcdf + echo Incorrect running mode ${mode} ... Valid modes are: bufr4backend, script4backend, bufr2netcdf, or script2netcdf fi