diff --git a/.gitignore b/.gitignore index 7669dac845..c2d9c9db45 100644 --- a/.gitignore +++ b/.gitignore @@ -81,6 +81,7 @@ parm/ufs/MOM6_data_table.IN parm/ufs/ice_in.IN parm/ufs/ufs.configure.*.IN parm/ufs/post_itag_gfs +parm/ufs/ww3_shel.nml.IN parm/wafs # Ignore sorc and logs folders from externals diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000000..5044689f7e --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,27 @@ +# ------------------------------------------------------------------------- # +# Global Workflow +# ------------------------------------------------------------------------- # + +# Check for minimum cmake requirement +cmake_minimum_required( VERSION 3.20 FATAL_ERROR ) + +project(global_workflow VERSION 1.0.0) + +include(GNUInstallDirs) +enable_testing() + +# Build type. +if(NOT CMAKE_BUILD_TYPE MATCHES "^(Debug|Release|RelWithDebInfo|MinSizeRel)$") + message(STATUS "Setting build type to 'Release' as none was specified.") + set(CMAKE_BUILD_TYPE + "Release" + CACHE STRING "Choose the type of build." FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") +endif() + +# Build global-workflow source codes +# add_subdirectory(sorc) + +# Setup tests +add_subdirectory(ctests) diff --git a/ci/platforms/config.hera b/ci/platforms/config.hera index 6d3e43c820..09e2e28ddb 100644 --- a/ci/platforms/config.hera +++ b/ci/platforms/config.hera @@ -2,6 +2,8 @@ export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR + +export STAGED_TESTS_DIR=${GFS_CI_ROOT}/STAGED_TESTS_DIR export HPC_ACCOUNT=nems export max_concurrent_cases=5 export max_concurrent_pr=4 diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion index 5171373127..507068d4e7 100644 --- a/ci/platforms/config.orion +++ b/ci/platforms/config.orion @@ -2,6 +2,7 @@ export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/ORION export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR +export STAGED_TESTS_DIR=${GFS_CI_ROOT}/STAGED_TESTS_DIR export HPC_ACCOUNT=nems export max_concurrent_cases=5 export max_concurrent_pr=4 diff --git a/ctests/CMakeLists.txt b/ctests/CMakeLists.txt new file mode 100644 index 0000000000..f8d928f456 --- /dev/null +++ b/ctests/CMakeLists.txt @@ -0,0 +1,106 @@ +# ------------------------------------------------------------------------- # +# CTests for Global Workflow +# ------------------------------------------------------------------------- # +# These ctests correspond to JJOBs (individual Rocoto jobs) that can be +# run independently, each requiring its own YAML definition of inputs +# and configurations. By integrating with Rocoto, these jobs can be +# validated, staged, and executed as self-contained tests using +# their own data and test parameters. +# ------------------------------------------------------------------------- # + +# Function to set a variable from an environment variable or default value +function(set_from_env_or_default VAR_NAME ENV_VAR DEFAULT_VALUE) + if (DEFINED ENV{${ENV_VAR}} AND NOT DEFINED ${VAR_NAME}) + set(${VAR_NAME} $ENV{${ENV_VAR}} CACHE STRING "Set from environment variable ${ENV_VAR}") + elseif(NOT DEFINED ${VAR_NAME} AND NOT ${DEFAULT_VALUE} STREQUAL "") + set(${VAR_NAME} ${DEFAULT_VALUE} CACHE STRING "Default value for ${VAR_NAME}") + endif() +endfunction() + +# Set HOMEgfs +if (NOT DEFINED HOMEgfs) + set(HOMEgfs ${PROJECT_SOURCE_DIR}) +endif() + +# Set RUNTESTS +set_from_env_or_default(RUNTESTS RUNTESTS "${CMAKE_CURRENT_BINARY_DIR}/RUNTESTS") + +# Set HPC_ACCOUNT +set_from_env_or_default(HPC_ACCOUNT HPC_ACCOUNT " ") +if (NOT DEFINED HPC_ACCOUNT) + message(WARNING "HPC_ACCOUNT must be set. CTests will not be created.") + return() +endif() + +# Set ICSDIR_ROOT +set_from_env_or_default(ICSDIR_ROOT ICSDIR_ROOT "") +if (NOT DEFINED ICSDIR_ROOT) + message(WARNING "ICSDIR_ROOT must be set. CTests will not be created.") + return() +endif() + +# Set STAGED_TESTS_DIR +set_from_env_or_default(STAGED_TESTS_DIR STAGED_TESTS_DIR "") +if (NOT DEFINED STAGED_TESTS_DIR) + message(WARNING "STAGED_TESTS_DIR must be set. CTests will not be created.") + return() +endif() + +message(STATUS "gw: global-workflow baselines will be used from: '${HOMEgfs}'") +message(STATUS "gw: global-workflow tests will be run at: '${RUNTESTS}'") +message(STATUS "gw: global-workflow tests will use the allocation: '${HPC_ACCOUNT}'") +message(STATUS "gw: global-workflow tests will use ICSDIR_ROOT: '${ICSDIR_ROOT}'") +message(STATUS "gw: global-workflow tests will use staged data from: '${STAGED_TESTS_DIR}'") + +# Prepare test scripts +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/scripts/setup.sh.in + ${CMAKE_CURRENT_BINARY_DIR}/scripts/setup.sh @ONLY) +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/scripts/stage.sh.in + ${CMAKE_CURRENT_BINARY_DIR}/scripts/stage.sh @ONLY) +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/scripts/execute.sh.in + ${CMAKE_CURRENT_BINARY_DIR}/scripts/execute.sh @ONLY) +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/scripts/validate.sh.in + ${CMAKE_CURRENT_BINARY_DIR}/scripts/validate.sh @ONLY) + +function(AddJJOBTest) + + set(prefix ARG) + set(novals NOTRAPFPE NOVALGRIND) + set(singlevals CASE JOB TEST_DATE) + set(multivals TEST_DEPENDS) + + cmake_parse_arguments(${prefix} + "${novals}" "${singlevals}" "${multivals}" + ${ARGN}) + + set(TEST_NAME ${ARG_CASE}_${ARG_JOB}) + set(CASE_PATH ${HOMEgfs}/ci/cases/pr) + set(CASE_YAML ${CASE_PATH}/${ARG_CASE}.yaml) + + add_test(NAME test_${TEST_NAME}_setup + COMMAND ./setup.sh ${TEST_NAME} ${CASE_YAML} ${ARG_TEST_DATE} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/scripts) + set_tests_properties(test_${TEST_NAME}_setup PROPERTIES LABELS "${ARG_CASE};${ARG_JOB}") + + add_test(NAME test_${TEST_NAME}_stage + COMMAND ./stage.sh ${TEST_NAME} ${ARG_TEST_DATE} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/scripts) + set_tests_properties(test_${TEST_NAME}_stage PROPERTIES DEPENDS test_${TEST_NAME}_setup LABELS "${ARG_CASE};${ARG_JOB}") + + add_test(NAME test_${TEST_NAME}_execute + COMMAND ./execute.sh ${TEST_NAME} ${ARG_JOB} ${ARG_TEST_DATE} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/scripts) + set_tests_properties(test_${TEST_NAME}_execute PROPERTIES DEPENDS test_${TEST_NAME}_stage LABELS "${ARG_CASE};${ARG_JOB}") + + # TODO - This is a stub for the validation step + add_test(NAME test_${TEST_NAME}_validate + COMMAND ./validate.sh ${TEST_NAME} ${CASE_YAML} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/scripts) + set_tests_properties(test_${TEST_NAME}_validate PROPERTIES DEPENDS test_${TEST_NAME}_execute LABELS "${ARG_CASE};${ARG_JOB}") +endfunction() + +AddJJOBTest( + CASE "C48_ATM" + JOB "gfs_fcst_seg0" + TEST_DATE "2021032312" +) diff --git a/ctests/README.md b/ctests/README.md new file mode 100644 index 0000000000..95a32cd952 --- /dev/null +++ b/ctests/README.md @@ -0,0 +1,58 @@ +# CTest Framework for NOAA Global Workflow + +This directory contains the CTest framework for testing Rocoto JJOBS. The framework allows you to stage, execute, and validate individual JJOBS independently from other jobs in the workflow. Each test requires its own YAML definition of inputs and configurations. + +## Overview + +The CTest framework consists of the following scripts: +- **setup.sh.in**: Prepares the environment and creates the experiment. +- **stage.sh.in**: Stages the input files needed to run a JJOB. +- **execute.sh.in**: Executes the JJOB and monitors its status. +- **validate.sh.in**: (TODO) Validates the results of the JJOB. + +## Usage + +### CMake Configuration + +To configure the CTest framework using CMake, you need to provide several environment variables or default values. Here is an example of how to configure and build the project: + +```bash +# Set environment variables (may also be include at command line with -D) +export HPC_ACCOUNT="your_hpc_account" +export ICSDIR_ROOT="/path/to/icsdir_root" +export STAGED_TESTS_DIR="/path/to/staged_tests_dir" + +# Run CMake to configure the ctest framework +cmake -S /path/to/HOMEgfs -B /path/to/build -DRUNTESTS=/path/to/runtests + +``` + +### Running Tests with CTest + +Once the project is configured, you can run the tests using CTest. Here are some examples: + +#### Run All Tests + +```bash +cd /path/to/build +ctest +``` + +#### Run Tests for a Specific Case + +You can use the `-L` option with CTest to run tests for a specific case. For example, to run tests for the `C48_ATM` case: + +```bash +cd /path/to/build +ctest -L C48_ATM +``` + +To add a new test use the **AddJJOBTest()** function at the end of the `$HOMEgfs/ctest/CMakeLists.txt` file as follows: +```cmake +AddJJOBTest( + CASE "C48_ATM" + JOB "gfs_fcst_seg0" + TEST_DATE "2021032312" +) +``` +Then create a new YAML file with the required staged input files as is done with this example found in `$HOMEgfs/ctests/cases/C48_ATM_gfs_fcts_seg0.yaml` diff --git a/ctests/cases/C48_ATM_gfs_fcst_seg0.yaml b/ctests/cases/C48_ATM_gfs_fcst_seg0.yaml new file mode 100644 index 0000000000..ec0ce88ff1 --- /dev/null +++ b/ctests/cases/C48_ATM_gfs_fcst_seg0.yaml @@ -0,0 +1,17 @@ +input_files: + mkdir: + - "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input" + copy: + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_ctrl.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_ctrl.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_data.tile1.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_data.tile1.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_data.tile2.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_data.tile2.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_data.tile3.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_data.tile3.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_data.tile4.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_data.tile4.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_data.tile5.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_data.tile5.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/gfs_data.tile6.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/gfs_data.tile6.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/sfc_data.tile1.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/sfc_data.tile1.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/sfc_data.tile2.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/sfc_data.tile2.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/sfc_data.tile3.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/sfc_data.tile3.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/sfc_data.tile4.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/sfc_data.tile4.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/sfc_data.tile5.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/sfc_data.tile5.nc"] + - ["{{ 'STAGED_TESTS_DIR' | getenv }}/{{ 'TEST_NAME' | getenv }}/input_files/gfs.{{ TEST_DATE | to_YMD }}/{{ TEST_DATE | strftime('%H') }}/model/atmos/input/sfc_data.tile6.nc", "{{ 'RUNTESTS' | getenv }}/COMROOT/{{ 'TEST_NAME' | getenv }}/gfs.{{TEST_DATE | to_YMD}}/{{TEST_DATE | strftime('%H')}}/model/atmos/input/sfc_data.tile6.nc"] diff --git a/ctests/scripts/execute.sh.in b/ctests/scripts/execute.sh.in new file mode 100755 index 0000000000..9cf3ef5917 --- /dev/null +++ b/ctests/scripts/execute.sh.in @@ -0,0 +1,66 @@ +#!/usr/bin/env bash + +set -xe + +TEST_NAME=${1:?"Name of the test is required"} +JOB=${2:?"Job name is required"} +# TODO - adding idate by hand for now, need to get this from the test somehow +idate=$3 + +#TODO - add rocotoboot_dryrun to repo some how +rocotoboot_dryrun=/work2/noaa/global/mterry/rocoto_dryrun/bin/rocotoboot +CASEDIR="@CMAKE_CURRENT_BINARY_DIR@/RUNTESTS/EXPDIR/${TEST_NAME}" +cd "${CASEDIR}" +rm -f ./*.db +rm -f ./jobcard + +yes | "${rocotoboot_dryrun}" -d "${TEST_NAME}.db" -w "${TEST_NAME}.xml" -v 10 -c "${idate}00" -t "${JOB}" 2> jobcard || true +sed '/^{{\|^}}/d' < jobcard | sed '1d' > "${TEST_NAME}.sub" || true + +#TODO - Generalize for batch system (hard coded to slurm) + +output=$(sbatch "${TEST_NAME}.sub") +job_id=$(echo "${output}" | awk '{print $4}') +echo "Job ${job_id} submitted for test ${TEST_NAME} with job name ${JOB}" + +# First loop: wait until job appears +lack_of_job_count=0 +LACK_OF_JOB_LIMIT=5 + +while true; do + job_status=$(sacct -j "${job_id}" --format=State --noheader -n | head -1) || true + if [[ -n "${job_status}" ]]; then + echo "Job ${job_id} found in sacct." + break + fi + echo "Job ${job_id} not in sacct yet, attempt ${lack_of_job_count}/${LACK_OF_JOB_LIMIT}." + lack_of_job_count=$((lack_of_job_count + 1)) + if [[ "${lack_of_job_count}" -ge "${LACK_OF_JOB_LIMIT}" ]]; then + echo "Job ${job_id} not found after ${lack_of_job_count} attempts. Exiting." + exit 1 + fi + sleep 30 +done + +# Second loop: monitor job status until completion or failure +timeout=0 +TIMEOUT=60 +while true; do + # Trim trailing spaces from job_status + job_status=$(sacct -j "${job_id}" --format=State --noheader -n | head -1 | xargs) || true + if [[ "${job_status}" == "COMPLETED" ]]; then + echo "Job ${job_id} completed successfully." + break + elif [[ "${job_status}" =~ ^(FAILED|CANCELLED|TIMEOUT)$ ]]; then + echo "Job ${job_id} failed with status: ${job_status}." + exit 1 + else + echo "Job ${job_id} is still running with status: ${job_status}." + sleep 60 + timeout=$((timeout + 1)) + if [[ "${timeout}" -gt "${TIMEOUT}" ]]; then + echo "Job ${job_id} has been running for more than ${TIMEOUT} minutes. Exiting." + exit 1 + fi + fi +done diff --git a/ctests/scripts/setup.sh.in b/ctests/scripts/setup.sh.in new file mode 100755 index 0000000000..6c4a772b65 --- /dev/null +++ b/ctests/scripts/setup.sh.in @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +set -ux + +TEST_NAME=${1:?"Name of the test is required"} +YAML_FILE=${2:?"Name of the CI yaml file for the test"} + +# CMake to fill these variables +HOMEgfs="@PROJECT_SOURCE_DIR@" +RUNTESTS="@RUNTESTS@" +ICSDIR_ROOT="@ICSDIR_ROOT@" +HPC_ACCOUNT="@HPC_ACCOUNT@" + +set +x +source "${HOMEgfs}/workflow/gw_setup.sh" +set -x + +pslot="${TEST_NAME}" \ +RUNTESTS="${RUNTESTS}" \ +ICSDIR_ROOT="${ICSDIR_ROOT}" \ +HPC_ACCOUNT="${HPC_ACCOUNT}" \ +"${HOMEgfs}/workflow/create_experiment.py" --yaml "${YAML_FILE}" --overwrite +rc=$? +if [[ "${rc}" -ne 0 ]]; then + set +x + echo "Failed to create test experiment for '${TEST_NAME}' with yaml file '${YAML_FILE}'" + set -x + exit "${rc}" +fi + +exit 0 diff --git a/ctests/scripts/stage.py b/ctests/scripts/stage.py new file mode 100755 index 0000000000..b8a77a120d --- /dev/null +++ b/ctests/scripts/stage.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +""" +stage.py + +This script is part of the ctest framework for testing Rocoto JJOBS that stages the +input files needed to run a JJOB independently from other jobs in the workflow. +The YAML file specified at the command line contains the paths to the staged input files +and their corresponding directories under the COMROOT of the experiment for the JJOB. + +Usage: + stage.py -y [-d ] + +Arguments: + -y, --yaml Path to the YAML file describing the job test configuration (required) + -d, --test_date Test date in YYYYMMDDHH format (optional) + +Example: + ./stage.py -y /path/to/config.yaml -d 2021032312 +""" + +import os +import datetime + +from argparse import ArgumentParser +from pathlib import Path +from wxflow import parse_j2yaml, FileHandler, Logger + +# Initialize logger with environment variable for logging level +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=False) + + +def parse_args(): + """ + Parse command line arguments. + + Returns + ------- + argparse.Namespace + The parsed command line arguments, including: + - yaml: Path to the YAML file describing the job test configuration. + - test_date: Optional test date in YYYYMMDDHH format. + """ + description = """Arguments for creating and updating error log files + """ + parser = ArgumentParser(description=description) + + # Add argument for YAML file path + parser.add_argument('-y', '--yaml', help='full path to yaml file describing the job test configuration', type=Path, required=True) + # Add optional argument for test date + parser.add_argument('-d', '--test_date', help='test date in YYYYMMDDHH format', type=str, required=False) + return parser.parse_args() + + +if __name__ == '__main__': + + # Parse command line arguments + args = parse_args() + + data = {} + if args.test_date: + # Parse test date from string to datetime object + data['TEST_DATE'] = datetime.datetime.strptime(args.test_date, '%Y%m%d%H') + # Parse YAML configuration file with optional data + case_cfg = parse_j2yaml(path=args.yaml, data=data) + # Synchronize input files as per the parsed configuration + FileHandler(case_cfg.input_files).sync() diff --git a/ctests/scripts/stage.sh.in b/ctests/scripts/stage.sh.in new file mode 100755 index 0000000000..9ced3d8f4e --- /dev/null +++ b/ctests/scripts/stage.sh.in @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +set -ux + +TEST_NAME=${1:?"Name of the test is required"} +TEST_DATE=${2:?"idate of the test is required"} + +# CMake to fill these variables +STAGED_TESTS_DIR="@STAGED_TESTS_DIR@" +RUNTESTS="@RUNTESTS@" +HOMEgfs="@PROJECT_SOURCE_DIR@" + +# Load the runtime environment for this script (needs wxflow and its dependencies) +set +x +source "${HOMEgfs}/workflow/gw_setup.sh" +rc=$? +[[ "${rc}" -ne 0 ]] && exit "${status}" +set -x +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/sorc/wxflow/src" +export PYTHONPATH + +INPUTS_YAML="${HOMEgfs}/ctests/cases/${TEST_NAME}.yaml" + +TEST_NAME="${TEST_NAME}" \ +RUNTESTS="${RUNTESTS}" \ +STAGED_TESTS_DIR="${STAGED_TESTS_DIR}" \ +"${HOMEgfs}/ctests/scripts/stage.py" --yaml "${INPUTS_YAML}" --test_date "${TEST_DATE}" +rc=$? +if [[ "${rc}" -ne 0 ]]; then + set +x + echo "Failed to stage inputs for '${TEST_NAME}' with '${INPUTS_YAML}'" + set -x + exit "${rc}" +fi + +exit 0 diff --git a/ctests/scripts/validate.sh.in b/ctests/scripts/validate.sh.in new file mode 100755 index 0000000000..0277699956 --- /dev/null +++ b/ctests/scripts/validate.sh.in @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +set -ux + +TEST_NAME=${1:?"Name of the test is required"} +YAML_FILE=${2:?"Name of the CI yaml file for validating the test"} + +echo "validating '${TEST_NAME}' with yaml file '${YAML_FILE}'" + +exit 0 \ No newline at end of file diff --git a/docs/source/wave.rst b/docs/source/wave.rst index 56aa34ce3b..52a984b6b3 100644 --- a/docs/source/wave.rst +++ b/docs/source/wave.rst @@ -21,6 +21,8 @@ Here are several regional naming conventions: +===========+=======================+ | glo | Global domain | +-----------+-----------------------+ +| uglo | Unstructured global | ++-----------+-----------------------+ | ak | Alaska | +-----------+-----------------------+ | ao or aoc | Arctic Ocean | diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 6c476cda5d..fe6acf88fb 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -13,11 +13,6 @@ step=$1 export launcher="mpiexec -l" export mpmd_opt="--cpu-bind verbose,core cfp" -# TODO: Add path to GDASApp libraries and cray-mpich as temporary patches -# TODO: Remove LD_LIBRARY_PATH lines as soon as permanent solutions are available -export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${HOMEgfs}/sorc/gdas.cd/build/lib" -export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/cray/pe/mpich/8.1.19/ofi/intel/19.0/lib" - # Calculate common resource variables # Check first if the dependent variables are set if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then diff --git a/modulefiles/module_gwsetup.hercules.lua b/modulefiles/module_gwsetup.hercules.lua index 9d845fb71d..e7735e4aa1 100644 --- a/modulefiles/module_gwsetup.hercules.lua +++ b/modulefiles/module_gwsetup.hercules.lua @@ -9,6 +9,7 @@ prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0" local python_ver=os.getenv("python_ver") or "3.11.6" +local cmake_ver=os.getenv("cmake_ver") or "3.23.1" load(pathJoin("stack-intel", stack_intel_ver)) load(pathJoin("python", python_ver)) diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua index b8e2fc8a9f..5ffebc31a1 100644 --- a/modulefiles/module_gwsetup.orion.lua +++ b/modulefiles/module_gwsetup.orion.lua @@ -10,11 +10,13 @@ prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-st local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0" local python_ver=os.getenv("python_ver") or "3.11.6" +local cmake_ver=os.getenv("cmake_ver") or "3.23.1" load(pathJoin("stack-intel", stack_intel_ver)) load(pathJoin("python", python_ver)) load("py-jinja2") load("py-pyyaml") load("py-numpy") +load(pathJoin("cmake", cmake_ver)) whatis("Description: GFS run setup environment") diff --git a/parm/archive/gdaswave.yaml.j2 b/parm/archive/gdaswave.yaml.j2 index 220770b38d..109c70c181 100644 --- a/parm/archive/gdaswave.yaml.j2 +++ b/parm/archive/gdaswave.yaml.j2 @@ -1,5 +1,5 @@ gdaswave: - {% set head = "gdas.wave.t" + cycle_HH + "z." %} + {% set head = "gdaswave.t" + cycle_HH + "z." %} name: "GDASWAVE" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave.tar" required: diff --git a/parm/archive/gefs_extracted_atmos.yaml.j2 b/parm/archive/gefs_extracted_atmos.yaml.j2 deleted file mode 100644 index 7ceba551bf..0000000000 --- a/parm/archive/gefs_extracted_atmos.yaml.j2 +++ /dev/null @@ -1,86 +0,0 @@ -{% set cycle_HH = current_cycle | strftime("%H") %} -{% set cycle_YMD = current_cycle | to_YMD %} -{% set cycle_YMDH = current_cycle | to_YMDH %} -{% set head = RUN + ".t" + cycle_HH + "z." %} - -gefs_atmos: - name: "GEFS_ATMOS" - target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gefs_atmos.tar" - required: -#select mem%03d and ensstat files required -{% set members = ["ensstat"] %} -{% for mem_nm in range(0, NMEM_ENS + 1) %} - {% do members.append("mem" ~ '%03d' % mem_nm ) %} -{% endfor %} - -{% if REPLAY_ICS %} - {% set ofst_hr = OFFSET_START_HOUR %} -{% else %} - {% set ofst_hr = FHMIN_GFS %} -{% endif %} - -{% for mem in members %} - {% for res in ['0p25', '0p50', '1p00'] %} - {% set tmpl_dict = ({ '${ROTDIR}':ROTDIR, - '${RUN}':RUN, - '${YMD}':cycle_YMD, - '${HH}':cycle_HH, - '${GRID}':res, - '${MEMDIR}':mem }) %} - - {% set COMIN_ATMOS_GRIB = COM_ATMOS_GRIB_GRID_TMPL | replace_tmpl(tmpl_dict) %} - -# Select pgrb and grib files to copy to the atardir - {% if path_exists(COMIN_ATMOS_GRIB) %} - {% if FHMAX_HF_GFS == 0 %} - {% for fhr in range(ofst_hr, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - {% if mem=="ensstat" %} - {% set file_name = head ~ "mean.pres_." ~ res ~ ".f" ~ '%03d'|format(fhr) ~ ".grib2" %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% else %} - {% set file_name = head ~ "pgrb2." ~ res ~ ".f" ~ '%03d'|format(fhr) %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% set file_name = head ~ "pgrb2b." ~ res ~ ".f" ~ '%03d'|format(fhr) %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endif %} - {% endfor %} - {% else %} - {% if res == "0p25" %} - {% for fhr in range(ofst_hr, FHMAX_HF_GFS + FHOUT_HF_GFS, FHOUT_HF_GFS) %} - {% if mem=="ensstat" %} - {% set file_name = head ~ "mean.pres_." ~ res ~ ".f" ~ '%03d'|format(fhr) ~ ".grib2" %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% else %} - {% set file_name = head ~ "pgrb2." ~ res ~ ".f" ~ '%03d'|format(fhr) %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% set file_name = head ~ "pgrb2b." ~ res ~ ".f" ~ '%03d'|format(fhr) %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endif %} - {% endfor %} - {% endif %} - {% if res == "0p50" %} - {% for fhr in range(FHMAX_HF_GFS + FHOUT_GFS, FHMAX_GFS + FHOUT_GFS, FHOUT_GFS) %} - {% if mem=="ensstat" %} - {% set file_name = head ~ "mean.pres_." ~ res ~ ".f" ~ '%03d'|format(fhr) ~ ".grib2" %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% else %} - {% set file_name = head ~ "pgrb2." ~ res ~ ".f" ~ '%03d'|format(fhr) %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% set file_name = head ~ "pgrb2b." ~ res ~ ".f" ~ '%03d'|format(fhr) %} - {% set file_path = COMIN_ATMOS_GRIB ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endif %} - {% endfor %} - {% endif %} - {% endif %} - {% endif %} - {% endfor %} -{% endfor %} diff --git a/parm/archive/gefs_extracted_ice.yaml.j2 b/parm/archive/gefs_extracted_ice.yaml.j2 deleted file mode 100644 index 786d502f23..0000000000 --- a/parm/archive/gefs_extracted_ice.yaml.j2 +++ /dev/null @@ -1,33 +0,0 @@ -{% set cycle_HH = current_cycle | strftime("%H") %} -{% set cycle_YMD = current_cycle | to_YMD %} -{% set cycle_YMDH = current_cycle | to_YMDH %} -{% set head = RUN + ".ice.t" + cycle_HH + "z." %} - -gefs_ice: - name: "GEFS_ICE" - target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gefs_ice.tar" - required: -#select mem%03d and ensstat files required -{% set members = [] %} -{% for mem_nm in range(0, NMEM_ENS + 1) %} - {% do members.append("mem" ~ '%03d' % mem_nm ) %} -{% endfor %} - -{% for mem in members %} - {% set tmpl_dict = ({ '${ROTDIR}':ROTDIR, - '${RUN}':RUN, - '${YMD}':cycle_YMD, - '${HH}':cycle_HH, - '${MEMDIR}':mem }) %} - - {% set COMIN_ICE_HISTORY = COM_ICE_HISTORY_TMPL | replace_tmpl(tmpl_dict) %} - -# Select netcdf files to copy to the atardir - {% if path_exists(COMIN_ICE_HISTORY) %} - {% for fhr in range(FHMIN_GFS + FHOUT_ICE_GFS, FHMAX_GFS + FHOUT_ICE_GFS, FHOUT_ICE_GFS) %} - {% set file_name = head ~ FHOUT_ICE_GFS ~ "hr_avg" ~ ".f" ~ '%03d'|format(fhr) ~ ".nc" %} - {% set file_path = COMIN_ICE_HISTORY ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endfor %} - {% endif %} -{% endfor %} diff --git a/parm/archive/gefs_extracted_ocean.yaml.j2 b/parm/archive/gefs_extracted_ocean.yaml.j2 deleted file mode 100644 index e5e3b36e3b..0000000000 --- a/parm/archive/gefs_extracted_ocean.yaml.j2 +++ /dev/null @@ -1,40 +0,0 @@ -{% set cycle_HH = current_cycle | strftime("%H") %} -{% set cycle_YMD = current_cycle | to_YMD %} -{% set cycle_YMDH = current_cycle | to_YMDH %} -{% set head = RUN + ".ocean.t" + cycle_HH + "z." %} - -gefs_ocean: - name: "GEFS_OCEAN" - target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gefs_ocean.tar" - required: -#select mem%03d and ensstat files required -{% set members = [] %} -{% for mem_nm in range(0, NMEM_ENS + 1) %} - {% do members.append("mem" ~ '%03d' % mem_nm ) %} -{% endfor %} - -{% if OCNRES == "025" %} - {% set res = "1p00" %} -{% else %} - {% set res = (OCNRES|string())[0] ~ "p" ~ (OCNRES|string())[-2:] %} -{% endif %} - -{% for mem in members %} - {% set tmpl_dict = ({ '${ROTDIR}':ROTDIR, - '${RUN}':RUN, - '${YMD}':cycle_YMD, - '${HH}':cycle_HH, - '${MEMDIR}':mem }) %} - - {% set COMIN_OCEAN_NETCDF = COM_OCEAN_NETCDF_TMPL | replace_tmpl(tmpl_dict) %} - - # Select netcdf files to copy to the atardir - {% set netcdf_grid_dir = COMIN_OCEAN_NETCDF ~ "/" ~ res %} - {% if path_exists(netcdf_grid_dir) %} - {% for fhr in range(FHMIN_GFS + FHOUT_OCN_GFS, FHMAX_GFS + FHOUT_OCN_GFS, FHOUT_OCN_GFS) %} - {% set file_name = head ~ res ~ ".f" ~ '%03d'|format(fhr) ~ ".nc" %} - {% set file_path = netcdf_grid_dir ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endfor %} - {% endif %} -{% endfor %} diff --git a/parm/archive/gefs_extracted_wave.yaml.j2 b/parm/archive/gefs_extracted_wave.yaml.j2 deleted file mode 100644 index e0aa07c816..0000000000 --- a/parm/archive/gefs_extracted_wave.yaml.j2 +++ /dev/null @@ -1,51 +0,0 @@ -{% set cycle_HH = current_cycle | strftime("%H") %} -{% set cycle_YMD = current_cycle | to_YMD %} -{% set cycle_YMDH = current_cycle | to_YMDH %} -{% set head = RUN + "wave.t" + cycle_HH + "z." %} - -gefs_wave: - name: "GEFS_WAVE" - target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gefs_wave.tar" - required: -{% if REPLAY_ICS %} - {% set ofst_hr = OFFSET_START_HOUR %} -{% else %} - {% set ofst_hr = FHMIN_GFS %} -{% endif %} - -{% set res = (waveGRD[-3:])[0] ~ "p" ~ (waveGRD[-3:])[-2:] %} - -#select mem%03d and ensstat files required -{% set members = [] %} -{% for mem_nm in range(0, NMEM_ENS + 1) %} - {% do members.append("mem" ~ '%03d' % mem_nm ) %} -{% endfor %} - -{% for mem in members %} - {% set tmpl_dict = ({ '${ROTDIR}':ROTDIR, - '${RUN}':RUN, - '${YMD}':cycle_YMD, - '${HH}':cycle_HH, - '${MEMDIR}':mem }) %} - - {% set COMIN_WAVE_GRID = COM_WAVE_GRID_TMPL | replace_tmpl(tmpl_dict) %} - # Select grib2 files to copy to the atardir - {% if path_exists(COMIN_WAVE_GRID) %} - {% for fhr in range(ofst_hr, FHMAX_GFS + FHOUT_WAV, FHOUT_WAV) %} - {% set file_name = head ~ "global." ~ res ~ ".f" ~ '%03d'|format(fhr) ~ ".grib2" %} - {% set file_path = COMIN_WAVE_GRID ~ "/" ~ file_name %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endfor %} - {% endif %} - - {% set COMIN_WAVE_STATION = COM_WAVE_STATION_TMPL | replace_tmpl(tmpl_dict) %} - # Select station files to copy to the atardir - {% if path_exists(COMIN_WAVE_STATION) %} - {% set file_path = COMIN_WAVE_STATION ~ "/" ~ RUN ~ "wave.t" ~ cycle_HH ~ "z.spec_tar.gz" %} - - "{{ file_path | relpath(ROTDIR)}}" - {% set file_path = COMIN_WAVE_STATION ~ "/" ~ RUN ~ "wave.t" ~ cycle_HH ~ "z.cbull_tar" %} - - "{{ file_path | relpath(ROTDIR)}}" - {% set file_path = COMIN_WAVE_STATION ~ "/" ~ RUN ~ "wave.t" ~ cycle_HH ~ "z.bull_tar" %} - - "{{ file_path | relpath(ROTDIR)}}" - {% endif %} -{% endfor %} diff --git a/parm/archive/master_gefs.yaml.j2 b/parm/archive/master_gefs.yaml.j2 index e76d7c9f7a..e33215a23c 100644 --- a/parm/archive/master_gefs.yaml.j2 +++ b/parm/archive/master_gefs.yaml.j2 @@ -4,13 +4,6 @@ {% set cycle_YMDH = current_cycle | to_YMDH %} datasets: -{% filter indent(width=4) %} - {% include "gefs_extracted_atmos.yaml.j2" %} - {% include "gefs_extracted_ocean.yaml.j2" %} - {% include "gefs_extracted_ice.yaml.j2" %} - {% include "gefs_extracted_wave.yaml.j2" %} -{% endfilter %} - # Archive the EXPDIR if requested {% if archive_expdir %} {% filter indent(width=4) %} diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 3f931d7c0d..f2ee7b8619 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -512,9 +512,7 @@ fi # Set the name of the UFS (previously nems) configure template to use # Default ufs.configure templates for supported model configurations -if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then - tmpl_suffix="_esmf" -fi +# WW3 restart field variable is different for slow vs fast loop. Add WW3_RSTFLDS="ice" for slow loop variables based on coupling scheme. case "${model_list}" in atm) default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" @@ -533,9 +531,11 @@ case "${model_list}" in ;; atm.ocean.ice.wave) default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; atm.ocean.ice.wave.aero) default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; *) echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" @@ -547,6 +547,9 @@ esac export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} unset model_list default_template +#export wave restart variable: +export WW3_RSTFLDS=${WW3_RSTFLDS:-" "} + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index f8b23e69d2..d069c370b7 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -219,7 +219,7 @@ case "${CASE}" in export waveGRD='uglo_100km' ;; "C768" | "C1152") - export waveGRD='uglo_m1g16' + export waveGRD='uglo_15km' ;; *) echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 34c645d5e9..a2db2c7cfe 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -198,8 +198,8 @@ case ${step} in threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) NTASKS=${ntasks} - memory_gdas="10GB" - memory_gfs="10GB" + memory_gdas="20GB" + memory_gfs="20GB" ;; # The wavepost*pnt* jobs are I/O heavy and do not scale well to large nodes. diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 0a16a75cb2..5f2b6675cf 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km" echo "--gocart" exit 1 @@ -605,12 +605,12 @@ if [[ "${skip_ww3}" == "false" ]]; then "mx025") ntasks_ww3=80 ;; - "uglo_100km") - ntasks_ww3=40 + "uglo_15km") + ntasks_ww3=1000 nthreads_ww3=1 ;; - "uglo_m1g16") - ntasks_ww3=1000 + "uglo_100km") + ntasks_ww3=40 nthreads_ww3=1 ;; *) @@ -630,9 +630,7 @@ fi # Set the name of the UFS (previously nems) configure template to use # Default ufs.configure templates for supported model configurations -if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then - tmpl_suffix="_esmf" -fi +# WW3 restart field variable is different for slow vs fast loop. Add WW3_RSTFLDS="ice" for slow loop variables based on coupling scheme. case "${model_list}" in atm) default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" @@ -651,9 +649,11 @@ case "${model_list}" in ;; atm.ocean.ice.wave) default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; atm.ocean.ice.wave.aero) default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; *) echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" @@ -665,6 +665,9 @@ esac export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} unset model_list default_template +# export wave restart variable: +export WW3_RSTFLDS=${WW3_RSTFLDS:-" "} + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave index e792f45473..722e1122e4 100644 --- a/parm/config/gfs/config.wave +++ b/parm/config/gfs/config.wave @@ -56,18 +56,18 @@ case "${waveGRD}" in export wavepostGRD='glo_500' export waveuoutpGRD=${waveGRD} ;; + "uglo_15km") + # unstructured 15km grid + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; "uglo_100km") #unstructured 100km grid export waveinterpGRD='glo_200' export wavepostGRD='' export waveuoutpGRD=${waveGRD} ;; - "uglo_m1g16") - #unstructured m1v16 grid - export waveinterpGRD='glo_15mxt' - export wavepostGRD='' - export waveuoutpGRD=${waveGRD} - ;; *) echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." exit 1 diff --git a/parm/product/gefs.0p25.fFFF.paramlist.a.txt b/parm/product/gefs.0p25.fFFF.paramlist.a.txt index 4bb87c32ff..afe8464789 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.a.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.a.txt @@ -38,3 +38,6 @@ :MSLET:mean sea level: :VIS:surface: :HGT:cloud ceiling: +:MAXUW:10 m above ground: +:MAXVW:10 m above ground: +:WIND:10 m above ground: diff --git a/parm/product/gefs.0p25.fFFF.paramlist.b.txt b/parm/product/gefs.0p25.fFFF.paramlist.b.txt index 5c406ce34d..f0ed5b5cb9 100644 --- a/parm/product/gefs.0p25.fFFF.paramlist.b.txt +++ b/parm/product/gefs.0p25.fFFF.paramlist.b.txt @@ -73,10 +73,13 @@ :HGT:100 mb: :HGT:10 mb: :HGT:1 mb: +:HGT:125 mb: :HGT:150 mb: +:HGT:175 mb: :HGT:200 mb: :HGT:20 mb: :HGT:2 mb: +:HGT:225 mb: :HGT:250 mb: :HGT:300 mb: :HGT:30 mb: @@ -94,8 +97,11 @@ :HGT:70 mb: :HGT:7 mb: :HGT:750 mb: +:HGT:775 mb: :HGT:800 mb: +:HGT:825 mb: :HGT:850 mb: +:HGT:875 mb: :HGT:900 mb: :HGT:925 mb: :HGT:950 mb: @@ -238,11 +244,14 @@ :SPFH:1 mb: :SPFH:120-90 mb above ground: :SPFH:150-120 mb above ground: +:SPFH:125 mb: :SPFH:150 mb: +:SPFH:175 mb: :SPFH:180-150 mb above ground: :SPFH:200 mb: :SPFH:20 mb: :SPFH:2 mb: +:SPFH:225 mb: :SPFH:250 mb: :SPFH:2 m above ground: :SPFH:300 mb: @@ -263,9 +272,12 @@ :SPFH:70 mb: :SPFH:7 mb: :SPFH:750 mb: +:SPFH:775 mb: :SPFH:800 mb: +:SPFH:825 mb: :SPFH:80 m above ground: :SPFH:850 mb: +:SPFH:875 mb: :SPFH:900 mb: :SPFH:90-60 mb above ground: :SPFH:925 mb: @@ -286,12 +298,15 @@ :TMP:1 mb: :TMP:120-90 mb above ground: :TMP:150-120 mb above ground: +:TMP:125 mb: :TMP:150 mb: +:TMP:175 mb: :TMP:180-150 mb above ground: :TMP:1829 m above mean sea level: :TMP:200 mb: :TMP:20 mb: :TMP:2 mb: +:TMP:225 mb: :TMP:250 mb: :TMP:2743 m above mean sea level: :TMP:300 mb: @@ -321,9 +336,12 @@ :TMP:70 mb: :TMP:7 mb: :TMP:750 mb: +:TMP:775 mb: :TMP:800 mb: :TMP:80 m above ground: +:TMP:825 mb: :TMP:850 mb: +:TMP:875 mb: :TMP:900 mb: :TMP:90-60 mb above ground: :TMP:914 m above mean sea level: @@ -354,14 +372,17 @@ :UGRD:1000 mb: :UGRD:100 m above ground: :UGRD:100 mb: +:UGRD:125 mb: :UGRD:10 mb: :UGRD:1 mb: :UGRD:120-90 mb above ground: :UGRD:150-120 mb above ground: :UGRD:150 mb: +:UGRD:175 mb: :UGRD:180-150 mb above ground: :UGRD:1829 m above mean sea level: :UGRD:200 mb: +:UGRD:225 mb: :UGRD:20 mb: :UGRD:2 mb: :UGRD:250 mb: @@ -393,9 +414,12 @@ :UGRD:70 mb: :UGRD:7 mb: :UGRD:750 mb: +:UGRD:775 mb: :UGRD:800 mb: :UGRD:80 m above ground: +:UGRD:825 mb: :UGRD:850 mb: +:UGRD:875 mb: :UGRD:900 mb: :UGRD:90-60 mb above ground: :UGRD:914 m above mean sea level: @@ -422,14 +446,17 @@ :VGRD:1000 mb: :VGRD:100 m above ground: :VGRD:100 mb: +:VGRD:125 mb: :VGRD:10 mb: :VGRD:1 mb: :VGRD:120-90 mb above ground: :VGRD:150-120 mb above ground: :VGRD:150 mb: +:VGRD:175 mb: :VGRD:180-150 mb above ground: :VGRD:1829 m above mean sea level: :VGRD:200 mb: +:VGRD:225 mb: :VGRD:20 mb: :VGRD:2 mb: :VGRD:250 mb: @@ -461,9 +488,12 @@ :VGRD:70 mb: :VGRD:7 mb: :VGRD:750 mb: +:VGRD:775 mb: :VGRD:800 mb: :VGRD:80 m above ground: +:VGRD:825 mb: :VGRD:850 mb: +:VGRD:875 mb: :VGRD:900 mb: :VGRD:90-60 mb above ground: :VGRD:914 m above mean sea level: @@ -497,8 +527,11 @@ :VVEL:70 mb: :VVEL:1000 mb: :VVEL:100 mb: +:VVEL:125 mb: :VVEL:150 mb: +:VVEL:175 mb: :VVEL:200 mb: +:VVEL:225 mb: :VVEL:250 mb: :VVEL:300 mb: :VVEL:350 mb: @@ -510,8 +543,11 @@ :VVEL:650 mb: :VVEL:700 mb: :VVEL:750 mb: +:VVEL:775 mb: :VVEL:800 mb: +:VVEL:825 mb: :VVEL:850 mb: +:VVEL:875 mb: :VVEL:900 mb: :VVEL:925 mb: :VVEL:950 mb: diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table index f44bfd82a4..ba4f9c793d 100644 --- a/parm/ufs/fv3/diag_table +++ b/parm/ufs/fv3/diag_table @@ -178,6 +178,9 @@ "gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wind10mmax", "wind10m_max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10mmax", "u10m_max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10mmax", "v10m_max", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "pahi", "pahi", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "pah_ave", "pah_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "ecan_acc", "ecan_acc", "fv3_history2d", "all", .false., "none", 2 diff --git a/parm/ufs/fv3/diag_table_replay b/parm/ufs/fv3/diag_table_replay index 01f2cf9794..e197510b34 100644 --- a/parm/ufs/fv3/diag_table_replay +++ b/parm/ufs/fv3/diag_table_replay @@ -232,6 +232,9 @@ ocean_model, "Heat_PmE", "Heat_PmE", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo "gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wind10mmax", "wind10m_max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10mmax", "u10m_max", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10mmax", "v10m_max", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "pahi", "pahi", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "pah_ave", "pah_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "ecan_acc", "ecan_acc", "fv3_history2d", "all", .false., "none", 2 diff --git a/parm/wave/ak_10m_interp.inp.tmpl b/parm/wave/ak_10m_interp.inp.tmpl index 225ab3628d..c397e17e5d 100755 --- a/parm/wave/ak_10m_interp.inp.tmpl +++ b/parm/wave/ak_10m_interp.inp.tmpl @@ -3,10 +3,9 @@ $------------------------------------------------ $ Start Time DT NSteps TIME DT NSTEPS $ Total number of grids - 3 + 2 $ Grid extensions - 'gnh_10m' - 'aoc_9km' + 'uglo_15km' 'ak_10m' $ 0 diff --git a/parm/wave/at_10m_interp.inp.tmpl b/parm/wave/at_10m_interp.inp.tmpl index 6f4c1f7099..903f49504f 100755 --- a/parm/wave/at_10m_interp.inp.tmpl +++ b/parm/wave/at_10m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'at_10m' $ 0 diff --git a/parm/wave/ep_10m_interp.inp.tmpl b/parm/wave/ep_10m_interp.inp.tmpl index 23cfd50c2e..292522325c 100755 --- a/parm/wave/ep_10m_interp.inp.tmpl +++ b/parm/wave/ep_10m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'ep_10m' $ 0 diff --git a/parm/wave/glo_15mxt_interp.inp.tmpl b/parm/wave/glo_15mxt_interp.inp.tmpl index 19e9dae684..e9ba00ef23 100755 --- a/parm/wave/glo_15mxt_interp.inp.tmpl +++ b/parm/wave/glo_15mxt_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'glo_15mxt' $ 0 diff --git a/parm/wave/glo_30m_interp.inp.tmpl b/parm/wave/glo_30m_interp.inp.tmpl index c62881202c..611907fb15 100755 --- a/parm/wave/glo_30m_interp.inp.tmpl +++ b/parm/wave/glo_30m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'glo_30m' $ 0 diff --git a/parm/wave/wc_10m_interp.inp.tmpl b/parm/wave/wc_10m_interp.inp.tmpl index 8338c91d0c..234a9a1f93 100755 --- a/parm/wave/wc_10m_interp.inp.tmpl +++ b/parm/wave/wc_10m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'wc_10m' $ 0 diff --git a/parm/wave/ww3_shel.inp.tmpl b/parm/wave/ww3_shel.inp.tmpl deleted file mode 100644 index 0b9b335e1b..0000000000 --- a/parm/wave/ww3_shel.inp.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -$ -------------------------------------------------------------------- $ -$ WAVEWATCH III shel input file $ -$ -------------------------------------------------------------------- $ -$ Include ice and mud parameters only if IC1/2/3/4 used : - F F Water levels - CURRLINE - WINDLINE - ICELINE - F F Atmospheric momentum - F F Air density - F Assimilation data : Mean parameters - F Assimilation data : 1-D spectra - F Assimilation data : 2-D spectra -$ - RUN_BEG - RUN_END -$ -$ IOSTYP - IOSRV -$ - OUT_BEG DTFLD OUT_END GOFILETYPE - N - OUTPARS -$ - OUT_BEG DTPNT OUT_END POFILETYPE -BUOY_FILE -$ - OUT_BEG 0 OUT_END -$ -$ Keep next two lines formatting as is to allow proper parsing - RST_BEG DTRST RST_END RSTTYPE -RST_2_BEG DT_2_RST RST_2_END -$ - OUT_BEG 0 OUT_END -$ - OUT_BEG 0 OUT_END -$ - 'the_end' 0 -$ - 'STP' -$ -$ End of input file diff --git a/scripts/exglobal_atmos_products.sh b/scripts/exglobal_atmos_products.sh index 51e1a108bb..e963f4b2f5 100755 --- a/scripts/exglobal_atmos_products.sh +++ b/scripts/exglobal_atmos_products.sh @@ -104,7 +104,7 @@ for (( nset=1 ; nset <= downset ; nset++ )); do # grep returns 1 if no match is found, so temporarily turn off exit on non-zero rc set +e # shellcheck disable=SC2312 - ${WGRIB2} -d "${last}" "${tmpfile}" | grep -E -i "ugrd|ustm|uflx|u-gwd|land" + ${WGRIB2} -d "${last}" "${tmpfile}" | grep -E -i "ugrd|ustm|uflx|u-gwd|land|maxuw" rc=$? set_strict if (( rc == 0 )); then # Matched the grep diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 1988fe60f6..95067ca4b4 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -175,19 +175,13 @@ declare -a ufs_templates=("model_configure.IN" "input_global_nest.nml.IN" "MOM6_data_table.IN" "ice_in.IN" "ufs.configure.atm.IN" - "ufs.configure.atm_esmf.IN" "ufs.configure.atmaero.IN" - "ufs.configure.atmaero_esmf.IN" "ufs.configure.s2s.IN" - "ufs.configure.s2s_esmf.IN" "ufs.configure.s2sa.IN" - "ufs.configure.s2sa_esmf.IN" "ufs.configure.s2sw.IN" - "ufs.configure.s2sw_esmf.IN" "ufs.configure.s2swa.IN" - "ufs.configure.s2swa_esmf.IN" "ufs.configure.leapfrog_atm_wav.IN" - "ufs.configure.leapfrog_atm_wav_esmf.IN" + "ww3_shel.nml.IN" "post_itag_gfs") for file in "${ufs_templates[@]}"; do [[ -s "${file}" ]] && rm -f "${file}" diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 63ace62a36..76471dc6b7 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 63ace62a36a263f03b914a92fc5536509e862dbc +Subproject commit 76471dc6b7bfc3342416d1a3402f360724f7c0fa diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 64cb14a3ec..7b9bd0ee48 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -341,10 +341,9 @@ FV3_out() { # shellcheck disable=SC2034 WW3_postdet() { echo "SUB ${FUNCNAME[0]}: Linking input data for WW3" - local ww3_grid first_ww3_restart_out ww3_restart_file # Copy initial condition files: - local restart_date restart_dir + local restart_date restart_dir seconds if [[ "${RERUN}" == "YES" ]]; then restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/WW3_RESTART" @@ -354,29 +353,64 @@ WW3_postdet() { fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" - ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3" - if [[ -s "${ww3_restart_file}" ]]; then - ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \ - || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) - first_ww3_restart_out=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) + + #First check to see if netcdf restart exists: + local ww3_binary_restart_file ww3_netcdf_restart_file + ww3_binary_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3" + ww3_netcdf_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3.nc" + if [[ -s "${ww3_netcdf_restart_file}" ]]; then + export WW3_restart_from_binary=false + seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds + local ww3_restart_dest_file="ufs.cpld.ww3.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" + ${NCP} "${ww3_netcdf_restart_file}" "${DATA}/${ww3_restart_dest_file}" \ + || ( echo "FATAL ERROR: Unable to copy netcdf WW3 IC, ABORT!"; exit 1 ) + elif [[ -s "${ww3_binary_restart_file}" ]]; then + # found binary ww3 restart file + export WW3_restart_from_binary=true + if [[ -f "${DATA}/ufs.cpld.cpl.r.nc" ]]; then + #if this is a cmeps continue then the wave restart name is different + seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds + local ww3_restart_dest_file="ufs.cpld.ww3.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}" + ${NCP} "${ww3_binary_restart_file}" "${DATA}/${ww3_restart_dest_file}" \ + || ( echo "FATAL ERROR: Unable to copy binary WW3 IC, ABORT!"; exit 1 ) + else + ${NCP} "${ww3_binary_restart_file}" "${DATA}/restart.ww3" \ + || ( echo "FATAL ERROR: Unable to copy binary WW3 IC, ABORT!"; exit 1 ) + fi else if [[ "${RERUN}" == "YES" ]]; then # In the case of a RERUN, the WW3 restart file is required - echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!" + echo "FATAL ERROR: WW3 binary | netcdf restart file '${ww3_binary_restart_file}' | '${ww3_netcdf_restart_file}' not found for RERUN='${RERUN}', ABORT!" exit 1 else - echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!" - first_ww3_restart_out=${model_start_date_current_cycle} + echo "WARNING: WW3 binary | netcdf restart file '${ww3_binary_restart_file}' | '${ww3_netcdf_restart_file}' not found for warm_start='${warm_start}', will start from rest!" + export WW3_restart_from_binary=true fi fi + first_ww3_restart_out=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) + if [[ "${DOIAU:-NO}" == "YES" ]]; then + first_ww3_restart_out=$(date --utc -d "${first_ww3_restart_out:0:8} ${first_ww3_restart_out:8:2} + ${half_window} hours" +%Y%m%d%H) + fi + # Link restart files for (( vdate = first_ww3_restart_out; vdate <= forecast_end_cycle; vdate = $(date --utc -d "${vdate:0:8} ${vdate:8:2} + ${restart_interval} hours" +%Y%m%d%H) )); do - ww3_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3" - ${NLN} "${DATArestart}/WW3_RESTART/${ww3_restart_file}" "${ww3_restart_file}" + seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds + ww3_restart_ufs_file="ufs.cpld.ww3.r.${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}.nc" + ww3_netcdf_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3.nc" + ${NLN} "${DATArestart}/WW3_RESTART/${ww3_netcdf_restart_file}" "${ww3_restart_ufs_file}" done + # TO DO: link GEFS restart for next cycle IC + #if [[ "${RUN}" == "gefs" ]]; then + # vdate=${model_start_date_next_cycle} + # seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds + # ww3_restart_ufs_file="ufs.cpld.ww3.r.${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}.nc" + # ww3_netcdf_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3.nc" + # ${NLN} "${DATArestart}/WW3_RESTART/${ww3_netcdf_restart_file}" "${ww3_restart_ufs_file}" + #fi + # Link output files local wavprfx="${RUN}wave${WAV_MEMBER:-}" ${NLN} "${COMOUT_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3" @@ -414,7 +448,42 @@ WW3_nml() { WW3_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for WW3" - # TODO: Need to add logic to copy restarts from DATArestart/WW3_RESTART to COMOUT_WAVE_RESTART + + # Copy wave namelist from DATA to COMOUT_CONF after the forecast is run (and successfull) + ${NCP} "${DATA}/ww3_shel.nml" "${COMOUT_CONF}/ufs.ww3_shel.nml" + + # Copy WW3 restarts at the end of the forecast segment to COM for RUN=gfs|gefs + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local restart_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying WW3 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.restart.ww3.nc" + ${NCP} "${DATArestart}/WW3_RESTART/${restart_file}" \ + "${COMOUT_WAVE_RESTART}/${restart_file}" + fi + fi + + # Copy restarts for next cycle for RUN=gdas|gefs + #TO DO: GEFS needs to be added here + if [[ "${RUN}" == "gdas" ]]; then + local restart_date restart_file + restart_date="${model_start_date_next_cycle}" + echo "Copying WW3 restarts for 'RUN=${RUN}' at ${restart_date}" + restart_file="${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3.nc" + ${NCP} "${DATArestart}/WW3_RESTART/${restart_file}" \ + "${COMOUT_WAVE_RESTART}/${restart_file}" + fi + + # Copy restarts for downstream usage in HAFS + if [[ "${RUN}" == "gdas" ]]; then + local restart_date restart_file + restart_date="${next_cycle}" + echo "Copying WW3 restarts for 'RUN=${RUN}' at ${restart_date}" + restart_file="${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3.nc" + ${NCP} "${DATArestart}/WW3_RESTART/${restart_file}" \ + "${COMOUT_WAVE_RESTART}/${restart_file}" + fi + } diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 2b730fa7d6..e08b84d932 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -574,6 +574,18 @@ FV3_predet(){ ${NCP} "${PARMgfs}/post/sfs/postxconfig-NT-sfs.txt" "${DATA}/postxconfig-NT.txt" ${NCP} "${PARMgfs}/post/sfs/postxconfig-NT-sfs.txt" "${DATA}/postxconfig-NT_FH00.txt" fi + + # For gefs run, provide ensemble header information + if [[ "${RUN}" == "gefs" ]]; then + if [[ "${ENSMEM}" == "000" ]]; then + export e1=1 + else + export e1=3 + fi + export e2="${ENSMEM:1:2}" + export e3="${NMEM_ENS}" + fi + fi } @@ -695,6 +707,7 @@ MOM6_predet(){ } +# shellcheck disable=SC2178 CMEPS_predet(){ echo "SUB ${FUNCNAME[0]}: CMEPS before run type determination" @@ -703,6 +716,29 @@ CMEPS_predet(){ if [[ ! -d "${DATArestart}/CMEPS_RESTART" ]]; then mkdir -p "${DATArestart}/CMEPS_RESTART"; fi ${NLN} "${DATArestart}/CMEPS_RESTART" "${DATA}/CMEPS_RESTART" + # For CMEPS, CICE, MOM6 and WW3 determine restart writes + # Note FV3 has its own restart intervals + cmeps_restart_interval=${restart_interval:-${FHMAX}} + # restart_interval = 0 implies write restart at the END of the forecast i.e. at FHMAX + # Convert restart interval into an explicit list for FV3 + if (( cmeps_restart_interval == 0 )); then + if [[ "${DOIAU:-NO}" == "YES" ]]; then + CMEPS_RESTART_FH=$(( FHMAX + half_window )) + else + CMEPS_RESTART_FH=("${FHMAX}") + fi + else + if [[ "${DOIAU:-NO}" == "YES" ]]; then + local restart_interval_start=$(( cmeps_restart_interval + half_window )) + local restart_interval_end=$(( FHMAX + half_window )) + else + local restart_interval_start=${cmeps_restart_interval} + local restart_interval_end=${FHMAX} + fi + CMEPS_RESTART_FH="$(seq -s ' ' "${restart_interval_start}" "${cmeps_restart_interval}" "${restart_interval_end}")" + fi + export CMEPS_RESTART_FH + # TODO: For GEFS, once cycling waves "self-cycles" and therefore needs to have a restart at 6 hour } # shellcheck disable=SC2034 diff --git a/ush/load_ufsda_modules.sh b/ush/load_ufsda_modules.sh index 8117d3f359..9c7c57c330 100755 --- a/ush/load_ufsda_modules.sh +++ b/ush/load_ufsda_modules.sh @@ -35,6 +35,14 @@ module use "${HOMEgfs}/sorc/gdas.cd/modulefiles" case "${MACHINE_ID}" in ("hera" | "orion" | "hercules" | "wcoss2") + #TODO: Remove LMOD_TMOD_FIND_FIRST line when spack-stack on WCOSS2 + if [[ "${MACHINE_ID}" == "wcoss2" ]]; then + export LMOD_TMOD_FIND_FIRST=yes + # TODO: Add path to GDASApp libraries and cray-mpich as temporary patches + # TODO: Remove LD_LIBRARY_PATH lines as soon as permanent solutions are available + export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${HOMEgfs}/sorc/gdas.cd/build/lib" + export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/opt/cray/pe/mpich/8.1.19/ofi/intel/19.0/lib" + fi module load "${MODS}/${MACHINE_ID}" ncdump=$( command -v ncdump ) NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) diff --git a/ush/module-setup.sh b/ush/module-setup.sh index 366286d142..2429963d70 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -52,7 +52,7 @@ elif [[ ${MACHINE_ID} = s4* ]] ; then elif [[ ${MACHINE_ID} = wcoss2 ]]; then # We are on WCOSS2 # Ignore default modules of the same version lower in the search path (req'd by spack-stack) - export LMOD_TMOD_FIND_FIRST=yes + #export LMOD_TMOD_FIND_FIRST=yes #TODO: Uncomment this when using spack-stack module reset elif [[ ${MACHINE_ID} = cheyenne* ]] ; then diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 8033d7686a..d28048f098 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -25,6 +25,7 @@ local SHOUR=${model_start_date:8:2} local FHROT=${IAU_FHROT:-0} local DT_ATMOS=${DELTIM} local RESTART_INTERVAL="${FV3_RESTART_FH[*]}" +local RESTART_FH="${CMEPS_RESTART_FH:-" "}" # QUILTING local QUILTING_RESTART="${QUILTING_RESTART:-${QUILTING}}" local WRITE_GROUP=${WRITE_GROUP:-1} diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index bb6a204cc8..2628b03594 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -711,24 +711,6 @@ EOF EOF fi - if [[ "${DO_OCN_SPPT:-NO}" == "YES" ]]; then - cat >> input.nml <> input.nml <> input.nml << EOF / EOF diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index 5ee4944c18..67bffb1967 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -4,158 +4,61 @@ WW3_namelists(){ # WW3 namelists/input generation - FHMAX_WAV=${FHMAX_WAV:-384} - - # Date and time stuff - - # Beginning time for outpupt may differ from SDATE if DOIAU=YES - export date=$PDY - export YMDH=${PDY}${cyc} - # Roll back $IAU_FHROT hours of DOIAU=YES - if [ "$DOIAU" = "YES" ] - then - WAVHINDH=$(( WAVHINDH + IAU_FHROT )) - fi - # Set time stamps for model start and output - # For special case when IAU is on but this is an initial half cycle - if [ ${IAU_OFFSET:-0} = 0 ]; then - ymdh_beg=$YMDH - else - ymdh_beg=$($NDATE -$WAVHINDH $YMDH) - fi - time_beg="$(echo $ymdh_beg | cut -c1-8) $(echo $ymdh_beg | cut -c9-10)0000" - ymdh_end=$($NDATE $FHMAX_WAV $YMDH) - time_end="$(echo $ymdh_end | cut -c1-8) $(echo $ymdh_end | cut -c9-10)0000" - ymdh_beg_out=$YMDH - time_beg_out="$(echo $ymdh_beg_out | cut -c1-8) $(echo $ymdh_beg_out | cut -c9-10)0000" - - # Restart file times (already has IAU_FHROT in WAVHINDH) - RSTOFFSET=$(( ${WAVHCYC} - ${WAVHINDH} )) - # Update restart time is added offset relative to model start - RSTOFFSET=$(( ${RSTOFFSET} + ${RSTIOFF_WAV} )) - ymdh_rst_ini=$($NDATE ${RSTOFFSET} $YMDH) - RST2OFFSET=$(( DT_2_RST_WAV / 3600 )) - ymdh_rst2_ini=$($NDATE ${RST2OFFSET} $YMDH) # DT2 relative to first-first-cycle restart file - # First restart file for cycling - time_rst_ini="$(echo $ymdh_rst_ini | cut -c1-8) $(echo $ymdh_rst_ini | cut -c9-10)0000" - if [ ${DT_1_RST_WAV} = 1 ]; then - time_rst1_end=${time_rst_ini} - else - RST1OFFSET=$(( DT_1_RST_WAV / 3600 )) - ymdh_rst1_end=$($NDATE $RST1OFFSET $ymdh_rst_ini) - time_rst1_end="$(echo $ymdh_rst1_end | cut -c1-8) $(echo $ymdh_rst1_end | cut -c9-10)0000" - fi - # Second restart file for checkpointing - if [ "${RSTTYPE_WAV}" = "T" ]; then - time_rst2_ini="$(echo $ymdh_rst2_ini | cut -c1-8) $(echo $ymdh_rst2_ini | cut -c9-10)0000" - time_rst2_end=$time_end - # Condition for gdas run or any other run when checkpoint stamp is > ymdh_end - if [ $ymdh_rst2_ini -ge $ymdh_end ]; then - ymdh_rst2_ini=$($NDATE 3 $ymdh_end) - time_rst2_ini="$(echo $ymdh_rst2_ini | cut -c1-8) $(echo $ymdh_rst2_ini | cut -c9-10)0000" - time_rst2_end=$time_rst2_ini - fi - else - time_rst2_ini="$" - time_rst2_end= - DT_2_RST_WAV= - fi - - - set +x - echo ' ' - echo 'Times in wave model format :' - echo '----------------------------' - echo " date / cycle : $date $cycle" - echo " starting time : $time_beg" - echo " ending time : $time_end" - echo ' ' - set_trace - - + FHMAX_WAV="${FHMAX_WAV:-384}" # --------------------------------------------------------------------------- # -# Create ww3_shel.inp - - if [ -f "${PARMgfs}/wave/ww3_shel.inp.tmpl" ]; then - cp "${PARMgfs}/wave/ww3_shel.inp.tmpl" "ww3_shel.inp.tmpl" - fi - if [ ! -f ww3_shel.inp.tmpl ]; then - echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 SHEL INPUT FILE" - exit 12 - fi - # Buoy location file - if [ -f ${PARMgfs}/wave/wave_${NET}.buoys ] + if [ -f "${PARMgfs}/wave/wave_${NET}.buoys" ] then - cp ${PARMgfs}/wave/wave_${NET}.buoys buoy.loc + ${NCP} "${PARMgfs}/wave/wave_${NET}.buoys" "${DATA}/ww3_points.list" fi - if [ -f buoy.loc ] + if [ -f "${DATA}/ww3_points.list" ] then set +x - echo " buoy.loc copied (${PARMgfs}/wave/wave_${NET}.buoys)." + echo "ww3_points.list copied (${PARMgfs}/wave/wave_${NET}.buoys)." set_trace else - echo " FATAL ERROR : buoy.loc (${PARMgfs}/wave/wave_${NET}.buoys) NOT FOUND" + echo "FATAL ERROR : ww3_points.list (${PARMgfs}/wave/wave_${NET}.buoys) NOT FOUND" exit 12 fi -# Initialize inp file parameters -ICELINE='F F' -CURRLINE='F F' -WINDLINE='F F' - -case ${WW3ATMINP} in - 'YES' ) - WINDLINE="T F";; - 'CPL' ) - WINDLINE="C F";; -esac - -case ${WW3ICEINP} in - 'YES' ) - ICELINE="T F";; - 'CPL' ) - ICELINE="C F";; -esac - -case ${WW3CURINP} in - 'YES' ) - CURRLINE="T F";; - 'CPL' ) - CURRLINE="C F";; -esac - -sed -e "s/IOSRV/${IOSRV}/g" \ - -e "s/OUTPARS/${OUTPARS_WAV}/g" \ - -e "s/ICELINE/$ICELINE/g" \ - -e "s/CURRLINE/$CURRLINE/g" \ - -e "s/WINDLINE/$WINDLINE/g" \ - -e "s/RUN_BEG/$time_beg/g" \ - -e "s/RUN_END/$time_end/g" \ - -e "s/OUT_BEG/$time_beg_out/g" \ - -e "s/OUT_END/$time_end/g" \ - -e "s/DTFLD/ $DTFLD_WAV/g" \ - -e "s/GOFILETYPE/ $GOFILETYPE/g" \ - -e "s/POFILETYPE/ $POFILETYPE/g" \ - -e "s/DTPNT/ $DTPNT_WAV/g" \ - -e "s/DTPNT/ $DTPNT_WAV/g" \ - -e "/BUOY_FILE/r buoy.loc" \ - -e "s/BUOY_FILE/DUMMY/g" \ - -e "s/RST_BEG/$time_rst_ini/g" \ - -e "s/RSTTYPE/$RSTTYPE_WAV/g" \ - -e "s/RST_2_BEG/$time_rst2_ini/g" \ - -e "s/DTRST/$DT_1_RST_WAV/g" \ - -e "s/DT_2_RST/$DT_2_RST_WAV/g" \ - -e "s/RST_END/$time_rst1_end/g" \ - -e "s/RST_2_END/$time_rst2_end/g" \ - ww3_shel.inp.tmpl | \ -sed -n "/DUMMY/!p" > ww3_shel.inp - -rm -f ww3_shel.inp.tmpl buoy.loc - -cat ww3_shel.inp + #set coupling to ice/current + WW3_ICE="F" + WW3_CUR="F" + + case ${WW3ICEINP} in + 'YES' ) + WW3_ICE="T";; + 'CPL' ) + WW3_ICE="C";; + esac + + case ${WW3CURINP} in + 'YES' ) + WW3_CUR="T";; + 'CPL' ) + WW3_CUR="C";; + esac + + # Variables used in atparse of shel template + export WW3_IC1="F" + export WW3_IC5="F" + export WW3_ICE + export WW3_CUR + export WW3_OUTPARS="${OUTPARS_WAV}" + export WW3_DTFLD="${DTFLD_WAV}" + export WW3_DTPNT="${DTPNT_WAV}" + # Ensure the template exists + local template=${WW3_INPUT_TEMPLATE:-"${PARMgfs}/ufs/ww3_shel.nml.IN"} + if [[ ! -f "${template}" ]]; then + echo "FATAL ERROR: template '${template}' does not exist, ABORT!" + exit 1 + fi + rm -f "${DATA}/ww3_shel.nml" + atparse < "${template}" >> "${DATA}/ww3_shel.nml" + echo "Rendered ww3_shel.nml:" + cat "${DATA}/ww3_shel.nml" } diff --git a/ush/parsing_ufs_configure.sh b/ush/parsing_ufs_configure.sh index 7ee699ef0a..f5e5857830 100755 --- a/ush/parsing_ufs_configure.sh +++ b/ush/parsing_ufs_configure.sh @@ -53,12 +53,14 @@ if [[ "${cplflx}" = ".true." ]]; then local CMEPS_RESTART_DIR="CMEPS_RESTART/" local CPLMODE="${cplmode}" local coupling_interval_fast_sec="${CPL_FAST}" - local RESTART_N="${restart_interval}" + local RESTART_N=999999 local ocean_albedo_limit=0.06 local ATMTILESIZE="${CASE:1}" local ocean_albedo_limit=0.06 local pio_rearranger=${pio_rearranger:-"box"} local MED_history_n=1000000 + + local histaux_enabled=".false." fi if [[ "${cplice}" = ".true." ]]; then @@ -74,12 +76,10 @@ if [[ "${cplwav}" = ".true." ]]; then local wav_model="ww3" local wav_petlist_bounds="$(( ATMPETS+OCNPETS+ICEPETS )) $(( ATMPETS+OCNPETS+ICEPETS+WAVPETS-1 ))" local wav_omp_num_threads="${WAVTHREADS}" - local WW3_user_sets_restname="false" local WW3_user_histname="false" local WW3_historync="false" - local WW3_restartnc="false" - local WW3_restart_from_binary="false" + local WW3_restartnc="true" local WW3_PIO_FORMAT="pnetcdf" local WW3_PIO_IOTASKS=-99 local WW3_PIO_STRIDE=4 @@ -97,6 +97,13 @@ if [[ "${cplchm}" = ".true." ]]; then fi +#Set ESMF_THREADING variable for ufs configure +if [[ "${USE_ESMF_THREADING}" = "YES" ]]; then + local ESMF_THREADING="true" +else + local ESMF_THREADING="false" +fi + # Ensure the template exists if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: template '${ufs_configure_template}' does not exist, ABORT!" diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index aadf325531..cf9e6ccf4c 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -94,8 +94,8 @@ def get_task_names(self): tasks += ['wavepostpnt'] if options['do_extractvars']: - tasks += ['extractvars', 'arch'] + tasks += ['extractvars'] - tasks += ['cleanup'] + tasks += ['arch', 'cleanup'] return {f"{self.run}": tasks} diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 725968241f..02f206fe61 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -122,7 +122,7 @@ def _get_app_configs(self, run): configs += ['awips', 'fbwind'] if options['do_wave']: - configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + configs += ['waveinit', 'wavepostsbs', 'wavepostpnt'] if options['do_wave_bnd']: configs += ['wavepostbndpnt', 'wavepostbndpntbll'] if options['do_gempak']: @@ -190,7 +190,7 @@ def get_task_names(self): if options['do_jedisnowda']: task_names[run] += ['snowanl'] - wave_prep_tasks = ['waveinit', 'waveprep'] + wave_prep_tasks = ['waveinit'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] wave_post_tasks = ['wavepostsbs', 'wavepostpnt'] diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 5b397c105b..de1c8cef27 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -67,7 +67,7 @@ def _get_app_configs(self, run): configs += ['oceanice_products'] if options['do_wave']: - configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + configs += ['waveinit', 'wavepostsbs', 'wavepostpnt'] if options['do_wave_bnd']: configs += ['wavepostbndpnt', 'wavepostbndpntbll'] if options['do_gempak']: diff --git a/workflow/generate_workflows.sh b/workflow/generate_workflows.sh index dbd360fda2..152e442dec 100755 --- a/workflow/generate_workflows.sh +++ b/workflow/generate_workflows.sh @@ -5,7 +5,7 @@ function _usage() { cat << EOF This script automates the experiment setup process for the global workflow. Options are also available to update submodules, build the workflow (with - specific build flags), specicy which YAMLs and YAML directory to run, and + specific build flags), specify which YAMLs and YAML directory to run, and whether to automatically update your crontab. Usage: generate_workflows.sh [OPTIONS] /path/to/RUNTESTS diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py index cbeba46e08..d89b6de1d8 100644 --- a/workflow/rocoto/gefs_tasks.py +++ b/workflow/rocoto/gefs_tasks.py @@ -295,7 +295,7 @@ def atmos_ensstat(self): for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) - task_name = f'gefs_atmos_ensstat_#fhr_label#' + task_name = 'gefs_atmos_ensstat_#fhr_label#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, @@ -306,7 +306,7 @@ def atmos_ensstat(self): 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;'} - fhr_metatask_dict = {'task_name': f'gefs_atmos_ensstat', + fhr_metatask_dict = {'task_name': 'gefs_atmos_ensstat', 'task_dict': task_dict, 'var_dict': fhr_var_dict} @@ -351,7 +351,7 @@ def wavepostsbs(self): largest_group = max([len(grp.split(',')) for grp in fhr_var_dict['fhr_list'].split(' ')]) resources['walltime'] = Tasks.multiply_HMS(resources['walltime'], largest_group) - task_name = f'gefs_wave_post_grid_mem#member#_#fhr_label#' + task_name = 'gefs_wave_post_grid_mem#member#_#fhr_label#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, @@ -363,7 +363,7 @@ def wavepostsbs(self): 'maxtries': '&MAXTRIES;' } - fhr_metatask_dict = {'task_name': f'gefs_wave_post_grid_#member#', + fhr_metatask_dict = {'task_name': 'gefs_wave_post_grid_#member#', 'task_dict': task_dict, 'var_dict': fhr_var_dict} @@ -569,7 +569,7 @@ def arch(self): dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') resources = self.get_resource('arch') - task_name = 'arch' + task_name = 'gefs_arch' task_dict = {'task_name': task_name, 'resources': resources, 'envars': self.envars, @@ -610,35 +610,13 @@ def globus(self): def cleanup(self): deps = [] - if self.options['do_extractvars']: - dep_dict = {'type': 'task', 'name': 'gefs_arch'} - deps.append(rocoto.add_dependency(dep_dict)) - if self.options['do_globusarch']: - dep_dict = {'type': 'task', 'name': 'gefs_globus'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') - else: - dep_dict = {'type': 'metatask', 'name': 'gefs_atmos_prod'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': 'gefs_atmos_ensstat'} + dep_dict = {'type': 'task', 'name': 'gefs_arch'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + if self.options['do_globusarch']: + dep_dict = {'type': 'task', 'name': 'gefs_globus'} deps.append(rocoto.add_dependency(dep_dict)) - if self.options['do_ice']: - dep_dict = {'type': 'metatask', 'name': 'gefs_ice_prod'} - deps.append(rocoto.add_dependency(dep_dict)) - if self.options['do_ocean']: - dep_dict = {'type': 'metatask', 'name': 'gefs_ocean_prod'} - deps.append(rocoto.add_dependency(dep_dict)) - if self.options['do_wave']: - dep_dict = {'type': 'metatask', 'name': 'gefs_wave_post_grid'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': 'gefs_wave_post_pnt'} - deps.append(rocoto.add_dependency(dep_dict)) - if self.options['do_wave_bnd']: - dep_dict = {'type': 'metatask', 'name': 'gefs_wave_post_bndpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': 'gefs_wave_post_bndpnt_bull'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') + dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') resources = self.get_resource('cleanup') task_name = 'gefs_cleanup' diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 73c0ac06c7..3b90c48cdf 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -641,10 +641,10 @@ def prepoceanobs(self): def marineanlletkf(self): deps = [] - dep_dict = {'type': 'metatask', 'name': 'enkfgdas_fcst', 'offset': f"-{timedelta_to_HMS(self._base['interval_gdas'])}"} - deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}_prepoceanobs'} deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run}_marinebmat'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('marineanlletkf') @@ -931,6 +931,11 @@ def _fcst_cycled(self): dep = rocoto.add_dependency(dep_dict) dependencies = rocoto.create_dependency(dep=dep) + if self.options['do_wave']: + wave_job = 'waveprep' if self.options['app'] in ['ATMW'] else 'waveinit' + dep_dict = {'type': 'task', 'name': f'{self.run}_{wave_job}'} + dependencies.append(rocoto.add_dependency(dep_dict)) + if self.options['do_jediocnvar']: dep_dict = {'type': 'task', 'name': f'{self.run}_marineanlfinal'} dependencies.append(rocoto.add_dependency(dep_dict)) @@ -950,11 +955,6 @@ def _fcst_cycled(self): dependencies.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) - if self.options['do_wave']: - dep_dict = {'type': 'task', 'name': f'{self.run}_waveprep'} - dependencies.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) - cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run if self.run in ['gfs']: