diff --git a/.github/workflows/build_conda.yml b/.github/workflows/build_conda.yml index d9ba9162..94acc688 100644 --- a/.github/workflows/build_conda.yml +++ b/.github/workflows/build_conda.yml @@ -7,10 +7,15 @@ jobs: build: runs-on: ubuntu-latest container: - image: continuumio/miniconda3:latest + image: ghcr.io/noaa-gfdl/fre-cli:miniconda24.7.1_gcc14.2.0 steps: - name: Checkout Files uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Add mkmf to PATH + run: | + echo $PWD/mkmf/bin >> $GITHUB_PATH - name: Run Conda to Build run: | conda config --append channels conda-forge diff --git a/.github/workflows/create_test_conda_env.yml b/.github/workflows/create_test_conda_env.yml index ce5de814..18ff20f0 100644 --- a/.github/workflows/create_test_conda_env.yml +++ b/.github/workflows/create_test_conda_env.yml @@ -1,70 +1,58 @@ name: create_test_conda_env -on: [push] +on: [pull_request] jobs: build-linux: runs-on: ubuntu-latest + container: + image: ghcr.io/noaa-gfdl/fre-cli:miniconda24.7.1_gcc14.2.0 steps: - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 with: - python-version: '>=3.9' - - - name: Add conda to system path - run: | - # $CONDA is an env var pointing to root of miniconda dir - echo $CONDA/bin >> $GITHUB_PATH - + submodules: 'recursive' - name: Create fre-cli environment run: | # create environment containing all dependencies # the env cannot be explicitly activated in github CI/CD conda env create -f environment.yml --name fre-cli - # add conda env's executables to github's PATH equiv. + # sets CONDA to wherever it may be on the image + source /root/.bashrc + + # add conda env's executables and mkmf to github's PATH equiv. echo $CONDA/envs/fre-cli/bin >> $GITHUB_PATH - + echo $PWD/mkmf/bin >> $GITHUB_PATH + # use *conda environment's pip* to install fre-cli # called w/ full path to conda's python for explicitness # called as a module (-m pip) for explicitness - $CONDA/envs/fre-cli/bin/python -m pip install --prefix $CONDA/envs/fre-cli . + $CONDA/envs/fre-cli/bin/python -m pip install --prefix $CONDA/envs/fre-cli . - name: Run pytest in fre-cli environment run: | - # try to make sure the right things are in GITHUB_PATH - echo $CONDA/envs/fre-cli/bin >> $GITHUB_PATH - - # are we talking to the right python? - which python - python --version - $CONDA/envs/fre-cli/bin/python --version - + # add spack installed binaries to front of path so that + # conda's netcdf/hdf5 installs don't break compilation tests + export path_save=$PATH + export PATH="/opt/views/view/bin:$PATH" + # run pytest pytest --junit-xml=pytest_results.xml --config-file=fre/pytest.ini --cov-config=fre/coveragerc --cov-report=xml --cov=fre fre/ - - # install genbadge to generate coverage badge based on xml + + # restore original path and install genbadge to generate coverage badge based on xml + export PATH="$path_save" pip install genbadge genbadge coverage -v -i coverage.xml -o docs/cov_badge.svg genbadge tests -v -i pytest_results.xml -o docs/pytest_badge.svg - + - name: Run pylint in fre-cli environment run: | - # try to make sure the right things are in GITHUB_PATH - echo $CONDA/envs/fre-cli/bin >> $GITHUB_PATH - - # are we talking to the right python? - which python - python --version - $CONDA/envs/fre-cli/bin/python --version - - # run pylint, ignored modules avoid warnings arising from code internal to those modules + # run pylint, ignored modules avoid warnings arising from code internal to those modules pylint --max-args 6 -ry --ignored-modules netCDF4,cmor fre/ || echo "pylint returned non-zero exit code. preventing workflow from dying with this echo." - + - name: Install Sphinx and Build Documentation run: | - pip install sphinx renku-sphinx-theme sphinx-rtd-theme + pip install sphinx renku-sphinx-theme sphinx-rtd-theme pip install --upgrade sphinx-rtd-theme sphinx-apidoc --output-dir docs fre/ --separate sphinx-build docs build diff --git a/.github/workflows/publish_conda.yml b/.github/workflows/publish_conda.yml index e36a72ea..26b1825a 100644 --- a/.github/workflows/publish_conda.yml +++ b/.github/workflows/publish_conda.yml @@ -7,10 +7,15 @@ jobs: publish: runs-on: ubuntu-latest container: - image: continuumio/miniconda3:latest + image: ghcr.io/noaa-gfdl/fre-cli:miniconda24.7.1_gcc14.2.0 steps: - name: Checkout Files uses: actions/checkout@v4 + with: + submodules: 'recursive' + - name: Add mkmf to PATH + run: | + echo $PWD/mkmf/bin >> $GITHUB_PATH - name: Run Conda to Build and Publish run: | conda config --append channels conda-forge diff --git a/.gitignore b/.gitignore index 71828433..5212a9a5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,8 @@ +# fremake generated files that should be ignored +combined-null_model.yaml +combined-am5.yaml +tmp +makefile_out # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index fef13ef2..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,13 +0,0 @@ -# You can override the included template(s) by including variable overrides -# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings -# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings -# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings -# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings -# Note that environment variables can be set in several places -# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence -stages: -- test -sast: - stage: test -include: -- template: Security/SAST.gitlab-ci.yml diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..b95f9e27 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "mkmf"] + path = mkmf + url = https://github.com/NOAA-GFDL/mkmf +[submodule "fre/gfdl_msd_schemas"] + path = fre/gfdl_msd_schemas + url = https://github.com/NOAA-GFDL/gfdl_msd_schemas diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a7bd7b6c..c37953c9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,7 +1,7 @@ ## **For Developers** * Developers are free to use this repository's `README.md` to familiarize with the CLI and save time from having to install any dependencies, but development within a Conda environment is heavily recommended regardless -* Gain access to the repository with `git clone git@github.com:NOAA-GFDL/fre-cli.git` or your fork's link (recommended) and an SSH RSA key +* Gain access to the repository with `git clone --recursive git@github.com:NOAA-GFDL/fre-cli.git` or your fork's link (recommended) and an SSH RSA key - Once inside the repository, developers can test local changes by running a `pip install .` inside of the root directory to install the fre-cli package locally with the newest local changes on top of the installed Conda fre-cli dependencies - Test as a normal user would use the CLI * Create a GitHub issue to reflect your contribution's background and reference it with Git commits diff --git a/docs/FAQ.rst b/docs/FAQ.rst deleted file mode 100644 index cde88d85..00000000 --- a/docs/FAQ.rst +++ /dev/null @@ -1,2 +0,0 @@ -FAQ -=== diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..a3655f99 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,4 @@ +============= +API +============= +Auto-harvested goodness, coming soon. diff --git a/docs/badges.rst b/docs/badges.rst index 542793d4..3fbc5d66 100644 --- a/docs/badges.rst +++ b/docs/badges.rst @@ -1,3 +1,5 @@ +.. this file is explicitly for the hyperlinkage in the base README.md to the badge image files +====== Badges ====== diff --git a/docs/contributing_to_doc.rst b/docs/contributing_to_doc.rst new file mode 100644 index 00000000..15721886 --- /dev/null +++ b/docs/contributing_to_doc.rst @@ -0,0 +1,62 @@ +.. last updated early Nov 2024. + could use some refinement +=========================== +Documentation-Documentation +=========================== + +Welcome to ``fre-cli``'s Documentation-documentation- where we document how the documentation is +documented + +How to Contribute to ``fre-cli``'s documentation +================================================ + + + +fork and poke at the settings +----------------------------- + +* Fork ``fre-cli`` on github + +* On github, navigate to your ``fre-cli`` fork, and click “settings” + +* In “settings”, click “pages” + +* In “pages”, under “build and deployment”, make sure “source” is set to “Deploy from a branch” + +* Under that, find “Branch”, make sure the branch selected is ``gh-pages`` + +* The branch ``gh-pages`` is "automagic”- i.e. do not change anything about it nor create a new one, + nor interact with anything in that branch directly + + +enable workflows for your fork +------------------------------ + +note: this step may depend on user-specific settings! + +* Back on top where you found “settings”, find and click “actions” to the left + +* Enable running the workflow actions assoc with the ``fre-cli`` repo under ``.github/workflows`` + + +run your fork's first workflow +------------------------------ + +* The documentation builds as the last steps to ``create_test_conda_env.yml`` when theres a push to ``main`` + +* To get your first workflow run on your fork, comment out the ``github.ref == ‘refs/heads/main’`` bit + so that it runs when you push to any branch, and make a small, trivial, commit somewhere to your + remote fork + +* You should be able to find the deployed webpage from a successful workflow at + https://your_username.github.io/fre-cli (if you did not change the fork’s name from ``fre-cli``, that is) + +* If you’re only editing docs, you can make the turn-around time on your workflow ~3 min faster by + commenting-out the ``pylint`` and ``pytest`` steps in ``create_test_conda_env.yml``, and disable running the + ``build_conda.yml`` workflow + + + +Other Helpful Things +==================== +`restructured text cheat-sheet `_ diff --git a/docs/for-developers.rst b/docs/for-developers.rst new file mode 100644 index 00000000..92feada5 --- /dev/null +++ b/docs/for-developers.rst @@ -0,0 +1,118 @@ +=============== +For developers +=============== + +Developers are free to use the user guide above to familiarize with the CLI and save time from +having to install any dependencies, but development within a Conda environment is heavily +recommended regardless. + +Gain access to the repository with ``git clone --recursive git@github.com:NOAA-GFDL/fre-cli.git`` or your fork's +link (recommended) and an SSH RSA key. Once inside the repository, developers can test local changes +by running a ``pip install .`` inside of the root directory to install the ``fre-cli`` package locally +with the newest local changes. Test as a normal user would use the CLI. + + +Adding New Tools +================ + + +From Other Repositories +----------------------- + +Currently, the solution to this task is to approach it using Conda packages. The tool that is being +added must reside within a repository that contains a ``meta.yaml`` that includes Conda dependencies +like the one in this repository and ideally a ``setup.py`` (may be subject to change due to deprecation) +that may include any potentially needed pip dependencies + +* Once published as a Conda package, ideally on the `NOAA-GFDL conda channel `_, + an addition can be made to the ``run`` section under ``requirements`` in ``meta.yaml`` of the ``fre-cli`` + following the syntax ``channel::package`` + +* On pushes to the main branch, the package located at https://anaconda.org/NOAA-GFDL/fre-cli will automatically + be updated using by the workflow defined in ``.github/workflows/publish_conda.yml`` + + +Checklist +--------- + +For the new tool you are trying to develop, there are a few criteria to satisfy + +1. Create a subdirectory for the tool group inside the ``fre/`` directory; i.e. ``fre/`` + +2. Add an ``__init__.py`` inside of ``fre/`` + +* typically this file should be empty, but it depends on the ````'s needs +* even if empty, the file facillitates module importability and must be present + +3. Add a file named ``fre//fre.py``. This will serve as the main entry point for ``fre`` + into the ````'s functionality + +4. Add a ``click`` group named after ```` within ``fre//fre.py`` + +* This ``click`` group will contain all the functionality under the ```` + +5. Create separate files as needed for different commands; do not code out the full + implemetation of ```` inside of a ``click`` command within ``fre//fre.py``. + +* better yet, consider what structure your tool may need in the future for maintainability's sake +* if you need, specify a ```` like ``fre//``. ``fre/app`` currently has + this structure + +6. Be sure to import the contents of the needed subcommand scripts inside of ``fre.py`` + +* i.e. from ``fre..toolCommandScript import *`` + +7. At this point, you can copy and paste the parts of your main ``click`` command from its script + into ``fre.py`` when implementing the function reflective of the command function + +* Everything will remain the same; i.e. arguments, options, etc. + +* However, this new function within ``fre.py`` must a new line after the arguments, options, + and other command components; ``@click.pass_context`` + +* Along with this, a new argument ``context`` must now be added to the parameters of the command + (preferably at the beginning, but it won't break it if it's not) + +8. From here, all that needs to be added after defining the command with a name is + ``context.forward(mainFunctionOfToolCommand)``, and done! + +9. The last step is to replicate the command in the same way as done in ``fre.py`` + inside of ``fre.py``, but make sure to add ``from fre import `` and + ``from fre. import *`` + +Please refer to this issue when encountering naming issues: +`NOAA-GFDL#31 `_ + + +Example ``fre/`` Directory Structure +------------------------------------ + +``fre/`` +├── ``__init__.py`` +├── ``fre.py`` +├── ``fre`` +│ ├── ``__init__.py`` +│ ├── ``toolCommandScript.py`` +│ └── ``fre.py`` + + +``MANIFEST.in`` +--------------- + +In the case where non-python files like templates, examples, and outputs are to be included in the ``fre-cli`` package, +``MANIFEST.in`` can provide the solution. Ensure that the file exists within the correct folder, and add a line to the +``MANIFEST.in`` file saying something like ``include fre/fre/fileName.fileExtension`` + +* For more efficiency, if there are multiple files of the same type needed, the ``MANIFEST.in`` addition can be something + like ``recursive-include fre/fre *.fileExtension`` which would recursively include every file matching that + ``fileExtension`` within the specified directory and its respective subdirectories. + + +Adding Documentation +-------------------- + +see section "Documentation-Documentation" + + + + diff --git a/docs/index.rst b/docs/index.rst index 77c46de6..07dcbeb9 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,23 +1,28 @@ -.. Fre-Cli documentation master file, created by - sphinx-quickstart on Wed Mar 6 22:28:21 2024. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. +.. Fre-Cli documentation master file, created by sphinx-quickstart on Wed Mar 6 22:28:21 2024. + You can adapt this file completely to your liking, but it should at least contain the root + \`toctree\` directive (no backslashes) + Some sphinx markdown examples: + https://gist.github.com/SMotaal/24006b13b354e6edad0c486749171a70 -Welcome to Fre-Cli's documentation! -=================================== +======================================= +Welcome to ``fre-cli``'s documentation! +======================================= +.. the entry in the toc must be the .rst filename. + what shows in the webpage is the first header or title .. toctree:: :maxdepth: 2 :caption: Contents: + what-is-fre setup usage - subtools - FAQ - badges + tools + api + for-developers -Indices and tables -================== +Indices +======= * :ref:`genindex` * :ref:`modindex` diff --git a/docs/setup.rst b/docs/setup.rst index cf06bb99..7fd26b14 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -1,32 +1,35 @@ +===== Setup ===== +fre-cli is conda-installable from the “noaa-gfdl” anaconda channel (https://anaconda.org/NOAA-GFDL/fre-cli) +and is deployed on GFDL systems as Environment Modules. + +On GFDL systems +======================== +If you are at GFDL (gaea, PP/AN, workstations), you may skip installation:: -Need to set up Conda environment first and foremost + module load fre/2024.01 -If on workstation: -module load conda + fre --help -Create new Conda environment -conda create -n [environmentName] +Generic +======================= +If you are outside GFDL or are a FRE developer, install with conda. If you're at GFDL, bring conda into your PATH:: -Append necessary channels -conda config --append channels noaa-gfdl -conda config --append channels conda-forge + module load miniforge -Run conda install on needed dependencies -conda install noaa-gfdl::fre-cli should install the CLI package located at https://anaconda.org/NOAA-GFDL/fre-cli created from the meta.yaml file +If you are outside GFDL, install the miniconda tool with the standard instructions (https://docs.anaconda.com/miniconda/miniconda-install/). -All other dependencies used by the tools are installed along with this install (configured inside the meta.yaml), with the exception of local modules -setup.py file allows fre.py to be ran with fre as the entry point on the command line instead of python fre.py +Once you have conda available, install the latest fre-cli from the NOAA-GFDL anaconda channel:: -Enter commands and follow --help messages for guidance (brief rundown of commands also provided below) + conda create --name fre --channel noaa-gfdl --channel conda-forge fre-cli -If the user just runs fre, it will list all the command groups following fre, such as run, make, pp, etc. and once the user specifies a command group, the list of available subcommands for that group will be shown +To install a specific version:: -Commands that require arguments to run will alert user about missing arguments, and will also list the rest of the optional parameters if --help is executed + conda create --name fre-202401 --channel noaa-gfdl --channel conda-forge fre-cli::2024.01 -Argument flags are not positional, can be specified in any order as long as they are specified +and activate it:: -Can run directly from any directory, no need to clone repository + conda activate fre -May need to deactivate environment and reactivate it in order for changes to apply + fre --help diff --git a/docs/subtools.rst b/docs/subtools.rst deleted file mode 100644 index ee6f663f..00000000 --- a/docs/subtools.rst +++ /dev/null @@ -1,55 +0,0 @@ -Subtools -======== - -fre app --------- - -fre catalog --------- - -The fre catalog tool brings the functionality of the `GFDL catalog builder `_ to fre users. The catalog builder is a python community package ecosystem that allows you to generate data catalogs compatible with intake-esm. - -**Subtools** - -Buildcatalog - Generate a data catalog - -Validate - Validate the catalog - -**Flags** - -Overwrite - Overwrite an existing catalog at the given output path - -Apend - Append (without headerlist) to an existing catalog at the given output path - -**Quickstart** - -Catalogs are generated by the following command: fre catalog buildcatalog - -(OUTPUT_PATH should end with the desired output filename WITHOUT a file ending) See example below. - -.. code-block:: console -fre catalog buildcatalog --overwrite /archive/path_to_data_dir ~/output - -fre check (not yet implemented) --------- - -fre cmor --------- - -fre list (not yet implemented) --------- - -fre make --------- - -fre pp --------- - -fre run (not yet implemented) --------- - -fre test (not yet implemented) --------- - -fre yamltools (not yet implemented) --------- diff --git a/docs/tools.rst b/docs/tools.rst new file mode 100644 index 00000000..c658b211 --- /dev/null +++ b/docs/tools.rst @@ -0,0 +1,65 @@ +============= +Tools +============= + +Notes on command-line interface +======================================== +The “cli” in fre-cli derives from the shell “fre SUBCOMMAND COMMAND” structure inspired by git, cylc, and other modern Linux command-line tools. This enables discovery of the tooling capability, useful for complex tools with multiple options. + +To discover subcommands, use ``--help``, e.g.:: + + fre --help + + fre make --help + + fre pp --help + +Commands that require arguments to run will alert user about missing arguments, and will also list +the rest of the optional parameters if ``--help`` is executed. e.g.:: + + fre pp configure-yaml --help + +Argument flags are not positional, can be specified in any order. Some arguments expect sub-arguments. + +fre app +=========== + +.. include:: tools/app.rst + + +fre catalog +=============== + +.. include:: tools/catalog.rst + + +fre cmor +============ + +* See also, ``fre cmor``'s `README `_ +* See also, ``fre cmor``'s `project board `_ + +This set of tools leverages the external ``cmor`` python package within the ``fre`` ecosystem. ``cmor`` is an +acronym for "climate model output rewriter". The process of rewriting model-specific output files for model +intercomparisons (MIPs) using the ``cmor`` module is, quite cleverly, referred to as "CMORizing". + + +.. include:: tools/cmor.rst + + +fre make +============ + +.. include:: tools/make.rst + + +fre pp +========== + +.. include:: tools/pp.rst + + +fre yamltools +================= + +.. include:: tools/yamltools.rst diff --git a/docs/tools/app.rst b/docs/tools/app.rst new file mode 100644 index 00000000..252871a3 --- /dev/null +++ b/docs/tools/app.rst @@ -0,0 +1 @@ +`fre app` tools are intended to be a collection of single-purpose tools. diff --git a/docs/tools/catalog.rst b/docs/tools/catalog.rst new file mode 100644 index 00000000..03eeca22 --- /dev/null +++ b/docs/tools/catalog.rst @@ -0,0 +1,12 @@ +.. NEEDS UPDATING #TODO +``builder`` +----------- + Generate a catalog +* Builds json and csv format catalogs from user input directory path +* Minimal Syntax: ``fre catalog builder -i [input path] -o [output path]`` +* Module(s) needed: n/a +* Example: ``fre catalog builder -i /archive/am5/am5/am5f3b1r0/c96L65_am5f3b1r0_pdclim1850F/gfdl.ncrc5-deploy-prod-openmp/pp -o ~/output --overwrite`` + +``validate`` +------------ +Validate the catalog diff --git a/docs/tools/cmor.rst b/docs/tools/cmor.rst new file mode 100644 index 00000000..ee55cf43 --- /dev/null +++ b/docs/tools/cmor.rst @@ -0,0 +1,91 @@ +.. last updated Nov 2024 + +``run`` +------- + +``fre cmor run`` rewrites climate model output files in a target directory in a CMIP-compliant manner +for downstream publishing. It accepts 6 arguments, only one being optional. A brief description of each: + + +arguments +~~~~~~~~~ + +* (required) ``-d, --indir TEXT``, input directory containing netCDF files to CMORize. + + - all netCDF files within ``indir`` will have their filename checked for local variables + specified in ``varlist`` as keys, and ISO datetime strings extracted and kept in a list + for later iteration over target files + + - a debugging-oriented boolean flag constant at the top of ``cmor_mixer.py``, if ``True`` + will process one file of all files found within ``indir`` and cease processing for that + variable after succeeding on one file + +* (required) ``-l, --varlist TEXT``, path to variable list dictionary. + + - each entry in the variable list dictionary corresponds to a key/value pair + + - the key (local variable) is used for ID'ing files within ``indir`` to be processed + + - associated with the key (local variable), is the value (target variable), which should + be the label attached to the data within the targeted file(s) + +* (required) ``-r, --table_config TEXT``, path to MIP json configuration holding variable + metadata. + + - typically, this is to be provided by a data-request associated with the MIP and + participating experiments + +* (required) ``-p, --exp_config TEXT``, path to json configuration holding experiment/model + metadata + + - contains e.g. ``grid_label``, and points to other important configuration files + associated with the MIP + + - the other configuration files are typically housing metadata associated with ``coordinates``, + ``formula_terms``, and controlled-vocabulary (``CV``). + +* (required) ``-o, --outdir TEXT``, path-prefix inwhich the output directory structure is created. + + - further output-directories and structure/template information is specified specified in ``exp_config`` + + - in addition to the output-structure/template used, an additional directory corresponding to the + date the CMORizing was done is created near the end of the directory tree structure + +* (optional) ``-v, --opt_var_name TEXT``, a specific variable to target for processing + + - largely a debugging convenience functionality, this can be helpful for targeting more specific + input files as desired. + + +examples +~~~~~~~~ +with a local clone of ``fre-cli``, the following call should work out-of-the-box from +the root directory of the repository. + +.. code-block:: python + + fre cmor run \ + -d fre/tests/test_files/ocean_sos_var_file \ + -l fre/tests/test_files/varlist \ + -r fre/tests/test_files/cmip6-cmor-tables/Tables/CMIP6_Omon.json \ + -p fre/tests/test_files/CMOR_input_example.json \ + -o fre/tests/test_files/outdir + +background +~~~~~~~~~~ + +The bulk of this routine is housed in ``fre/cmor/cmor_mixer.py``, which is a rewritten version of +Sergey Malyshev's original ``CMORcommander.py`` script, utilized during GFDL's CMIP6 publishing run. + +This code is dependent on two primary json configuration files- a MIP +variable table and another containing experiment (i.e. model) specific metdata (e.g. grid) to append +to the output netCDF file headers, in addition to other configuration options such as output directory +name specification, output path templates, and specification of other json configuration files containing +controlled-vocabulary (CV), coordinate, and formula term conventions for rewriting the output metadata. + + + + + + + diff --git a/docs/tools/make.rst b/docs/tools/make.rst new file mode 100644 index 00000000..5a3e557b --- /dev/null +++ b/docs/tools/make.rst @@ -0,0 +1,59 @@ +``create-checkout`` +------------- + +``fre make create-checkout [options]`` + - Purpose: Creates the checkout script and can check out source code (with execute option) + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-npc, --no-parallel-checkout (for container build)` + - `-e, --execute` + +``create-makefile`` +------------- + +``fre make create-makefile [options]`` + - Purpose: Creates the makefile + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + +``create-compile`` +------------- + +``fre make create-compile [options]`` + - Purpose: Creates the compile script and compiles the model (with execute option) + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-n, --parallel [number of concurrent modile compiles]` + - `-e, --execute` + +``create-dockerfile`` +------------- + +``fre make create-dockerfile [options]`` + - Purpose: Creates the dockerfile and creates the container (with execute option) + - With the creation of the dockerfile, the Makefile, checkout script, and any other necessary script is copied into the container from a temporary location + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + +``run-fremake`` +------------- + +``fre make run-fremake [options]`` + - Purpose: Create the checkout script, Makefile, compile script, and dockerfile (platform dependent) for the compilation of the model + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `-npc, --no-parallel-checkout (for container build)` + - `-j, --jobs [number of jobs to run simultneously]` + - `-n, --parallel [number of concurrent modile compiles]` diff --git a/docs/tools/pp.rst b/docs/tools/pp.rst new file mode 100644 index 00000000..6160a1e5 --- /dev/null +++ b/docs/tools/pp.rst @@ -0,0 +1,16 @@ +.. NEEDS UPDATING #TODO +``configure`` +------------- + +* Postprocessing yaml configuration +* Minimal Syntax: ``fre pp configure -y [user-edit yaml file]`` +* Module(s) needed: n/a +* Example: ``fre pp configure -y /home/$user/pp/ue2/user-edits/edits.yaml`` + +``checkout`` +------------ + +* Checkout template file and clone gitlab.gfdl.noaa.gov/fre2/workflows/postprocessing.git repository +* Minimal Syntax: ``fre pp checkout -e [experiment name] -p [platform name] -t [target name]`` +* Module(s) needed: n/a +* Example: ``fre pp checkout -e c96L65_am5f4b4r0_amip -p gfdl.ncrc5-deploy -t prod-openmp`` diff --git a/docs/tools/yamltools.rst b/docs/tools/yamltools.rst new file mode 100644 index 00000000..7ff42502 --- /dev/null +++ b/docs/tools/yamltools.rst @@ -0,0 +1,13 @@ +``combine-yamls`` +----------------- + +``fre yamltools combine-yamls [options]`` + - Purpose: Creates a combined yaml file for either compilation or post-processing. + If `--use compile`, the model yaml is combined with the compile and platforms yaml. + If `--use pp`, the model yaml is combined with post-processing yamls. + - Options: + - `-y, --yamlfile [experiment yaml] (required)` + - `-e, --experiment [experiment name]` + - `-p, --platform [platform] (required)` + - `-t, --target [target] (required)` + - `--use [compile|pp] (required)` diff --git a/docs/usage.rst b/docs/usage.rst index 95afbe5a..e49f106b 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,156 +1,28 @@ +============= Usage -===== +============= +Using a set of YAML configuration files, ``fre make`` compiles a FMS-based model, and ``fre pp`` postprocesses the history output and runs diagnostic analysis scripts. Please note that model running is not yet supported in FRE 2024; continue to use FRE Bronx frerun. -User Usage ----------- +YAML Framework +======================== +.. include:: usage/yaml_framework.rst -**Conda environment set up** +Build FMS model +======================= +.. include:: usage/compile.rst -Load Conda +Run FMS model +======================= +Check back in the latter half of 2025 or so. -.. code-block::console - module load conda +Postprocess FMS history output +============================== +.. include:: usage/postprocess.rst -Create new Conda environment +CMORize postprocessed output +============================ +.. include:: usage/cmor.rst -.. code-block::console - conda create -n [environmentName] - -Append necessary channels - -.. code-block::console - conda config --append channels noaa-gfdl - conda config --append channels conda-forge - -Install needed dependencies - -.. code-block::console - conda install noaa-gfdl::fre-cli - -setup.py file allows fre.py to be ran with fre as the entry point on the command line instead of python fre.py - -Enter commands and follow *--help* messages for guidance (brief rundown of commands also provided below) - -If the user just runs *fre*, it will list all the command groups following *fre*, such as *run*, *make*, *pp*, etc. and once the user specifies a command group, the list of available subcommands for that group will be shown - -Commands that require arguments to run will alert user about missing arguments, and will also list the rest of the optional parameters if *--help* is executed - -Argument flags are not positional, can be specified in any order as long as they are specified - -Can run directly from any directory, no need to clone repository - -May need to deactivate environment and reactivate it in order for changes to apply - - -Tools ------ - -A few subtools are currently in development: - -**fre pp** - -1. configure - -* Postprocessing yaml configuration -* Minimal Syntax: *fre pp configure -y [user-edit yaml file]* -* Module(s) needed: n/a -* Example: *fre pp configure -y /home/$user/pp/ue2/user-edits/edits.yaml* - -2. checkout - -* Checkout template file and clone gitlab.gfdl.noaa.gov/fre2/workflows/postprocessing.git repository -* Minimal Syntax: *fre pp checkout -e [experiment name] -p [platform name] -t [target name]* -* Module(s) needed: n/a -* Example: *fre pp checkout -e c96L65_am5f4b4r0_amip -p gfdl.ncrc5-deploy -t prod-openmp* - - -**fre catalog** - -1. buildCatalog1 -* Builds json and csv format catalogs from user input directory path -* Minimal Syntax: *fre catalog buildCatalog -i [input path] -o [output path]* -* Module(s) needed: n/a -* Example: *fre catalog buildCatalog -i /archive/am5/am5/am5f3b1r0/c96L65_am5f3b1r0_pdclim1850F/gfdl.ncrc5-deploy-prod-openmp/pp -o ~/output --overwrite* - -**To be developed:** - -#. fre check -#. fre list -#. fre make -#. fre run -#. fre test -#. fre yamltools - - -Usage (Developers) ------------------- - -Developers are free to use the user guide above to familiarize with the CLI and save time from having to install any dependencies, but development within a Conda environment is heavily recommended regardless - -Gain access to the repository with *git clone git@github.com:NOAA-GFDL/fre-cli.git* or your fork's link (recommended) and an SSH RSA key - -Once inside the repository, developers can test local changes by running a *pip install .* inside of the root directory to install the fre-cli package locally with the newest local changes - -Test as a normal user would use the CLI - -**Adding New Tools - Checklist** - -If there is *no* subdirectory created for the new tool you are trying to develop, there are a few steps to follow: - -1. Create a subdirectory for the tool group inside the /fre folder; i.e. /fre/fre(subTool) - -2. Add an *__init__.py* inside of the new subdirectory - -* This will contain one line, *from fre.fre(subTool) import ** -* The purpose of this line is to allow the subTool module to include all the scripts and functions within it when invoked by fre - -3. Add a file named *fre(subTool).py*. This will serve as the main file to house all of the tool's related subcommands - -4. Add a Click group named after the subTool within *fre(subTool).py* - -* This group will contain all of the subcommands - -5. Create separate files to house the code for each different subcommand; do not code out the full implemetation of a function inside of a Click command within *fre(subTool).py* - -6. Be sure to import the contents of the needed subcommand scripts inside of fre(subTool).py - -* i.e. from fre.fre(subTool).subCommandScript import * - -7. At this point, you can copy and paste the parts of your main Click subcommand from its script into *fre(subTool).py* when implementing the function reflective of the subcommand function - -* Everything will remain the same; i.e. arguments, options, etc. - -* However, this new function within *fre(subTool).py* must a new line after the arguments, options, and other command components; *@click.pass_context* - -* Along with this, a new argument "context" must now be added to the parameters of the command (preferably at the beginning, but it won't break it if it's not) - -8. From here, all that needs to be added after defining the command with a name is *context.forward(mainFunctionOfSubcommand)*, and done! - -9. After this step, it is important to add *from fre.fre(subTool) import* to the *__init__.py* within the /fre folder - -10. The last step is to replicate the subcommand in the same way as done in *fre(subTool).py* inside of *fre.py*, but make sure to add *from fre import fre(subTool)* and *from fre.fre(subTool).fre(subTool) import ** - -Please refer to this issue when encountering naming issues: `NOAA-GFDL#31 `_ - -**Adding Tools From Other Repositories** - -Currently, the solution to this task is to approach it using Conda packages. The tool that is being added must reside within a repository that contains a meta.yaml that includes Conda dependencies like the one in this repository and ideally a setup.py (may be subject to change due to deprecation) that may include any potentially needed pip dependencies - -* Once published as a Conda package, ideally on the NOAA-GFDL channel at https://anaconda.org/NOAA-GFDL, an addition can be made to the "run" section under the "requirements" category in the meta.yaml of the fre-cli following the syntax channel::package - -* On pushes to the main branch, the package located at https://anaconda.org/NOAA-GFDL/fre-cli will automatically be updated using the workflow file - -**MANIFEST.in** - -In the case where non-python files like templates, examples, and outputs are to be included in the fre-cli package, MANIFEST.in can provide the solution. Ensure that the file exists within the correct folder, and add a line to the MANIFEST.in file saying something like *include fre/fre(subTool)/fileName.fileExtension* - -* For more efficiency, if there are multiple files of the same type needed, the MANIFEST.in addition can be something like *recursive-include fre/fre(subTool) *.fileExtension* which would recursively include every file matching that fileExtension within the specified directory and its respective subdirectories. - -**Example /fre Directory Structure** -. -├── __init__.py -├── fre.py -├── fre(subTool) -│ ├── __init__.py -│ ├── subCommandScript.py -│ └── fre(subTool).py +Generate data catalogs +====================== +.. include:: usage/catalogs.rst diff --git a/docs/usage/cmor.rst b/docs/usage/cmor.rst new file mode 100644 index 00000000..d886bf4b --- /dev/null +++ b/docs/usage/cmor.rst @@ -0,0 +1,20 @@ +Brief rundown of commands also provided below: + +* Enter commands and follow ``--help`` messages for guidance +* If the user just runs ``fre``, it will list all the command groups following ``fre``, such as + ``run``, ``make``, ``pp``, etc. and once the user specifies a command group, the list of available + subcommands for that group will be shown +* Commands that require arguments to run will alert user about missing arguments, and will also list + the rest of the optional parameters if ``--help`` is executed +* Argument flags are not positional, can be specified in any order as long as they are specified +* Can run directly from any directory, no need to clone repository +* May need to deactivate environment and reactivate it in order for changes to apply +* ``fre/setup.py`` allows ``fre/fre.py`` to be ran as ``fre`` on the command line by defining it as an + *entry point*. Without it, the call would be instead, something like ``python fre/fre.py`` + +* See also, ``fre cmor``'s `README `_ +* See also, ``fre cmor``'s `project board `_ + +This set of tools leverages the external ``cmor`` python package within the ``fre`` ecosystem. ``cmor`` is an +acronym for "climate model output rewriter". The process of rewriting model-specific output files for model +intercomparisons (MIPs) using the ``cmor`` module is, quite cleverly, referred to as "CMORizing". diff --git a/docs/usage/compile.rst b/docs/usage/compile.rst new file mode 100644 index 00000000..26473010 --- /dev/null +++ b/docs/usage/compile.rst @@ -0,0 +1,183 @@ +``fre make`` can compile a traditional "bare metal" executable or a containerized executable using a set of YAML configuration files. + +Through the fre-cli, ``fre make`` can be used to create and run a checkout script, makefile, and compile a model. + +Fremake Canopy Supports: + - multiple target use; ``-t`` flag to define each target (for multiple platform-target combinations) + - bare-metal build + - container creation + - parallel checkouts for bare-metal build + - parallel model builds + - one yaml format + - additional library support if needed + +**Note: Users will not be able to create containers without access to podman. To get access, submit a helpdesk ticket.** + +Required configuration files: + + - Model Yaml + - Compile Yaml + - Platforms yaml + +These yamls are combined and further parsed through the ``fre make`` tools. + +Compile Yaml +---------- +To create the compile yaml, reference the compile section on an XML. Certain fields should be included under "compile". These include ``experiment``, ``container_addlibs``, ``baremetal_linkerflags``, and ``src``. + + - The experiment can be explicitly defined or can be used in conjunction with defined ``fre_properties`` from the model yaml, as seen in the code block below + - ``container_addlibs``: list of strings of packages needed for the model to compile (used to create the link line in the Makefile) + - ``baremetal_linkerflags``: list of strings of linker flags (used to populate the link line in the Makefile + - ``src``: contains information about components needed for model compilation + +.. code-block:: + + compile: + experiment: !join [*group_version, "_compile"] + container_addlibs: "libraries and packages needed for linking in container" (string) + baremetal_linkerflags: "linker flags of libraries and packages needed" (string) + src: + +The ``src`` section is used to include component information. This will include: ``component``, ``repo``, ``cpdefs``, ``branch``, ``paths``, ``otherFlags``, and ``makeOverrides``. + +.. code-block:: + + src: + - component: "component name" (string) + requires: ["list of components that this component depends on"] (list of string) + repo: "url of code repository" (string) + branch: "version of code to clone" (string / list of strings) + paths: ["paths in the component to compile"] (list of strings) + cppdefs: "CPPDEFS ot include in compiling componenet (string) + makeOverrides: "overrides openmp target for MOM6" ('OPENMP=""') (string) + otherFlags: "Include flags needed to retrieve other necessary code" (string) + doF90Cpp: True if the preprocessor needs to be run (boolean) + additionalInstructions: additional instructions to run after checkout (string) + +Guide +---------- +1. Bare-metal Build: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y [model yaml file] -p [platform] -t [target] + + # Create and run checkout script + fre make create-checkout -y [model yaml file] -p [platform] -t [target] --execute + + # Create Makefile + fre make create-makefile -y [model yaml file] -p [platform] -t [target] + + # Creat the compile script + fre make create-compile -y [model yaml file] -p [platform] -t [target] + + # Create and run the compile script + fre make create-compile -y [model yaml file] -p [platform] -t [target] --execute + + # Run all of fremake + fre make run-fremake -y [model yaml file] -p [platform] -t [target] [other options...] + +2. Container Build: + +For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. + +Gaea users will not be able to create containers unless they have requested and been given podman access. + +.. code-block:: + + # Create checkout script + fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] -npc + + # Create and run checkout script + fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + + # Create Makefile + fre make create-makefile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + + # Create a Dockerfile + fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + + # Create and run the Dockerfile + fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + +Quickstart +---------- +The quickstart instructions can be used with the null model example located in the fre-cli repository: https://github.com/NOAA-GFDL/fre-cli/tree/main/fre/make/tests/null_example + +1. Bare-metal Build: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y null_model.yaml -p ncrc5.intel23 -t prod + + # Create and run checkout script + fre make create-checkout -y null_model.yaml -p ncrc5.intel23 -t prod --execute + + # Create Makefile + fre make create-makefile -y null_model.yaml -p ncrc5.intel23 -t prod + + # Create the compile script + fre make create-compile -y null_model.yaml -p ncrc5.intel23 -t prod + + # Create and run the compile script + fre make create-compile -y null_model.yaml -p ncrc5.intel23 -t prod --execute + +2. Bare-metal Build Multi-target: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y null_model.yaml -p ncrc5.intel23 -t prod -t debug + + # Create and run checkout script + fre make create-checkout -y null_model.yaml -p ncrc5.intel23 -t prod -t debug --execute + + # Create Makefile + fre make create-makefile -y null_model.yaml -p ncrc5.intel23 -t prod -t debug + + # Create the compile script + fre make create-compile -y null_model.yaml -p ncrc5.intel23 -t prod -t debug + + # Create and run the compile script + fre make create-compile -y null_model.yaml -p ncrc5.intel23 -t prod -t debug --execute + +3. Container Build: + +In order for the container to build successfully, a `-npc`, or `--no-parallel-checkout` is needed. + +.. code-block:: + + # Create checkout script + fre make create-checkout -y null_model.yaml -p hpcme.2023 -t prod -npc + + # Create and run checkout script + fre make create-checkout -y null_model.yaml -p hpcme.2023 -t prod -npc --execute + + # Create Makefile + fre make create-makefile -y null_model.yaml -p hpcme.2023 -t prod + + # Create Dockerfile + fre make create-dockerfile -y null_model.yaml -p hpcme.2023 -t prod + + # Create and run the Dockerfile + fre make create-dockerfile -y null_model.yaml -p hpcme.2023 -t prod --execute + +4. Run all of fremake: + +`run-fremake` kicks off the compilation automatically + +.. code-block:: + + # Bare-metal: create and run checkout script, create makefile, create compile script + fre make run-fremake -y null_model.yaml -p ncrc5.intel23 -t prod + + # Bare-metal: create and run checkout script, create makefile, create and run compile script + fre make run-fremake -y null_model.yaml -p ncrc5.intel23 -t prod --execute + + # Container: create checkout script, makefile, and dockerfile + fre make run-fremake -y null_model.yaml -p hpcme.2023 -t prod -npc + + # Container: create checkout script, makefile, create and run dockerfile to build container + fre make run-fremake -y null_model.yaml -p hpcme.2023 -t prod -npc --execute diff --git a/docs/usage/postprocess.rst b/docs/usage/postprocess.rst new file mode 100644 index 00000000..4a8cdf9d --- /dev/null +++ b/docs/usage/postprocess.rst @@ -0,0 +1,156 @@ +``fre pp`` regrids FMS history files and generates timeseries, climatologies, and static postprocessed files, with instructions specified in YAML. + +Bronx plug-in refineDiag and analysis scripts can also be used, and a reimagined analysis script ecosystem is being developed and is available now (for adventurous users). The new analysis script framework is independent of and compatible with FRE (https://github.com/NOAA-GFDL/analysis-scripts). The goal is to combine the ease-of-use of legacy FRE analysis scripts with the standardization of model output data catalogs and python virtual environments. + +In the future, output NetCDF files will be rewritten by CMOR by default, ready for publication to community archives (e.g. ESGF). Presently, standalone CMOR tooling is available as ``fre cmor``. + +By default, an intake-esm-compatible data catalog is generated and updated, containing a programmatic metadata-enriched searchable interface to the postprocessed output. The catalog tooling can be independently assessed as ``fre catalog``. + +FMS history files +----------------- +FRE experiments are run in segments of simulated time. The FMS diagnostic manager, as configured in +experiment configuration files (diag yamls) saves a set of diagnostic output files, or "history files." +The history files are organized by label and can contain one or more temporal or static diagnostics. +FRE (Bronx frerun) renames and combines the raw model output (that is usually on a distributed grid), +and saves the history files in one tarfile per segment, date-stamped with the date of the beginning of the segment. +The FMS diagnostic manager requires +that variables within one history file be the same temporal frequency (e.g. daily, monthly, annual), +but statics are allowed in any history file. Usually, variables in a history file +share a horizontal and vertical grid. + +Each history tarfile, again, is date-stamped with the date of the beginning of the segment, in YYYYMMDD format. +For example, for a 5-year experiment with 6-month segments, there will be 10 history files containing the +raw model output. Each history tarfile contains a segment's worth of time (in this case 6 months).:: + + 19790101.nc.tar 19800101.nc.tar 19810101.nc.tar 19820101.nc.tar 19830101.nc.tar + 19790701.nc.tar 19800701.nc.tar 19810701.nc.tar 19820701.nc.tar 19830701.nc.tar + +Each history file within the history tarfiles are also similarly date-stamped. Atmosphere and land history files +are on the native cubed-sphere grid, which have 6 tiles that represent the global domain. Ocean, ice, and +global scalar output have just one file that covers the global domain. + +For example, if the diagnostic manager were configured to save atmospheric and ocean annual and monthly history files, +the 19790101.nc.tar tarfile might contain:: + + tar -tf 19790101.nc.tar | sort + + ./19790101.atmos_month.tile1.nc + ./19790101.atmos_month.tile2.nc + ./19790101.atmos_month.tile3.nc + ./19790101.atmos_month.tile4.nc + ./19790101.atmos_month.tile5.nc + ./19790101.atmos_month.tile6.nc + ./19790101.atmos_annual.tile1.nc + ./19790101.atmos_annual.tile2.nc + ./19790101.atmos_annual.tile3.nc + ./19790101.atmos_annual.tile4.nc + ./19790101.atmos_annual.tile5.nc + ./19790101.atmos_annual.tile6.nc + ./19790101.ocean_month.nc + ./19790101.ocean_annual.nc + +The name of the history file, while often predictably named, are arbitrary labels within the Diagnostic Manager configuration +(diag yamls). Each history file is a CF-standard NetCDF file that can be inspected with common NetCDF tools such as the NCO or CDO tools, or even ``ncdump``. + +Required configuration + +1. Set the history directory in your postprocessing yaml:: + + directories: + history: /arch5/am5/am5/am5f7c1r0/c96L65_am5f7c1r0_amip/gfdl.ncrc5-deploy-prod-openmp/history + +2. Set the segment size as an ISO8601 duration (e.g. P1Y is "one year"):: + + postprocess: + settings: + history_segment: P1Y + +3. Set the date range to postprocess as ISO8601 dates:: + + postprocess: + settings: + pp_start: 1979-01-01T0000Z + + pp_stop: 2020-01-01T0000Z + +Postprocess components +---------------------- +The history-file namespace is a single layer as shown above. By longtime tradition, FRE postprocessing namespaces are richer, with +a distinction for timeseries, timeaveraged, and static output datasets, and includes frequency and chunk-size in the directory structure. + +Postprocessed files within a "component" share a horizontal grid; which can be the native grid or regridded to lat/lon. + +Required configuration + +4. Define the atmos and ocean postprocess components:: + + postprocess: + components: + - type: atmos + + sources: [atmos_month, atmos_annual] + - type: ocean + + sources: [ocean_month, ocean_annual] + +XY-regridding +------------- +Commonly, native grid history files are regridded during postprocessing. To regrid to a lat/lon grid, configure your +desired output grid, interpolation method, input grid type, and path to your FMS exchange grid definition. + +Optional configuration (i.e. if xy-regridding is desired) + +5. Regrid the atmos and ocean components to a 1x1 degree grid:: + + directories: + pp_grid_spec: /archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar + + postprocess: + components: + - type: atmos + + sources: [atmos_month, atmos_annual] + + sourceGrid: cubedsphere + + inputRealm: atmos + + xyInterp: [180, 360] + + interpMethod: conserve_order2 + - type: ocean + + sources: [ocean_month, ocean_annual] + + sourceGrid: tripolar + + inputRealm: ocean + + xyInterp: [180, 360] + + interpMethod: conserve_order1 + +Timeseries +---------- +Timeseries output is the most common type of postprocessed output. + +Climatologies +------------- +annual and monthly climatologies +less fine-grained than bronx +per-component switch coming +now it's one switch for entire pp + +Statics +------- +underbaked, known deficiency +currently, takes statics from "source" history files + +Analysis scripts +---------------- + +Surface masking for FMS pressure-level history +---------------------------------------------- + +Legacy refineDiag scripts +------------------------- diff --git a/docs/usage/yaml_framework.rst b/docs/usage/yaml_framework.rst new file mode 100644 index 00000000..3422044a --- /dev/null +++ b/docs/usage/yaml_framework.rst @@ -0,0 +1,152 @@ +In order to utilize FRE 2024.01 tools, a distrubuted YAML structure is required. This framework includes a main model yaml, a compile yaml, a platforms yaml, and post-processing yamls. Throughout the compilation and post-processing steps, combined yamls that will be parsed for information are created. Yamls follow a dictionary-like structure with ``[key]: [value]`` fields. + +Yaml Formatting +---------- +Helpful information and format recommendations for creating yaml files. + +1. You can define a block of values as well as individual ``[key]: [value]`` pairs: + +.. code-block:: + + section name: + key: value + key: value + +2. ``[key]: [value]`` pairs can be made a list by utilizing a ``-``: + +.. code-block:: + + section name: + - key: value + - key: value + +3. If you want to associate information with a certain listed element, follow this structure: + +.. code-block:: + + section name: + - key: value + key: value + - key: value + key: value + +Where each dash indicates a list. + +4. Yamls also allow for the capability of reusable variables. These variables are defined by: + +.. code-block:: + + &ReusableVariable Value + +5. Users can apply a reusable variable on a block of values. For example, everything under "section" is associated with the reusable variable: + +.. code-block:: + + section: &ReusableVariable + - key: value + key: value + - key: value + +6. In order to use them as a reference else where in either the same or other yamls, use ``*``: + +.. code-block:: + + *ReusableVariable + +7. If the reusable variable must be combined with other strings, the **`!join`** constructor is used. Example: + +.. code-block:: + + &version "2024.01" + &stem !join [FRE/, *version] + +In this example, the reuasble variable ``stem`` will be parsed as ``FRE/2024.01``. + +Model Yaml +---------- +The model yaml defines reusable variables, shared directories, switches, post-processing settings, and paths to compile and post-processing yamls. Required fields in the model yaml include: ``fre_properties``, ``build``, ``shared``, and ``experiments``. + +* **fre_properties**: Reusable variables + + - list of variables + - these values can be extracted from ``fre_properties`` in a group's XML, if available + - value type: string + + .. code-block:: + + - &variable1 "value1" (string) + - &variable2 "value2" (string) + +* **build**: paths to information needed for compilation + + - subsections: ``compileYaml``, ``platformYaml`` + - value type: string + + .. code-block:: + + build: + compileYaml: "path the compile yaml in relation to model yaml" (string) + platformYaml: "path to platforms.yaml in relation to model yaml" (string) + +* **shared**: shared settings across experiments + + - subsections: ``directories``, ``postprocess`` + + .. code-block:: + + shared: + directories: &shared_directories + key: "value" (string) + + postprocess: + settings: &shared_settings + key: "value" (string) + switches: &shared_switches + key: True/False (boolean) + + * **Be sure to define directories, settings, and switches as reusable variables as well** + + + they will be "inherited" in the post-processing yamls created + +* **experiments**: list of post-processing experiments + + - subsections: ``name``, ``pp``, ``analysis`` + + .. code-block:: + + experiments: + - name: name of post-processing experiment (string) + pp: + - path/to/post-processing/yaml for that experiment in relation to model yaml (string) + analysis: + - path/to/analysis/yaml for that experiment in relation to model yaml (string) + +Compile Yaml +---------- +The compile yaml defines compilation information including component names, repos, branches, necessary flags, and necessary overrides. This is discussed more in the "Build FMS Model" section. + +Platforms Yaml +---------- +The platform yaml contains user defined information for both bare-metal and container platforms. Information includes the platform name, the compiler used, necessary modules to load, an mk template, fc, cc, container build, and container run. This yaml file is not model specific. + + .. code-block:: + + platforms: + - name: the platform name + compiler: the compiler you are using + modulesInit: ["array of commands that are needed to load modules." , "each command must end with a newline character"] + modules: [array of modules to load including compiler] + mkTemplate: The location of the mkmf make template + modelRoot: The root directory of the model (where src, exec, experiments will go) + - container platform: container platform name + compiler: compiler you are using + RUNenv: Commands needed at the beginning of a RUN in dockerfile + modelRoot: The root directory of the model (where src, exec, experiments will go) INSIDE of the container (/apps) + container: True if this is a container platform + containerBuild: "podman" - the container build program + containerRun: "apptainer" - the container run program + + +Post-Processing Yaml +---------- +The post-processing yamls include information specific to experiments, such as directories to data and other scripts used, switches, and component information. The post-processing yaml can further define more ``fre_properties`` that may be experiment specific. If there are any repeated reusable variables, the ones set in this yaml will overwrite those set in the model yaml. This is discussed further in the "Postprocess FMS History Output" section. diff --git a/docs/what-is-fre.rst b/docs/what-is-fre.rst new file mode 100644 index 00000000..6e289ff6 --- /dev/null +++ b/docs/what-is-fre.rst @@ -0,0 +1,13 @@ +============ +What is FRE? +============ + +FRE, the FMS Runtime Environment, is the companion runtime workflow for FMS-based climate and earth system models, and contains scripts and batch job handlers to compile models, run experiments, and postprocess and analyze the output. Developed around 2004 by GFDL's Modeling System Division, FRE was developed primarily in one repository ("fre-commands", https://github.com/NOAA-GFDL/FRE), used subtools in another repository (FRE-NCtools, https://github.com/NOAA-GFDL/fre-nctools), and was deployed using a set of Environment Modules (https://gitlab.gfdl.noaa.gov/fre/modulefiles). Originally, the major releases of FRE were rivers (Arkansas, Bronx) and the minor releases were numbers. In practice, though, the "Bronx" release name was retained and the number has been incremented over the years. e.g. Bronx-23 is the latest release. + +Over the last couple years, MSD's workflow team has reengineered the compiling and postprocessing parts of FRE, in a modern python and Cylc-based ecosystem (running experiments is not yet possible with this new FRE; stay tuned). Following a semantic versioning adopted in other FMS repositories, the reengineered FRE is versioned with a year and incrementing two-digit number. e.g. the first release of 2024 is 2024.01, the second 2024.02, and the first release next year will be 2025.01. (Optional minor releases are also available in the scheme; e.g. 2024.01.01 would be the first minor/patch release after 2024.01.) This version is used as tags in FRE repositories and in the corresponding conda (and in the future, container) release, and can be retrieved from ``fre --version``. + +fre-cli (this repository) can be considered a successor to the FRE Bronx “fre-commands” repository, which primarily contains user-facing tools and subtools. fre-workflows (https://github.com/NOAA-GFDL/fre-workflows) is a companion repository containing workflow definitions that can be run by the Cylc workflow engine. It contains workflow-specific elements previously in FRE Bronx, and allows flexibility to support multiple and more complex workflows. The two new FRE repositories are versioned with the same approach, and updates will be released together for some time to ensure compatibility. + +The “cli” in fre-cli derives from the shell “fre SUBCOMMAND COMMAND” structure inspired by git, cylc, and other modern Linux command-line tools. This enables discovery of the tooling capability, useful for complex tools with multiple options. e.g. ``fre --help``, ``fre make --help``, ``fre pp --help``. + +Underneath, fre-cli is python, and the workflows and tooling can be run through a Jupyter notebook, or through other python scripts. fre-cli is conda-installable from the “noaa-gfdl” channel (``conda install --channel noaa-gfdl fre-cli``). diff --git a/environment.yml b/environment.yml index f1564558..89e18766 100644 --- a/environment.yml +++ b/environment.yml @@ -12,6 +12,7 @@ dependencies: - jsonschema - noaa-gfdl::fre-nctools - noaa-gfdl::catalogbuilder + - noaa-gfdl::analysis_scripts - conda-forge::nccmp - conda-forge::cylc-flow>=8.2.0 - conda-forge::cylc-rose diff --git a/.public/.nojekyll b/fre/analysis/__init__.py similarity index 100% rename from .public/.nojekyll rename to fre/analysis/__init__.py diff --git a/fre/analysis/freanalysis.py b/fre/analysis/freanalysis.py new file mode 100644 index 00000000..212b13d5 --- /dev/null +++ b/fre/analysis/freanalysis.py @@ -0,0 +1,64 @@ +from analysis_scripts import available_plugins +import click + +from .subtools import install_analysis_package, list_plugins, run_analysis, \ + uninstall_analysis_package + + +@click.group(help=click.style(" - access fre analysis subcommands", fg=(250, 154, 90))) +def analysis_cli(): + """Entry point to fre analysis click commands.""" + pass + + +@analysis_cli.command() +@click.option("--url", type=str, required=True, help="URL of the github repository.") +@click.option("--name", type=str, required=False, help="Subdirectory to pip install.") +@click.option("--library-directory", type=str, required=False, + help="Path to a custom lib directory.") +def install(url, name, library_directory): + """Installs an analysis package.""" + install_analysis_package(url, name, library_directory) + + +@analysis_cli.command() +@click.option("--library-directory", type=str, required=False, + help="Path to a custom lib directory.") +def list(library_directory): + """List available plugins.""" + plugins = list_plugins(library_directory) + if plugins: + print("Installed analysis packages:\n") + for plugin in plugins: + print(plugin) + else: + print("No analysis packages found.") + + +@analysis_cli.command() +@click.option("--name", type=str, required=True, help="Name of the analysis script.") +@click.option("--catalog", type=str, required=True, help="Path to the data catalog.") +@click.option("--output-directory", type=str, required=True, + help="Path to the output directory.") +@click.option("--output-yaml", type=str, required=True, help="Path to the output yaml.") +@click.option("--experiment-yaml", type=str, required=True, help="Path to the experiment yaml.") +@click.option("--library-directory", type=str, required=False, + help="Path to a custom lib directory.") +def run(name, catalog, output_directory, output_yaml, experiment_yaml, + library_directory): + """Runs the analysis script and writes the paths to the created figures to a yaml file.""" + run_analysis(name, catalog, output_directory, output_yaml, experiment_yaml, + library_directory) + + +@analysis_cli.command() +@click.option("--name", type=str, required=True, help="Name of package to uninstall.") +@click.option("--library-directory", type=str, required=False, + help="Path to a custom lib directory.") +def uninstall(name, library_directory): + """Uninstall an analysis package.""" + uninstall_analysis_package(name, library_directory) + + +if __name__ == "__main__": + analysis_cli() diff --git a/fre/analysis/subtools.py b/fre/analysis/subtools.py new file mode 100644 index 00000000..549060db --- /dev/null +++ b/fre/analysis/subtools.py @@ -0,0 +1,123 @@ +from pathlib import Path +from subprocess import run +from tempfile import TemporaryDirectory + +from analysis_scripts import available_plugins, run_plugin, VirtualEnvManager +from yaml import safe_load + + +def install_analysis_package(url, name=None, library_directory=None): + """Installs the analysis package. + + Args: + url: URL to the github repository for the analysis package. + name: String name of the analysis-script package. + library_directory: Directory to install the package in. + """ + # Clean up the url if necessary. + if not url.startswith("https://"): + url = f"https://{url}" + if not url.endswith(".git"): + url = f"{url}.git" + + # Get the absolute path of the input library_directory. + if library_directory: + library_directory = Path(library_directory).resolve() + + if name: + # If a name is given, then expect that the analysis script is part or the noaa-gfdl + # github repository. + with TemporaryDirectory() as tmp: + tmp_path = Path(tmp) + run(["git", "clone", url, str(tmp_path / "scripts")], check=True) + + if library_directory: + # If a library directory is given, install the analysis script in a virtual + # environment. + env = VirtualEnvManager(library_directory) + env.create_env() + env.install_package(str(tmp_path / "scripts" / "core" / "analysis_scripts")) + env.install_package(str(tmp_path / "scripts" / "core" / "figure_tools")) + env.install_package(str(tmp_path / "scripts" / "user-analysis-scripts" / name)) + else: + run(["pip", "install", str(tmp_path / "scripts" / "core" / "figure_tools")], + check=True) + run(["pip", "install", str(tmp_path / "scripts" / "user-analysis-scripts" / name)], + check=True) + else: + if library_directory: + env = VirtualEnvManager(library_directory) + env.create_env() + env.install_package(str(tmp_path / "scripts" / "core" / "analysis_scripts")) + env.install_package(f"{url}@main") + else: + run(["pip", "install", f"{url}@main"]) + + +def list_plugins(library_directory=None): + """Finds the list of analysis scripts. + + Args: + library_directory: Directory where the analysis package is installed. + + Returns: + List of string plugin names. + """ + if library_directory: + env = VirtualEnvManager(library_directory) + return env.list_plugins() + else: + return available_plugins() + + +def run_analysis(name, catalog, output_directory, output_yaml, experiment_yaml, + library_directory=None): + """Runs the analysis script and writes the paths to the created figures to a yaml file. + + Args: + name: String name of the analysis script. + catalog: Path to the data catalog. + output_directory: Path to the output directory. + output_yaml: Path to the output yaml. + experiment: Path to the experiment yaml. + library_directory: Directory where the analysis package is installed. + """ + + # Create the directory for the figures. + Path(output_directory).mkdir(parents=True, exist_ok=True) + + # Parse the configuration out of the experiment yaml file. + with open(experiment_yaml) as file_: + config_yaml = safe_load(file_) + try: + configuration = config_yaml["analysis"][name]["required"] + except KeyError: + configuration = None + + # Run the analysis. + if library_directory: + env = VirtualEnvManager(library_directory) + figure_paths = env.run_analysis_plugin(name, catalog, output_directory, + config=configuration) + else: + figure_paths = run_plugin(name, catalog, output_directory, config=configuration) + + # Write out the figure paths to a file. + with open(output_yaml, "w") as output: + output.write("figure_paths:\n") + for path in figure_paths: + output.write(f" -{Path(path).resolve()}\n") + + +def uninstall_analysis_package(name, library_directory=None): + """Uninstalls the analysis package. + + Args: + name: String name of the analysis-script package. + library_directory: Directory where the package was installed. + """ + if library_directory: + env = VirtualEnvManager(library_directory) + env.uninstall_package(name) + else: + run(["pip", "uninstall", name], check=True) diff --git a/fre/analysis/tests/__init__.py b/fre/analysis/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/fre/analysis/tests/test_subtools.py b/fre/analysis/tests/test_subtools.py new file mode 100644 index 00000000..66815457 --- /dev/null +++ b/fre/analysis/tests/test_subtools.py @@ -0,0 +1,61 @@ +from pathlib import Path +import pytest +from subprocess import CalledProcessError +from tempfile import TemporaryDirectory + +from analysis_scripts import available_plugins, UnknownPluginError +from fre.analysis.subtools import install_analysis_package, list_plugins, run_analysis + + +def make_experiment_yaml(path, name, whitespace=" "): + """Creates and experiment yaml configuration file for testing. + + Args: + path: Path to the experiment yaml file that will be created. + name: String name of the analysis package. + whitespace: Amount of whitespace each block will be indented by. + """ + with open(path, "w") as yaml_: + yaml_.write("analysis:\n") + yaml_.write(f"{whitespace}{name}:\n") + yaml_.write(f"{2*whitespace}required:\n") + yaml_.write(f"{3*whitespace}arg: value\n") + + +def test_install_analysis_package(): + """Tests installing an analysis package.""" + url = "github.com/noaa-gfdl/analysis-scripts" + name = "freanalysis_clouds" + with TemporaryDirectory() as tmp: + install_analysis_package(url, name, tmp) + plugins = list_plugins(tmp) + assert name in list_plugins(tmp) + + +def test_run_analysis(): + """Tests running an analysis package. Expects to fail because we don't make a catalog.""" + name = "freanalysis_clouds" + with TemporaryDirectory() as tmp: + experiment_yaml = Path(tmp) / "experiment.yaml" + make_experiment_yaml(experiment_yaml, name) + library_directory = Path(tmp) / "env" + url = "github.com/noaa-gfdl/analysis-scripts" + catalog = Path(tmp) / "fake-catalog" + install_analysis_package(url, name, library_directory) + with pytest.raises(CalledProcessError) as err: + run_analysis(name, str(catalog), ".", "output.yaml", experiment_yaml, + library_directory) + for line in err._excinfo[1].output.decode("utf-8").split("\n"): + if f"No such file or directory: '{str(catalog)}'" in line: + return + assert False + + +def test_run_unknown_analysis(): + """Get an UnknownPluginError when trying to run an uninstalled package.""" + name = "freanalysis_clouds" + with TemporaryDirectory() as tmp: + experiment_yaml = Path(tmp) / "experiment.yaml" + make_experiment_yaml(experiment_yaml, name) + with pytest.raises(UnknownPluginError) as err: + run_analysis(name, "fake-catalog", ".", "output.yaml", experiment_yaml) diff --git a/fre/app/freapp.py b/fre/app/freapp.py index 545356e4..29c34385 100644 --- a/fre/app/freapp.py +++ b/fre/app/freapp.py @@ -58,6 +58,7 @@ def app_cli(): def regrid(context, input_dir, output_dir, begin, tmp_dir, remap_dir, source, grid_spec, def_xy_interp ): + # pylint: disable=unused-argument ''' regrid target netcdf file ''' context.forward(_regrid_xy) diff --git a/fre/app/generate_time_averages/tests/.unused_tests/test_multiply_duration.py b/fre/app/generate_time_averages/tests/.unused_tests/test_multiply_duration.py deleted file mode 100644 index 704907b5..00000000 --- a/fre/app/generate_time_averages/tests/.unused_tests/test_multiply_duration.py +++ /dev/null @@ -1,17 +0,0 @@ -from fre_python_tools.utilities.multiply_duration import multiply_duration -import metomi.isodatetime.parsers as parse - -def test_month(): - '''1 month x 2 = 2 months''' - two_months = parse.DurationParser().parse('P2M') - assert multiply_duration('P1M', 2) == two_months - -def test_minutes(): - '''12 minutes x 5 = 1 hour''' - hour = parse.DurationParser().parse('PT1H') - assert multiply_duration('PT12M', 5) == hour - -def test_fail(): - '''10 minutes x 5 != 1 hour''' - hour = parse.DurationParser().parse('PT1H') - assert multiply_duration('PT10M', 5) != hour diff --git a/fre/app/generate_time_averages/tests/.unused_tests/test_subtract_durations.py b/fre/app/generate_time_averages/tests/.unused_tests/test_subtract_durations.py deleted file mode 100644 index f495745d..00000000 --- a/fre/app/generate_time_averages/tests/.unused_tests/test_subtract_durations.py +++ /dev/null @@ -1,17 +0,0 @@ -from fre_python_tools.utilities.subtract_durations import subtract_durations -import metomi.isodatetime.parsers as parse - -def test_months(): - '''13 months - 3 months = 10 months''' - ten_months = parse.DurationParser().parse('P10M') - assert subtract_durations('P13M', 'P3M') == ten_months - -def test_hour(): - '''2 hours minus 30 minutes = 90 minutes''' - ninety_mins = parse.DurationParser().parse('PT90M') - assert subtract_durations('PT2H', 'PT30M') == ninety_mins - -def test_fail(): - '''2 hours minus 60 minutes != 90 minutes''' - ninety_mins = parse.DurationParser().parse('PT90M') - assert subtract_durations('PT2H', 'PT60M') != ninety_mins diff --git a/fre/app/generate_time_averages/tests/test_generate_time_averages.py b/fre/app/generate_time_averages/tests/test_generate_time_averages.py index 4ce3963d..7e1345d9 100644 --- a/fre/app/generate_time_averages/tests/test_generate_time_averages.py +++ b/fre/app/generate_time_averages/tests/test_generate_time_averages.py @@ -95,32 +95,32 @@ def test_cdo_time_unwgt_stddevs(): #def test_cdo_time_stddevs(): ## frepythontools avgs+stddevs, weighted+unweighted, all ------------------------ -def test_fre_python_tools_time_avgs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_avgs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), outfile = (time_avg_file_dir+'frepytools_timavg_'+test_file_name), pkg='fre-python-tools',avg_type='all', unwgt=False ) -def test_fre_python_tools_time_unwgt_avgs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_unwgt_avgs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), outfile = (time_avg_file_dir+'frepytools_unwgt_timavg_'+test_file_name), pkg='fre-python-tools',avg_type='all', unwgt=True ) -def test_fre_python_tools_time_avgs_stddevs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_avgs_stddevs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), outfile = (time_avg_file_dir+'frepytools_stddev_'+test_file_name), pkg='fre-python-tools',avg_type='all', stddev_type='samp', unwgt=False ) -def test_fre_python_tools_time_unwgt_avgs_stddevs(): - ''' generates a time averaged file using fre_python_tools's version ''' +def test_fre_cli_time_unwgt_avgs_stddevs(): + ''' generates a time averaged file using fre_cli's version ''' ''' weighted average, no std deviation ''' assert run_avgtype_pkg_calculations( infile = (time_avg_file_dir+test_file_name), @@ -128,15 +128,15 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): pkg='fre-python-tools',avg_type='all', stddev_type='samp', unwgt=True ) ## (TODO) WRITE THESE VERSIONS FOR FREPYTOOLSTIMEAVERAGER CLASS THEN MAKE THESE TESTS -#def test_monthly_fre_python_tools_time_avgs(): -#def test_monthly_fre_python_tools_time_unwgt_avgs(): -#def test_monthly_fre_python_tools_time_avgs_stddevs(): -#def test_monthly_fre_python_tools_time_unwgt_avgs_stddevs(): +#def test_monthly_fre_cli_time_avgs(): +#def test_monthly_fre_cli_time_unwgt_avgs(): +#def test_monthly_fre_cli_time_avgs_stddevs(): +#def test_monthly_fre_cli_time_unwgt_avgs_stddevs(): # -#def test_seasonal_fre_python_tools_time_avgs(): -#def test_seasonal_fre_python_tools_time_unwgt_avgs(): -#def test_seasonal_fre_python_tools_time_avgs_stddevs(): -#def test_seasonal_fre_python_tools_time_unwgt_avgs_stddevs(:) +#def test_seasonal_fre_cli_time_avgs(): +#def test_seasonal_fre_cli_time_unwgt_avgs(): +#def test_seasonal_fre_cli_time_avgs_stddevs(): +#def test_seasonal_fre_cli_time_unwgt_avgs_stddevs(:) @@ -144,7 +144,7 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): #alt_str_fre_nctools_inf= \ # 'tests/time_avg_test_files/fre_nctools_timavg_CLI_test_r8_b_atmos_LWP_1979_5y.nc' #def test_fre_nctools_time_avgs(): -# ''' generates a time averaged file using fre_python_tools's version ''' +# ''' generates a time averaged file using fre_cli's version ''' # ''' weighted average, no std deviation ''' # infile =time_avg_file_dir+test_file_name # all_outfile=time_avg_file_dir+'frenctools_timavg_'+test_file_name @@ -153,7 +153,7 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): # print('output test file exists. deleting before remaking.') # pl.Path(all_outfile).unlink() #delete file so we check that it can be recreated # -# from fre_python_tools.generate_time_averages import generate_time_averages as gtas +# from fre_cli.generate_time_averages import generate_time_averages as gtas # gtas.generate_time_average(infile = infile, outfile = all_outfile, # pkg='fre-nctools', unwgt=False, avg_type='all') # assert pl.Path(all_outfile).exists() @@ -169,8 +169,8 @@ def test_fre_python_tools_time_unwgt_avgs_stddevs(): str_unwgt_cdo_inf=time_avg_file_dir+'timmean_unwgt_'+test_file_name -def test_compare_fre_python_tools_to_fre_nctools(): - ''' compares fre_python_tools pkg answer to fre_nctools pkg answer ''' +def test_compare_fre_cli_to_fre_nctools(): + ''' compares fre_cli pkg answer to fre_nctools pkg answer ''' import numpy as np import netCDF4 as nc fre_pytools_inf=nc.Dataset(str_fre_pytools_inf,'r') @@ -208,8 +208,8 @@ def test_compare_fre_python_tools_to_fre_nctools(): assert not( (non_zero_count > 0.) or (non_zero_count < 0.) ) @pytest.mark.skip(reason='test fails b.c. cdo cannot bitwise-reproduce fre-nctools answer') -def test_compare_fre_python_tools_to_cdo(): - ''' compares fre_python_tools pkg answer to cdo pkg answer ''' +def test_compare_fre_cli_to_cdo(): + ''' compares fre_cli pkg answer to cdo pkg answer ''' import numpy as np import netCDF4 as nc fre_pytools_inf=nc.Dataset(str_fre_pytools_inf,'r') @@ -239,8 +239,8 @@ def test_compare_fre_python_tools_to_cdo(): assert not( (non_zero_count > 0.) or (non_zero_count < 0.) ) -def test_compare_unwgt_fre_python_tools_to_unwgt_cdo(): - ''' compares fre_python_tools pkg answer to cdo pkg answer ''' +def test_compare_unwgt_fre_cli_to_unwgt_cdo(): + ''' compares fre_cli pkg answer to cdo pkg answer ''' import numpy as np import netCDF4 as nc fre_pytools_inf=nc.Dataset(str_unwgt_fre_pytools_inf,'r') diff --git a/fre/app/regrid_xy/regrid_xy.py b/fre/app/regrid_xy/regrid_xy.py index 8721b36a..7b363e32 100755 --- a/fre/app/regrid_xy/regrid_xy.py +++ b/fre/app/regrid_xy/regrid_xy.py @@ -164,7 +164,7 @@ def regrid_xy(input_dir = None, output_dir = None, begin = None, tmp_dir = None, """ ## rose config load check - config_name = os.getcwd() + config_name = os.getcwd() #REMOVE ME TODO config_name += '/rose-app-run.conf' #config_name += '/rose-app.conf' print(f'config_name = {config_name}') @@ -235,8 +235,6 @@ def regrid_xy(input_dir = None, output_dir = None, begin = None, tmp_dir = None, # grid_spec file management - #starting_dir = os.getcwd() - #os.chdir(work_dir) # i hate it if '.tar' in grid_spec: untar_sp = \ subprocess.run( ['tar', '-xvf', grid_spec, '-C', input_dir], @@ -463,7 +461,6 @@ def regrid_xy(input_dir = None, output_dir = None, begin = None, tmp_dir = None, continue # end of comp loop, exit or next one. - #os.chdir(starting_dir) # not clear this is necessary. print('done running regrid_xy()') return 0 diff --git a/fre/coveragerc b/fre/coveragerc index f95c3c41..0e936b95 100644 --- a/fre/coveragerc +++ b/fre/coveragerc @@ -1,3 +1,15 @@ +# https://pytest-cov.readthedocs.io/en/latest/config.html [run] omit = - */test_*py + fre/tests/* + fre/app/generate_time_averages/tests/* + fre/app/regrid_xy/tests/* + fre/catalog/tests/* + fre/check + fre/cmor/tests/* + fre/list + fre/make/tests/* + fre/pp/tests/* + fre/run + fre/test + fre/yamltools/tests/* diff --git a/fre/fre.py b/fre/fre.py index 9dd65eb0..61ebb73c 100644 --- a/fre/fre.py +++ b/fre/fre.py @@ -7,6 +7,21 @@ be called via this script. I.e. 'fre' is the entry point """ +#versioning... always fun... +# turn xxxx.y into xxxx.0y +import importlib.metadata +version_unexpanded = importlib.metadata.version('fre-cli') +version_unexpanded_split = version_unexpanded.split('.') +if len(version_unexpanded_split[1]) == 1: + version_minor = "0" + version_unexpanded_split[1] +else: + version_minor = version_unexpanded_split[1] +version = version_unexpanded_split[0] + '.' + version_minor + + + + + import click from .lazy_group import LazyGroup @@ -21,21 +36,21 @@ "yamltools": ".yamltools.freyamltools.yamltools_cli", "make": ".make.fremake.make_cli", "app": ".app.freapp.app_cli", - "cmor": ".cmor.frecmor.cmor_cli" }, + "cmor": ".cmor.frecmor.cmor_cli", + "analysis": ".analysis.freanalysis.analysis_cli"}, help = click.style( "'fre' is the main CLI click group that houses the other tool groups as lazy subcommands.", fg='cyan') ) + @click.version_option( package_name = "fre-cli", - message = click.style("%(package)s | %(version)s", - fg = (155,255,172) ) + version=version ) def fre(): ''' entry point function to subgroup functions ''' - if __name__ == '__main__': fre() diff --git a/fre/gfdl_msd_schemas b/fre/gfdl_msd_schemas new file mode 160000 index 00000000..32b9d7ca --- /dev/null +++ b/fre/gfdl_msd_schemas @@ -0,0 +1 @@ +Subproject commit 32b9d7ca00aa314b781341dda4d241d48e588d18 diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py deleted file mode 100644 index fc4d2df4..00000000 --- a/fre/make/createCheckout.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import logging -import sys -import click -import fre.yamltools.combine_yamls as cy -from .gfdlfremake import varsfre, yamlfre, checkout, targetfre - -def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): - # Define variables - yml = yamlfile - name = yamlfile.split(".")[0] - run = execute - jobs = str(jobs) - pcheck = no_parallel_checkout - - if pcheck: - pc = "" - else: - pc = " &" - - if verbose: - logging.basicConfig(level=logging.INFO) - else: - logging.basicConfig(level=logging.ERROR) - - srcDir="src" - checkoutScriptName = "checkout.sh" - baremetalRun = False # This is needed if there are no bare metal runs - - ## Split and store the platforms and targets in a list - plist = platform - tlist = target - - # Combine model, compile, and platform yamls - # Default behavior - combine yamls / rewrite combined yaml - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) - - ## Get the variables in the model yaml - freVars = varsfre.frevars(full_combined) - - ## Open the yaml file, validate the yaml, and parse as fremakeYaml - modelYaml = yamlfre.freyaml(full_combined,freVars) - fremakeYaml = modelYaml.getCompileYaml() - - ## Error checking the targets - for targetName in tlist: - target = targetfre.fretarget(targetName) - - ## Loop through the platforms specified on the command line - ## If the platform is a baremetal platform, write the checkout script and run it once - ## This should be done separately and serially because bare metal platforms should all be using - ## the same source code. - for platformName in plist: - if modelYaml.platforms.hasPlatform(platformName): - pass - else: - raise ValueError (platformName + " does not exist in platforms.yaml") - ( compiler, modules, modulesInit, fc, cc, modelRoot, - iscontainer, mkTemplate, containerBuild, ContainerRun, - RUNenv ) = modelYaml.platforms.getPlatformFromName(platformName) - - ## Create the source directory for the platform - if iscontainer is False: - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" - # if the source directory does not exist, it is created - if not os.path.exists(srcDir): - os.system("mkdir -p " + srcDir) - # if the checkout script does not exist, it is created - if not os.path.exists(srcDir+"/checkout.sh"): - freCheckout = checkout.checkout("checkout.sh",srcDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - # Make checkout script executable - os.chmod(srcDir+"/checkout.sh", 0o744) - print("\nCheckout script created in "+ srcDir + "/checkout.sh \n") - - # Run the checkout script - if run is True: - freCheckout.run() - else: - sys.exit() - else: - print("\nCheckout script PREVIOUSLY created in "+ srcDir + "/checkout.sh \n") - if run == True: - try: - subprocess.run(args=[srcDir+"/checkout.sh"], check=True) - except: - print("\nThere was an error with the checkout script "+srcDir+"/checkout.sh.", - "\nTry removing test folder: " + modelRoot +"\n") - raise - else: - sys.exit() - - else: - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" - tmpDir = "tmp/"+platformName - freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - print("\nCheckout script created at " + tmpDir + "/checkout.sh" + "\n") - -@click.command() -def _checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): - ''' - Decorator for calling checkout_create - allows the decorated version - of the function to be separate from the undecorated version - ''' - return checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose) - -if __name__ == "__main__": - checkout_create() diff --git a/fre/make/create_checkout_script.py b/fre/make/create_checkout_script.py new file mode 100644 index 00000000..336a6b85 --- /dev/null +++ b/fre/make/create_checkout_script.py @@ -0,0 +1,118 @@ +''' +checks out a makefile for a given model from the yamls +i think! +''' + +import os +import subprocess +import logging +import sys +import click +import fre.yamltools.combine_yamls as cy +from .gfdlfremake import varsfre, yamlfre, checkout, targetfre + +def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + # Define variables + yml = yamlfile + name = yamlfile.split(".")[0] + run = execute + jobs = str(jobs) + pcheck = no_parallel_checkout + + if pcheck: + pc = "" + else: + pc = " &" + + if verbose: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.ERROR) + + src_dir="src" + checkout_script_name = "checkout.sh" + baremetal_run = False # This is needed if there are no bare metal runs + + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + + # Combine model, compile, and platform yamls + # Default behavior - combine yamls / rewrite combined yaml + comb = cy.init_compile_yaml(yml,platform,target) + full_combined = cy.get_combined_compileyaml(comb) + + ## Get the variables in the model yaml + fre_vars = varsfre.frevars(full_combined) + + ## Open the yaml file, validate the yaml, and parse as fremake_yaml + model_yaml = yamlfre.freyaml(full_combined,fre_vars) + fremake_yaml = model_yaml.getCompileYaml() + + ## Error checking the targets + for target_name in tlist: + target = targetfre.fretarget(target_name) + + ## Loop through the platforms specified on the command line + ## If the platform is a baremetal platform, write the checkout script and run it once + ## This should be done separately and serially because bare metal platforms should all be using + ## the same source code. + for platform_name in plist: + if model_yaml.platforms.hasPlatform(platform_name): + pass + else: + raise ValueError (platform_name + " does not exist in platforms.yaml") + + platform = model_yaml.platforms.getPlatformFromName(platform_name) + + # ceate the source directory for the platform + if not platform["container"]: + src_dir = platform["modelRoot"] + "/" + fremake_yaml["experiment"] + "/src" + # if the source directory does not exist, it is created + if not os.path.exists(src_dir): + os.system("mkdir -p " + src_dir) + # if the checkout script does not exist, it is created + if not os.path.exists(src_dir+"/checkout.sh"): + fre_checkout = checkout.checkout("checkout.sh",src_dir) + fre_checkout.writeCheckout(model_yaml.compile.getCompileYaml(),jobs,pc) + fre_checkout.finish(pc) + # Make checkout script executable + os.chmod(src_dir+"/checkout.sh", 0o744) + print("\nCheckout script created in "+ src_dir + "/checkout.sh \n") + + # Run the checkout script + if run: + fre_checkout.run() + else: + sys.exit() + else: + print("\nCheckout script PREVIOUSLY created in "+ src_dir + "/checkout.sh \n") + if run: + try: + subprocess.run(args=[src_dir+"/checkout.sh"], check=True) + except: + print("\nThere was an error with the checkout script "+src_dir+"/checkout.sh.", + "\nTry removing test folder: " + platform["modelRoot"] +"\n") + raise + else: + sys.exit() + + else: + src_dir = platform["modelRoot"] + "/" + fremake_yaml["experiment"] + "/src" + bld_dir = platform["modelRoot"] + "/" + fremake_yaml["experiment"] + "/exec" + tmp_dir = "tmp/"+platform_name + fre_checkout = checkout.checkoutForContainer("checkout.sh", src_dir, tmp_dir) + fre_checkout.writeCheckout(model_yaml.compile.getCompileYaml(),jobs,pc) + fre_checkout.finish(pc) + print("\nCheckout script created at " + tmp_dir + "/checkout.sh" + "\n") + +@click.command() +def _checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + ''' + Decorator for calling checkout_create - allows the decorated version + of the function to be separate from the undecorated version + ''' + return checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose) + +if __name__ == "__main__": + checkout_create() diff --git a/fre/make/createCompile.py b/fre/make/create_compile_script.py similarity index 89% rename from fre/make/createCompile.py rename to fre/make/create_compile_script.py index 36068329..6b1bd94a 100644 --- a/fre/make/createCompile.py +++ b/fre/make/create_compile_script.py @@ -58,22 +58,22 @@ def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): else: raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + platform=modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + srcDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build - if iscontainer is False: + if platform["container"] is False: baremetalRun = True - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" + bldDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" os.system("mkdir -p " + bldDir) ## Create a list of compile scripts to run in parallel fremakeBuild = buildBaremetal.buildBaremetal(exp = fremakeYaml["experiment"], - mkTemplatePath = mkTemplate, + mkTemplatePath = platform["mkTemplate"], srcDir = srcDir, bldDir = bldDir, target = target, - modules = modules, - modulesInit = modulesInit, + modules = platform["modules"], + modulesInit = platform["modulesInit"], jobs = jobs) for c in fremakeYaml['src']: fremakeBuild.writeBuildComponents(c) diff --git a/fre/make/createDocker.py b/fre/make/create_docker_script.py similarity index 73% rename from fre/make/createDocker.py rename to fre/make/create_docker_script.py index 59b73ee9..e785dc82 100644 --- a/fre/make/createDocker.py +++ b/fre/make/create_docker_script.py @@ -2,6 +2,7 @@ import os import sys +import subprocess from pathlib import Path import click #from .gfdlfremake import varsfre, targetfre, makefilefre, platformfre, yamlfre, buildDocker @@ -44,38 +45,39 @@ def dockerfile_create(yamlfile,platform,target,execute): raise ValueError (platformName + " does not exist in " + \ modelYaml.combined.get("compile").get("platformYaml")) - ( compiler, modules, modulesInit, fc, cc, modelRoot, - iscontainer, mkTemplate, containerBuild, containerRun, - RUNenv ) = modelYaml.platforms.getPlatformFromName(platformName) + platform = modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + srcDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build - if iscontainer is True: - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + if platform["container"] is True: + image=modelYaml.platforms.getContainerImage(platformName) + bldDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName - dockerBuild = buildDocker.container(base = image, exp = fremakeYaml["experiment"], libs = fremakeYaml["container_addlibs"], - RUNenv = RUNenv, - target = targetObject) + RUNenv = platform["RUNenv"], + target = targetObject, + mkTemplate = platform["mkTemplate"]) dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") dockerBuild.writeDockerfileMakefile(tmpDir+"/Makefile", tmpDir+"/linkline.sh") for c in fremakeYaml['src']: dockerBuild.writeDockerfileMkmf(c) - dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") + dockerBuild.writeRunscript(platform["RUNenv"],platform["containerRun"],tmpDir+"/execrunscript.sh") currDir = os.getcwd() click.echo("\ntmpDir created in " + currDir + "/tmp") click.echo("Dockerfile created in " + currDir +"\n") - if run: - dockerBuild.build(containerBuild, containerRun) - else: - sys.exit() + # create build script for container + dockerBuild.createBuildScript(platform["containerBuild"], platform["containerRun"]) + print("Container build script created at "+dockerBuild.userScriptPath+"\n\n") + + # run the script if option is given + if run: + subprocess.run(args=[dockerBuild.userScriptPath], check=True) @click.command() def _dockerfile_create(yamlfile,platform,target,execute): diff --git a/fre/make/createMakefile.py b/fre/make/create_makefile_script.py similarity index 81% rename from fre/make/createMakefile.py rename to fre/make/create_makefile_script.py index eaf340dd..fe12e62d 100644 --- a/fre/make/createMakefile.py +++ b/fre/make/create_makefile_script.py @@ -41,41 +41,40 @@ def makefile_create(yamlfile,platform,target): else: raise ValueError (platformName + " does not exist in " + modelYaml.combined.get("compile").get("platformYaml")) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + platform=modelYaml.platforms.getPlatformFromName(platformName) ## Make the bldDir based on the modelRoot, the platform, and the target - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + srcDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build - if iscontainer is False: + if platform["container"] is False: baremetalRun = True - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + targetObject.gettargetName() + "/exec" + bldDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + targetObject.gettargetName() + "/exec" os.system("mkdir -p " + bldDir) ## Create the Makefile freMakefile = makefilefre.makefile(exp = fremakeYaml["experiment"], libs = fremakeYaml["baremetal_linkerflags"], srcDir = srcDir, bldDir = bldDir, - mkTemplatePath = mkTemplate) + mkTemplatePath = platform["mkTemplate"]) # Loop through components and send the component name, requires, and overrides for the Makefile for c in fremakeYaml['src']: freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) freMakefile.writeMakefile() click.echo("\nMakefile created at " + bldDir + "/Makefile" + "\n") else: - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" - tmpDir = "tmp/"+platformName + bldDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/exec" + tmpDir = "./tmp/"+platformName freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], libs = fremakeYaml["container_addlibs"], srcDir = srcDir, bldDir = bldDir, - mkTemplatePath = mkTemplate, + mkTemplatePath = platform["mkTemplate"], tmpDir = tmpDir) # Loop through compenents and send the component name and requires for the Makefile for c in fremakeYaml['src']: freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) freMakefile.writeMakefile() - click.echo("\nMakefile created at " + bldDir + "/Makefile" + "\n") + click.echo("\nMakefile created at " + tmpDir + "/Makefile" + "\n") @click.command() def _makefile_create(yamlfile,platform,target): diff --git a/fre/make/fremake.py b/fre/make/fremake.py index f39a6be0..5b013457 100644 --- a/fre/make/fremake.py +++ b/fre/make/fremake.py @@ -1,31 +1,31 @@ import click -from fre.make import createCheckout -from fre.make import createMakefile -from fre.make import createCompile -from fre.make import createDocker -from fre.make import runFremake +from fre.make import create_checkout_script +from fre.make import create_makefile_script +from fre.make import create_compile_script +from fre.make import create_docker_script +from fre.make import run_fremake_script -yamlfile_opt_help = """Experiment yaml compile FILE +YAMLFILE_OPT_HELP = """Experiment yaml compile FILE """ -experiment_opt_help = """Name of experiment""" -platform_opt_help = """Hardware and software FRE platform space separated list of STRING(s). +EXPERIMENT_OPT_HELP = """Name of experiment""" +PLATFORM_OPT_HELP = """Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions """ -target_opt_help = """a space separated list of STRING(s) that defines compilation settings and +TARGET_OPT_HELP = """a space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used. """ -parallel_opt_help = """Number of concurrent model compiles (default 1) +PARALLEL_OPT_HELP = """Number of concurrent model compiles (default 1) """ -jobs_opt_help = """Number of jobs to run simultaneously. Used for make -jJOBS and git clone +JOBS_OPT_HELP = """Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS """ -no_parallel_checkout_opt_help = """Use this option if you do not want a parallel checkout. +NO_PARALLEL_CHECKOUT_OPT_HELP = """Use this option if you do not want a parallel checkout. The default is to have parallel checkouts. """ -verbose_opt_help = """Get verbose messages (repeat the option to increase verbosity level) +VERBOSE_OPT_HELP = """Get verbose messages (repeat the option to increase verbosity level) """ @@ -38,71 +38,77 @@ def make_cli(): @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("-n", "--parallel", type = int, metavar = '', default = 1, - help = parallel_opt_help) + help = PARALLEL_OPT_HELP) @click.option("-j", "--jobs", type = int, metavar = '', default = 4, - help = jobs_opt_help) + help = JOBS_OPT_HELP) @click.option("-npc", "--no-parallel-checkout", is_flag = True, - help = no_parallel_checkout_opt_help) + help = NO_PARALLEL_CHECKOUT_OPT_HELP) +@click.option("-e", + "--execute", + is_flag = True, + default = False, + help = "Use this to run the created compilation script.") @click.option("-v", "--verbose", is_flag = True, - help = verbose_opt_help) + help = VERBOSE_OPT_HELP) @click.pass_context -def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): +def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, execute, verbose): + # pylint: disable=unused-argument """ - Perform all fremake functions to run checkout and compile model""" - context.forward(runFremake._fremake_run) + context.forward(run_fremake_script._fremake_run) #### @make_cli.command() @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("-j", "--jobs", type = int, metavar = '', default = 4, - help = jobs_opt_help) + help = JOBS_OPT_HELP) @click.option("-npc", "--no-parallel-checkout", is_flag = True, - help = no_parallel_checkout_opt_help) + help = NO_PARALLEL_CHECKOUT_OPT_HELP) @click.option("--execute", is_flag = True, default = False, @@ -110,33 +116,35 @@ def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel @click.option("-v", "--verbose", is_flag = True, - help = verbose_opt_help) + help = VERBOSE_OPT_HELP) @click.pass_context def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + # pylint: disable=unused-argument """ - Write the checkout script """ - context.forward(createCheckout._checkout_create) + context.forward(create_checkout_script._checkout_create) ##### @make_cli.command @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.pass_context def create_makefile(context,yamlfile,platform,target): + # pylint: disable=unused-argument """ - Write the makefile """ - context.forward(createMakefile._makefile_create) + context.forward(create_makefile_script._makefile_create) ##### @@ -144,29 +152,29 @@ def create_makefile(context,yamlfile,platform,target): @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("-j", "--jobs", type = int, metavar = '', default = 4, - help = jobs_opt_help) + help = JOBS_OPT_HELP) @click.option("-n", "--parallel", type = int, metavar = '', default = 1, - help = parallel_opt_help) + help = PARALLEL_OPT_HELP) @click.option("--execute", is_flag = True, default = False, @@ -174,35 +182,37 @@ def create_makefile(context,yamlfile,platform,target): @click.option("-v", "--verbose", is_flag = True, - help = verbose_opt_help) + help = VERBOSE_OPT_HELP) @click.pass_context def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbose): + # pylint: disable=unused-argument """ - Write the compile script """ - context.forward(createCompile._compile_create) + context.forward(create_compile_script._compile_create) @make_cli.command @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("--execute", is_flag = True, help = "Build Dockerfile that has been generated by create-docker.") @click.pass_context def create_dockerfile(context,yamlfile,platform,target,execute): + # pylint: disable=unused-argument """ - Write the dockerfile """ - context.forward(createDocker._dockerfile_create) + context.forward(create_docker_script._dockerfile_create) if __name__ == "__main__": make_cli() diff --git a/fre/make/gfdlfremake/.gitignore b/fre/make/gfdlfremake/.gitignore deleted file mode 100644 index a623caf0..00000000 --- a/fre/make/gfdlfremake/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -__pycache__/ -Dockerfile -checkout.sh -compile.sh -Makefile diff --git a/fre/make/gfdlfremake/.gitlab-ci.yml b/fre/make/gfdlfremake/.gitlab-ci.yml deleted file mode 100644 index 1e1b666c..00000000 --- a/fre/make/gfdlfremake/.gitlab-ci.yml +++ /dev/null @@ -1,12 +0,0 @@ -stages: - - test - -test_build_am5: - stage: test - script: -# conda env -# - /ncrc/sw/gaea-c5/python/3.9/anaconda-base/envs/noaa_py3.9 - - cd yamls/ - - ../fremake -y am5.yaml -p ncrc5.intel -t prod - tags: - - ncrc5 diff --git a/fre/make/gfdlfremake/buildBaremetal.py b/fre/make/gfdlfremake/buildBaremetal.py index fdb4e2d8..9e742980 100644 --- a/fre/make/gfdlfremake/buildBaremetal.py +++ b/fre/make/gfdlfremake/buildBaremetal.py @@ -10,14 +10,14 @@ def fremake_parallel(fremakeBuildList): """ Brief: Called for parallel execution purposes. Runs the builds. - Param: + Param: - fremakeBuildList : fremakeBuild object list passes by pool.map """ fremakeBuildList.run() class buildBaremetal(): """ - Brief: Creates the build script to compile the model + Brief: Creates the build script to compile the model Param: - self : The buildScript object - exp : The experiment name @@ -40,22 +40,22 @@ def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jo self.template = mkTemplatePath self.modules = "" for m in modules: - self.modules = self.modules +" "+ m + self.modules = f"{self.modules} {m}" ## Set up the top portion of the compile script self.setup=[ "#!/bin/sh -fx \n", - "bld_dir="+self.bld+"/ \n", - "src_dir="+self.src+"/ \n", - "mkmf_template="+self.template+" \n"] + f"bld_dir={self.bld}/ \n", + f"src_dir={self.src}/ \n", + f"mkmf_template={self.template} \n"] if self.modules != "": self.setup.extend(modulesInit) #extend - this is a list - self.setup.append("module load "+self.modules+" \n") # Append -this is a single string + self.setup.append(f"module load {self.modules} \n") # Append -this is a single string ## Create the build directory - os.system("mkdir -p "+self.bld) + os.system(f"mkdir -p {self.bld}") ## Create the compile script - self.f=open(self.bld+"/compile.sh","w") + self.f=open(f"{self.bld}/compile.sh","w") self.f.writelines(self.setup) def writeBuildComponents(self, c): @@ -69,7 +69,7 @@ def writeBuildComponents(self, c): comp = c["component"] # Make the component directory - self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") + self.f.write(f"\n mkdir -p $bld_dir/{comp}\n") # Get the paths needed for compiling pstring = "" @@ -77,16 +77,22 @@ def writeBuildComponents(self, c): pstring = pstring+"$src_dir/"+paths+" " # Run list_paths - self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") - self.f.write(" cd $bld_dir/"+comp+"\n") + self.f.write(f" list_paths -l -o $bld_dir/{comp}/pathnames_{comp} {pstring}\n") + self.f.write(f" cd $bld_dir/{comp}\n") # Create the mkmf line # If this lib doesnt have any code dependencies and # it requires the preprocessor (no -o and yes --use-cpp) if c["requires"] == [] and c["doF90Cpp"]: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template --use-cpp " + "-c \""+c["cppdefs"]+"\" "+c["otherFlags"] + +" $bld_dir/"+comp+"/pathnames_"+comp+" \n") elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template -c \"" + +c["cppdefs"]+"\" "+c["otherFlags"] + +" $bld_dir/"+comp+"/pathnames_"+comp+" \n") else: #Has requirements #Set up the requirements as a string to inclue after the -o reqstring = "" @@ -95,9 +101,15 @@ def writeBuildComponents(self, c): #Figure out if we need the preprocessor if c["doF90Cpp"]: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template --use-cpp " + "-c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" " + +c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") else: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template -c \"" + +c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"] + +" $bld_dir/"+comp+"/pathnames_"+comp+" \n") ##TODO: add targets input def writeScript(self): @@ -106,8 +118,8 @@ def writeScript(self): Param: - self : The buildScript object """ - self.f.write("cd "+self.bld+"\n") - self.f.write(self.make+"\n") + self.f.write(f"cd {self.bld}\n") + self.f.write(f"{self.make}\n") self.f.close() # Make compile script executable @@ -120,10 +132,22 @@ def run(self): Param: - self : The dockerfile object """ -###### TODO make the Makefile - command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] - try: - subprocess.run(args=command, check=True) - except: - print("There was an error running "+self.bld+"/compile.sh") - raise + command = [self.bld+"/compile.sh"] + + # Run compile script + p1 = subprocess.Popen(command, stdout=subprocess.PIPE,stderr=subprocess.STDOUT) + + # Direct output to log file as well + p2 = subprocess.Popen(["tee",self.bld+"/log.compile"], stdin=p1.stdout) + + # Allow process1 to receive SIGPIPE is process2 exits + p1.stdout.close() + p2.communicate() + + # wait for process1 to finish before checking return code + p1.wait() + if p1.returncode != 0: + print(f"\nThere was an error running {self.bld}/compile.sh") + print(f"Check the log file: {self.bld}/log.compile") + else: + print(f"\nSuccessful run of {self.bld}/compile.sh") diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 6d33d0d2..491082ef 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -9,16 +9,16 @@ class container(): """ Brief: Opens the Dockerfile for writing - Param: + Param: - self : The dockerfile object - base : The docker base image to start from - libs : Additional libraries defined by user - exp : The experiment name - - RUNenv : The commands that have to be run at + - RUNenv : The commands that have to be run at the beginning of a RUN in the dockerfile to set up the environment """ - def __init__(self,base,exp,libs,RUNenv,target): + def __init__(self,base,exp,libs,RUNenv,target,mkTemplate): """ Initialize variables and write to the dockerfile """ @@ -29,7 +29,7 @@ def __init__(self,base,exp,libs,RUNenv,target): self.bld = "/apps/"+self.e+"/exec" self.mkmf = True self.target = target - self.template = "/apps/mkmf/templates/hpcme-intel21.mk" + self.template = mkTemplate # Set up spack loads in RUN commands in dockerfile if RUNenv == "": @@ -54,11 +54,18 @@ def __init__(self,base,exp,libs,RUNenv,target): " && mkmf_template="+self.template+ " \\ \n"] self.d=open("Dockerfile","w") self.d.writelines("FROM "+self.base+" \n") + if self.base == "ecpe4s/noaa-intel-prototype:2023.09.25": + self.prebuild = '''RUN + ''' + self.postbuild = ''' + ''' + self.secondstage = ''' + ''' def writeDockerfileCheckout(self, cScriptName, cOnDisk): """ Brief: writes to the checkout part of the Dockerfile and sets up the compile - Param: + Param: - self : The dockerfile object - cScriptName : The name of the checkout script in the container - cOnDisk : The relative path to the checkout script on disk @@ -67,14 +74,18 @@ def writeDockerfileCheckout(self, cScriptName, cOnDisk): self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") self.d.write("RUN chmod 744 "+self.src+"/checkout.sh \n") self.d.writelines(self.setup) - self.d.write(" && "+self.src+"/checkout.sh \n") + # Check if there is a RUNenv. If there is not, then do not use the && + if self.setup == ["RUN \\ \n"]: + self.d.write(self.src+"/checkout.sh \n") + else: + self.d.write(" && "+self.src+"/checkout.sh \n") # Clone mkmf self.d.writelines(self.mkmfclone) def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): """ Brief: Copies the Makefile into the bldDir in the dockerfile - Param: + Param: - self : The dockerfile object - makefileOnDiskPath : The path to Makefile on the local disk - linklineonDiskPath : The path to the link line script on the local disk @@ -98,8 +109,8 @@ def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): def writeDockerfileMkmf(self, c): """ - Brief: Adds components to the build part of the Dockerfile - Param: + Brief: Adds components to the build part of the Dockerfile + Param: - self : The dockerfile object - c : Component from the compile yaml """ @@ -141,34 +152,38 @@ def writeDockerfileMkmf(self, c): def writeRunscript(self,RUNenv,containerRun,runOnDisk): """ Brief: Writes a runscript to set up spack loads/environment - in order to run the executable in the container; + in order to run the executable in the container; runscript copied into container - Param: + Param: - self : The dockerfile object - - RUNEnv : The commands that have to be run at + - RUNEnv : The commands that have to be run at the beginning of a RUN in the dockerfile - - containerRun : The container platform used with `exec` - to run the container; apptainer + - containerRun : The container platform used with `exec` + to run the container; apptainer or singularity used - runOnDisk : The path to the run script on the local disk """ #create runscript in tmp - create spack environment, install necessary packages, - self.createscript = ["#!/bin/bash \n", - "export BACKUP_LD_LIBRARY_PATH=$LD_LIBRARY_PATH\n", - "# Set up spack loads\n", - RUNenv[0]+"\n"] - with open(runOnDisk,"w") as f: - f.writelines(self.createscript) - f.write("# Load spack packages\n") - for env in RUNenv[1:]: - f.write(env+"\n") - - if self.l: - for l in self.l: - self.spackloads = "spack load "+l+"\n" - f.write(self.spackloads) - - f.write("export LD_LIBRARY_PATH=$BACKUP_LD_LIBRARY_PATH:$LD_LIBRARY_PATH\n") + if isinstance(RUNenv, list): + self.createscript = ["#!/bin/bash \n", + "export BACKUP_LD_LIBRARY_PATH=$LD_LIBRARY_PATH\n", + "# Set up spack loads\n", + RUNenv[0]+"\n"] + with open(runOnDisk,"w") as f: + f.writelines(self.createscript) + f.write("# Load spack packages\n") + for env in RUNenv[1:]: + f.write(env+"\n") + if self.l: + for l in self.l: + self.spackloads = "spack load "+l+"\n" + f.write(self.spackloads) + f.write("export LD_LIBRARY_PATH=$BACKUP_LD_LIBRARY_PATH:$LD_LIBRARY_PATH\n") + else: + self.createscript = ["#!/bin/bash \n"] + with open(runOnDisk,"w") as f: + f.writelines(self.createscript) + with open(runOnDisk,"a") as f: f.write("# Run executable\n") f.write(self.bld+"/"+self.e+".x\n") #copy runscript into container in dockerfile @@ -180,21 +195,33 @@ def writeRunscript(self,RUNenv,containerRun,runOnDisk): self.d.write(" && ln -sf "+self.bld+"/execrunscript.sh "+"/apps/bin/execrunscript.sh \n") #finish the dockerfile self.d.writelines(self.setup) - self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") + # Check if there is a RUNenv. If there is not, then do not use the && + if self.setup == ["RUN \\ \n"]: + self.d.write(" cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") + else: + self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") self.d.write('ENTRYPOINT ["/bin/bash"]') self.d.close() - def build(self,containerBuild,containerRun): + def createBuildScript(self,containerBuild,containerRun): """ - Brief: Builds the container image for the model - Param: + Brief: Writes out the build commands for the created dockerfile in a script, + which builds the dockerfile and then converts the format to a singularity image file. + Param: - self : The dockerfile object - - containerBuild : The tool used to build the container; + - containerBuild : The tool used to build the container; docker or podman used - - containerRun : The container platform used with `exec` to + - containerRun : The container platform used with `exec` to run the container; apptainer or singularity used """ - os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) - os.system("rm -f "+self.e+".tar "+self.e+".sif") - os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) - os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") + self.userScript = ["#!/bin/bash\n"] + self.userScript.append(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()+"\n") + self.userScript.append("rm -f "+self.e+".tar "+self.e+".sif\n") + self.userScript.append(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()+"\n") + self.userScript.append(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar\n") + self.userScriptFile = open("createContainer.sh","w") + self.userScriptFile.writelines(self.userScript) + self.userScriptFile.close() + os.chmod("createContainer.sh", 0o744) + self.userScriptPath = os.getcwd()+"/createContainer.sh" + diff --git a/fre/make/gfdlfremake/fremake b/fre/make/gfdlfremake/fremake deleted file mode 100755 index 6c245424..00000000 --- a/fre/make/gfdlfremake/fremake +++ /dev/null @@ -1,249 +0,0 @@ -#!/usr/bin/python3 -## \date 2023 -## \author Tom Robinson -## \author Dana Singh -## \description fremake is used to create and run a code checkout script and compile a model. - -import subprocess -import os -import yaml -import argparse -import logging -from . import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal -from multiprocessing.dummy import Pool - -## Add in cli options -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Fremake is used to create a code checkout script to compile models for FRE experiments.') - parser.add_argument("-y", - "--yamlfile", - type=str, help="Experiment yaml compile FILE",required=True) - parser.add_argument("-p", - "--platform", - nargs='*', - type=str, help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions",required=True) - parser.add_argument("-t", - "--target", - nargs='*', - type=str, help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments.\n\nPredefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.",required=True) - parser.add_argument("-f", - "--force-checkout", - action="store_true", - help="Force checkout to get a fresh checkout to source directory in case the source directory exists") - parser.add_argument("-F", - "--force-compile", - action="store_true", - help="Force compile to compile a fresh executable in case the executable directory exists") - parser.add_argument("-K", - "--keep-compiled", - action="store_true", - help="Keep compiled files in the executable directory for future use") - parser.add_argument("--no-link", - action="store_true", - help="Do not link the executable") - parser.add_argument("-E", - "--execute", - action="store_true", - help="Execute all the created scripts in the current session") - parser.add_argument("-n", - "--parallel", - type=int, - metavar='', default=1, - help="Number of concurrent model compiles (default 1)") - parser.add_argument("-j", - "--jobs", - type=int, - metavar='', default=4, - help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") - parser.add_argument("-npc", - "--no-parallel-checkout", - action="store_true", - help="Use this option if you do not want a parallel checkout. The default is to have parallel checkouts.") - parser.add_argument("-s", - "--submit", - action="store_true", - help="Submit all the created scripts as batch jobs") - parser.add_argument("-v", - "--verbose", - action="store_true", - help="Get verbose messages (repeat the option to increase verbosity level)") - parser.add_argument("-w NUM", - "--walltime=NUM", - type=int, metavar='', - help="Maximum wall time NUM (in minutes) to use") - parser.add_argument("--mail-list=STRING", - action="store_true", - help="Email the comma=separated STRING list of emails rather than \$USER\@noaa.gov") - - ## Parse the arguments - args = parser.parse_args() - - ## Define arguments as variables - yml = args.yamlfile - ps = args.platform - ts = args.target - nparallel = args.parallel - jobs = str(args.jobs) - pcheck = args.no_parallel_checkout - - ## Define parallelism addition for checkouts - # If pcheck is defined, no parallel checkouts - # If pcheck is not defined, default is to have parallel checkouts - if pcheck: - pc = "" - else: - pc = " &" - - ## Define operation of option(s) above - if args.verbose: - logging.basicCOnfig(level=logging.INFO) - else: - logging.basicConfig(level=logging.ERROR) - -#### Main -srcDir="src" -checkoutScriptName = "checkout.sh" -baremetalRun = False # This is needed if there are no bare metal runs - -## Split and store the platforms and targets in a list -plist = args.platform -tlist = args.target - -## Get the variables in the model yaml -freVars = varsfre.frevars(yml) - -## Open the yaml file and parse as fremakeYaml -modelYaml = yamlfre.freyaml(yml,freVars) -fremakeYaml = modelYaml.getCompileYaml() - -## Error checking the targets -for targetName in tlist: - target = targetfre.fretarget(targetName) - -## Loop through the platforms specified on the command line -## If the platform is a baremetal platform, write the checkout script and run it once -## This should be done separately and serially because bare metal platforms should all be using -## the same source code. -for platformName in plist: - if modelYaml.platforms.hasPlatform(platformName): - pass - else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) - - ## Create the checkout script - if iscontainer == False: - ## Create the source directory for the platform - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" - if not os.path.exists(srcDir): - os.system("mkdir -p " + srcDir) - if not os.path.exists(srcDir+"/checkout.sh"): - freCheckout = checkout.checkout("checkout.sh",srcDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - -## TODO: Options for running on login cluster? - freCheckout.run() - -fremakeBuildList = [] -## Loop through platforms and targets -for platformName in plist: - for targetName in tlist: - target = targetfre.fretarget(targetName) - if modelYaml.platforms.hasPlatform(platformName): - pass - else: - raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) - (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) - - ## Make the source directory based on the modelRoot and platform - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" - - ## Check for type of build - if iscontainer == False: - baremetalRun = True - ## Make the build directory based on the modelRoot, the platform, and the target - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" - os.system("mkdir -p " + bldDir) - - ## Create the Makefile - freMakefile = makefilefre.makefile(exp = fremakeYaml["experiment"], - libs = fremakeYaml["baremetal_linkerflags"], - srcDir = srcDir, - bldDir = bldDir, - mkTemplatePath = mkTemplate) - - - # Loop through components and send the component name, requires, and overrides for the Makefile - for c in fremakeYaml['src']: - freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) - freMakefile.writeMakefile() - -## Create a list of compile scripts to run in parallel - fremakeBuild = buildBaremetal.buildBaremetal(exp = fremakeYaml["experiment"], - mkTemplatePath = mkTemplate, - srcDir = srcDir, - bldDir = bldDir, - target = target, - modules = modules, - modulesInit = modulesInit, - jobs = jobs) - - for c in fremakeYaml['src']: - fremakeBuild.writeBuildComponents(c) - fremakeBuild.writeScript() - fremakeBuildList.append(fremakeBuild) - ## Run the build - fremakeBuild.run() - else: -#################################### container stuff below ########################################################### - ## Run the checkout script -# image="hpc-me-intel:2021.1.1" - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" - tmpDir = "tmp/"+platformName - - ## Create the checkout script - freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - - ## Create the makefile -### Should this even be a separate class from "makefile" in makefilefre? ~ ejs - freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], - libs = fremakeYaml["container_addlibs"], - srcDir = srcDir, - bldDir = bldDir, - mkTemplatePath = mkTemplate, - tmpDir = tmpDir) - - # Loop through components and send the component name and requires for the Makefile - for c in fremakeYaml['src']: - freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) - freMakefile.writeMakefile() - - ## Build the dockerfile - dockerBuild = buildDocker.container(base = image, - exp = fremakeYaml["experiment"], - libs = fremakeYaml["container_addlibs"], - RUNenv = RUNenv, - target = target) - - dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") - dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir()+"/linkline.sh") - - for c in fremakeYaml['src']: - dockerBuild.writeDockerfileMkmf(c) - - dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") - - ## Run the dockerfile; build the container - dockerBuild.build(containerBuild,containerRun) - - #freCheckout.cleanup() - #buildDockerfile(fremakeYaml,image) - -if baremetalRun: - if __name__ == '__main__': - pool = Pool(processes=nparallel) # Create a multiprocessing Pool - pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) # process data_inputs iterable with pool diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py index fe8924f9..69820d46 100644 --- a/fre/make/gfdlfremake/platformfre.py +++ b/fre/make/gfdlfremake/platformfre.py @@ -23,15 +23,6 @@ def __init__(self,platforminfo): p["compiler"] except: raise Exception("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") - ## Check for the Fortran (fc) and C (cc) compilers - try: - p["fc"] - except: - raise Exception("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") - try: - p["cc"] - except: - raise Exception("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") ## Check for modules to load try: p["modules"] @@ -52,9 +43,12 @@ def __init__(self,platforminfo): p["container"] except: p["container"] = False - p["RUNenv"] = "" + p["RUNenv"] = [""] p["containerBuild"] = "" p["containerRun"] = "" + p["containerViews"] = False + p["containerBase"] = "" + p["container2step"] = "" if p["container"]: ## Check the container builder try: @@ -63,7 +57,20 @@ def __init__(self,platforminfo): raise Exception("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": raise ValueError("Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") - ## Check for container environment set up for RUN commands + print (p["containerBuild"]) +## Check for container environment set up for RUN commands + try: + p["containerBase"] + except NameError: + print("You must specify the base container you wish to use to build your application") + try: + p["containerViews"] + except: + p["containerViews"] = False + try: + p["container2step"] + except: + p["container2step"] = "" try: p["RUNenv"] except: @@ -75,14 +82,11 @@ def __init__(self,platforminfo): raise Exception("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": raise ValueError("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") - ## set the location of the mkTemplate. - ## In a container, it uses the hpc-me template cloned from mkmf - p["mkTemplate"] = "/apps/mkmf/templates/hpcme-intel21.mk" else: try: p["mkTemplate"] except: - raise ValueError("The non-container platform "+p["name"]+" must specify a mkTemplate \n") + raise ValueError("The platform "+p["name"]+" must specify a mkTemplate \n") def hasPlatform(self,name): """ @@ -105,4 +109,31 @@ def getPlatformFromName(self,name): """ for p in self.yaml: if p["name"] == name: - return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) + return p + def getContainerInfoFromName(self,name): + """ + Brief: Return a tuple of the container information + """ + for p in self.yaml: + if p["name"] == name: + return (p["container"], \ + p["RUNenv"], \ + p["containerBuild"], \ + p["containerRun"], \ + p["containerViews"], \ + p["containerBase"], \ + p["container2step"]) + def isContainer(self, name): + """ + Brief: Returns boolean of if this platform is a container based on the name + """ + for p in self.yaml: + if p["name"] == name: + return p["container"] + def getContainerImage(self,name): + """ + Brief: Returns the image name from the platform + """ + for p in self.yaml: + if p["name"] == name: + return p["containerBase"] diff --git a/fre/make/gfdlfremake/schema.json b/fre/make/gfdlfremake/schema.json deleted file mode 100644 index 751bb9db..00000000 --- a/fre/make/gfdlfremake/schema.json +++ /dev/null @@ -1,201 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-06/schema#", - "type": "object", - "additionalProperties": false, - "properties": { - "name": { - "description": "The name of the experiment", - "type": "string" - }, - "platform": { - "description": "The platforms listed in the command", - "type": "string" - }, - "target": { - "description": "The targets listed in the command", - "type": "string" - }, - "build": { - "type": "object", - "additionalProperties": false, - "properties": { - "compileYaml": { - "description": "Path to the compile yaml.", - "type": "string" - }, - "platformYaml": { - "description": "Path to the platform yaml.", - "type": "string" - } - } - }, - "compile": { - "description": "The source code descriptions", - "$ref": "#/definitions/Compile" - }, - "platforms": { - "description": "FRE platforms", - "type": "array", - "items": {"$ref": "#/definitions/Platform"} - } - }, - "definitions": { - "Compile": { - "type": "object", - "properties": { - "experiment": { - "description": "The name of the model", - "type": "string" - }, - "container_addlibs": { - "description": "Libraries and packages needed for linking in the container", - "type": ["array","string","null"] - }, - "baremetal_linkerflags": { - "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", - "type": ["array","string","null"] - }, - "src": { - "type": "array", - "items": {"$ref": "#/definitions/Src"} - } - } - }, - "Src": { - "type": "object", - "properties": { - "component": { - "description": "The name of the model component", - "type": "string" - }, - "repo": { - "anyOf": [ - { - "description": "The URL of the code repository", - "type": "array", - "items": { - "type": "string", - "format": "uri", - "qt-uri-protocols": [ - "https" - ], - "qt-uri-extensions": [ - ".git" - ] - } - }, - { - "description": "The URL of the code repository", - "type": "string", - "format": "uri", - "qt-uri-protocols": [ - "https" - ], - "qt-uri-extensions": [ - ".git" - ] - } - ] - }, - "cppdefs": { - "description": "String of CPPDEFs to include in compiling the component", - "type": "string" - }, - "branch": { - "anyOf": [ - { - "description": "The version of code to clone", - "type": "array", - "items": { - "type": "string" - } - }, - { - "description": "The version of code to clone", - "type": "string" - } - ] - }, - "otherFlags": { - "description": "String of Include flags necessary to retrieve other code needed", - "type": "string" - }, - "requires": { - "description": "list of componets that this component depends on", - "type": "array", - "items": {"type": "string"} - }, - "paths": { - "description": "A list of the paths in the component to compile", - "type": "array", - "items": {"type": "string"} - }, - "doF90Cpp": { - "description": "True if the preprocessor needs to be run", - "type": "boolean" - }, - "makeOverrides": { - "description": "Overrides openmp target for MOM6", - "type": "string" - } - } - }, - "Platform": { - "type": "object", - "properties": { - "name": { - "description": "The name of the platform", - "type": "string" - }, - "compiler": { - "description": "The compiler used to build the model", - "type": "string" - }, - "modulesInit": { - "description": "Array of commands to run before loading modules", - "type": "array", - "items": {"type": "string"} - }, - "modules": { - "description": "List (array) of modules to load", - "type": "array", - "items": { - "type": "string" - } - }, - "fc": { - "description": "The Fortran compiler", - "type": "string" - }, - "cc": { - "description": "The C compiler", - "type": "string" - }, - "mkTemplate": { - "description": "Path to the mk template file", - "type": "string" - }, - "modelRoot": { - "description": "Path to the root for all model install files", - "type": "string" - }, - "RUNenv": { - "description": "Commands needed at the beginning of a RUN in dockerfile", - "type": ["array","string"] - }, - "container": { - "description": "True/False if using container to compile", - "type": "boolean" - }, - "containerBuild": { - "description": "Program used to build the container", - "type": "string" - }, - "containerRun": { - "description": "Program used to run the container", - "type": "string" - } - } - } - } -} diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py index 6f638bbb..0683ad4e 100644 --- a/fre/make/gfdlfremake/yamlfre.py +++ b/fre/make/gfdlfremake/yamlfre.py @@ -1,5 +1,6 @@ import os import json +from pathlib import Path import yaml from jsonschema import validate, ValidationError, SchemaError from . import platformfre @@ -52,14 +53,14 @@ def __init__(self,compileinfo): """ # compile information from the combined yaml self.yaml = compileinfo - - ## Check the yaml for required things + # Check if self.yaml is None + if self.yaml is None: + raise ValueError("The provided compileinfo is None. It must be a valid dictionary.") ## Check for required experiment name try: self.yaml["experiment"] - except: - print("You must set an experiment name to compile \n") - raise + except KeyError: + raise KeyError("You must set an experiment name to compile \n") ## Check for optional libraries and packages for linking in container try: self.yaml["container_addlibs"] @@ -170,14 +171,15 @@ def __init__(self,combinedyaml,v): #get platform info self.platformsdict = self.freyaml.get("platforms") + print(self.platformsdict) self.platforms = platformfre.platforms(self.platformsdict) self.platformsyaml = self.platforms.getPlatformsYaml() #self.freyaml.update(self.platformsyaml) ## VALIDATION OF COMBINED YAML FOR COMPILATION - fremake_package_dir = os.path.dirname(os.path.abspath(__file__)) - schema_path = os.path.join(fremake_package_dir, 'schema.json') + fremake_package_dir = Path(__file__).resolve().parents[2] + schema_path = os.path.join(fremake_package_dir, 'gfdl_msd_schemas', 'FRE', 'fre_make.json') with open(schema_path, 'r') as f: s = f.read() schema = json.loads(s) diff --git a/fre/make/gfdlfremake/yamls/SHiELD_example/SHiELD.yaml b/fre/make/gfdlfremake/yamls/SHiELD_example/SHiELD.yaml deleted file mode 100644 index 9f063d0a..00000000 --- a/fre/make/gfdlfremake/yamls/SHiELD_example/SHiELD.yaml +++ /dev/null @@ -1,10 +0,0 @@ -platformYaml: platforms.yaml -compileYaml: compile.yaml -fv3_release: main -phy_release: main -fms_release: "2023.02" -drivers_release: main -coupler_release: "2023.02" -FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" -INTEL: intel-classic diff --git a/fre/make/gfdlfremake/yamls/SHiELD_example/compile.yaml b/fre/make/gfdlfremake/yamls/SHiELD_example/compile.yaml deleted file mode 100644 index a83bb1ce..00000000 --- a/fre/make/gfdlfremake/yamls/SHiELD_example/compile.yaml +++ /dev/null @@ -1,38 +0,0 @@ -experiment: shield_nh -compileInclude: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -container_addlibs: ["bacio","sp","w3emc","w3nco"] -baremetal_addlibs: ["-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/bacio-2.4.1-wrykbu2/lib -lbacio_4", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/bacio-2.4.1-wrykbu2/lib -lbacio_8", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/sp-2.5.0-7bumbmx/lib64 -lsp_d", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/w3emc-2.10.0-zmuykep/lib64 -lw3emc_d", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/w3nco-2.4.1-76qm6h2/lib -lw3nco_d"] -src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -Duse_LARGEFILE -DHAVE_SCHED_GETAFFINITY -DINTERNAL_FILE_NML -DGFS_PHYS -DGFS_CONSTANTS -DHAVE_GETTID" - branch: "$(fms_release)" - - component: "SHiELD_physics" - requires: ["FMS"] - repo: "https://github.com/NOAA-GFDL/SHiELD_physics.git" - branch: "$(phy_release)" - paths: [SHiELD_physics/gsmphys, - SHiELD_physics/GFS_layer, - SHiELD_physics/IPD_layer] - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - otherFlags: "$(FMSincludes)" - - component: "fv3" - requires: ["FMS", "SHiELD_physics"] - repo: ["https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git", - "https://github.com/NOAA-GFDL/atmos_drivers.git"] - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - branch: ["$(fv3_release)","$(drivers_release)"] - paths: [SHiELD_physics/FV3GFS/, - fv3/atmos_drivers/SHiELD/atmos_model.F90, - fv3/GFDL_atmos_cubed_sphere/driver/SHiELD/atmosphere.F90, - fv3/GFDL_atmos_cubed_sphere/tools/, - fv3/GFDL_atmos_cubed_sphere/model/, - fv3/GFDL_atmos_cubed_sphere/GFDL_tools/fv_diag_column.F90] - otherFlags: "$(FMSincludes)" - - component: "FMScoupler" - requires: ["FMS", "SHiELD_physics", "fv3"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - branch: "$(coupler_release)" - paths: ["FMScoupler/SHiELD/coupler_main.F90"] - otherFlags: "$(FMSincludes)" diff --git a/fre/make/gfdlfremake/yamls/SHiELD_example/platforms.yaml b/fre/make/gfdlfremake/yamls/SHiELD_example/platforms.yaml deleted file mode 100644 index 9f72043b..00000000 --- a/fre/make/gfdlfremake/yamls/SHiELD_example/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/SHiELDtest - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/SHiELDtest - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.12.1"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/make/gfdlfremake/yamls/am5.yaml b/fre/make/gfdlfremake/yamls/am5.yaml deleted file mode 100644 index 4b7bf8d4..00000000 --- a/fre/make/gfdlfremake/yamls/am5.yaml +++ /dev/null @@ -1,6 +0,0 @@ -platformYaml: platforms.yaml -compileYaml: compile.yaml -release: f1a1r1 -INTEL: "intel-classic" -FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" diff --git a/fre/make/gfdlfremake/yamls/compile.yaml b/fre/make/gfdlfremake/yamls/compile.yaml deleted file mode 100644 index 5200599c..00000000 --- a/fre/make/gfdlfremake/yamls/compile.yaml +++ /dev/null @@ -1,66 +0,0 @@ -experiment: "am5" -container_addlibs: -baremetal_linkerflags: -src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" - branch: "2022.01" - cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" - otherFlags: "$(FMSincludes)" - - component: "am5_phys" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" - branch: "2022.01" - otherFlags: "$(FMSincludes)" - - component: "GFDL_atmos_cubed_sphere" - requires: ["FMS", "am5_phys"] - repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" - branch: "2022.01" - paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", - "GFDL_atmos_cubed_sphere/model", - "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", - "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", - "GFDL_atmos_cubed_sphere/tools", - "GFDL_atmos_cubed_sphere/GFDL_tools"] - otherFlags: "$(FMSincludes)" - - component: "atmos_drivers" - requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] - repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE" - branch: "2022.01" - paths: ["atmos_drivers/coupled"] - otherFlags: "$(FMSincludes)" - - component: "ice_sis" - requires: ["FMS", "ice_param", "mom6"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" - branch: "2021.02" - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "ice_param" - repo: "https://github.com/NOAA-GFDL/ice_param.git" - cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" - branch: "2021.02" - requires: ["FMS", "mom6"] - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "land_lad2" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" - branch: "2022.01" - branch: "land_lad2_2021.02" - doF90Cpp: True - cppdefs: "-DINTERNAL_FILE_NML" - otherFlags: "$(FMSincludes)" - - component: "mom6" - requires: ["FMS"] - paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] - branch: ["2021.02","dev/gfdl/2018.04.06"] - repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] - makeOverrides: 'OPENMP=""' - otherFlags: "$(FMSincludes) $(momIncludes)" - - component: "FMScoupler" - paths: ["FMScoupler/full", "FMScoupler/shared"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - branch: "2022.01" - requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] - otherFlags: "$(FMSincludes) $(momIncludes)" diff --git a/fre/make/gfdlfremake/yamls/platforms.yaml b/fre/make/gfdlfremake/yamls/platforms.yaml deleted file mode 100644 index 02b7d222..00000000 --- a/fre/make/gfdlfremake/yamls/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/test - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/test - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/make/gfdlfremake/yamls/schema.json b/fre/make/gfdlfremake/yamls/schema.json deleted file mode 120000 index c92d7461..00000000 --- a/fre/make/gfdlfremake/yamls/schema.json +++ /dev/null @@ -1 +0,0 @@ -../schema.json \ No newline at end of file diff --git a/fre/make/runFremake.py b/fre/make/run_fremake_script.py similarity index 76% rename from fre/make/runFremake.py rename to fre/make/run_fremake_script.py index ffe2ec96..0d0ea534 100644 --- a/fre/make/runFremake.py +++ b/fre/make/run_fremake_script.py @@ -10,12 +10,13 @@ from multiprocessing.dummy import Pool from pathlib import Path import click +import subprocess import fre.yamltools.combine_yamls as cy from .gfdlfremake import ( targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal ) -def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): +def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose): ''' run fremake via click''' yml = yamlfile name = yamlfile.split(".")[0] @@ -71,15 +72,12 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb raise ValueError(f'{platformName} does not exist in ' f'{modelYaml.combined.get("compile").get("platformYaml")}') - ( compiler, modules, modulesInit, - fc, cc, modelRoot, iscontainer, - mkTemplate, containerBuild, ContainerRun, - RUNenv ) = modelYaml.platforms.getPlatformFromName(platformName) + platform = modelYaml.platforms.getPlatformFromName(platformName) ## Create the checkout script - if not iscontainer: + if not platform["container"]: ## Create the source directory for the platform - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + srcDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/src" if not os.path.exists(srcDir): os.system("mkdir -p " + srcDir) if not os.path.exists(srcDir+"/checkout.sh"): @@ -87,6 +85,7 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) os.chmod(srcDir+"/checkout.sh", 0o744) + print("\nCheckout script created at "+ srcDir + "/checkout.sh \n") ## TODO: Options for running on login cluster? freCheckout.run() @@ -99,19 +98,16 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb pass else: raise ValueError (platformName + " does not exist in " + modelYaml.platformsfile) - ( compiler, modules, modulesInit, - fc, cc, modelRoot, iscontainer, - mkTemplate, containerBuild, containerRun, - RUNenv ) = modelYaml.platforms.getPlatformFromName(platformName) + platform = modelYaml.platforms.getPlatformFromName(platformName) ## Make the source directory based on the modelRoot and platform - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + srcDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/src" ## Check for type of build - if not iscontainer: + if not platform["container"]: baremetalRun = True ## Make the build directory based on the modelRoot, the platform, and the target - bldDir = f'{modelRoot}/{fremakeYaml["experiment"]}/' + \ + bldDir = f'{platform["modelRoot"]}/{fremakeYaml["experiment"]}/' + \ f'{platformName}-{target.gettargetName()}/exec' os.system("mkdir -p " + bldDir) @@ -120,50 +116,52 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb libs = fremakeYaml["baremetal_linkerflags"], srcDir = srcDir, bldDir = bldDir, - mkTemplatePath = mkTemplate) + mkTemplatePath = platform["mkTemplate"]) # Loop through components, send component name/requires/overrides for Makefile for c in fremakeYaml['src']: freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + print("\nMakefile created at " + bldDir + "/Makefile" + "\n") freMakefile.writeMakefile() ## Create a list of compile scripts to run in parallel fremakeBuild = buildBaremetal.buildBaremetal(exp = fremakeYaml["experiment"], - mkTemplatePath = mkTemplate, + mkTemplatePath = platform["mkTemplate"], srcDir = srcDir, bldDir = bldDir, target = target, - modules = modules, - modulesInit = modulesInit, + modules = platform["modules"], + modulesInit = platform["modulesInit"], jobs = jobs) for c in fremakeYaml['src']: fremakeBuild.writeBuildComponents(c) fremakeBuild.writeScript() fremakeBuildList.append(fremakeBuild) - ## Run the build - fremakeBuild.run() + ## Run the build if --execute option given, otherwise print out compile script path + if execute: + fremakeBuild.run() + else: + print("Compile script created at "+ bldDir+"/compile.sh\n\n") else: ###################### container stuff below ####################################### ## Run the checkout script - # image="hpc-me-intel:2021.1.1" - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + image=modelYaml.platforms.getContainerImage(platformName) + srcDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/src" + bldDir = platform["modelRoot"] + "/" + fremakeYaml["experiment"] + "/exec" tmpDir = "tmp/"+platformName - ## Create the checkout script freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) - ## Create the makefile ### Should this even be a separate class from "makefile" in makefilefre? ~ ejs freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], libs = fremakeYaml["container_addlibs"], srcDir = srcDir, bldDir = bldDir, - mkTemplatePath = mkTemplate, + mkTemplatePath = platform["mkTemplate"], tmpDir = tmpDir) # Loop through components and send the component name and requires for the Makefile @@ -175,9 +173,9 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb dockerBuild = buildDocker.container(base = image, exp = fremakeYaml["experiment"], libs = fremakeYaml["container_addlibs"], - RUNenv = RUNenv, - target = target) - + RUNenv = platform["RUNenv"], + target = target, + mkTemplate = platform["mkTemplate"]) dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir() + "/linkline.sh") @@ -185,28 +183,34 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb for c in fremakeYaml['src']: dockerBuild.writeDockerfileMkmf(c) - dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") + dockerBuild.writeRunscript(platform["RUNenv"],platform["containerRun"],tmpDir+"/execrunscript.sh") + + # Create build script for container + dockerBuild.createBuildScript(platform["containerBuild"], platform["containerRun"]) + print("Container build script created at "+dockerBuild.userScriptPath+"\n\n") - ## Run the dockerfile; build the container - dockerBuild.build(containerBuild,containerRun) + # Execute if flag is given + if execute: + subprocess.run(args=[dockerBuild.userScriptPath], check=True) #freCheckout.cleanup() #buildDockerfile(fremakeYaml,image) if baremetalRun: if __name__ == '__main__': - # Create a multiprocessing Pool - pool = Pool(processes=nparallel) - # process data_inputs iterable with pool - pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) + if execute: + # Create a multiprocessing Pool + pool = Pool(processes=nparallel) + # process data_inputs iterable with pool + pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) @click.command() -def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): +def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose): ''' Decorator for calling _fremake_run - allows the decorated version of the function to be separate from the undecorated version ''' - return fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose) + return fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose) if __name__ == "__main__": fremake_run() diff --git a/fre/make/tests/AM5_example/yaml_include/platforms.yaml b/fre/make/tests/AM5_example/yaml_include/platforms.yaml index 60d1aad2..0c8bac45 100644 --- a/fre/make/tests/AM5_example/yaml_include/platforms.yaml +++ b/fre/make/tests/AM5_example/yaml_include/platforms.yaml @@ -23,4 +23,6 @@ platforms: cc: mpiicc container: True containerBuild: "podman" - containerRun: "apptainer" + containerRun: "apptainer" + containerBase: "ecpe4s/noaa-intel-prototype:2023.09.25" + mkTemplate: "/apps/mkmf/templates/hpcme-intel21.mk" diff --git a/fre/make/tests/ESM4_example/compile.yaml b/fre/make/tests/ESM4_example/compile.yaml deleted file mode 100644 index ee51658d..00000000 --- a/fre/make/tests/ESM4_example/compile.yaml +++ /dev/null @@ -1,124 +0,0 @@ -compile: - experiment: "esm4" - container_addlibs: - baremetal_linkerflags: - src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -DMAXFIELDMETHODS_=500" - branch: *FMS_GIT_TAG - - component: "atmos_phys" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/atmos_phys.git" - branch: *ATM_PHYS_GIT_TAG - otherFlags: *FMSincludes - - component: "atmos_dyn" - requires: ["FMS", "atmos_phys"] - repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" - otherFlags: *FMSincludes - branch: *ATM_FV3_GIT_TAG - paths: [ "atmos_dyn/driver/GFDL", - "atmos_dyn/model", - "atmos_dyn/model_nh_null", - "atmos_dyn/GFDL_tools", - "atmos_dyn/driver/SHiELD/cloud_diagnosis.F90", - "atmos_dyn/driver/SHiELD/gfdl_cloud_microphys.F90", - "atmos_dyn/tools" ] - - component: "atmos_drivers" - requires: ["FMS", "atmos_phys", "atmos_dyn"] - repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE" - branch: *ATM_DRV_GIT_TAG - otherFlags: *FMSincludes - paths: ["atmos_drivers/coupled"] - - component: "lm4p" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/lm4p.git" - branch: *LAND_GIT_TAG - cppdefs: "-DINTERNAL_FILE_NML" - otherFlags: *FMSincludes - - component: "mom6" - requires: ["FMS"] - repo: ["https://github.com/NOAA-GFDL/MOM6-examples.git", - "https://github.com/NOAA-GFDL/ocean_BGC.git" ] - branch: [ "dev/gfdl", *OCEAN_BGC_GIT_TAG ] # cant use property for mom6 since its a commit hash instead of a branch - otherFlags: !join [ *FMSincludes, " ", *MOMincludes ] - additionalInstructions: | - pushd mom6/MOM6-examples - git checkout 40e3937 # this is just the value of MOM6_GIT_TAG property, can't seem to use variable - popd - # this step might be covered by initial checkout since the default is recursive - git submodule update --recursive --init mom6/MOM6-examples/src/MOM6 mom6/MOM6-examples/src/SIS2 mom6/MOM6-examples/src/icebergs - # checkout dev/gfdl on the icebergs submodule - pushd mom6/MOM6-examples/src/icebergs - git checkout dev/gfdl - popd - # this is different than the MOM6_GIT_TAG above since its pulling a submodule not the whole repo - if [[ $MOM6_GIT_FIX ]]; then - echo WARNING: Checking out MOM6_GIT_FIX, set to: $MOM6_GIT_FIX - pushd mom6/MOM6-examples/src/MOM6/ - git checkout $MOM6_GIT_FIX - popd - fi - if [[ $SIS2_GIT_FIX ]]; then - echo WARNING: Checking out SIS2_GIT_FIX, set to: $SIS2_GIT_FIX - pushd mom6/MOM6-examples/src/SIS2/ - git checkout $SIS2_GIT_FIX - popd - fi - # link in dataset - pushd mom6/MOM6-examples - # skip the check for orion - #set platform_domain = `perl -T -e "use Net::Domain(hostdomain) ; print hostdomain"` - #if ("${platform_domain}" =~ *"MsState"* ) then - # ln -s /work/noaa/gfdlscr/pdata/gfdl/gfdl_O/datasets/ .datasets - #else - ln -s /gpfs/f5/gfdl_o/world-shared/datasets .datasets - #endif - popd - test -e mom6/.datasets - if [[ $status != 0 ]]; then - echo ""; echo "" ; echo " WARNING: .datasets link in MOM6 examples directory is invalid"; echo ""; echo "" - fi - cppdefs: "-DMAX_FIELDS_=100 -DNOT_SET_AFFINITY -D_USE_MOM6_DIAG -D_USE_GENERIC_TRACER -DUSE_PRECISION=2" - paths: [ "mom6/MOM6-examples/src/MOM6/config_src/infra/FMS2", - "mom6/MOM6-examples/src/MOM6/config_src/memory/dynamic_symmetric", - "mom6/MOM6-examples/src/MOM6/config_src/drivers/FMS_cap", - "mom6/MOM6-examples/src/MOM6/src/ALE", - "mom6/MOM6-examples/src/MOM6/src/core", - "mom6/MOM6-examples/src/MOM6/src/diagnostics", - "mom6/MOM6-examples/src/MOM6/src/equation_of_state", - "mom6/MOM6-examples/src/MOM6/src/framework", - "mom6/MOM6-examples/src/MOM6/src/ice_shelf", - "mom6/MOM6-examples/src/MOM6/src/initialization", - "mom6/MOM6-examples/src/MOM6/src/ocean_data_assim", - "mom6/MOM6-examples/src/MOM6/src/parameterizations", - "mom6/MOM6-examples/src/MOM6/src/tracer", - "mom6/MOM6-examples/src/MOM6/src/user", - "mom6/MOM6-examples/src/MOM6/config_src/external/ODA_hooks", - "mom6/MOM6-examples/src/MOM6/config_src/external/database_comms", - "mom6/MOM6-examples/src/MOM6/config_src/external/drifters", - "mom6/MOM6-examples/src/MOM6/config_src/external/stochastic_physics", - "mom6/MOM6-examples/src/MOM6/config_src/external/stochastic_physics", - "mom6/ocean_BGC/generic_tracers", - "mom6/ocean_BGC/mocsy/src" ] - - component: "sis2" - requires: ["FMS", "mom6"] - repo: "https://github.com/NOAA-GFDL/ice_param.git" - branch: "2024.01" - cppdefs: "-DUSE_FMS2_IO" - otherFlags: !join [ *FMSincludes, " ", *MOMincludes ] - paths: [ "mom6/MOM6-examples/src/SIS2/config_src/dynamic_symmetric", - "mom6/MOM6-examples/src/SIS2/config_src/external/Icepack_interfaces", - "mom6/MOM6-examples/src/SIS2/src", - "mom6/MOM6-examples/src/icebergs/src", - "sis2" ] - - component: "coupler" - requires: ["FMS", "atmos_dyn", "atmos_drivers", "atmos_phys", "lm4p", "sis2", "mom6"] - repo: "https://github.com/NOAA-GFDL/FMScoupler" - branch: "2024.01" - otherFlags: !join [ *FMSincludes, " ", *MOMincludes ] - paths: [ "coupler/shared", - "coupler/full" ] - diff --git a/fre/make/tests/ESM4_example/esm4.yaml b/fre/make/tests/ESM4_example/esm4.yaml deleted file mode 100644 index 436672ab..00000000 --- a/fre/make/tests/ESM4_example/esm4.yaml +++ /dev/null @@ -1,86 +0,0 @@ -# esm4.2, based off ESM4p2_piControl_spinup_J_rts.xml -# this needs -npc (non-parallel checkout) flag during checkout script creation for additional checkouts to work properly -fre_properties: - - &RELEASE "2024.01" - - &FMS_GIT_TAG "2024.01" - - &ATM_PHYS_GIT_TAG "2024.01-alpha6" - - &ATM_FV3_GIT_TAG "2023.03" - - &ATM_DRV_GIT_TAG "2023.04" - - &LAND_GIT_TAG "2024.01" - - &ICE_PARAM_GIT_TAG "2023.04" - - &ESM4_VERSION "2024.01" - - &OCEAN_BGC_GIT_TAG "dev4.2_benthic" - - &MOM6_DATE "20231130" - - &MOM6_GIT_TAG "40e3937" - - &MOM6_GIT_FIX "" - - &SIS2_GIT_FIX "" - # compile - - &FRE_STEM !join [fre/FMS, *RELEASE, _mom6_, *MOM6_DATE] - - &INTEL "intel-classic" - - &FMSincludes "-IFMS/include" - - &MOMincludes "-Imom6/MOM6-examples/src/MOM6/src/framework" - # post processing - - &OM4_ANALYSIS "ice_ocean_SIS2/OM4_05" - - &PROD_SIMTIME "5" # simulation length in years - - &PP_CMIP_CHUNK_A "5yr" # smaller chunk length for pp/analysis - - &PP_CMIP_CHUNK_B "10yr" # bigger chunk length for pp/analysis - - &PP_START_YEAR "0001" # starting year - - &PROD_RUNTIME "16:00:00" # Maximum wall clock per simulation - - &PROD_SEGTIME "03:00:00" # Wall clock per segment (usually 1 year) - # MDBI settings - - &EXP_CPLD_START "0001" # simulation start year - - &EXP_CPLD_END "1000" # simulation end year - # this property wasn't actually used in the xml, but seems to be intended for specifying reference files to verify regression tests - - &reference_tag "FMS2022.03_mom6_20220703" - # these properties modify the build - - &BUILD_DATE "" # included as part of the src directory path - - &MODIFIER "" # appended to compile experiment name, can likely remove since compile experiments are no longer a thing - - &PLATINFO "" # used for awg_input - - &LIBS_ROOT "esm4.2_compile$(MODIFIER)" # used below - - &SRC_DIR !join [ $root/, *BUILD_DATE, /, *LIBS_ROOT, /src] # this ends up being used for input paths - - &MOM6_EXAMPLES !join [ $root/, *BUILD_DATE, /, *LIBS_ROOT, /src/mom6] # also for input paths - # input paths - - &AWG_INPUT_HOME_GAEA "awg_include" - - &AWG_INPUT_HOME_NESCC "awg_include" - - &AWG_INPUT_HOME_GFDL !join [ /nbhome/$USER/, *FRE_STEM, *PLATINFO, /$(name)/mdt_xml/awg_include] - - &USER_FILES_F2toF5 "/gpfs/f5/gfdl_f/world-shared/Niki.Zadeh/archive/input/f2_user_files_in_xmls" - - &append_to_setup_csh "" # The start year of forcing dataset. FRE hack to set fyear - -build: - compileYaml: "compile.yaml" - platformYaml: "platforms.yaml" - -shared: - # directories shared across tools - # shamelessly stolen from am5 example - directories: &shared_directories - history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] - pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] - analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] - ptmp_dir: "/xtmp/$USER/ptmp" - fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" - - # shared pp settings - # also shamelessly stolen from am5 example - postprocess: - settings: &shared_settings - history_segment: "P1Y" - site: "ppan" - switches: &shared_switches - do_statics: True - do_timeavgs: True - clean_work: True - do_refinediag: False - do_atmos_plevel_masking: True - do_preanalysis: False - do_analysis: True - -experiments: - - name: "ESM4p2_piControl_spinup_J" - pp: - - name: "ESM4p2_piControl_spinup_Jb" - pp: - - name: "ESM4p2_piControl_spinup_J_redoyr450_btmdiags" - pp: - - name: "ESM4p2_piControl_spinup_J_redoyr450" - pp: diff --git a/fre/make/tests/ESM4_example/platforms.yaml b/fre/make/tests/ESM4_example/platforms.yaml deleted file mode 100644 index 14d4dfff..00000000 --- a/fre/make/tests/ESM4_example/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL,".mk"] - modelRoot: ${HOME}/fremake_canopy/test - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] - modelRoot: ${HOME}/fremake_canopy/test - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/make/tests/SHiELD_example/SHiELD.yaml b/fre/make/tests/SHiELD_example/SHiELD.yaml deleted file mode 100644 index 9f063d0a..00000000 --- a/fre/make/tests/SHiELD_example/SHiELD.yaml +++ /dev/null @@ -1,10 +0,0 @@ -platformYaml: platforms.yaml -compileYaml: compile.yaml -fv3_release: main -phy_release: main -fms_release: "2023.02" -drivers_release: main -coupler_release: "2023.02" -FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" -INTEL: intel-classic diff --git a/fre/make/tests/SHiELD_example/compile.yaml b/fre/make/tests/SHiELD_example/compile.yaml deleted file mode 100644 index a83bb1ce..00000000 --- a/fre/make/tests/SHiELD_example/compile.yaml +++ /dev/null @@ -1,38 +0,0 @@ -experiment: shield_nh -compileInclude: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -container_addlibs: ["bacio","sp","w3emc","w3nco"] -baremetal_addlibs: ["-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/bacio-2.4.1-wrykbu2/lib -lbacio_4", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/bacio-2.4.1-wrykbu2/lib -lbacio_8", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/sp-2.5.0-7bumbmx/lib64 -lsp_d", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/w3emc-2.10.0-zmuykep/lib64 -lw3emc_d", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/w3nco-2.4.1-76qm6h2/lib -lw3nco_d"] -src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -Duse_LARGEFILE -DHAVE_SCHED_GETAFFINITY -DINTERNAL_FILE_NML -DGFS_PHYS -DGFS_CONSTANTS -DHAVE_GETTID" - branch: "$(fms_release)" - - component: "SHiELD_physics" - requires: ["FMS"] - repo: "https://github.com/NOAA-GFDL/SHiELD_physics.git" - branch: "$(phy_release)" - paths: [SHiELD_physics/gsmphys, - SHiELD_physics/GFS_layer, - SHiELD_physics/IPD_layer] - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - otherFlags: "$(FMSincludes)" - - component: "fv3" - requires: ["FMS", "SHiELD_physics"] - repo: ["https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git", - "https://github.com/NOAA-GFDL/atmos_drivers.git"] - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - branch: ["$(fv3_release)","$(drivers_release)"] - paths: [SHiELD_physics/FV3GFS/, - fv3/atmos_drivers/SHiELD/atmos_model.F90, - fv3/GFDL_atmos_cubed_sphere/driver/SHiELD/atmosphere.F90, - fv3/GFDL_atmos_cubed_sphere/tools/, - fv3/GFDL_atmos_cubed_sphere/model/, - fv3/GFDL_atmos_cubed_sphere/GFDL_tools/fv_diag_column.F90] - otherFlags: "$(FMSincludes)" - - component: "FMScoupler" - requires: ["FMS", "SHiELD_physics", "fv3"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - branch: "$(coupler_release)" - paths: ["FMScoupler/SHiELD/coupler_main.F90"] - otherFlags: "$(FMSincludes)" diff --git a/fre/make/tests/SHiELD_example/platforms.yaml b/fre/make/tests/SHiELD_example/platforms.yaml deleted file mode 100644 index 9f72043b..00000000 --- a/fre/make/tests/SHiELD_example/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/SHiELDtest - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/SHiELDtest - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.12.1"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/make/tests/compilation/test_run_fremake_builds.py b/fre/make/tests/compilation/test_run_fremake_builds.py new file mode 100644 index 00000000..9fe6169e --- /dev/null +++ b/fre/make/tests/compilation/test_run_fremake_builds.py @@ -0,0 +1,53 @@ +''' this file holds any run-fremake tests that actually compile the model code''' +''' these tests assume your os is the ci image (gcc 14 + mpich on rocky 8)''' +''' you may need to add mkmf to your path or make other adjustments to the mkmf template to run elsewhere''' + +import os +from shutil import rmtree +from pathlib import Path + +import pytest + +from fre.make import run_fremake_script + +# command options +YAMLDIR = "fre/make/tests/null_example" +YAMLFILE = "null_model.yaml" +YAMLPATH = f"{YAMLDIR}/{YAMLFILE}" +PLATFORM = [ "ci.gnu" ] +CONTAINER_PLATFORM = ["hpcme.2023"] +TARGET = ["debug"] +EXPERIMENT = "null_model_full" +VERBOSE = False + +# set up some paths for the tests to build in +# the TEST_BUILD_DIR env var is used in the null model's platform.yaml +# so the model root directory path can be changed +currPath=os.getcwd() +SERIAL_TEST_PATH=f"{currPath}/fre/make/tests/compilation/serial_build" +MULTIJOB_TEST_PATH=f"{currPath}/fre/make/tests/compilation/multijob_build" +Path(SERIAL_TEST_PATH).mkdir(parents=True,exist_ok=True) +Path(MULTIJOB_TEST_PATH).mkdir(parents=True,exist_ok=True) + + +# test building the null model using gnu compilers +def test_run_fremake_serial_compile(): + ''' run fre make with run-fremake subcommand and build the null model experiment with gnu''' + os.environ["TEST_BUILD_DIR"] = SERIAL_TEST_PATH + run_fremake_script.fremake_run(YAMLPATH, PLATFORM, TARGET, False, 1, False, True, VERBOSE) + assert Path(f"{SERIAL_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/{EXPERIMENT}.x").exists() + +# same test with a parallel build +def test_run_fremake_multijob_compile(): + ''' test run-fremake parallel compile with gnu''' + os.environ["TEST_BUILD_DIR"] = MULTIJOB_TEST_PATH + run_fremake_script.fremake_run(YAMLPATH, PLATFORM, TARGET, True, 4, False, True, VERBOSE) + assert Path(f"{MULTIJOB_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/{EXPERIMENT}.x").exists() + +# containerized build +@pytest.mark.skip(reason="podman fails to pull image base on CI runner") +def test_run_fremake_container_build(): + ''' checks image creation for the container build''' + run_fremake_script.fremake_run(YAMLPATH, CONTAINER_PLATFORM, TARGET, False, 1, True, True, VERBOSE) + assert Path("null_model_full-debug.sif").exists() + diff --git a/fre/make/tests/null_example/compile.yaml b/fre/make/tests/null_example/compile.yaml index ab5052a0..68c151f8 100644 --- a/fre/make/tests/null_example/compile.yaml +++ b/fre/make/tests/null_example/compile.yaml @@ -5,7 +5,7 @@ compile: src: - component: "FMS" repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_netCDF -Duse_libMPI -DMAXFIELDS_=200 -DMAXFIELDMETHODS_=200 -DINTERNAL_FILE_NML -DHAVE_GETTID" # gettid flag is platform specific + cppdefs: "-Duse_netCDF -Duse_libMPI -DMAXFIELDS_=200 -DMAXFIELDMETHODS_=200 -DINTERNAL_FILE_NML -DHAVE_GETTID" otherFlags: "-fallow-argument-mismatch" # only needed for gcc branch: *branch - component: "atmos_null" diff --git a/fre/make/tests/null_example/platforms.yaml b/fre/make/tests/null_example/platforms.yaml index 60d1aad2..e7d9e5ae 100644 --- a/fre/make/tests/null_example/platforms.yaml +++ b/fre/make/tests/null_example/platforms.yaml @@ -1,26 +1,20 @@ platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] - modelRoot: ${HOME}/fremake_canopy/test - name: ncrc5.intel23 compiler: intel modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc + modules: [!join [*INTEL, "/2023.2.0"],"fre/bronx-21",cray-hdf5/1.12.2.11, cray-netcdf/4.9.0.11] mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] modelRoot: ${HOME}/fremake_canopy/test - name: hpcme.2023 compiler: intel RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] modelRoot: /apps - fc: mpiifort - cc: mpiicc container: True containerBuild: "podman" containerRun: "apptainer" + containerBase: "ecpe4s/noaa-intel-prototype:2023.09.25" + mkTemplate: "/apps/mkmf/templates/hpcme-intel21.mk" + - name: ci.gnu + compiler: gnu + mkTemplate: /__w/fre-cli/fre-cli/mkmf/templates/linux-ubuntu-xenial-gnu.mk + modelRoot: ${TEST_BUILD_DIR}/fremake_canopy/test diff --git a/fre/make/tests/test_create_makefile.py b/fre/make/tests/test_create_makefile.py index 36188b33..dd180262 100644 --- a/fre/make/tests/test_create_makefile.py +++ b/fre/make/tests/test_create_makefile.py @@ -4,10 +4,10 @@ import os import shutil from pathlib import Path -from fre.make import createMakefile +from fre.make import create_makefile_script # SET-UP -test_dir = Path("fre/make/tests") +TEST_DIR = Path("fre/make/tests") NM_EXAMPLE = Path("null_example") YAMLFILE = "null_model.yaml" BM_PLATFORM = ["ncrc5.intel23"] @@ -16,57 +16,60 @@ EXPERIMENT = "null_model_full" # Create output location -out = f"{test_dir}/makefile_out" -if Path(out).exists(): +OUT = f"{TEST_DIR}/makefile_out" +if Path(OUT).exists(): # remove - shutil.rmtree(out) + shutil.rmtree(OUT) # create output directory - Path(out).mkdir(parents=True,exist_ok=True) + Path(OUT).mkdir(parents=True,exist_ok=True) else: - Path(out).mkdir(parents=True,exist_ok=True) + Path(OUT).mkdir(parents=True,exist_ok=True) # Set output directory as home for fre make output -#os.environ["HOME"]=str(Path(out)) +#os.environ["HOME"]=str(Path(OUT)) def test_modelyaml_exists(): """ Check the model yaml exists """ - assert Path(f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}").exists() + assert Path(f"{TEST_DIR}/{NM_EXAMPLE}/{YAMLFILE}").exists() def test_compileyaml_exists(): """ Check the compile yaml exists """ - assert Path(f"{test_dir}/{NM_EXAMPLE}/compile.yaml").exists() + assert Path(f"{TEST_DIR}/{NM_EXAMPLE}/compile.yaml").exists() def test_platformyaml_exists(): """ Check the platform yaml exists """ - assert Path(f"{test_dir}/{NM_EXAMPLE}/platforms.yaml").exists() + assert Path(f"{TEST_DIR}/{NM_EXAMPLE}/platforms.yaml").exists() def test_bm_makefile_creation(): """ Check the makefile is created when a bare-metal platform is used """ # Set output directory as home for fre make output - os.environ["HOME"]=str(Path(out)) + def_home = str(os.environ["HOME"]) + os.environ["HOME"]=OUT#str(Path(OUT)) bm_plat = BM_PLATFORM[0] targ = TARGET[0] - yamlfile_path = f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}" + yamlfile_path = f"{TEST_DIR}/{NM_EXAMPLE}/{YAMLFILE}" - createMakefile.makefile_create(yamlfile_path,BM_PLATFORM,TARGET) + create_makefile_script.makefile_create(yamlfile_path,BM_PLATFORM,TARGET) - assert Path(f"{out}/fremake_canopy/test/{EXPERIMENT}/{bm_plat}-{targ}/exec/Makefile").exists() + assert Path(f"{OUT}/fremake_canopy/test/{EXPERIMENT}/{bm_plat}-{targ}/exec/Makefile").exists() + os.environ["HOME"] = def_home + assert os.environ["HOME"] == def_home def test_container_makefile_creation(): """ Check the makefile is created when the container platform is used """ container_plat = CONTAINER_PLATFORM[0] - yamlfile_path = f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}" - createMakefile.makefile_create(yamlfile_path,CONTAINER_PLATFORM,TARGET) + yamlfile_path = f"{TEST_DIR}/{NM_EXAMPLE}/{YAMLFILE}" + create_makefile_script.makefile_create(yamlfile_path,CONTAINER_PLATFORM,TARGET) assert Path(f"tmp/{container_plat}/Makefile").exists() diff --git a/fre/make/tests/test_run_fremake.py b/fre/make/tests/test_run_fremake.py new file mode 100644 index 00000000..4447acf6 --- /dev/null +++ b/fre/make/tests/test_run_fremake.py @@ -0,0 +1,156 @@ +''' test "fre make run-fremake" calls without actual compilation ''' + +import os +from shutil import rmtree +from pathlib import Path + +from click.testing import CliRunner + +import pytest + +from fre import fre +from fre.make import run_fremake_script + +runner=CliRunner() + +# command options +YAMLDIR = "fre/make/tests/null_example" +YAMLFILE = "null_model.yaml" +YAMLPATH = f"{YAMLDIR}/{YAMLFILE}" +PLATFORM = [ "ci.gnu" ] +CONTAINER_PLATFORM = ["hpcme.2023"] +TARGET = ["debug"] +BADOPT = ["foo"] +EXPERIMENT = "null_model_full" +VERBOSE = False + +# possible targets +targets = ["debug", "prod", "repro", "debug-openmp", "prod-openmp", "repro-openmp"] + +# set up some paths for the tests +SERIAL_TEST_PATH="fre/make/tests/test_run_fremake_serial" +MULTIJOB_TEST_PATH="fre/make/tests/test_run_fremake_multijob" +MULTITARGET_TEST_PATH="fre/make/tests/test_run_fremake_multitarget" +Path(SERIAL_TEST_PATH).mkdir(parents=True,exist_ok=True) +Path(MULTIJOB_TEST_PATH).mkdir(parents=True,exist_ok=True) +Path(MULTITARGET_TEST_PATH).mkdir(parents=True,exist_ok=True) + +##def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose): + +# yaml file checks +def test_modelyaml_exists(): + assert Path(f"{YAMLDIR}/{YAMLFILE}").exists() + +def test_compileyaml_exists(): + assert Path(f"{YAMLDIR}/compile.yaml").exists() + +def test_platformyaml_exists(): + assert Path(f"{YAMLDIR}/platforms.yaml").exists() + +# expected failures for incorrect options +@pytest.mark.xfail() +def test_bad_platform_option(): + ''' test run-fremake with a invalid platform option''' + run_fremake_script.fremake_run(YAMLPATH, BADOPT, TARGET, False, 1, False, False, VERBOSE) + +@pytest.mark.xfail() +def test_bad_target_option(): + ''' test run-fremake with a invalid target option''' + run_fremake_script.fremake_run(YAMLPATH, PLATFORM, BADOPT, False, 1, False, False, VERBOSE) + +@pytest.mark.xfail() +def test_bad_yamlpath_option(): + ''' test run-fremake with a invalid target option''' + run_fremake_script.fremake_run(BADOPT[0], PLATFORM, TARGET, False, 1, False, False, VERBOSE) + +# tests script/makefile creation without executing (serial compile) +# first test runs the run-fremake command, subsequent tests check for creation of scripts +def test_run_fremake_serial(): + ''' run fre make with run-fremake subcommand and build the null model experiment with gnu''' + os.environ["TEST_BUILD_DIR"] = SERIAL_TEST_PATH + run_fremake_script.fremake_run(YAMLPATH, PLATFORM, TARGET, False, 1, False, False, VERBOSE) + +def test_run_fremake_compile_script_creation_serial(): + ''' check for compile script creation from previous test ''' + assert Path(f"{SERIAL_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/compile.sh").exists() + +def test_run_fremake_checkout_script_creation_serial(): + ''' check for checkout script creation from previous test ''' + assert Path(f"{SERIAL_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/src/checkout.sh").exists() + +def test_run_fremake_makefile_creation_serial(): + ''' check for makefile creation from previous test ''' + assert Path(f"{SERIAL_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/Makefile").exists() + +# same tests with multijob compile and non-parallel-checkout options enabled +def test_run_fremake_multijob(): + ''' run fre make with run-fremake subcommand and build the null model experiment with gnu''' + os.environ["TEST_BUILD_DIR"] = MULTIJOB_TEST_PATH + run_fremake_script.fremake_run(YAMLPATH, PLATFORM, TARGET, True, 4, True, False, VERBOSE) + +def test_run_fremake_compile_script_creation_multijob(): + ''' check for compile script creation from previous test ''' + assert Path(f"{MULTIJOB_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/compile.sh").exists() + +def test_run_fremake_checkout_script_creation_multijob(): + ''' check for checkout script creation from previous test ''' + assert Path(f"{MULTIJOB_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/src/checkout.sh").exists() + +def test_run_fremake_makefile_creation_multijob(): + ''' check for makefile creation from previous test ''' + assert Path(f"{MULTIJOB_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/Makefile").exists() + +# tests container build script/makefile/dockerfile creation +def test_run_fremake_container(): + '''run run-fremake with options for containerized build''' + run_fremake_script.fremake_run(YAMLPATH, CONTAINER_PLATFORM, TARGET, False, 1, True, False, VERBOSE) + +def test_run_fremake_build_script_creation_container(): + ''' checks container build script creation from previous test ''' + assert Path("createContainer.sh").exists() + +def test_run_fremake_dockerfile_creation_container(): + ''' checks dockerfile creation from previous test ''' + assert Path("Dockerfile").exists() + +def test_run_fremake_checkout_script_creation_container(): + ''' checks checkout script creation from previous test ''' + assert Path(f"tmp/{CONTAINER_PLATFORM[0]}/checkout.sh").exists() + +def test_run_fremake_makefile_creation_container(): + ''' checks makefile creation from previous test ''' + assert Path(f"tmp/{CONTAINER_PLATFORM[0]}/Makefile").exists() + +def test_run_fremake_run_script_creation_container(): + ''' checks (internal) container run script creation from previous test ''' + assert Path(f"tmp/{CONTAINER_PLATFORM[0]}/execrunscript.sh").exists() + +# tests for builds with multiple targets + +def test_run_fremake_bad_target(): + ''' checks invalid target returns an error ''' + os.environ["TEST_BUILD_DIR"] = MULTITARGET_TEST_PATH + result = runner.invoke(fre.fre, args=["make", "run-fremake", "-y", YAMLPATH, "-p", PLATFORM[0], "-t", "prod-repro"]) + assert result.exit_code == 1 + +def test_run_fremake_multiple_targets(): + ''' passes all valid targets for a build ''' + result = runner.invoke(fre.fre, args=["make", "run-fremake", "-y", YAMLPATH, "-p", PLATFORM[0], "-t", \ + "debug", "-t", "prod", "-t", "repro", "-t", "debug-openmp", "-t",\ + "prod-openmp", "-t", "repro-openmp"]) + assert result.exit_code == 0 + +def test_run_fremake_compile_script_creation_multitarget(): + ''' check compile scripts for all targets exist from previous test''' + for t in targets: + assert Path(f"{MULTITARGET_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{t}/exec/compile.sh").exists() + +def test_run_fremake_checkout_script_creation_multitarget(): + ''' check for checkout script creation for mulit-target build''' + ''' check checkout script exists from previous test''' + assert Path(f"{MULTITARGET_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/src/checkout.sh").exists() + +def test_run_fremake_makefile_creation_multitarget(): + ''' check for makefile creation from previous test ''' + for t in targets: + assert Path(f"{MULTITARGET_TEST_PATH}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{t}/exec/Makefile").exists() diff --git a/fre/pp/checkoutScript.py b/fre/pp/checkoutScript.py deleted file mode 100644 index 57036634..00000000 --- a/fre/pp/checkoutScript.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -# Author: Bennett Chang -# Description: - -import os -import subprocess -from subprocess import PIPE -from subprocess import STDOUT -import re -import click - -############################################# - -package_dir = os.path.dirname(os.path.abspath(__file__)) - -############################################# - -def _checkoutTemplate(experiment, platform, target, branch='main'): - """ - Checkout the workflow template files from the repo - """ - # Create the directory if it doesn't exist - directory = os.path.expanduser("~/cylc-src") - os.makedirs(directory, exist_ok=True) - - # Change the current working directory - os.chdir(directory) - - # Set the name of the directory - name = f"{experiment}__{platform}__{target}" - - # Clone the repository with depth=1; check for errors - click.echo("cloning experiment into directory " + directory + "/" + name) - clonecmd = ( - f"git clone -b {branch} --single-branch --depth=1 --recursive " - f"https://github.com/NOAA-GFDL/fre-workflows.git {name}" ) - preexist_error = f"fatal: destination path '{name}' exists and is not an empty directory." - click.echo(clonecmd) - cloneproc = subprocess.run(clonecmd, shell=True, check=False, stdout=PIPE, stderr=STDOUT) - if not cloneproc.returncode == 0: - if re.search(preexist_error.encode('ASCII'),cloneproc.stdout) is not None: - argstring = f" -e {experiment} -p {platform} -t {target}" - stop_report = ( - "Error in checkoutTemplate: the workflow definition specified by -e/-p/-t already" - f" exists at the location ~/cylc-src/{name}!\n" - f"In the future, we will confirm that ~/cylc-src/{name} is usable and will check " - "whether it is up-to-date.\n" - "But for now, if you wish to proceed, you must delete the workflow definition.\n" - "To start over, try:\n" - f"\t cylc stop {name}\n" - f"\t cylc clean {name}\n" - f"\t rm -r ~/cylc-src/{name}" ) - click.echo(stop_report) - return 1 - else: - #if not identified, just print the error - click.echo(clonecmd) - click.echo(cloneproc.stdout) - return 1 - -############################################# - -@click.command() -def checkoutTemplate(experiment, platform, target, branch="main"): - ''' - Wrapper script for calling checkoutTemplate - allows the decorated version - of the function to be separate from the undecorated version - ''' - return _checkoutTemplate(experiment, platform, target, branch) - - -if __name__ == '__main__': - checkoutTemplate() diff --git a/fre/pp/checkout_script.py b/fre/pp/checkout_script.py new file mode 100644 index 00000000..02dca1f7 --- /dev/null +++ b/fre/pp/checkout_script.py @@ -0,0 +1,95 @@ +''' +Description: Checkout script which accounts for 4 different scenarios: +1. branch not given, folder does not exist, +2. branch given, folder does not exist, +3. branch not given, folder exists, +4. branch given and folder exists +''' +import os +import sys +import subprocess + +import click + +from fre import fre + +FRE_WORKFLOWS_URL = 'https://github.com/NOAA-GFDL/fre-workflows.git' + +def checkout_template(experiment = None, platform = None, target = None, branch = None): + """ + Checkout the workflow template files from the repo + """ + ## Chdir back to here before we exit this routine + go_back_here = os.getcwd() + + # branch and version parameters + default_tag = fre.version + git_clone_branch_arg = branch if branch is not None else default_tag + if branch is None: + print(f"(checkout_script) default tag is '{default_tag}'") + else: + print(f"(checkout_script) requested branch/tag is '{branch}'") + + # check args + set the name of the directory + if None in [experiment, platform, target]: + raise ValueError( 'one of these are None: experiment / platform / target = \n' + f'{experiment} / {platform} / {target}' ) + name = f"{experiment}__{platform}__{target}" + + # Create the directory if it doesn't exist + directory = os.path.expanduser("~/cylc-src") + try: + os.makedirs(directory, exist_ok = True) + except Exception as exc: + raise OSError( + '(checkoutScript) directory {directory} wasnt able to be created. exit!') from exc + + checkout_exists = os.path.isdir(f'{directory}/{name}') + + if not checkout_exists: # scenarios 1+2, checkout doesn't exist, branch specified (or not) + print(f'(checkout_script) checkout does not yet exist; will create now') + clone_output = subprocess.run( ['git', 'clone','--recursive', + f'--branch={git_clone_branch_arg}', + FRE_WORKFLOWS_URL, f'{directory}/{name}'], + capture_output = True, text = True, check = True) + print(f'(checkout_script) {clone_output}') + + else: # the repo checkout does exist, scenarios 3 and 4. + os.chdir(f'{directory}/{name}') + + # capture the branch and tag + # if either match git_clone_branch_arg, then success. otherwise, fail. + + current_tag = subprocess.run(["git","describe","--tags"], + capture_output = True, + text = True, check = True).stdout.strip() + current_branch = subprocess.run(["git", "branch", "--show-current"], + capture_output = True, + text = True, check = True).stdout.strip() + + if current_tag == git_clone_branch_arg or current_branch == git_clone_branch_arg: + print(f"(checkout_script) checkout exists ('{directory}/{name}'), and matches '{git_clone_branch_arg}'") + else: + print(f"(checkout_script) ERROR: checkout exists ('{directory}/{name}') and does not match '{git_clone_branch_arg}'") + print(f"(checkout_script) ERROR: current branch is '{current_branch}', current tag-describe is '{current_tag}'") + exit(1) + + # make sure we are back where we should be + if os.getcwd() != go_back_here: + os.chdir(go_back_here) + + return 0 + +############################################# + +@click.command() +def _checkout_template(experiment, platform, target, branch ): + ''' + Wrapper script for calling checkout_template - allows the decorated version + of the function to be separate from the undecorated version + ''' + return checkout_template(experiment, platform, target, branch) + + +if __name__ == '__main__': + checkout_template() diff --git a/fre/pp/configure_script_xml.py b/fre/pp/configure_script_xml.py index 899b4830..4674a63b 100644 --- a/fre/pp/configure_script_xml.py +++ b/fre/pp/configure_script_xml.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 ''' Primary Usage: fre-bronx-to-canopy -x XML -e EXP -p PLATFORM -t TARGET @@ -686,11 +685,11 @@ def format_req_pp_year(pp_year): ############################################## @click.command() -def convert(): +def _convert(): ''' Wrapper for convert call - allows users to call without command-line args ''' - _convert() + convert() if __name__ == '__main__': convert() diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index b782e3de..25507c14 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Script creates rose-apps and rose-suite files for the workflow from the pp yaml. @@ -147,7 +146,7 @@ def set_rose_apps(yamlfile,rose_regrid,rose_remap): value=f'{interp_split[0]}_{interp_split[1]}.{interp_method}') #################### -def yamlInfo(yamlfile,experiment,platform,target): +def yaml_info(yamlfile,experiment,platform,target): """ Using a valid pp.yaml, the rose-app and rose-suite configuration files are created in the cylc-src @@ -200,13 +199,13 @@ def yamlInfo(yamlfile,experiment,platform,target): print(" " + outfile) @click.command() -def _yamlInfo(yamlfile,experiment,platform,target): +def _yaml_info(yamlfile,experiment,platform,target): ''' - Wrapper script for calling yamlInfo - allows the decorated version + Wrapper script for calling yaml_info - allows the decorated version of the function to be separate from the undecorated version ''' - return yamlInfo(yamlfile,experiment,platform,target) + return yaml_info(yamlfile,experiment,platform,target) # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': - yamlInfo() + yaml_info() diff --git a/fre/pp/frepp.py b/fre/pp/frepp.py index b3456e31..55939232 100644 --- a/fre/pp/frepp.py +++ b/fre/pp/frepp.py @@ -1,14 +1,16 @@ ''' fre pp ''' import click -from .checkoutScript import checkoutTemplate -from .configure_script_yaml import yamlInfo -from .configure_script_xml import convert -from .validate import validate_subtool -from .install import install_subtool -from .run import pp_run_subtool -from .status import status_subtool -from .wrapper import runFre2pp + +from fre.pp import checkout_script +from fre.pp import configure_script_yaml +from fre.pp import configure_script_xml +from fre.pp import validate_script +from fre.pp import install_script +from fre.pp import run_script +from fre.pp import trigger_script +from fre.pp import status_script +from fre.pp import wrapper_script @click.group(help=click.style(" - access fre pp subcommands", fg=(57,139,210))) def pp_cli(): @@ -30,7 +32,7 @@ def pp_cli(): def status(context, experiment, platform, target): # pylint: disable=unused-argument """ - Report status of PP configuration""" - context.forward(status_subtool) + context.forward(status_script._status_subtool) # fre pp run @pp_cli.command() @@ -47,7 +49,7 @@ def status(context, experiment, platform, target): def run(context, experiment, platform, target): # pylint: disable=unused-argument """ - Run PP configuration""" - context.forward(pp_run_subtool) + context.forward(run_script._pp_run_subtool) # fre pp validate @pp_cli.command() @@ -64,7 +66,7 @@ def run(context, experiment, platform, target): def validate(context, experiment, platform, target): # pylint: disable=unused-argument """ - Validate PP configuration""" - context.forward(validate_subtool) + context.forward(validate_script._validate_subtool) # fre pp install @pp_cli.command() @@ -81,7 +83,7 @@ def validate(context, experiment, platform, target): def install(context, experiment, platform, target): # pylint: disable=unused-argument """ - Install PP configuration""" - context.forward(install_subtool) + context.forward(install_script._install_subtool) @pp_cli.command() @click.option("-y", "--yamlfile", type=str, @@ -100,7 +102,7 @@ def install(context, experiment, platform, target): def configure_yaml(context,yamlfile,experiment,platform,target): # pylint: disable=unused-argument """ - Execute fre pp configure """ - context.forward(yamlInfo) + context.forward(configure_script_yaml._yaml_info) @pp_cli.command() @click.option("-e", "--experiment", type=str, @@ -112,17 +114,14 @@ def configure_yaml(context,yamlfile,experiment,platform,target): @click.option("-t", "--target", type=str, help="Target name", required=True) -@click.option("-b", "--branch", - show_default=True, - default="main", type=str, - help="Name of fre2/workflows/postproc branch to clone; " \ - "defaults to 'main'. Not intended for production use, " \ - "but needed for branch testing." ) +@click.option("-b", "--branch", type =str, + required=False, default = None, + help="fre-workflows branch/tag to clone; default is $(fre --version)") @click.pass_context -def checkout(context, experiment, platform, target, branch='main'): +def checkout(context, experiment, platform, target, branch=None): # pylint: disable=unused-argument """ - Execute fre pp checkout """ - context.forward(checkoutTemplate) + context.forward(checkout_script._checkout_template) @pp_cli.command() @click.option('-x', '--xml', @@ -154,11 +153,11 @@ def checkout(context, experiment, platform, target, branch='main'): is_flag=True, default=False, help="Optional. Process refineDiag scripts") -@click.option('--pp_start', +@click.option('--pp_start', type=str, default='0000', help="Optional. Starting year of postprocessing. " \ "If not specified, a default value of '0000' " \ "will be set and must be changed in rose-suite.conf") -@click.option('--pp_stop', +@click.option('--pp_stop', type=str, default='0000', help="Optional. Ending year of postprocessing. " \ "If not specified, a default value of '0000' " \ "will be set and must be changed in rose-suite.conf") @@ -180,7 +179,7 @@ def configure_xml(context, xml, platform, target, experiment, do_analysis, histo ppdir, do_refinediag, pp_start, pp_stop, validate, verbose, quiet, dual): # pylint: disable=unused-argument """ - Converts a Bronx XML to a Canopy rose-suite.conf """ - context.forward(convert) + context.forward(configure_script_xml._convert) #fre pp wrapper @pp_cli.command() @@ -190,23 +189,44 @@ def configure_xml(context, xml, platform, target, experiment, do_analysis, histo @click.option("-p", "--platform", type=str, help="Platform name", required=True) -@click.option("-t", "--target", type=str, +@click.option("-T", "--target", type=str, help="Target name", required=True) @click.option("-c", "--config-file", type=str, help="Path to a configuration file in either XML or YAML", required=True) @click.option("-b", "--branch", - show_default=True, - default="main", type=str, - help="Name of fre2/workflows/postproc branch to clone; " \ - "defaults to 'main'. Not intended for production use, " \ - "but needed for branch testing." ) + required=False, default=None, + help="fre-workflows branch/tag to clone; default is $(fre --version)") +@click.option("-t", "--time", + required=False, default=None, + help="Time whose history files are ready") @click.pass_context -def wrapper(context, experiment, platform, target, config_file, branch='main'): +def wrapper(context, experiment, platform, target, config_file, branch, time): # pylint: disable=unused-argument """ - Execute fre pp steps in order """ - context.forward(runFre2pp) + print(f'(frepp.wrapper) about to foward context to wrapper.run_all_fre_pp_steps via click...') + context.forward(wrapper_script._run_all_fre_pp_steps) + print(f'(frepp.wrapper) done fowarding context to wrapper.run_all_fre_pp_steps via click.') + +@pp_cli.command() +@click.option("-e", "--experiment", type=str, + help="Experiment name", + required=True) +@click.option("-p", "--platform", type=str, + help="Platform name", + required=True) +@click.option("-T", "--target", type=str, + help="Target name", + required=True) +@click.option("-t", "--time", + required=True, + help="Time whose history files are ready") +@click.pass_context +def trigger(context, experiment, platform, target, time): + # pylint: disable=unused-argument + """ - Start postprocessing for a particular time """ + context.forward(trigger_script._trigger) if __name__ == "__main__": ''' entry point for click to fre pp commands ''' diff --git a/fre/pp/install.py b/fre/pp/install.py deleted file mode 100644 index 9ffc00ee..00000000 --- a/fre/pp/install.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -''' fre pp install ''' - -import subprocess -import click - -def _install_subtool(experiment, platform, target): - """ - Install the Cylc workflow definition located in - ~/cylc-src/____ - to - ~/cylc-run/____ - """ - - name = experiment + '__' + platform + '__' + target - cmd = f"cylc install --no-run-name {name}" - subprocess.run(cmd, shell=True, check=True) - -@click.command() -def install_subtool(experiment, platform, target): - ''' entry point to install for click ''' - return _install_subtool(experiment, platform, target) diff --git a/fre/pp/install_script.py b/fre/pp/install_script.py new file mode 100644 index 00000000..95f28eb5 --- /dev/null +++ b/fre/pp/install_script.py @@ -0,0 +1,41 @@ +''' fre pp install ''' + +from pathlib import Path +import os +import subprocess +import click + +def install_subtool(experiment, platform, target): + """ + Install the Cylc workflow definition located in + ~/cylc-src/____ + to + ~/cylc-run/____ + """ + + name = experiment + '__' + platform + '__' + target + # if the cylc-run directory already exists, + # then check whether the cylc expanded definition (cylc config) + # is identical. If the same, good. If not, bad. + source_dir = Path(os.path.expanduser("~/cylc-src"), name) + install_dir = Path(os.path.expanduser("~/cylc-run"), name) + if os.path.isdir(install_dir): + installed_def = subprocess.run(["cylc", "config", name],capture_output=True).stdout + go_back_here = os.getcwd() + os.chdir(source_dir) + source_def = subprocess.run(['cylc', 'config', '.'], capture_output=True).stdout + if installed_def == source_def: + print(f"NOTE: Workflow '{install_dir}' already installed, and the definition is unchanged") + else: + print(f"ERROR: Workflow '{install_dir}' already installed, and the definition has changed!") + print(f"ERROR: Please remove installed workflow with 'cylc clean {name}' or move the workflow run directory '{install_dir}'") + exit(1) + else: + print(f"NOTE: About to install workflow into ~/cylc-run/{name}") + cmd = f"cylc install --no-run-name {name}" + subprocess.run(cmd, shell=True, check=True) + +@click.command() +def _install_subtool(experiment, platform, target): + ''' entry point to install for click ''' + return install_subtool(experiment, platform, target) diff --git a/fre/pp/run.py b/fre/pp/run_script.py similarity index 81% rename from fre/pp/run.py rename to fre/pp/run_script.py index 57f2c427..b6ca607a 100644 --- a/fre/pp/run.py +++ b/fre/pp/run_script.py @@ -1,10 +1,9 @@ -#!/usr/bin/env python ''' fre pp run ''' import subprocess import click -def _pp_run_subtool(experiment, platform, target): +def pp_run_subtool(experiment, platform, target): """ Start or restart the Cylc workflow identified by: ____ @@ -15,6 +14,10 @@ def _pp_run_subtool(experiment, platform, target): subprocess.run(cmd, shell=True, check=True) @click.command() -def pp_run_subtool(experiment, platform, target): +def _pp_run_subtool(experiment, platform, target): ''' entry point to run for click ''' - return _pp_run_subtool(experiment, platform, target) + return pp_run_subtool(experiment, platform, target) + + +if __name__ == "__main__": + pp_run_subtool() diff --git a/fre/pp/status.py b/fre/pp/status_script.py similarity index 68% rename from fre/pp/status.py rename to fre/pp/status_script.py index 6e2c07e1..b4f579c7 100644 --- a/fre/pp/status.py +++ b/fre/pp/status_script.py @@ -1,10 +1,11 @@ -#!/usr/bin/env python ''' fre pp status ''' import subprocess import click -def _status_subtool(experiment, platform, target): +TIMEOUT_SECS=120#30 + +def status_subtool(experiment, platform, target): """ Report workflow state for the Cylc workflow ____ @@ -12,10 +13,13 @@ def _status_subtool(experiment, platform, target): name = experiment + '__' + platform + '__' + target cmd = f"cylc workflow-state {name}" - subprocess.run(cmd, shell=True, check=True, timeout=30) + subprocess.run(cmd, shell=True, check=True, timeout=TIMEOUT_SECS) @click.command() -def status_subtool(experiment, platform, target): +def _status_subtool(experiment, platform, target): ''' entry point to status for click ''' - return _status_subtool(experiment, platform, target) + return status_subtool(experiment, platform, target) + +if __name__ == "__main__": + status_subtool() diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index eaf1fc2e..e6391513 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -37,7 +37,7 @@ def test_configure_script(): model_yaml = str(Path(f"{test_dir}/{test_yaml}")) # Invoke configure_yaml_script.py - csy.yamlInfo(model_yaml,EXPERIMENT,PLATFORM,TARGET) + csy.yaml_info(model_yaml,EXPERIMENT,PLATFORM,TARGET) # Check for configuration creation and final combined yaml assert all([Path(f"{out_dir}/{EXPERIMENT}.yaml").exists(), diff --git a/fre/pp/tests/test_rose_quoting.py b/fre/pp/tests/test_rose_quoting.py index bd5353b1..13b3141a 100644 --- a/fre/pp/tests/test_rose_quoting.py +++ b/fre/pp/tests/test_rose_quoting.py @@ -1,7 +1,10 @@ +''' quick tests to make sure rose handles certain types of values with quotes correctly ''' from fre.pp.configure_script_yaml import quote_rose_values def test_boolean(): + ''' check that boolean values with quotes are handled correctly by rose''' assert quote_rose_values(True) == 'True' def test_string(): + ''' check that string values with quotes are handled correctly by rose''' assert quote_rose_values('foo') == "'foo'" diff --git a/fre/pp/trigger_script.py b/fre/pp/trigger_script.py new file mode 100644 index 00000000..14ce4291 --- /dev/null +++ b/fre/pp/trigger_script.py @@ -0,0 +1,22 @@ +''' fre pp trigger ''' + +import subprocess +import click + +def trigger(experiment, platform, target, time): + """ + Trigger the pp-starter task for the time indicated + """ + + name = experiment + '__' + platform + '__' + target + cmd = f"cylc trigger {name}//{time}/pp-starter" + subprocess.run(cmd, shell=True, check=True, timeout=30) + + +@click.command() +def _trigger(experiment, platform, target, time): + ''' entry point to trigger for click ''' + return trigger(experiment, platform, target, time) + +if __name__ == "__main__": + trigger() diff --git a/fre/pp/validate.py b/fre/pp/validate_script.py similarity index 81% rename from fre/pp/validate.py rename to fre/pp/validate_script.py index 9c07340f..d48f5f47 100644 --- a/fre/pp/validate.py +++ b/fre/pp/validate_script.py @@ -1,16 +1,15 @@ -#!/usr/bin/env python ''' fre pp validate ''' import os import subprocess import click -def _validate_subtool(experiment, platform, target): +def validate_subtool(experiment, platform, target): """ Validate the Cylc workflow definition located in ~/cylc-src/____ """ - + go_back_here = os.getcwd() directory = os.path.expanduser('~/cylc-src/' + experiment + '__' + platform + '__' + target) # Change the current working directory @@ -23,8 +22,12 @@ def _validate_subtool(experiment, platform, target): # Validate the Cylc configuration cmd = "cylc validate ." subprocess.run(cmd, shell=True, check=True) + os.chdir(go_back_here) @click.command() -def validate_subtool(experiment, platform, target): +def _validate_subtool(experiment, platform, target): ''' entry point to validate for click ''' - return _validate_subtool(experiment, platform, target) + return validate_subtool(experiment, platform, target) + +if __name__ == "__main__": + validate_subtool() diff --git a/fre/pp/wrapper.py b/fre/pp/wrapper.py deleted file mode 100644 index 0f0aad39..00000000 --- a/fre/pp/wrapper.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -frepp.py, a replacement for the frepp bash script located at: -https://gitlab.gfdl.noaa.gov/fre2/system-settings/-/blob/main/bin/frepp -Author: Carolyn.Whitlock -""" - -#todo: -# add relative path import to rest of pp tools -# add command-line args using same format as fre.py -# include arg for pp start / stop -# test yaml path -# error handling - -import os -import time -import click - -# Import from the local packages -from .checkoutScript import _checkoutTemplate -from .configure_script_xml import _convert -from .configure_script_yaml import _yamlInfo -from .validate import _validate_subtool -from .install import _install_subtool -from .run import _pp_run_subtool -from .status import _status_subtool - -@click.command() -def runFre2pp(experiment, platform, target, config_file, branch): - ''' - Wrapper script for calling a FRE2 pp experiment with the canopy-style - infrastructure and fre-cli - time=0000 - ''' - - config_file = os.path.abspath(config_file) - - #env_setup - #todo: check for experiment existing, call frepp_stop to clean experiment, - try: - print("calling _checkoutTemplate") - _checkoutTemplate(experiment, platform, target, branch) - except Exception as err: - raise - - #dumb xml check;does it need to be smarter? - is_xml = config_file[-3:] == "xml" - if is_xml: - #TODO: should this prompt for pp start/stop years? - try: - _convert(config_file, platform, target, experiment, do_analysis=False) - #note: arg list for this function is a looooot longer, but all those - #args can be deduced from the xml when given default vals - except Exception as err: - raise - try: - _validate_subtool(experiment, platform, target) - #See notes in main() function - except Exception as err: - raise - else: - try: - _yamlInfo(config_file, experiment, platform, target) - except Exception as err: - raise - - try: - _install_subtool(experiment, platform, target) - except: - raise - - try: - _pp_run_subtool(experiment, platform, target) - except Exception as err: - raise - - #send off a watcher script that reports on how it's going - for n in range(1,12): - try: - _status_subtool(experiment, platform, target) - except Exception as err: - raise - time.sleep(300) - -if __name__ == '__main__': - runFre2pp() diff --git a/fre/pp/wrapper_script.py b/fre/pp/wrapper_script.py new file mode 100644 index 00000000..15a48dea --- /dev/null +++ b/fre/pp/wrapper_script.py @@ -0,0 +1,64 @@ +""" +frepp.py, a replacement for the frepp bash script located at: +https://gitlab.gfdl.noaa.gov/fre2/system-settings/-/blob/main/bin/frepp +Author: Carolyn.Whitlock +""" + +# add relative path import to rest of pp tools +# add command-line args using same format as fre.py +# include arg for pp start / stop +# test yaml path +# error handling + +import os +#import time +import click + +# Import from the local packages +from fre.pp.checkout_script import checkout_template +from fre.pp.configure_script_yaml import yaml_info +from fre.pp.install_script import install_subtool +from fre.pp.run_script import pp_run_subtool +from fre.pp.trigger_script import trigger +from fre.pp.status_script import status_subtool + +def run_all_fre_pp_steps(experiment, platform, target, config_file, branch=None, time=None): + ''' + Wrapper script for calling a FRE2 pp experiment with the canopy-style + infrastructure and fre-cli + ''' + print('(run_all_fre_pp_steps) config_file path resolving...') + config_file = os.path.abspath(config_file) + print(f' config_file={config_file}') + + print('(run_all_fre_pp_steps) calling checkout_template') + checkout_template(experiment, platform, target, branch) + + print('(run_all_fre_pp_steps) calling yaml_info') + yaml_info(config_file, experiment, platform, target) + + print('(run_all_fre_pp_steps) calling install_subtool') + install_subtool(experiment, platform, target) + + print('(run_all_fre_pp_steps) calling pp_run_subtool') + pp_run_subtool(experiment, platform, target) + + if time is not None: + print('(run_all_fre_pp_steps) calling trigger') + trigger(experiment, platform, target, time) + + print('(run_all_fre_pp_steps) calling status_subtool') + status_subtool(experiment, platform, target) + + print('(run_all_fre_pp_steps) done.') + + +@click.command() +def _run_all_fre_pp_steps(experiment, platform, target, config_file, branch, time): + ''' + click entry point for run_all_fre_pp_steps. + ''' + return run_all_fre_pp_steps(experiment, platform, target, config_file, branch, time) + +if __name__ == '__main__': + run_all_fre_pp_steps() diff --git a/fre/pp/wrapperscript b/fre/pp/wrapperscript deleted file mode 100755 index f98d9cd6..00000000 --- a/fre/pp/wrapperscript +++ /dev/null @@ -1,320 +0,0 @@ -#!/bin/bash -set -euo pipefail -set -x - -# https://stackoverflow.com/questions/402377/using-getopts-to-process-long-and-short-command-line-options -TEMP=$(getopt -o x:p:P:T:t:shvc:D:d: --long xml:,platform:,target:,time:,help,mppnccombine-opts:,mail-list: -n 'frepp' -- "$@") -eval set -- "$TEMP" - -# defaults -xml= -platform= -target= -time= -help= - -# arg parsing -while true; do - case "$1" in - # required - -x | --xml ) xml="$2"; shift 2 ;; - -p | -P | --platform ) platform="$2"; shift 2 ;; - -T | --target ) target="$2"; shift 2 ;; - -t | --time ) time="$2"; shift 2 ;; - - # optional - -h | --help ) help=true; shift ;; - - # ignored - -v ) shift ;; - -c ) shift 2 ;; - -D ) shift 2 ;; - -d ) shift 2 ;; - -s ) shift ;; - --mppnccombine-opts ) shift 2 ;; - --mail-list ) shift 2 ;; - - -- ) shift; break ;; - * ) break ;; - esac -done -if [[ -n ${1-} ]]; then - expname=$1 -else - expname= -fi - -# If $FRE_DUALPP is set, then take two different actions -# 1. Append "_canopy" to pp, analysis, and history_refined directories created through the XML converter -# 2. Submit Bronx frepp as well -set +u -if [[ $FRE_DUALPP ]]; then - dual=true -else - dual=false -fi -set -u - -# Help -usage="Usage: frepp --xml=XML --platform=PLATFORM --target=TARGET --time=YYYY EXP" -if [[ $help ]]; then - echo $usage - cat << EOF -################################################################################ -FRE Canopy frepp wrapper to start Canopy postprocessing workflow with -traditional Bronx frepp usage. - -Cylc implementation current settings used by this wrapper: -1. Workflow name is ____ -e.g. use cylc commands such as: - -cylc workflow-state ____ - -This is somewhat overly verbose and also not verbose enough -(i.e. does not include FRE STEM). -If you have suggestions please let the FRE team know. - -2. Will not use unique run directories. -If the run directory exists you will need to remove it before re-installing. - -################################################################################ -What does this script do? -1. If workflow run-dir was previously installed, - start postprocessing for a history file segment: - -- Check if the workflow is running -- Check the task states -- Start cylc scheduler -- Trigger requested processing (-t YYYY) -- Exit - -2. Otherwise, if workflow src-dir does not exist, - configure the postprocessing: - -- Checkout a fresh PP template -- Run the XML converter - -3. Then, install and start the postprocessing for a history file segment -- Run the validation scripts -- Install the workflow -- Start cylc scheduler -- Trigger requested processing (-t YYYY) - -################################################################################ -Recovery steps and scenarios: -1. Something is terribly wrong with PP and you want to reconfigure and try again -- Stop cylc scheduler with "cylc stop --kill " -- Remove run directory with "cylc clean " -- Edit the configuration files in ~/cylc-src/ -- Run frepp again to reinstall and run the updated PP configuration. - -2. Something is terribly wrong and you want a complete fresh start, - or you want an update from the pp template repo. -- Stop cylc scheduler with "cylc stop --kill" -- Remove run directory with "cylc clean " -- Remove src directory with "rm -rf ~/cylc-src/" -- Run frepp again to recheckout pp template, run xml converter, and install/run - -################################################################################ -Specific suggestions to recover from task failures: - -1. refineDiag script failures are likely with a XML-converted configs - for two reasons, so you will probably need to either adjust or remove them. - To disable refineDiag, - - set DO_REFINEDIAG=False, and - - comment out HISTORY_DIR_REFINED - -a. It may use something in the XML, using an xml shell variable that does not - exist now. In these cases, you could rewrite the refineDiag script to - not use the xmlDir shell variable or not use the script. - For "refineDiag_atmos_cmip6.csh", it was included in the postprocessing - template checkout with a small modification. Use this location: - '\$CYLC_WORKFLOW_RUN_DIR/etc/refineDiag/refineDiag_atmos_cmip6.csh'. - - set REFINEDIAG_SCRIPTS to that location - -b. It may be a refineDiag script that does not generate .nc files - as it was expected to do. FRE Bronx allows these side-effect refineDiags, - and instead a new mechanism was invented for these scripts that - do not generate netcdf output: - - set DO_PREANALYSIS=True, and - - PREANALYSIS_SCRIPT="/paath/to/script". - -2. Many PP components in Bronx XMLs are doomed (in terms of failing to - produce output and job failing) caused by using history files that do not - exist, but do not cause problems for the other components. Currently, - the Canopy pp template is not robust in terms of this error mode, - so it's best to not process history files that do not exist. - - In the future, diag manager metadata output will provide a catalog - of history output that the validators will check against. For now, - a simple checker exists, but you must manually generate the - history output list ("history-manifest" file). - - Generate the file with a simple listing of the history tarfile. - You can append a history_refined tarfile as well. Then, the validator - will report on PP components you have specified - (PP_COMPONENTS) but that do not exist in the history-manifest file. - - tar -tf /path/to/history/YYYYMMDD.nc.tar | sort > history-manifest - - To run the configuration validation: - -cd ~/cylc-src/ -rose macro --validate - - It is a good idea to not include pp components (PP_COMPONENTS) that - include history files that do not exist. - - In all cases it is recommended to remove validation errors. - See README.md for general configuration instructions. -EOF - exit 0 -fi - -# check for all options -if [[ $xml ]]; then - xml=$(readlink -f $xml) - if [[ -f $xml ]]; then - echo "using $xml" - else - echo "XML '$xml' does not exist" - exit 1 - fi -else - echo $usage - exit 1 -fi - -if [[ $platform ]]; then - echo "using $platform" -else - echo $usage - exit 1 -fi - -if [[ $target ]]; then - echo "using $target" -else - echo $usage - exit 1 -fi - -if [[ $time ]]; then - echo "using $time" -else - echo $usage - exit 1 -fi - -if [[ $expname ]]; then - echo "using $expname" -else - echo $usage - exit 1 -fi - -cylc --version -if cylc cycle-point $time; then - time_iso=$(cylc cycle-point $time --template CCYYMMDDT0000Z) -else - echo "Time '$time' not a valid ISO8601 date" - exit 1 -fi - -# Start bronx dual-pp -if [[ $dual == true ]]; then - $FRE_COMMANDS_HOME/bin/frepp -x $xml -P $platform -T $target -t $time -D '' $expname -v -s -fi - -# Set the cylc workflow name to __ -# use the default workflow source convention -name=${expname}__${platform}__$target -rundir="$HOME/cylc-run/$name" -srcdir="$HOME/cylc-src/$name" -echo Workflow name: $name -echo Run directory: $rundir -echo Src directory: $srcdir - -# Start postprocessing for a history file segment (workflow was previously installed) -if [[ -d $rundir ]]; then - echo "Run directory '$rundir' exists, so will now try to start it" - cylc scan - cylc workflow-state $name - if cylc workflow-state $name | grep failed; then - cat << EOF -################################################################################ -Unfortunately, there are failed tasks, probably caused by refineDiag errors -or try to use a history file that does not exist. - -While Cylc workflows can be configured to handle failure gracefully, -this workflow is not yet set to do this, so currently it's recommended -to reconfigure your postprocessing to remove task errors. - -For some suggestions to recover from the above most common errors, see: - -frepp --help -################################################################################ -EOF - fi - # sometimes this hangs for unknown reasons - # So for now we'll add --debug to try to diagnose it, and - # use /bin/timeout to exit after 10 min - timeout 10m cylc play --debug $name - sleep 20 - cylc trigger $name//$time_iso/pp-starter - exit 0 -fi - -# Checkout postprocessing template and configure -if [[ ! -d $srcdir ]]; then - echo "Workflow source directory '$srcdir' does not exist, so will now try to checkout template" - - # checkout - mkdir -p $HOME/cylc-src - cd $HOME/cylc-src - # try to reduce checkout size with depth=1 - #git clone --depth=1 --recursive git@gitlab.gfdl.noaa.gov:fre2/workflows/postprocessing.git $name - git clone --depth=1 --recursive https://gitlab.gfdl.noaa.gov/fre2/workflows/postprocessing.git $name - - # xml converter - cd $srcdir - if [[ $dual == true ]]; then - bin/fre-bronx-to-canopy.py -x $xml -p $platform -t $target -e $expname -v --dual - else - bin/fre-bronx-to-canopy.py -x $xml -p $platform -t $target -e $expname -v - fi -fi - -# validate configuration -cd $srcdir -if ! rose macro --validate; then - cat << EOF -################################################################################ -Configuration may not be valid. - -In general, Canopy configurations should pass all available validation scripts. -To run them, - -cd $HOME/cylc-src/$name -rose macro --validate - -Most validation errors reflect configurations problems that should be corrected. -The exceptions are: -1. PP_DIR will be created if it does not exist -2. HISTORY_DIR_REFINED will be created if it does not exist, - assuming DO_REFINEDIAG is also set - -See README.md for general configuration instructions. -################################################################################ -EOF -fi -cylc validate . - -# Install -cylc install --no-run-name $name - -# Start -cylc play $name -sleep 20 -cylc trigger $name//$time_iso/pp-starter -exit 0 diff --git a/fre/pytest.ini b/fre/pytest.ini index 131a2523..e8822023 100644 --- a/fre/pytest.ini +++ b/fre/pytest.ini @@ -9,6 +9,9 @@ testpaths = fre/pp/tests # fre/run/tests # fre/test/tests -# fre/yamltools/tests + fre/yamltools/tests # fre/app/tests fre/app/generate_time_averages/tests + fre/app/regrid_xy/tests + fre/analysis/tests + diff --git a/fre/tests/test_fre_analysis_cli.py b/fre/tests/test_fre_analysis_cli.py new file mode 100644 index 00000000..39be0235 --- /dev/null +++ b/fre/tests/test_fre_analysis_cli.py @@ -0,0 +1,102 @@ +"""Test fre analysis cli.""" +from click.testing import CliRunner + +from fre import fre + + +runner = CliRunner() + + +def test_cli_fre_analysis(): + """Most basic invocation of fre analysis.""" + result = runner.invoke(fre.fre, args=["analysis"]) + assert result.exit_code == 0 + + +def test_cli_fre_analysis_help(): + """Make sure fre analysis --help runs.""" + result = runner.invoke(fre.fre, args=["analysis", "--help"]) + assert result.exit_code == 0 + + +def test_cli_fre_analysis_install_help(): + """Make sure fre analysis install --help runs.""" + result = runner.invoke(fre.fre, args=["analysis", "install", "--help"]) + assert result.exit_code == 0 + + +def test_cli_fre_analysis_install_missing_url(): + """Missing the fre analysis install --url argument.""" + result = runner.invoke(fre.fre, args=["analysis", "install"]) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_install_unknown_argument(): + """Using an unknown argument with fre analysis install.""" + result = runner.invoke(fre.fre, args=["analysis", "install", "bad-arg",]) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_run_help(): + """Make sure fre analysis run --help runs.""" + result = runner.invoke(fre.fre, args=["analysis", "run", "--help"]) + assert result.exit_code == 0 + + +def test_cli_fre_analysis_run_missing_name(): + """Missing the fre analysis run --name argument.""" + result = runner.invoke(fre.fre, args=["analysis", "run"]) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_run_missing_catalog(): + """Missing the fre analysis run --catalog argument.""" + result = runner.invoke(fre.fre, args=["analysis", "run", "--name", "name"]) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_run_missing_output_directory(): + """Missing the fre analysis run --output-directory argument.""" + args = ["analysis", "run", "--name", "name", "--catalog", "catalog"] + result = runner.invoke(fre.fre, args=args) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_run_missing_output_yaml(): + """Missing the fre analysis run --output-yaml argument.""" + args = ["analysis", "run", "--name", "name", "--catalog", "catalog", + "--output-directory", "dir",] + result = runner.invoke(fre.fre, args=args) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_run_missing_experiment_yaml(): + """Missing the fre analysis run --experiment-yaml argument.""" + args = ["analysis", "run", "--name", "name", "--catalog", "catalog", + "--output-directory", "dir", "--output-yaml", "yaml"] + result = runner.invoke(fre.fre, args=args) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_run_unknown_argument(): + """Using an unknown argument with fre analysis run.""" + result = runner.invoke(fre.fre, args=["analysis", "run", "bad-arg",]) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_uninstall_help(): + """Make sure fre analysis uninstall --help runs.""" + result = runner.invoke(fre.fre, args=["analysis", "uninstall", "--help"]) + assert result.exit_code == 0 + + +def test_cli_fre_analysis_uninstall_missing_name(): + """Missing the fre analysis uninstall --name argument.""" + result = runner.invoke(fre.fre, args=["analysis", "uninstall"]) + assert result.exit_code == 2 + + +def test_cli_fre_analysis_uninstall_unknown_argument(): + """Using an unknown argument with fre analysis uninstall.""" + result = runner.invoke(fre.fre, args=["analysis", "uninstall", "bad-arg",]) + assert result.exit_code == 2 diff --git a/fre/tests/test_fre_cli.py b/fre/tests/test_fre_cli.py index 5c5bdb12..e7bad1d0 100644 --- a/fre/tests/test_fre_cli.py +++ b/fre/tests/test_fre_cli.py @@ -20,3 +20,15 @@ def test_cli_fre_option_dne(): ''' fre optionDNE ''' result = runner.invoke(fre.fre, args='optionDNE') assert result.exit_code == 2 + + +def test_fre_version(): + ''' module import flavor of below cli test ''' + assert '2024.01' == fre.version + +def test_cli_fre_version(): + ''' fre --version ''' + result = runner.invoke(fre.fre, args='--version') + expected_out = 'fre, version 2024.01' + assert all( [ result.exit_code == 0, + expected_out in result.stdout.split('\n') ] ) diff --git a/fre/tests/test_fre_pp_cli.py b/fre/tests/test_fre_pp_cli.py index 399c8e3e..e60d012d 100644 --- a/fre/tests/test_fre_pp_cli.py +++ b/fre/tests/test_fre_pp_cli.py @@ -1,5 +1,9 @@ ''' test "fre pp" calls ''' +import os +import shutil +from pathlib import Path + from click.testing import CliRunner from fre import fre @@ -39,6 +43,18 @@ def test_cli_fre_pp_checkout_opt_dne(): result = runner.invoke(fre.fre, args=["pp", "checkout", "optionDNE"]) assert result.exit_code == 2 +def test_cli_fre_pp_checkout_case(): + ''' fre pp checkout -e FOO -p BAR -t BAZ''' + directory = os.path.expanduser("~/cylc-src")+'/FOO__BAR__BAZ' + if Path(directory).exists(): + shutil.rmtree(directory) + result = runner.invoke(fre.fre, args=["pp", "checkout", + "-e", "FOO", + "-p", "BAR", + "-t", "BAZ"] ) + assert all( [ result.exit_code == 0, + Path(directory).exists()] ) + #-- fre pp configure-xml def test_cli_fre_pp_configure_xml(): ''' fre pp configure-xml ''' @@ -71,6 +87,18 @@ def test_cli_fre_pp_configure_yaml_opt_dne(): result = runner.invoke(fre.fre, args=["pp", "configure-yaml", "optionDNE"]) assert result.exit_code == 2 +def test_cli_fre_pp_configure_yaml_fail1(): + ''' fre pp configure-yaml ''' + result = runner.invoke(fre.fre, args = [ "pp", "configure-yaml", + "-e", "FOO", + "-p", "BAR", + "-t", "BAZ", + "-y", "BOO" ] ) + assert all( [ result.exit_code == 1, + isinstance(result.exception, FileNotFoundError ) + ] ) + + #-- fre pp install def test_cli_fre_pp_install(): ''' fre pp install ''' diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index b2b6540f..caf7cad0 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -94,13 +94,15 @@ def experiment_check(mainyaml_dir,comb,experiment): ey=Path(os.path.join(mainyaml_dir,e)) ey_path.append(ey) else: - raise ValueError("Incorrect experiment yaml path given; does not exist.") + raise ValueError(f"Experiment yaml path given ({e}) does not exist.") else: raise ValueError("No experiment yaml path given!") if analysisyaml is not None: ay_path=[] for a in analysisyaml: + # prepend the directory containing the yaml + a = Path(mainyaml_dir, a) if Path(a).exists(): ay=Path(os.path.join(mainyaml_dir,a)) ay_path.append(ay) @@ -113,185 +115,192 @@ def experiment_check(mainyaml_dir,comb,experiment): ## COMPILE CLASS ## class init_compile_yaml(): - def __init__(self,yamlfile,platform,target): - """ - Process to combine yamls applicable to compilation - """ - self.yml = yamlfile - self.name = yamlfile.split(".")[0] - self.namenopath = self.name.split("/")[-1].split(".")[0] - self.platform = platform - self.target = target + """ class holding routines for initalizing compilation yamls """ + def __init__(self,yamlfile,platform,target): + """ + Process to combine yamls applicable to compilation + """ + self.yml = yamlfile + self.name = yamlfile.split(".")[0] + self.namenopath = self.name.split("/")[-1].split(".")[0] + self.platform = platform + self.target = target + + # Register tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + base_name=f"combined-{self.namenopath}.yaml" + self.combined = base_name if len(self.mainyaml_dir) == 0 else f"{self.mainyaml_dir}/{base_name}" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + try: + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + except Exception as exc: + raise FileNotFoundError(f'{self.yml} not found') from exc + print(f" model yaml: {self.yml}") + + def combine_compile(self): + """ + Combine compile yaml with the defined combined.yaml + """ + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) + + # copy compile yaml info into combined yaml + if cy_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(cy_path,'r',encoding='UTF-8') as f2: + f1.write("\n### COMPILE INFO ###\n") + shutil.copyfileobj(f2,f1) + print(f" compile yaml: {cy_path}") - # Register tag handler - yaml.add_constructor('!join', join_constructor) + def combine_platforms(self): + """ + Combine platforms yaml with the defined combined.yaml + """ + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - # Path to the main model yaml - self.mainyaml_dir = os.path.dirname(self.yml) + # combine platform yaml + if py_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(py_path,'r',encoding='UTF-8') as f2: + f1.write("\n### PLATFORM INFO ###\n") + shutil.copyfileobj(f2,f1) + print(f" platforms yaml: {py_path}") - # Name of the combined yaml - self.combined= f"combined-{self.namenopath}.yaml" if len(self.mainyaml_dir) == 0 else f"{self.mainyaml_dir}/combined-{self.namenopath}.yaml" + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) - print("Combining yaml files: ") + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] - def combine_model(self): - """ - Create the combined.yaml and merge it with the model yaml - """ - # copy model yaml info into combined yaml - with open(self.combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{self.name}"\n') - f1.write(f'platform: &platform "{self.platform}"\n') - f1.write(f'target: &target "{self.target}"\n\n') - with open(self.yml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - - print(f" model yaml: {self.yml}") - - def combine_compile(self): - """ - Combine compile yaml with the defined combined.yaml - """ - # Get compile info - (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - - # copy compile yaml info into combined yaml - if cy_path is not None: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(cy_path,'r',encoding='UTF-8') as f2: - f1.write("\n### COMPILE INFO ###\n") - shutil.copyfileobj(f2,f1) - print(f" compile yaml: {cy_path}") - - def combine_platforms(self): - """ - Combine platforms yaml with the defined combined.yaml - """ - # Get compile info - (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - - # combine platform yaml - if py_path is not None: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(py_path,'r',encoding='UTF-8') as f2: - f1.write("\n### PLATFORM INFO ###\n") - shutil.copyfileobj(f2,f1) - print(f" platforms yaml: {py_path}") - - def clean_yaml(self): - """ - Clean the yaml; remove unnecessary sections in - final combined yaml. - """ - # Load the fully combined yaml - full_yaml = yaml_load(self.combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - with open(self.combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) - - print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") - return self.combined + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.abspath(self.combined)}") + return self.combined ## PP CLASS ## class init_pp_yaml(): - def __init__(self,yamlfile,experiment,platform,target): - """ - Process to combine the applicable yamls for post-processing - """ - self.yml = yamlfile - self.name = experiment - self.platform = platform - self.target = target - - # Regsiter tag handler - yaml.add_constructor('!join', join_constructor) - - # Path to the main model yaml - self.mainyaml_dir = os.path.dirname(self.yml) - - # Name of the combined yaml - self.combined=f"combined-{self.name}.yaml" - - print("Combining yaml files: ") - - def combine_model(self): - """ - Create the combined.yaml and merge it with the model yaml - """ - # copy model yaml info into combined yaml - with open(self.combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{self.name}"\n') - f1.write(f'platform: &platform "{self.platform}"\n') - f1.write(f'target: &target "{self.target}"\n\n') - with open(self.yml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - - print(f" model yaml: {self.yml}") - - def combine_experiment(self): - """ - Combine experiment yamls with the defined combined.yaml - """ - # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - - ## COMBINE EXPERIMENT YAML INFO - if ey_path is not None: - for i in ey_path: - #expyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(i,'r',encoding='UTF-8') as f2: - #copy expyaml into combined - shutil.copyfileobj(f2,f1) - print(f" experiment yaml: {i}") - - def combine_analysis(self): - """ - Combine analysis yamls with the defined combined.yaml - """ - # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - - ## COMBINE EXPERIMENT YAML INFO - if ay_path is not None: - for i in ay_path: - #analysisyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(i,'r',encoding='UTF-8') as f2: - #f1.write(f"\n### {i.upper()} settings ###\n") - #copy expyaml into combined + """ class holding routines for initalizing post-processing yamls """ + def __init__(self,yamlfile,experiment,platform,target): + """ + Process to combine the applicable yamls for post-processing + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + try: + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") shutil.copyfileobj(f2,f1) - print(f" analysis yaml: {i}") - - def clean_yaml(self): - """ - Clean the yaml; remove unnecessary sections in - final combined yaml. - """ - # Load the fully combined yaml - full_yaml = yaml_load(self.combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - with open(self.combined,'w') as f: - yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) - - print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") - return self.combined + except Exception as exc: + raise FileNotFoundError(f'{self.yml} not found') from exc + print(f" model yaml: {self.yml}") + + def combine_experiment(self): + """ + Combine experiment yamls with the defined combined.yaml + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + + ## COMBINE EXPERIMENT YAML INFO + if ey_path is not None: + for i in ey_path: + #expyaml_path = os.path.join(mainyaml_dir, i) + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(i,'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + print(f" experiment yaml: {i}") + + def combine_analysis(self): + """ + Combine analysis yamls with the defined combined.yaml + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + + ## COMBINE EXPERIMENT YAML INFO + if ay_path is not None: + for i in ay_path: + #analysisyaml_path = os.path.join(mainyaml_dir, i) + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(i,'r',encoding='UTF-8') as f2: + #f1.write(f"\n### {i.upper()} settings ###\n") + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + print(f" analysis yaml: {i}") + + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + with open(self.combined,'w') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.abspath(self.combined)}") + return self.combined ## Functions to combine the yaml files ## def get_combined_compileyaml(comb): diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index 3ca3ba93..97a06721 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -1,21 +1,12 @@ ''' fre yamltools ''' import click -from .freyamltoolsexample import yamltools_test_function from .combine_yamls import _consolidate_yamls @click.group(help=click.style(" - access fre yamltools subcommands", fg=(202,177,95))) def yamltools_cli(): ''' entry point to fre yamltools click commands ''' -@yamltools_cli.command() -@click.option('--uppercase', '-u', is_flag=True, help = 'Print statement in uppercase.') -@click.pass_context -def function(context, uppercase): - # pylint: disable=unused-argument - """ - Execute fre yamltools test """ - context.forward(yamltools_test_function) - @yamltools_cli.command() @click.option("-y", "--yamlfile", @@ -42,6 +33,7 @@ def function(context, uppercase): required=True) @click.pass_context def combine_yamls(context,yamlfile,experiment,platform,target,use): + # pylint: disable=unused-argument """ - Combine the model yaml with the compile, platform, experiment, and analysis yamls diff --git a/fre/yamltools/freyamltoolsexample.py b/fre/yamltools/freyamltoolsexample.py deleted file mode 100644 index e86fb206..00000000 --- a/fre/yamltools/freyamltoolsexample.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -experimentation file for integrating one file's functions into main prototype fre file -authored by Bennett.Chang@noaa.gov | bcc2761 -NOAA | GFDL -""" - -import click - -@click.command() -def yamltools_test_function(uppercase=None): - """Execute fre list testfunction2.""" - statement = "testingtestingtestingtesting" - if uppercase: - statement = statement.upper() - click.echo(statement) - -if __name__ == '__main__': - yamltools_test_function() diff --git a/fre/yamltools/tests/test_combine_yamls.py b/fre/yamltools/tests/test_combine_yamls.py index 129f0b85..7df6eb36 100644 --- a/fre/yamltools/tests/test_combine_yamls.py +++ b/fre/yamltools/tests/test_combine_yamls.py @@ -16,6 +16,7 @@ #CWD = Path.cwd() TEST_DIR = Path("fre/yamltools/tests") IN_DIR = Path(f"{TEST_DIR}/AM5_example") +SCHEMA_DIR = Path("fre/gfdl_msd_schemas/FRE") # Create output directories COMP_OUT_DIR = Path(f"{TEST_DIR}/combine_yamls_out/compile") @@ -81,8 +82,8 @@ def test_combined_compileyaml_validation(): Validate the combined compile yaml """ combined_yamlfile =f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml" - schema_file = os.path.join(f"{IN_DIR}","compile_yamls","schema.json") - + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") + with open(combined_yamlfile,'r') as cf: yml = yaml.safe_load(cf) @@ -134,7 +135,7 @@ def test_combined_compileyaml_validatefail(): # Validate against schema; should fail wrong_combined = Path(f"{COMP_OUT_DIR}/combined-am5-wrong_datatype.yaml") - schema_file = os.path.join(f"{IN_DIR}","compile_yamls","schema.json") + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") # Open/load combined yaml file with open(wrong_combined,'r') as cf: diff --git a/ideas.md b/ideas.md deleted file mode 100644 index 5862d04b..00000000 --- a/ideas.md +++ /dev/null @@ -1,34 +0,0 @@ -# Ideas for Implementation: - -## Helpful Click Decorators & Utilities -* click's `--help` option will be ideal for users -* `click.option()`: this will be very useful for flags that may be used - - will be able to use commands like `is_flag`, `flag_value`, `count`, `help`, etc. -* `click.group()`: this will allow FRE to be broken up into parts and subparts for each part - - will be able to use commands like `add_command` -* `click.progressbar()`: potential for the user to see progress while something runs like `fre run` -* `click.confirm()`: potential for the user to verify actions and proceed -* `click.style()`: can style text with wanted configurations if needed (can use click.secho()) -* `click.pause()`: stops executing current command and waits for user input to continue -* `click.pass_context`: allows use of `context.forward(command)` and `context.invoke(command, args)` for discouraged yet possible invocation of commands from another command, probably what is going to be the solution to running all of something like `fre make` at once - -## Potential Errors -* `click.confirm()` actions will be hard for users to script - -## Questions for Users/Devs -* do we want to use flags (`click.option()`), confirmations (`click.confirm()`), or a mix of both to allow users to run what they want, how they want? - - this means that users can either use certain flags (i.e `--execute`), which will be included and explained in the `--help` feature, or they will just be prompted for what features they want and can decide if they want it with [y/N] - -## Things to Consider/Implement -* use of classes, arguments (necessary) vs. flags (optional) - - arguments can be used for specific cases; i.e need to parse specific file -* NOAA GFDL Conda channel to get this into userspace (Conda > pip/venv) - -## Required Changes to Make -* `fre pp configure -y file.yaml` only works when inside folder containing schema at the moment -* want to polish up .gitignore file -* deployment via GitLab -* is there a way to check that all python dependencies needed by fre-cli are available in the current python envioronment? Like "python fre.py" or something? - -## Potential Additional Uses for Click -* program using BeautifulSoup to scrape GFDL pages for immediate tutorial guidance after prompting for GFDL login \ No newline at end of file diff --git a/meta.yaml b/meta.yaml index 07f76686..71c8b213 100644 --- a/meta.yaml +++ b/meta.yaml @@ -5,13 +5,10 @@ package: version: '{{ environ.get("GIT_DESCRIBE_TAG", data.get("version")) }}' source: - path: . -# ideally we want this git_url path, but it messes with conda publish -# where it builds only the sourced url and not local/branch changes -# git_url: https://github.com/NOAA-GFDL/fre-cli + git_url: https://github.com/NOAA-GFDL/fre-cli.git build: - script: + script: - {{ PYTHON }} -m pip install . -vv number: 1 noarch: python @@ -33,6 +30,7 @@ requirements: - jsonschema - noaa-gfdl::catalogbuilder - noaa-gfdl::fre-nctools + - noaa-gfdl::analysis_scripts - conda-forge::nccmp - conda-forge::cylc-flow>=8.2.0 - conda-forge::cylc-rose @@ -45,47 +43,41 @@ requirements: - conda-forge::cdo>=2.0.0 test: - source_files: + requires: + - pip + - pylint + - pytest + - pytest-cov + source_files: - fre/ imports: - - click - - pytest - - pylint - fre - - fre.pp - - fre.pp.install - - fre.pp.status - - fre.pp.run - - fre.pp.validate - - fre.make - fre.app - - fre.cmor - fre.catalog + # - fre.check + - fre.cmor + # - fre.list + - fre.make + - fre.pp + # - fre.run + # - fre.test + - fre.yamltools commands: - pylint --max-args 6 -ry --ignored-modules netCDF4,cmor fre/ || echo "pylint returned non-zero exit code and will kill the workflow. guarding against this now." - - pip install GitPython && pytest --config-file=fre/pytest.ini --cov-config=fre/coveragerc --cov=fre fre/ + # run pytest but ignore any tests that require compilation + - pip install GitPython && pytest --ignore=fre/make/tests/compilation --config-file=fre/pytest.ini --cov-report term-missing --cov-config=fre/coveragerc --cov=fre fre/ - fre --help - - fre pp --help - - fre pp install --help - - fre pp status --help - - fre pp run --help - - fre pp validate --help - - fre make --help - - fre make create-checkout --help - - fre make create-compile --help - - fre make create-dockerfile --help - - fre make create-makefile --help - - fre make run-fremake --help - fre app --help - - fre app mask-atmos-plevel --help + - fre catalog --help + # - fre check --help - fre cmor --help - - fre cmor run --help + # - fre list --help + - fre make --help + - fre pp --help + # - fre run --help + # - fre test --help + - fre yamltools --help - requires: - - pip - - pylint - - pytest - - pytest-cov about: home: https://github.com/NOAA-GFDL/fre-cli diff --git a/mkmf b/mkmf new file mode 160000 index 00000000..9830f1ac --- /dev/null +++ b/mkmf @@ -0,0 +1 @@ +Subproject commit 9830f1ac08566ec94e6b28555c921df28b6d0fea