diff --git a/.github/workflows/create_test_conda_env.yml b/.github/workflows/create_test_conda_env.yml index 25d382a5..18ff20f0 100644 --- a/.github/workflows/create_test_conda_env.yml +++ b/.github/workflows/create_test_conda_env.yml @@ -1,6 +1,6 @@ name: create_test_conda_env -on: [push] +on: [pull_request] jobs: build-linux: diff --git a/docs/index.rst b/docs/index.rst index 9c65e98a..07dcbeb9 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,7 +11,7 @@ Welcome to ``fre-cli``'s documentation! .. the entry in the toc must be the .rst filename. what shows in the webpage is the first header or title .. toctree:: - :maxdepth: 1 + :maxdepth: 2 :caption: Contents: what-is-fre diff --git a/docs/tool_guides.rst b/docs/tool_guides.rst deleted file mode 100644 index 9a6459db..00000000 --- a/docs/tool_guides.rst +++ /dev/null @@ -1,156 +0,0 @@ -.. NEEDS UPDATING #TODO -============= -Tool Guides -============= - -Guides for the process in which subtools are used with tools. - - -``fre app`` -============ - -``fre catalog`` -============ - -``fre cmor`` -============ - -.. _fre-make-guide: - -``fre make guide`` -============ - -1. Bare-metal Build: - -.. code-block:: - - # Create checkout script - fre make create-checkout -y [model yaml file] -p [platform] -t [target] - - # Create and run checkout script - fre make create-checkout -y [model yaml file] -p [platform] -t [target] --execute - - # Create Makefile - fre make create-makefile -y [model yaml file] -p [platform] -t [target] - - # Creat the compile script - fre make create-compile -y [model yaml file] -p [platform] -t [target] - - # Create and run the compile script - fre make create-compile -y [model yaml file] -p [platform] -t [target] --execute - - # Run all of fremake - fre make run-fremake -y [model yaml file] -p [platform] -t [target] [other options...] - -2. Container Build: - -For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. - -Users will not be able to create containers unless they have podman access on gaea. - -.. code-block:: - - # Create checkout script - fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] -npc - - # Create and run checkout script - fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute - - # Create Makefile - fre make create-makefile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] - - # Create a Dockerfile - fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] - - # Create and run the Dockerfile - fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute - - -**Quickstart** - -1. Bare-metal Build: - -.. code-block:: - - # Create checkout script - fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod - - # Create and run checkout script - fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod --execute - - # Create Makefile - fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod - - # Create the compile script - fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod - - # Create and run the compile script - fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod --execute - -2. Bare-metal Build Multi-target: - -.. code-block:: - - # Create checkout script - fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug - - # Create and run checkout script - fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute - - # Create Makefile - fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod -t debug - - # Create the compile script - fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug - - # Create and run the compile script - fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute - -3. Container Build: - -In order for the container to build successfully, a `-npc`, or `--no-parallel-checkout` is needed. - -.. code-block:: - - # Create checkout script - fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc - - # Create and run checkout script - fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc --execute - - # Create Makefile - fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod - - # Create Dockerfile - fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod - - # Create and run the Dockerfile - fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod --execute - -4. Run all of fremake: - -.. code-block:: - - # Bare-metal - fre make run-fremake -y am5.yaml -p ncrc5.intel23 -t prod - - # Container - fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod -npc - -``fre pp`` -============ - -``fre yamltools`` -============ - -``fre check`` -============ - -``fre list`` -============ - -``fre run`` -============ - -``fre test`` -============ diff --git a/docs/usage.rst b/docs/usage.rst index bf5be2a6..e49f106b 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -3,6 +3,10 @@ Usage ============= Using a set of YAML configuration files, ``fre make`` compiles a FMS-based model, and ``fre pp`` postprocesses the history output and runs diagnostic analysis scripts. Please note that model running is not yet supported in FRE 2024; continue to use FRE Bronx frerun. +YAML Framework +======================== +.. include:: usage/yaml_framework.rst + Build FMS model ======================= .. include:: usage/compile.rst diff --git a/docs/usage/compile.rst b/docs/usage/compile.rst index 8f9dfbe5..4d66c637 100644 --- a/docs/usage/compile.rst +++ b/docs/usage/compile.rst @@ -1,19 +1,177 @@ ``fre make`` can compile a traditional "bare metal" executable or a containerized executable using a set of YAML configuration files. -Through the fre-cli, `fre make` can be used to create and run a checkout script, makefile, and compile a model. +Through the fre-cli, ``fre make`` can be used to create and run a checkout script, makefile, and compile a model. Fremake Canopy Supports: - - multiple targets; use `-t` flag to define each target - - bare-metal build - - container creation - - parallel checkouts for bare-metal build** + - multiple target use; ``-t`` flag to define each target (for multiple platform-target combinations) + - bare-metal build + - container creation + - parallel checkouts for bare-metal build + - parallel model builds + - one yaml format + - additional library support if needed -** **Note: Users will not be able to create containers without access to podman** +**Note: Users will not be able to create containers without access to podman. To get access, submit a helpdesk ticket.** -.. include:: fre_make.rst +Required configuration files: -Guide and quickstart to `fre make` subtools: + - Model Yaml + - Compile Yaml + - Platforms yaml -:ref:`fre-make-guide` +These yamls are combined and further parsed through the ``fre make`` tools. -https://github.com/NOAA-GFDL/fre-cli/blob/main/fre/make/README.md +Compile Yaml +---------- +To create the compile yaml, reference the compile section on an XML. Certain fields should be included under "compile". These include ``experiment``, ``container_addlibs``, ``baremetal_linkerflags``, and ``src``. + + - The experiment can be explicitly defined or can be used in conjunction with defined ``fre_properties`` from the model yaml, as seen in the code block below + - ``container_addlibs``: list of strings of packages needed for the model to compile (used to create the link line in the Makefile) + - ``baremetal_linkerflags``: list of strings of linker flags (used to populate the link line in the Makefile + - ``src``: contains information about components needed for model compilation + +.. code-block:: + + compile: + experiment: !join [*group_version, "_compile"] + container_addlibs: "libraries and packages needed for linking in container" (string) + baremetal_linkerflags: "linker flags of libraries and packages needed" (string) + src: + +The ``src`` section is used to include component information. This will include: ``component``, ``repo``, ``cpdefs``, ``branch``, ``paths``, ``otherFlags``, and ``makeOverrides``. + +.. code-block:: + + src: + - component: "component name" (string) + requires: ["list of components that this component depends on"] (list of string) + repo: "url of code repository" (string) + branch: "version of code to clone" (string / list of strings) + paths: ["paths in the component to compile"] (list of strings) + cppdefs: "CPPDEFS ot include in compiling componenet (string) + makeOverrides: "overrides openmp target for MOM6" ('OPENMP=""') (string) + otherFlags: "Include flags needed to retrieve other necessary code" (string) + doF90Cpp: True if the preprocessor needs to be run (boolean) + additionalInstructions: additional instructions to run after checkout (string) + +Guide +---------- +1. Bare-metal Build: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y [model yaml file] -p [platform] -t [target] + + # Create and run checkout script + fre make create-checkout -y [model yaml file] -p [platform] -t [target] --execute + + # Create Makefile + fre make create-makefile -y [model yaml file] -p [platform] -t [target] + + # Creat the compile script + fre make create-compile -y [model yaml file] -p [platform] -t [target] + + # Create and run the compile script + fre make create-compile -y [model yaml file] -p [platform] -t [target] --execute + + # Run all of fremake + fre make run-fremake -y [model yaml file] -p [platform] -t [target] [other options...] + +2. Container Build: + +For the container build, parallel checkouts are not supported, so the `-npc` options must be used for the checkout script. In addition the platform must be a container platform. + +Users will not be able to create containers unless they have podman access on gaea. + +.. code-block:: + + # Create checkout script + fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] -npc + + # Create and run checkout script + fre make create-checkout -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + + # Create Makefile + fre make create-makefile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + + # Create a Dockerfile + fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] + + # Create and run the Dockerfile + fre make create-dockerfile -y [model yaml file] -p [CONTAINER PLATFORM] -t [target] --execute + +Quickstart +---------- +The quickstart instructions can be used with the am5-compile examples located in the fre-examples repository: https://github.com/NOAA-GFDL/fre-examples/tree/main/AM5/am5-compile + +1. Bare-metal Build: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod + + # Create and run checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod --execute + + # Create Makefile + fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod + + # Create the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod + + # Create and run the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod --execute + +2. Bare-metal Build Multi-target: + +.. code-block:: + + # Create checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug + + # Create and run checkout script + fre make create-checkout -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute + + # Create Makefile + fre make create-makefile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + + # Create the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug + + # Create and run the compile script + fre make create-compile -y am5.yaml -p ncrc5.intel23 -t prod -t debug --execute + +3. Container Build: + +In order for the container to build successfully, a `-npc`, or `--no-parallel-checkout` is needed. + +.. code-block:: + + # Create checkout script + fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc + + # Create and run checkout script + fre make create-checkout -y am5.yaml -p hpcme.2023 -t prod -npc --execute + + # Create Makefile + fre make create-makefile -y am5.yaml -p hpcme.2023 -t prod + + # Create Dockerfile + fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod + + # Create and run the Dockerfile + fre make create-dockerfile -y am5.yaml -p hpcme.2023 -t prod --execute + +4. Run all of fremake: + +Currently, run-fremake kicks off the compilation automatically; no ``--execute`` option needed. + +.. code-block:: + + # Bare-metal + fre make run-fremake -y am5.yaml -p ncrc5.intel23 -t prod + + # Container + fre make run-fremake -y am5.yaml -p hpcme.2023 -t prod -npc diff --git a/docs/usage/postprocess.rst b/docs/usage/postprocess.rst index 994a4f1b..4a8cdf9d 100644 --- a/docs/usage/postprocess.rst +++ b/docs/usage/postprocess.rst @@ -52,19 +52,87 @@ the 19790101.nc.tar tarfile might contain:: The name of the history file, while often predictably named, are arbitrary labels within the Diagnostic Manager configuration (diag yamls). Each history file is a CF-standard NetCDF file that can be inspected with common NetCDF tools such as the NCO or CDO tools, or even ``ncdump``. +Required configuration + +1. Set the history directory in your postprocessing yaml:: + + directories: + history: /arch5/am5/am5/am5f7c1r0/c96L65_am5f7c1r0_amip/gfdl.ncrc5-deploy-prod-openmp/history + +2. Set the segment size as an ISO8601 duration (e.g. P1Y is "one year"):: + + postprocess: + settings: + history_segment: P1Y + +3. Set the date range to postprocess as ISO8601 dates:: + + postprocess: + settings: + pp_start: 1979-01-01T0000Z + + pp_stop: 2020-01-01T0000Z + Postprocess components ---------------------- -History files are not immediately convenient for analysis. -On native grid, named in a single namespace. -Desire: regridded, renamed, ts +The history-file namespace is a single layer as shown above. By longtime tradition, FRE postprocessing namespaces are richer, with +a distinction for timeseries, timeaveraged, and static output datasets, and includes frequency and chunk-size in the directory structure. -Timeseries ----------- -Set chunk_a, and chunk_b if desired. +Postprocessed files within a "component" share a horizontal grid; which can be the native grid or regridded to lat/lon. + +Required configuration + +4. Define the atmos and ocean postprocess components:: + + postprocess: + components: + - type: atmos + + sources: [atmos_month, atmos_annual] + - type: ocean + + sources: [ocean_month, ocean_annual] XY-regridding ------------- -blahblah +Commonly, native grid history files are regridded during postprocessing. To regrid to a lat/lon grid, configure your +desired output grid, interpolation method, input grid type, and path to your FMS exchange grid definition. + +Optional configuration (i.e. if xy-regridding is desired) + +5. Regrid the atmos and ocean components to a 1x1 degree grid:: + + directories: + pp_grid_spec: /archive/oar.gfdl.am5/model_gen5/inputs/c96_grid/c96_OM4_025_grid_No_mg_drag_v20160808.tar + + postprocess: + components: + - type: atmos + + sources: [atmos_month, atmos_annual] + + sourceGrid: cubedsphere + + inputRealm: atmos + + xyInterp: [180, 360] + + interpMethod: conserve_order2 + - type: ocean + + sources: [ocean_month, ocean_annual] + + sourceGrid: tripolar + + inputRealm: ocean + + xyInterp: [180, 360] + + interpMethod: conserve_order1 + +Timeseries +---------- +Timeseries output is the most common type of postprocessed output. Climatologies ------------- diff --git a/docs/usage/yaml_framework.rst b/docs/usage/yaml_framework.rst new file mode 100644 index 00000000..9acc6742 --- /dev/null +++ b/docs/usage/yaml_framework.rst @@ -0,0 +1,156 @@ +In order to utilize FRE 2024.01 tools, a distrubuted YAML structure is required. This framework includes a main model yaml, a compile yaml, a platforms yaml, and post-processing yamls. Throughout the compilation and post-processing steps, combined yamls that will be parsed for information are created. Yamls follow a dictionary-like structure with ``[key]: [value]`` fields. + +Yaml Formatting +---------- +Helpful information and format recommendations for creating yaml files. + +1. You can define a block of values as well as individual ``[key]: [value]`` pairs: + +.. code-block:: + + section name: + key: value + key: value + +2. ``[key]: [value]`` pairs can be made a list by utilizing a ``-``: + +.. code-block:: + + section name: + - key: value + - key: value + +3. If you want to associate information with a certain listed element, follow this structure: + +.. code-block:: + + section name: + - key: value + key: value + - key: value + key: value + +Where each dash indicates a list. + +4. Yamls also allow for the capability of reusable variables. These variables are defined by: + +.. code-block:: + + &ReusableVariable Value + +5. Users can apply a reusable variable on a block of values. For example, everything under "section" is associated with the reusable variable: + +.. code-block:: + + section: &ReusableVariable + - key: value + key: value + - key: value + +6. In order to use them as a reference else where in either the same or other yamls, use ``*``: + +.. code-block:: + + *ReusableVariable + +7. If the reusable variable must be combined with other strings, the **`!join`** constructor is used. Example: + +.. code-block:: + + &version "2024.01" + &stem !join [FRE/, *version] + +In this example, the reuasble variable ``stem`` will be parsed as ``FRE/2024.01``. + +Model Yaml +---------- +The model yaml defines reusable variables, shared directories, switches, post-processing settings, and paths to compile and post-processing yamls. Required fields in the model yaml include: ``fre_properties``, ``build``, ``shared``, and ``experiments``. + +* **fre_properties**: Reusable variables + + - list of variables + - these values can be extracted from ``fre_properties`` in a group's XML, if available + - value type: string + + .. code-block:: + + - &variable1 "value1" (string) + - &variable2 "value2" (string) + +* **build**: paths to information needed for compilation + + - subsections: ``compileYaml``, ``platformYaml`` + - value type: string + + .. code-block:: + + build: + compileYaml: "path the compile yaml in relation to model yaml" (string) + platformYaml: "path to platforms.yaml in relation to model yaml" (string) + +* **shared**: shared settings across experiments + + - subsections: ``directories``, ``postprocess`` + + .. code-block:: + + shared: + directories: &shared_directories + key: "value" (string) + + postprocess: + settings: &shared_settings + key: "value" (string) + switches: &shared_switches + key: True/False (boolean) + + * **Be sure to define directories, settings, and switches as reusable variables as well** + + + they will be "inherited" in the post-processing yamls created + +* **experiments**: list of post-processing experiments + + - subsections: ``name``, ``pp``, ``analysis`` + + .. code-block:: + + experiments: + - name: name of post-processing experiment (string) + pp: + - path/to/post-processing/yaml for that experiment in relation to model yaml (string) + analysis: + - path/to/analysis/yaml for that experiment in relation to model yaml (string) + +Compile Yaml +---------- +The compile yaml defines compilation information including component names, repos, branches, necessary flags, and necessary overrides. This is discussed more in the "Build FMS Model" section. + +Platforms Yaml +---------- +The platform yaml contains user defined information for both bare-metal and container platforms. Information includes the platform name, the compiler used, necessary modules to load, an mk template, fc, cc, container build, and container run. This yaml file is not model specific. + + .. code-block:: + + platforms: + - name: the platform name + compiler: the compiler you are using + modulesInit: ["array of commands that are needed to load modules." , "each command must end with a newline character"] + modules: [array of modules to load including compiler] + fc: the name of the fortran compiler + cc: the name of the C compiler + mkTemplate: The location of the mkmf make template + modelRoot: The root directory of the model (where src, exec, experiments will go) + - container platform: container platform name + compiler: compiler you are using + RUNenv: Commands needed at the beginning of a RUN in dockerfile + modelRoot: The root directory of the model (where src, exec, experiments will go) INSIDE of the container (/apps) + fc: name of fortan compiler + cc: name of C compiler + container: True if this is a container platform + containerBuild: "podman" - the container build program + containerRun: "apptainer" - the container run program + + +Post-Processing Yaml +---------- +The post-processing yamls include information specific to experiments, such as directories to data and other scripts used, switches, and component information. The post-processing yaml can further define more ``fre_properties`` that may be experiment specific. If there are any repeated reusable variables, the ones set in this yaml will overwrite those set in the model yaml. This is discussed further in the "Postprocess FMS History Output" section. diff --git a/fre/app/freapp.py b/fre/app/freapp.py index 545356e4..29c34385 100644 --- a/fre/app/freapp.py +++ b/fre/app/freapp.py @@ -58,6 +58,7 @@ def app_cli(): def regrid(context, input_dir, output_dir, begin, tmp_dir, remap_dir, source, grid_spec, def_xy_interp ): + # pylint: disable=unused-argument ''' regrid target netcdf file ''' context.forward(_regrid_xy) diff --git a/fre/app/regrid_xy/regrid_xy.py b/fre/app/regrid_xy/regrid_xy.py index 8721b36a..7b363e32 100755 --- a/fre/app/regrid_xy/regrid_xy.py +++ b/fre/app/regrid_xy/regrid_xy.py @@ -164,7 +164,7 @@ def regrid_xy(input_dir = None, output_dir = None, begin = None, tmp_dir = None, """ ## rose config load check - config_name = os.getcwd() + config_name = os.getcwd() #REMOVE ME TODO config_name += '/rose-app-run.conf' #config_name += '/rose-app.conf' print(f'config_name = {config_name}') @@ -235,8 +235,6 @@ def regrid_xy(input_dir = None, output_dir = None, begin = None, tmp_dir = None, # grid_spec file management - #starting_dir = os.getcwd() - #os.chdir(work_dir) # i hate it if '.tar' in grid_spec: untar_sp = \ subprocess.run( ['tar', '-xvf', grid_spec, '-C', input_dir], @@ -463,7 +461,6 @@ def regrid_xy(input_dir = None, output_dir = None, begin = None, tmp_dir = None, continue # end of comp loop, exit or next one. - #os.chdir(starting_dir) # not clear this is necessary. print('done running regrid_xy()') return 0 diff --git a/fre/fre.py b/fre/fre.py index 9dd65eb0..8e5695c4 100644 --- a/fre/fre.py +++ b/fre/fre.py @@ -7,6 +7,21 @@ be called via this script. I.e. 'fre' is the entry point """ +#versioning... always fun... +# turn xxxx.y into xxxx.0y +import importlib.metadata +version_unexpanded = importlib.metadata.version('fre-cli') +version_unexpanded_split = version_unexpanded.split('.') +if len(version_unexpanded_split[1]) == 1: + version_minor = "0" + version_unexpanded_split[1] +else: + version_minor = version_unexpanded_split[1] +version = version_unexpanded_split[0] + '.' + version_minor + + + + + import click from .lazy_group import LazyGroup @@ -27,15 +42,14 @@ fg='cyan') ) + @click.version_option( package_name = "fre-cli", - message = click.style("%(package)s | %(version)s", - fg = (155,255,172) ) + version=version ) def fre(): ''' entry point function to subgroup functions ''' - if __name__ == '__main__': fre() diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py deleted file mode 100644 index fc4d2df4..00000000 --- a/fre/make/createCheckout.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/python3 - -import os -import subprocess -import logging -import sys -import click -import fre.yamltools.combine_yamls as cy -from .gfdlfremake import varsfre, yamlfre, checkout, targetfre - -def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): - # Define variables - yml = yamlfile - name = yamlfile.split(".")[0] - run = execute - jobs = str(jobs) - pcheck = no_parallel_checkout - - if pcheck: - pc = "" - else: - pc = " &" - - if verbose: - logging.basicConfig(level=logging.INFO) - else: - logging.basicConfig(level=logging.ERROR) - - srcDir="src" - checkoutScriptName = "checkout.sh" - baremetalRun = False # This is needed if there are no bare metal runs - - ## Split and store the platforms and targets in a list - plist = platform - tlist = target - - # Combine model, compile, and platform yamls - # Default behavior - combine yamls / rewrite combined yaml - comb = cy.init_compile_yaml(yml,platform,target) - full_combined = cy.get_combined_compileyaml(comb) - - ## Get the variables in the model yaml - freVars = varsfre.frevars(full_combined) - - ## Open the yaml file, validate the yaml, and parse as fremakeYaml - modelYaml = yamlfre.freyaml(full_combined,freVars) - fremakeYaml = modelYaml.getCompileYaml() - - ## Error checking the targets - for targetName in tlist: - target = targetfre.fretarget(targetName) - - ## Loop through the platforms specified on the command line - ## If the platform is a baremetal platform, write the checkout script and run it once - ## This should be done separately and serially because bare metal platforms should all be using - ## the same source code. - for platformName in plist: - if modelYaml.platforms.hasPlatform(platformName): - pass - else: - raise ValueError (platformName + " does not exist in platforms.yaml") - ( compiler, modules, modulesInit, fc, cc, modelRoot, - iscontainer, mkTemplate, containerBuild, ContainerRun, - RUNenv ) = modelYaml.platforms.getPlatformFromName(platformName) - - ## Create the source directory for the platform - if iscontainer is False: - srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" - # if the source directory does not exist, it is created - if not os.path.exists(srcDir): - os.system("mkdir -p " + srcDir) - # if the checkout script does not exist, it is created - if not os.path.exists(srcDir+"/checkout.sh"): - freCheckout = checkout.checkout("checkout.sh",srcDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - # Make checkout script executable - os.chmod(srcDir+"/checkout.sh", 0o744) - print("\nCheckout script created in "+ srcDir + "/checkout.sh \n") - - # Run the checkout script - if run is True: - freCheckout.run() - else: - sys.exit() - else: - print("\nCheckout script PREVIOUSLY created in "+ srcDir + "/checkout.sh \n") - if run == True: - try: - subprocess.run(args=[srcDir+"/checkout.sh"], check=True) - except: - print("\nThere was an error with the checkout script "+srcDir+"/checkout.sh.", - "\nTry removing test folder: " + modelRoot +"\n") - raise - else: - sys.exit() - - else: - image="ecpe4s/noaa-intel-prototype:2023.09.25" - bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" - tmpDir = "tmp/"+platformName - freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) - freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) - freCheckout.finish(pc) - print("\nCheckout script created at " + tmpDir + "/checkout.sh" + "\n") - -@click.command() -def _checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): - ''' - Decorator for calling checkout_create - allows the decorated version - of the function to be separate from the undecorated version - ''' - return checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose) - -if __name__ == "__main__": - checkout_create() diff --git a/fre/make/create_checkout_script.py b/fre/make/create_checkout_script.py new file mode 100644 index 00000000..6d5bb2aa --- /dev/null +++ b/fre/make/create_checkout_script.py @@ -0,0 +1,119 @@ +''' +checks out a makefile for a given model from the yamls +i think! +''' + +import os +import subprocess +import logging +import sys +import click +import fre.yamltools.combine_yamls as cy +from .gfdlfremake import varsfre, yamlfre, checkout, targetfre + +def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + # Define variables + yml = yamlfile + name = yamlfile.split(".")[0] + run = execute + jobs = str(jobs) + pcheck = no_parallel_checkout + + if pcheck: + pc = "" + else: + pc = " &" + + if verbose: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.ERROR) + + src_dir="src" + checkout_script_name = "checkout.sh" + baremetal_run = False # This is needed if there are no bare metal runs + + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + + # Combine model, compile, and platform yamls + # Default behavior - combine yamls / rewrite combined yaml + comb = cy.init_compile_yaml(yml,platform,target) + full_combined = cy.get_combined_compileyaml(comb) + + ## Get the variables in the model yaml + fre_vars = varsfre.frevars(full_combined) + + ## Open the yaml file, validate the yaml, and parse as fremake_yaml + model_yaml = yamlfre.freyaml(full_combined,fre_vars) + fremake_yaml = model_yaml.getCompileYaml() + + ## Error checking the targets + for target_name in tlist: + target = targetfre.fretarget(target_name) + + ## Loop through the platforms specified on the command line + ## If the platform is a baremetal platform, write the checkout script and run it once + ## This should be done separately and serially because bare metal platforms should all be using + ## the same source code. + for platform_name in plist: + if model_yaml.platforms.hasPlatform(platform_name): + pass + else: + raise ValueError (platform_name + " does not exist in platforms.yaml") + ( compiler, modules, modules_init, fc, cc, model_root, + iscontainer, mk_template, container_build, container_run, + RUNenv ) = model_yaml.platforms.getPlatformFromName(platform_name) + + # ceate the source directory for the platform + if not iscontainer: + src_dir = model_root + "/" + fremake_yaml["experiment"] + "/src" + # if the source directory does not exist, it is created + if not os.path.exists(src_dir): + os.system("mkdir -p " + src_dir) + # if the checkout script does not exist, it is created + if not os.path.exists(src_dir+"/checkout.sh"): + fre_checkout = checkout.checkout("checkout.sh",src_dir) + fre_checkout.writeCheckout(model_yaml.compile.getCompileYaml(),jobs,pc) + fre_checkout.finish(pc) + # Make checkout script executable + os.chmod(src_dir+"/checkout.sh", 0o744) + print("\nCheckout script created in "+ src_dir + "/checkout.sh \n") + + # Run the checkout script + if run: + fre_checkout.run() + else: + sys.exit() + else: + print("\nCheckout script PREVIOUSLY created in "+ src_dir + "/checkout.sh \n") + if run: + try: + subprocess.run(args=[src_dir+"/checkout.sh"], check=True) + except: + print("\nThere was an error with the checkout script "+src_dir+"/checkout.sh.", + "\nTry removing test folder: " + model_root +"\n") + raise + else: + sys.exit() + + else: + image="ecpe4s/noaa-intel-prototype:2023.09.25" + bld_dir = model_root + "/" + fremake_yaml["experiment"] + "/exec" + tmp_dir = "tmp/"+platform_name + fre_checkout = checkout.checkoutForContainer("checkout.sh", src_dir, tmp_dir) + fre_checkout.writeCheckout(model_yaml.compile.getCompileYaml(),jobs,pc) + fre_checkout.finish(pc) + print("\nCheckout script created at " + tmp_dir + "/checkout.sh" + "\n") + +@click.command() +def _checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + ''' + Decorator for calling checkout_create - allows the decorated version + of the function to be separate from the undecorated version + ''' + return checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose) + +if __name__ == "__main__": + checkout_create() diff --git a/fre/make/createCompile.py b/fre/make/create_compile_script.py similarity index 100% rename from fre/make/createCompile.py rename to fre/make/create_compile_script.py diff --git a/fre/make/createDocker.py b/fre/make/create_docker_script.py similarity index 89% rename from fre/make/createDocker.py rename to fre/make/create_docker_script.py index 59b73ee9..5731991e 100644 --- a/fre/make/createDocker.py +++ b/fre/make/create_docker_script.py @@ -2,6 +2,7 @@ import os import sys +import subprocess from pathlib import Path import click #from .gfdlfremake import varsfre, targetfre, makefilefre, platformfre, yamlfre, buildDocker @@ -72,10 +73,13 @@ def dockerfile_create(yamlfile,platform,target,execute): click.echo("\ntmpDir created in " + currDir + "/tmp") click.echo("Dockerfile created in " + currDir +"\n") - if run: - dockerBuild.build(containerBuild, containerRun) - else: - sys.exit() + # create build script for container + dockerBuild.createBuildScript(containerBuild, containerRun) + print("Container build script created at "+dockerBuild.userScriptPath+"\n\n") + + # run the script if option is given + if run: + subprocess.run(args=[dockerBuild.userScriptPath], check=True) @click.command() def _dockerfile_create(yamlfile,platform,target,execute): diff --git a/fre/make/createMakefile.py b/fre/make/create_makefile_script.py similarity index 100% rename from fre/make/createMakefile.py rename to fre/make/create_makefile_script.py diff --git a/fre/make/fremake.py b/fre/make/fremake.py index f39a6be0..5b013457 100644 --- a/fre/make/fremake.py +++ b/fre/make/fremake.py @@ -1,31 +1,31 @@ import click -from fre.make import createCheckout -from fre.make import createMakefile -from fre.make import createCompile -from fre.make import createDocker -from fre.make import runFremake +from fre.make import create_checkout_script +from fre.make import create_makefile_script +from fre.make import create_compile_script +from fre.make import create_docker_script +from fre.make import run_fremake_script -yamlfile_opt_help = """Experiment yaml compile FILE +YAMLFILE_OPT_HELP = """Experiment yaml compile FILE """ -experiment_opt_help = """Name of experiment""" -platform_opt_help = """Hardware and software FRE platform space separated list of STRING(s). +EXPERIMENT_OPT_HELP = """Name of experiment""" +PLATFORM_OPT_HELP = """Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions """ -target_opt_help = """a space separated list of STRING(s) that defines compilation settings and +TARGET_OPT_HELP = """a space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used. """ -parallel_opt_help = """Number of concurrent model compiles (default 1) +PARALLEL_OPT_HELP = """Number of concurrent model compiles (default 1) """ -jobs_opt_help = """Number of jobs to run simultaneously. Used for make -jJOBS and git clone +JOBS_OPT_HELP = """Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS """ -no_parallel_checkout_opt_help = """Use this option if you do not want a parallel checkout. +NO_PARALLEL_CHECKOUT_OPT_HELP = """Use this option if you do not want a parallel checkout. The default is to have parallel checkouts. """ -verbose_opt_help = """Get verbose messages (repeat the option to increase verbosity level) +VERBOSE_OPT_HELP = """Get verbose messages (repeat the option to increase verbosity level) """ @@ -38,71 +38,77 @@ def make_cli(): @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("-n", "--parallel", type = int, metavar = '', default = 1, - help = parallel_opt_help) + help = PARALLEL_OPT_HELP) @click.option("-j", "--jobs", type = int, metavar = '', default = 4, - help = jobs_opt_help) + help = JOBS_OPT_HELP) @click.option("-npc", "--no-parallel-checkout", is_flag = True, - help = no_parallel_checkout_opt_help) + help = NO_PARALLEL_CHECKOUT_OPT_HELP) +@click.option("-e", + "--execute", + is_flag = True, + default = False, + help = "Use this to run the created compilation script.") @click.option("-v", "--verbose", is_flag = True, - help = verbose_opt_help) + help = VERBOSE_OPT_HELP) @click.pass_context -def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, verbose): +def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel_checkout, execute, verbose): + # pylint: disable=unused-argument """ - Perform all fremake functions to run checkout and compile model""" - context.forward(runFremake._fremake_run) + context.forward(run_fremake_script._fremake_run) #### @make_cli.command() @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("-j", "--jobs", type = int, metavar = '', default = 4, - help = jobs_opt_help) + help = JOBS_OPT_HELP) @click.option("-npc", "--no-parallel-checkout", is_flag = True, - help = no_parallel_checkout_opt_help) + help = NO_PARALLEL_CHECKOUT_OPT_HELP) @click.option("--execute", is_flag = True, default = False, @@ -110,33 +116,35 @@ def run_fremake(context, yamlfile, platform, target, parallel, jobs, no_parallel @click.option("-v", "--verbose", is_flag = True, - help = verbose_opt_help) + help = VERBOSE_OPT_HELP) @click.pass_context def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + # pylint: disable=unused-argument """ - Write the checkout script """ - context.forward(createCheckout._checkout_create) + context.forward(create_checkout_script._checkout_create) ##### @make_cli.command @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.pass_context def create_makefile(context,yamlfile,platform,target): + # pylint: disable=unused-argument """ - Write the makefile """ - context.forward(createMakefile._makefile_create) + context.forward(create_makefile_script._makefile_create) ##### @@ -144,29 +152,29 @@ def create_makefile(context,yamlfile,platform,target): @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("-j", "--jobs", type = int, metavar = '', default = 4, - help = jobs_opt_help) + help = JOBS_OPT_HELP) @click.option("-n", "--parallel", type = int, metavar = '', default = 1, - help = parallel_opt_help) + help = PARALLEL_OPT_HELP) @click.option("--execute", is_flag = True, default = False, @@ -174,35 +182,37 @@ def create_makefile(context,yamlfile,platform,target): @click.option("-v", "--verbose", is_flag = True, - help = verbose_opt_help) + help = VERBOSE_OPT_HELP) @click.pass_context def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbose): + # pylint: disable=unused-argument """ - Write the compile script """ - context.forward(createCompile._compile_create) + context.forward(create_compile_script._compile_create) @make_cli.command @click.option("-y", "--yamlfile", type = str, - help = yamlfile_opt_help, + help = YAMLFILE_OPT_HELP, required = True) # use click.option() over click.argument(), we want help statements @click.option("-p", "--platform", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = platform_opt_help, required = True) + help = PLATFORM_OPT_HELP, required = True) @click.option("-t", "--target", multiple = True, # replaces nargs = -1, since click.option() type = str, - help = target_opt_help, + help = TARGET_OPT_HELP, required = True) @click.option("--execute", is_flag = True, help = "Build Dockerfile that has been generated by create-docker.") @click.pass_context def create_dockerfile(context,yamlfile,platform,target,execute): + # pylint: disable=unused-argument """ - Write the dockerfile """ - context.forward(createDocker._dockerfile_create) + context.forward(create_docker_script._dockerfile_create) if __name__ == "__main__": make_cli() diff --git a/fre/make/gfdlfremake/buildBaremetal.py b/fre/make/gfdlfremake/buildBaremetal.py index fdb4e2d8..9e742980 100644 --- a/fre/make/gfdlfremake/buildBaremetal.py +++ b/fre/make/gfdlfremake/buildBaremetal.py @@ -10,14 +10,14 @@ def fremake_parallel(fremakeBuildList): """ Brief: Called for parallel execution purposes. Runs the builds. - Param: + Param: - fremakeBuildList : fremakeBuild object list passes by pool.map """ fremakeBuildList.run() class buildBaremetal(): """ - Brief: Creates the build script to compile the model + Brief: Creates the build script to compile the model Param: - self : The buildScript object - exp : The experiment name @@ -40,22 +40,22 @@ def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jo self.template = mkTemplatePath self.modules = "" for m in modules: - self.modules = self.modules +" "+ m + self.modules = f"{self.modules} {m}" ## Set up the top portion of the compile script self.setup=[ "#!/bin/sh -fx \n", - "bld_dir="+self.bld+"/ \n", - "src_dir="+self.src+"/ \n", - "mkmf_template="+self.template+" \n"] + f"bld_dir={self.bld}/ \n", + f"src_dir={self.src}/ \n", + f"mkmf_template={self.template} \n"] if self.modules != "": self.setup.extend(modulesInit) #extend - this is a list - self.setup.append("module load "+self.modules+" \n") # Append -this is a single string + self.setup.append(f"module load {self.modules} \n") # Append -this is a single string ## Create the build directory - os.system("mkdir -p "+self.bld) + os.system(f"mkdir -p {self.bld}") ## Create the compile script - self.f=open(self.bld+"/compile.sh","w") + self.f=open(f"{self.bld}/compile.sh","w") self.f.writelines(self.setup) def writeBuildComponents(self, c): @@ -69,7 +69,7 @@ def writeBuildComponents(self, c): comp = c["component"] # Make the component directory - self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") + self.f.write(f"\n mkdir -p $bld_dir/{comp}\n") # Get the paths needed for compiling pstring = "" @@ -77,16 +77,22 @@ def writeBuildComponents(self, c): pstring = pstring+"$src_dir/"+paths+" " # Run list_paths - self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") - self.f.write(" cd $bld_dir/"+comp+"\n") + self.f.write(f" list_paths -l -o $bld_dir/{comp}/pathnames_{comp} {pstring}\n") + self.f.write(f" cd $bld_dir/{comp}\n") # Create the mkmf line # If this lib doesnt have any code dependencies and # it requires the preprocessor (no -o and yes --use-cpp) if c["requires"] == [] and c["doF90Cpp"]: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template --use-cpp " + "-c \""+c["cppdefs"]+"\" "+c["otherFlags"] + +" $bld_dir/"+comp+"/pathnames_"+comp+" \n") elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template -c \"" + +c["cppdefs"]+"\" "+c["otherFlags"] + +" $bld_dir/"+comp+"/pathnames_"+comp+" \n") else: #Has requirements #Set up the requirements as a string to inclue after the -o reqstring = "" @@ -95,9 +101,15 @@ def writeBuildComponents(self, c): #Figure out if we need the preprocessor if c["doF90Cpp"]: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template --use-cpp " + "-c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" " + +c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") else: - self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir " + "-p lib"+comp+".a -t $mkmf_template -c \"" + +c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"] + +" $bld_dir/"+comp+"/pathnames_"+comp+" \n") ##TODO: add targets input def writeScript(self): @@ -106,8 +118,8 @@ def writeScript(self): Param: - self : The buildScript object """ - self.f.write("cd "+self.bld+"\n") - self.f.write(self.make+"\n") + self.f.write(f"cd {self.bld}\n") + self.f.write(f"{self.make}\n") self.f.close() # Make compile script executable @@ -120,10 +132,22 @@ def run(self): Param: - self : The dockerfile object """ -###### TODO make the Makefile - command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] - try: - subprocess.run(args=command, check=True) - except: - print("There was an error running "+self.bld+"/compile.sh") - raise + command = [self.bld+"/compile.sh"] + + # Run compile script + p1 = subprocess.Popen(command, stdout=subprocess.PIPE,stderr=subprocess.STDOUT) + + # Direct output to log file as well + p2 = subprocess.Popen(["tee",self.bld+"/log.compile"], stdin=p1.stdout) + + # Allow process1 to receive SIGPIPE is process2 exits + p1.stdout.close() + p2.communicate() + + # wait for process1 to finish before checking return code + p1.wait() + if p1.returncode != 0: + print(f"\nThere was an error running {self.bld}/compile.sh") + print(f"Check the log file: {self.bld}/log.compile") + else: + print(f"\nSuccessful run of {self.bld}/compile.sh") diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py index 6d33d0d2..491d5c2c 100644 --- a/fre/make/gfdlfremake/buildDocker.py +++ b/fre/make/gfdlfremake/buildDocker.py @@ -9,12 +9,12 @@ class container(): """ Brief: Opens the Dockerfile for writing - Param: + Param: - self : The dockerfile object - base : The docker base image to start from - libs : Additional libraries defined by user - exp : The experiment name - - RUNenv : The commands that have to be run at + - RUNenv : The commands that have to be run at the beginning of a RUN in the dockerfile to set up the environment """ @@ -58,7 +58,7 @@ def __init__(self,base,exp,libs,RUNenv,target): def writeDockerfileCheckout(self, cScriptName, cOnDisk): """ Brief: writes to the checkout part of the Dockerfile and sets up the compile - Param: + Param: - self : The dockerfile object - cScriptName : The name of the checkout script in the container - cOnDisk : The relative path to the checkout script on disk @@ -74,7 +74,7 @@ def writeDockerfileCheckout(self, cScriptName, cOnDisk): def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): """ Brief: Copies the Makefile into the bldDir in the dockerfile - Param: + Param: - self : The dockerfile object - makefileOnDiskPath : The path to Makefile on the local disk - linklineonDiskPath : The path to the link line script on the local disk @@ -98,8 +98,8 @@ def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): def writeDockerfileMkmf(self, c): """ - Brief: Adds components to the build part of the Dockerfile - Param: + Brief: Adds components to the build part of the Dockerfile + Param: - self : The dockerfile object - c : Component from the compile yaml """ @@ -141,14 +141,14 @@ def writeDockerfileMkmf(self, c): def writeRunscript(self,RUNenv,containerRun,runOnDisk): """ Brief: Writes a runscript to set up spack loads/environment - in order to run the executable in the container; + in order to run the executable in the container; runscript copied into container - Param: + Param: - self : The dockerfile object - - RUNEnv : The commands that have to be run at + - RUNEnv : The commands that have to be run at the beginning of a RUN in the dockerfile - - containerRun : The container platform used with `exec` - to run the container; apptainer + - containerRun : The container platform used with `exec` + to run the container; apptainer or singularity used - runOnDisk : The path to the run script on the local disk """ @@ -184,17 +184,25 @@ def writeRunscript(self,RUNenv,containerRun,runOnDisk): self.d.write('ENTRYPOINT ["/bin/bash"]') self.d.close() - def build(self,containerBuild,containerRun): + def createBuildScript(self,containerBuild,containerRun): """ - Brief: Builds the container image for the model - Param: + Brief: Writes out the build commands for the created dockerfile in a script, + which builds the dockerfile and then converts the format to a singularity image file. + Param: - self : The dockerfile object - - containerBuild : The tool used to build the container; + - containerBuild : The tool used to build the container; docker or podman used - - containerRun : The container platform used with `exec` to + - containerRun : The container platform used with `exec` to run the container; apptainer or singularity used """ - os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) - os.system("rm -f "+self.e+".tar "+self.e+".sif") - os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) - os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") + self.userScript = ["#!/bin/bash\n"] + self.userScript.append(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()+"\n") + self.userScript.append("rm -f "+self.e+".tar "+self.e+".sif\n") + self.userScript.append(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()+"\n") + self.userScript.append(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar\n") + self.userScriptFile = open("createContainer.sh","w") + self.userScriptFile.writelines(self.userScript) + self.userScriptFile.close() + os.chmod("createContainer.sh", 0o744) + self.userScriptPath = os.getcwd()+"/createContainer.sh" + diff --git a/fre/make/runFremake.py b/fre/make/run_fremake_script.py similarity index 88% rename from fre/make/runFremake.py rename to fre/make/run_fremake_script.py index ffe2ec96..9e1c1730 100644 --- a/fre/make/runFremake.py +++ b/fre/make/run_fremake_script.py @@ -10,12 +10,13 @@ from multiprocessing.dummy import Pool from pathlib import Path import click +import subprocess import fre.yamltools.combine_yamls as cy from .gfdlfremake import ( targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal ) -def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): +def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose): ''' run fremake via click''' yml = yamlfile name = yamlfile.split(".")[0] @@ -87,6 +88,7 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) freCheckout.finish(pc) os.chmod(srcDir+"/checkout.sh", 0o744) + print("\nCheckout script created at "+ srcDir + "/checkout.sh \n") ## TODO: Options for running on login cluster? freCheckout.run() @@ -126,6 +128,7 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb # Loop through components, send component name/requires/overrides for Makefile for c in fremakeYaml['src']: freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + print("\nMakefile created at " + bldDir + "/Makefile" + "\n") freMakefile.writeMakefile() ## Create a list of compile scripts to run in parallel @@ -142,8 +145,11 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb fremakeBuild.writeBuildComponents(c) fremakeBuild.writeScript() fremakeBuildList.append(fremakeBuild) - ## Run the build - fremakeBuild.run() + ## Run the build if --execute option given, otherwise print out compile script path + if execute: + fremakeBuild.run() + else: + print("Compile script created at "+ bldDir+"/compile.sh\n\n") else: ###################### container stuff below ####################################### ## Run the checkout script @@ -187,26 +193,32 @@ def fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verb dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") - ## Run the dockerfile; build the container - dockerBuild.build(containerBuild,containerRun) + # Create build script for container + dockerBuild.createBuildScript(containerBuild, containerRun) + print("Container build script created at "+dockerBuild.userScriptPath+"\n\n") + + # Execute if flag is given + if execute: + subprocess.run(args=[dockerBuild.userScriptPath], check=True) #freCheckout.cleanup() #buildDockerfile(fremakeYaml,image) if baremetalRun: if __name__ == '__main__': - # Create a multiprocessing Pool - pool = Pool(processes=nparallel) - # process data_inputs iterable with pool - pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) + if execute: + # Create a multiprocessing Pool + pool = Pool(processes=nparallel) + # process data_inputs iterable with pool + pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) @click.command() -def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose): +def _fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose): ''' Decorator for calling _fremake_run - allows the decorated version of the function to be separate from the undecorated version ''' - return fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,verbose) + return fremake_run(yamlfile,platform,target,parallel,jobs,no_parallel_checkout,execute,verbose) if __name__ == "__main__": fremake_run() diff --git a/fre/make/tests/ESM4_example/compile.yaml b/fre/make/tests/ESM4_example/compile.yaml deleted file mode 100644 index ee51658d..00000000 --- a/fre/make/tests/ESM4_example/compile.yaml +++ /dev/null @@ -1,124 +0,0 @@ -compile: - experiment: "esm4" - container_addlibs: - baremetal_linkerflags: - src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -DMAXFIELDMETHODS_=500" - branch: *FMS_GIT_TAG - - component: "atmos_phys" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/atmos_phys.git" - branch: *ATM_PHYS_GIT_TAG - otherFlags: *FMSincludes - - component: "atmos_dyn" - requires: ["FMS", "atmos_phys"] - repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" - otherFlags: *FMSincludes - branch: *ATM_FV3_GIT_TAG - paths: [ "atmos_dyn/driver/GFDL", - "atmos_dyn/model", - "atmos_dyn/model_nh_null", - "atmos_dyn/GFDL_tools", - "atmos_dyn/driver/SHiELD/cloud_diagnosis.F90", - "atmos_dyn/driver/SHiELD/gfdl_cloud_microphys.F90", - "atmos_dyn/tools" ] - - component: "atmos_drivers" - requires: ["FMS", "atmos_phys", "atmos_dyn"] - repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" - cppdefs: "-DSPMD -DCLIMATE_NUDGE" - branch: *ATM_DRV_GIT_TAG - otherFlags: *FMSincludes - paths: ["atmos_drivers/coupled"] - - component: "lm4p" - requires: ["FMS"] - repo: "https://gitlab.gfdl.noaa.gov/FMS/lm4p.git" - branch: *LAND_GIT_TAG - cppdefs: "-DINTERNAL_FILE_NML" - otherFlags: *FMSincludes - - component: "mom6" - requires: ["FMS"] - repo: ["https://github.com/NOAA-GFDL/MOM6-examples.git", - "https://github.com/NOAA-GFDL/ocean_BGC.git" ] - branch: [ "dev/gfdl", *OCEAN_BGC_GIT_TAG ] # cant use property for mom6 since its a commit hash instead of a branch - otherFlags: !join [ *FMSincludes, " ", *MOMincludes ] - additionalInstructions: | - pushd mom6/MOM6-examples - git checkout 40e3937 # this is just the value of MOM6_GIT_TAG property, can't seem to use variable - popd - # this step might be covered by initial checkout since the default is recursive - git submodule update --recursive --init mom6/MOM6-examples/src/MOM6 mom6/MOM6-examples/src/SIS2 mom6/MOM6-examples/src/icebergs - # checkout dev/gfdl on the icebergs submodule - pushd mom6/MOM6-examples/src/icebergs - git checkout dev/gfdl - popd - # this is different than the MOM6_GIT_TAG above since its pulling a submodule not the whole repo - if [[ $MOM6_GIT_FIX ]]; then - echo WARNING: Checking out MOM6_GIT_FIX, set to: $MOM6_GIT_FIX - pushd mom6/MOM6-examples/src/MOM6/ - git checkout $MOM6_GIT_FIX - popd - fi - if [[ $SIS2_GIT_FIX ]]; then - echo WARNING: Checking out SIS2_GIT_FIX, set to: $SIS2_GIT_FIX - pushd mom6/MOM6-examples/src/SIS2/ - git checkout $SIS2_GIT_FIX - popd - fi - # link in dataset - pushd mom6/MOM6-examples - # skip the check for orion - #set platform_domain = `perl -T -e "use Net::Domain(hostdomain) ; print hostdomain"` - #if ("${platform_domain}" =~ *"MsState"* ) then - # ln -s /work/noaa/gfdlscr/pdata/gfdl/gfdl_O/datasets/ .datasets - #else - ln -s /gpfs/f5/gfdl_o/world-shared/datasets .datasets - #endif - popd - test -e mom6/.datasets - if [[ $status != 0 ]]; then - echo ""; echo "" ; echo " WARNING: .datasets link in MOM6 examples directory is invalid"; echo ""; echo "" - fi - cppdefs: "-DMAX_FIELDS_=100 -DNOT_SET_AFFINITY -D_USE_MOM6_DIAG -D_USE_GENERIC_TRACER -DUSE_PRECISION=2" - paths: [ "mom6/MOM6-examples/src/MOM6/config_src/infra/FMS2", - "mom6/MOM6-examples/src/MOM6/config_src/memory/dynamic_symmetric", - "mom6/MOM6-examples/src/MOM6/config_src/drivers/FMS_cap", - "mom6/MOM6-examples/src/MOM6/src/ALE", - "mom6/MOM6-examples/src/MOM6/src/core", - "mom6/MOM6-examples/src/MOM6/src/diagnostics", - "mom6/MOM6-examples/src/MOM6/src/equation_of_state", - "mom6/MOM6-examples/src/MOM6/src/framework", - "mom6/MOM6-examples/src/MOM6/src/ice_shelf", - "mom6/MOM6-examples/src/MOM6/src/initialization", - "mom6/MOM6-examples/src/MOM6/src/ocean_data_assim", - "mom6/MOM6-examples/src/MOM6/src/parameterizations", - "mom6/MOM6-examples/src/MOM6/src/tracer", - "mom6/MOM6-examples/src/MOM6/src/user", - "mom6/MOM6-examples/src/MOM6/config_src/external/ODA_hooks", - "mom6/MOM6-examples/src/MOM6/config_src/external/database_comms", - "mom6/MOM6-examples/src/MOM6/config_src/external/drifters", - "mom6/MOM6-examples/src/MOM6/config_src/external/stochastic_physics", - "mom6/MOM6-examples/src/MOM6/config_src/external/stochastic_physics", - "mom6/ocean_BGC/generic_tracers", - "mom6/ocean_BGC/mocsy/src" ] - - component: "sis2" - requires: ["FMS", "mom6"] - repo: "https://github.com/NOAA-GFDL/ice_param.git" - branch: "2024.01" - cppdefs: "-DUSE_FMS2_IO" - otherFlags: !join [ *FMSincludes, " ", *MOMincludes ] - paths: [ "mom6/MOM6-examples/src/SIS2/config_src/dynamic_symmetric", - "mom6/MOM6-examples/src/SIS2/config_src/external/Icepack_interfaces", - "mom6/MOM6-examples/src/SIS2/src", - "mom6/MOM6-examples/src/icebergs/src", - "sis2" ] - - component: "coupler" - requires: ["FMS", "atmos_dyn", "atmos_drivers", "atmos_phys", "lm4p", "sis2", "mom6"] - repo: "https://github.com/NOAA-GFDL/FMScoupler" - branch: "2024.01" - otherFlags: !join [ *FMSincludes, " ", *MOMincludes ] - paths: [ "coupler/shared", - "coupler/full" ] - diff --git a/fre/make/tests/ESM4_example/esm4.yaml b/fre/make/tests/ESM4_example/esm4.yaml deleted file mode 100644 index 436672ab..00000000 --- a/fre/make/tests/ESM4_example/esm4.yaml +++ /dev/null @@ -1,86 +0,0 @@ -# esm4.2, based off ESM4p2_piControl_spinup_J_rts.xml -# this needs -npc (non-parallel checkout) flag during checkout script creation for additional checkouts to work properly -fre_properties: - - &RELEASE "2024.01" - - &FMS_GIT_TAG "2024.01" - - &ATM_PHYS_GIT_TAG "2024.01-alpha6" - - &ATM_FV3_GIT_TAG "2023.03" - - &ATM_DRV_GIT_TAG "2023.04" - - &LAND_GIT_TAG "2024.01" - - &ICE_PARAM_GIT_TAG "2023.04" - - &ESM4_VERSION "2024.01" - - &OCEAN_BGC_GIT_TAG "dev4.2_benthic" - - &MOM6_DATE "20231130" - - &MOM6_GIT_TAG "40e3937" - - &MOM6_GIT_FIX "" - - &SIS2_GIT_FIX "" - # compile - - &FRE_STEM !join [fre/FMS, *RELEASE, _mom6_, *MOM6_DATE] - - &INTEL "intel-classic" - - &FMSincludes "-IFMS/include" - - &MOMincludes "-Imom6/MOM6-examples/src/MOM6/src/framework" - # post processing - - &OM4_ANALYSIS "ice_ocean_SIS2/OM4_05" - - &PROD_SIMTIME "5" # simulation length in years - - &PP_CMIP_CHUNK_A "5yr" # smaller chunk length for pp/analysis - - &PP_CMIP_CHUNK_B "10yr" # bigger chunk length for pp/analysis - - &PP_START_YEAR "0001" # starting year - - &PROD_RUNTIME "16:00:00" # Maximum wall clock per simulation - - &PROD_SEGTIME "03:00:00" # Wall clock per segment (usually 1 year) - # MDBI settings - - &EXP_CPLD_START "0001" # simulation start year - - &EXP_CPLD_END "1000" # simulation end year - # this property wasn't actually used in the xml, but seems to be intended for specifying reference files to verify regression tests - - &reference_tag "FMS2022.03_mom6_20220703" - # these properties modify the build - - &BUILD_DATE "" # included as part of the src directory path - - &MODIFIER "" # appended to compile experiment name, can likely remove since compile experiments are no longer a thing - - &PLATINFO "" # used for awg_input - - &LIBS_ROOT "esm4.2_compile$(MODIFIER)" # used below - - &SRC_DIR !join [ $root/, *BUILD_DATE, /, *LIBS_ROOT, /src] # this ends up being used for input paths - - &MOM6_EXAMPLES !join [ $root/, *BUILD_DATE, /, *LIBS_ROOT, /src/mom6] # also for input paths - # input paths - - &AWG_INPUT_HOME_GAEA "awg_include" - - &AWG_INPUT_HOME_NESCC "awg_include" - - &AWG_INPUT_HOME_GFDL !join [ /nbhome/$USER/, *FRE_STEM, *PLATINFO, /$(name)/mdt_xml/awg_include] - - &USER_FILES_F2toF5 "/gpfs/f5/gfdl_f/world-shared/Niki.Zadeh/archive/input/f2_user_files_in_xmls" - - &append_to_setup_csh "" # The start year of forcing dataset. FRE hack to set fyear - -build: - compileYaml: "compile.yaml" - platformYaml: "platforms.yaml" - -shared: - # directories shared across tools - # shamelessly stolen from am5 example - directories: &shared_directories - history_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, history] - pp_dir: !join [/archive/$USER/, *FRE_STEM, /, *name, /, *platform, -, *target, /, pp] - analysis_dir: !join [/nbhome/$USER/, *FRE_STEM, /, *name] - ptmp_dir: "/xtmp/$USER/ptmp" - fre_analysis_home: "/home/fms/local/opt/fre-analysis/test" - - # shared pp settings - # also shamelessly stolen from am5 example - postprocess: - settings: &shared_settings - history_segment: "P1Y" - site: "ppan" - switches: &shared_switches - do_statics: True - do_timeavgs: True - clean_work: True - do_refinediag: False - do_atmos_plevel_masking: True - do_preanalysis: False - do_analysis: True - -experiments: - - name: "ESM4p2_piControl_spinup_J" - pp: - - name: "ESM4p2_piControl_spinup_Jb" - pp: - - name: "ESM4p2_piControl_spinup_J_redoyr450_btmdiags" - pp: - - name: "ESM4p2_piControl_spinup_J_redoyr450" - pp: diff --git a/fre/make/tests/ESM4_example/platforms.yaml b/fre/make/tests/ESM4_example/platforms.yaml deleted file mode 100644 index 14d4dfff..00000000 --- a/fre/make/tests/ESM4_example/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL,".mk"] - modelRoot: ${HOME}/fremake_canopy/test - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] - modelRoot: ${HOME}/fremake_canopy/test - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/make/tests/SHiELD_example/SHiELD.yaml b/fre/make/tests/SHiELD_example/SHiELD.yaml deleted file mode 100644 index 9f063d0a..00000000 --- a/fre/make/tests/SHiELD_example/SHiELD.yaml +++ /dev/null @@ -1,10 +0,0 @@ -platformYaml: platforms.yaml -compileYaml: compile.yaml -fv3_release: main -phy_release: main -fms_release: "2023.02" -drivers_release: main -coupler_release: "2023.02" -FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" -INTEL: intel-classic diff --git a/fre/make/tests/SHiELD_example/compile.yaml b/fre/make/tests/SHiELD_example/compile.yaml deleted file mode 100644 index a83bb1ce..00000000 --- a/fre/make/tests/SHiELD_example/compile.yaml +++ /dev/null @@ -1,38 +0,0 @@ -experiment: shield_nh -compileInclude: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" -container_addlibs: ["bacio","sp","w3emc","w3nco"] -baremetal_addlibs: ["-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/bacio-2.4.1-wrykbu2/lib -lbacio_4", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/bacio-2.4.1-wrykbu2/lib -lbacio_8", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/sp-2.5.0-7bumbmx/lib64 -lsp_d", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/w3emc-2.10.0-zmuykep/lib64 -lw3emc_d", "-L/autofs/ncrc-svm1_proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/intel/2023.1.0/w3nco-2.4.1-76qm6h2/lib -lw3nco_d"] -src: - - component: "FMS" - repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -Duse_LARGEFILE -DHAVE_SCHED_GETAFFINITY -DINTERNAL_FILE_NML -DGFS_PHYS -DGFS_CONSTANTS -DHAVE_GETTID" - branch: "$(fms_release)" - - component: "SHiELD_physics" - requires: ["FMS"] - repo: "https://github.com/NOAA-GFDL/SHiELD_physics.git" - branch: "$(phy_release)" - paths: [SHiELD_physics/gsmphys, - SHiELD_physics/GFS_layer, - SHiELD_physics/IPD_layer] - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - otherFlags: "$(FMSincludes)" - - component: "fv3" - requires: ["FMS", "SHiELD_physics"] - repo: ["https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git", - "https://github.com/NOAA-GFDL/atmos_drivers.git"] - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - branch: ["$(fv3_release)","$(drivers_release)"] - paths: [SHiELD_physics/FV3GFS/, - fv3/atmos_drivers/SHiELD/atmos_model.F90, - fv3/GFDL_atmos_cubed_sphere/driver/SHiELD/atmosphere.F90, - fv3/GFDL_atmos_cubed_sphere/tools/, - fv3/GFDL_atmos_cubed_sphere/model/, - fv3/GFDL_atmos_cubed_sphere/GFDL_tools/fv_diag_column.F90] - otherFlags: "$(FMSincludes)" - - component: "FMScoupler" - requires: ["FMS", "SHiELD_physics", "fv3"] - repo: "https://github.com/NOAA-GFDL/FMScoupler.git" - cppdefs: "-Duse_libMPI -Duse_netCDF -DHAVE_SCHED_GETAFFINITY -DSPMD -Duse_LARGEFILE -DGFS_PHYS -DUSE_GFSL63 -DNEW_TAUCTMAX -DNEMS_GSM -DINTERNAL_FILE_NML -DMOIST_CAPPA -DUSE_COND" - branch: "$(coupler_release)" - paths: ["FMScoupler/SHiELD/coupler_main.F90"] - otherFlags: "$(FMSincludes)" diff --git a/fre/make/tests/SHiELD_example/platforms.yaml b/fre/make/tests/SHiELD_example/platforms.yaml deleted file mode 100644 index 9f72043b..00000000 --- a/fre/make/tests/SHiELD_example/platforms.yaml +++ /dev/null @@ -1,26 +0,0 @@ -platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/SHiELDtest - - name: ncrc5.intel23 - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" - modelRoot: ${HOME}/fremake_canopy/SHiELDtest - - name: hpcme.2023 - compiler: intel - RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.12.1"] - modelRoot: /apps - fc: mpiifort - cc: mpiicc - container: True - containerBuild: "podman" - containerRun: "apptainer" diff --git a/fre/make/tests/compilation/test_fre_make_run_fremake.py b/fre/make/tests/compilation/test_fre_make_run_fremake.py index be91a547..0b9f94b3 100644 --- a/fre/make/tests/compilation/test_fre_make_run_fremake.py +++ b/fre/make/tests/compilation/test_fre_make_run_fremake.py @@ -1,9 +1,13 @@ ''' test "fre make run-fremake" calls ''' import os -from fre.make import runFremake from pathlib import Path +import pytest + +from fre.make import run_fremake_script + + # command options YAMLFILE = "fre/make/tests/null_example/null_model.yaml" PLATFORM = [ "ci.gnu" ] @@ -14,8 +18,9 @@ # get HOME dir to check output HOME_DIR = os.environ["HOME"] +@pytest.mark.skip(reason='failing: fix in development, see PR 275') def test_fre_make_run_fremake_null_model_serial_compile(): ''' run fre make with run-fremake subcommand and build the null model experiment with gnu''' - runFremake.fremake_run(YAMLFILE, PLATFORM, TARGET, False, 1, False, False) + run_fremake_script.fremake_run(YAMLFILE, PLATFORM, TARGET, False, 1, False, False, False) assert Path(f"{HOME_DIR}/fremake_canopy/test/{EXPERIMENT}/{PLATFORM[0]}-{TARGET[0]}/exec/{EXPERIMENT}.x").exists() diff --git a/fre/make/tests/null_example/compile.yaml b/fre/make/tests/null_example/compile.yaml index ab5052a0..68c151f8 100644 --- a/fre/make/tests/null_example/compile.yaml +++ b/fre/make/tests/null_example/compile.yaml @@ -5,7 +5,7 @@ compile: src: - component: "FMS" repo: "https://github.com/NOAA-GFDL/FMS.git" - cppdefs: "-Duse_netCDF -Duse_libMPI -DMAXFIELDS_=200 -DMAXFIELDMETHODS_=200 -DINTERNAL_FILE_NML -DHAVE_GETTID" # gettid flag is platform specific + cppdefs: "-Duse_netCDF -Duse_libMPI -DMAXFIELDS_=200 -DMAXFIELDMETHODS_=200 -DINTERNAL_FILE_NML -DHAVE_GETTID" otherFlags: "-fallow-argument-mismatch" # only needed for gcc branch: *branch - component: "atmos_null" diff --git a/fre/make/tests/null_example/platforms.yaml b/fre/make/tests/null_example/platforms.yaml index fdfa9d4f..d2a3c97f 100644 --- a/fre/make/tests/null_example/platforms.yaml +++ b/fre/make/tests/null_example/platforms.yaml @@ -1,16 +1,8 @@ platforms: - - name: ncrc5.intel - compiler: intel - modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [ !join [*INTEL, "/2022.2.1"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] - fc: ftn - cc: cc - mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] - modelRoot: ${HOME}/fremake_canopy/test - name: ncrc5.intel23 compiler: intel modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] - modules: [!join [*INTEL, "/2023.1.0"],"fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + modules: [!join [*INTEL, "/2023.2.0"],"fre/bronx-21",cray-hdf5/1.12.2.11, cray-netcdf/4.9.0.11] fc: ftn cc: cc mkTemplate: !join ["/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/", *INTEL, ".mk"] diff --git a/fre/make/tests/test_create_makefile.py b/fre/make/tests/test_create_makefile.py index 36188b33..dd180262 100644 --- a/fre/make/tests/test_create_makefile.py +++ b/fre/make/tests/test_create_makefile.py @@ -4,10 +4,10 @@ import os import shutil from pathlib import Path -from fre.make import createMakefile +from fre.make import create_makefile_script # SET-UP -test_dir = Path("fre/make/tests") +TEST_DIR = Path("fre/make/tests") NM_EXAMPLE = Path("null_example") YAMLFILE = "null_model.yaml" BM_PLATFORM = ["ncrc5.intel23"] @@ -16,57 +16,60 @@ EXPERIMENT = "null_model_full" # Create output location -out = f"{test_dir}/makefile_out" -if Path(out).exists(): +OUT = f"{TEST_DIR}/makefile_out" +if Path(OUT).exists(): # remove - shutil.rmtree(out) + shutil.rmtree(OUT) # create output directory - Path(out).mkdir(parents=True,exist_ok=True) + Path(OUT).mkdir(parents=True,exist_ok=True) else: - Path(out).mkdir(parents=True,exist_ok=True) + Path(OUT).mkdir(parents=True,exist_ok=True) # Set output directory as home for fre make output -#os.environ["HOME"]=str(Path(out)) +#os.environ["HOME"]=str(Path(OUT)) def test_modelyaml_exists(): """ Check the model yaml exists """ - assert Path(f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}").exists() + assert Path(f"{TEST_DIR}/{NM_EXAMPLE}/{YAMLFILE}").exists() def test_compileyaml_exists(): """ Check the compile yaml exists """ - assert Path(f"{test_dir}/{NM_EXAMPLE}/compile.yaml").exists() + assert Path(f"{TEST_DIR}/{NM_EXAMPLE}/compile.yaml").exists() def test_platformyaml_exists(): """ Check the platform yaml exists """ - assert Path(f"{test_dir}/{NM_EXAMPLE}/platforms.yaml").exists() + assert Path(f"{TEST_DIR}/{NM_EXAMPLE}/platforms.yaml").exists() def test_bm_makefile_creation(): """ Check the makefile is created when a bare-metal platform is used """ # Set output directory as home for fre make output - os.environ["HOME"]=str(Path(out)) + def_home = str(os.environ["HOME"]) + os.environ["HOME"]=OUT#str(Path(OUT)) bm_plat = BM_PLATFORM[0] targ = TARGET[0] - yamlfile_path = f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}" + yamlfile_path = f"{TEST_DIR}/{NM_EXAMPLE}/{YAMLFILE}" - createMakefile.makefile_create(yamlfile_path,BM_PLATFORM,TARGET) + create_makefile_script.makefile_create(yamlfile_path,BM_PLATFORM,TARGET) - assert Path(f"{out}/fremake_canopy/test/{EXPERIMENT}/{bm_plat}-{targ}/exec/Makefile").exists() + assert Path(f"{OUT}/fremake_canopy/test/{EXPERIMENT}/{bm_plat}-{targ}/exec/Makefile").exists() + os.environ["HOME"] = def_home + assert os.environ["HOME"] == def_home def test_container_makefile_creation(): """ Check the makefile is created when the container platform is used """ container_plat = CONTAINER_PLATFORM[0] - yamlfile_path = f"{test_dir}/{NM_EXAMPLE}/{YAMLFILE}" - createMakefile.makefile_create(yamlfile_path,CONTAINER_PLATFORM,TARGET) + yamlfile_path = f"{TEST_DIR}/{NM_EXAMPLE}/{YAMLFILE}" + create_makefile_script.makefile_create(yamlfile_path,CONTAINER_PLATFORM,TARGET) assert Path(f"tmp/{container_plat}/Makefile").exists() diff --git a/fre/pp/checkoutScript.py b/fre/pp/checkoutScript.py deleted file mode 100644 index 57036634..00000000 --- a/fre/pp/checkoutScript.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -# Author: Bennett Chang -# Description: - -import os -import subprocess -from subprocess import PIPE -from subprocess import STDOUT -import re -import click - -############################################# - -package_dir = os.path.dirname(os.path.abspath(__file__)) - -############################################# - -def _checkoutTemplate(experiment, platform, target, branch='main'): - """ - Checkout the workflow template files from the repo - """ - # Create the directory if it doesn't exist - directory = os.path.expanduser("~/cylc-src") - os.makedirs(directory, exist_ok=True) - - # Change the current working directory - os.chdir(directory) - - # Set the name of the directory - name = f"{experiment}__{platform}__{target}" - - # Clone the repository with depth=1; check for errors - click.echo("cloning experiment into directory " + directory + "/" + name) - clonecmd = ( - f"git clone -b {branch} --single-branch --depth=1 --recursive " - f"https://github.com/NOAA-GFDL/fre-workflows.git {name}" ) - preexist_error = f"fatal: destination path '{name}' exists and is not an empty directory." - click.echo(clonecmd) - cloneproc = subprocess.run(clonecmd, shell=True, check=False, stdout=PIPE, stderr=STDOUT) - if not cloneproc.returncode == 0: - if re.search(preexist_error.encode('ASCII'),cloneproc.stdout) is not None: - argstring = f" -e {experiment} -p {platform} -t {target}" - stop_report = ( - "Error in checkoutTemplate: the workflow definition specified by -e/-p/-t already" - f" exists at the location ~/cylc-src/{name}!\n" - f"In the future, we will confirm that ~/cylc-src/{name} is usable and will check " - "whether it is up-to-date.\n" - "But for now, if you wish to proceed, you must delete the workflow definition.\n" - "To start over, try:\n" - f"\t cylc stop {name}\n" - f"\t cylc clean {name}\n" - f"\t rm -r ~/cylc-src/{name}" ) - click.echo(stop_report) - return 1 - else: - #if not identified, just print the error - click.echo(clonecmd) - click.echo(cloneproc.stdout) - return 1 - -############################################# - -@click.command() -def checkoutTemplate(experiment, platform, target, branch="main"): - ''' - Wrapper script for calling checkoutTemplate - allows the decorated version - of the function to be separate from the undecorated version - ''' - return _checkoutTemplate(experiment, platform, target, branch) - - -if __name__ == '__main__': - checkoutTemplate() diff --git a/fre/pp/checkout_script.py b/fre/pp/checkout_script.py new file mode 100644 index 00000000..02dca1f7 --- /dev/null +++ b/fre/pp/checkout_script.py @@ -0,0 +1,95 @@ +''' +Description: Checkout script which accounts for 4 different scenarios: +1. branch not given, folder does not exist, +2. branch given, folder does not exist, +3. branch not given, folder exists, +4. branch given and folder exists +''' +import os +import sys +import subprocess + +import click + +from fre import fre + +FRE_WORKFLOWS_URL = 'https://github.com/NOAA-GFDL/fre-workflows.git' + +def checkout_template(experiment = None, platform = None, target = None, branch = None): + """ + Checkout the workflow template files from the repo + """ + ## Chdir back to here before we exit this routine + go_back_here = os.getcwd() + + # branch and version parameters + default_tag = fre.version + git_clone_branch_arg = branch if branch is not None else default_tag + if branch is None: + print(f"(checkout_script) default tag is '{default_tag}'") + else: + print(f"(checkout_script) requested branch/tag is '{branch}'") + + # check args + set the name of the directory + if None in [experiment, platform, target]: + raise ValueError( 'one of these are None: experiment / platform / target = \n' + f'{experiment} / {platform} / {target}' ) + name = f"{experiment}__{platform}__{target}" + + # Create the directory if it doesn't exist + directory = os.path.expanduser("~/cylc-src") + try: + os.makedirs(directory, exist_ok = True) + except Exception as exc: + raise OSError( + '(checkoutScript) directory {directory} wasnt able to be created. exit!') from exc + + checkout_exists = os.path.isdir(f'{directory}/{name}') + + if not checkout_exists: # scenarios 1+2, checkout doesn't exist, branch specified (or not) + print(f'(checkout_script) checkout does not yet exist; will create now') + clone_output = subprocess.run( ['git', 'clone','--recursive', + f'--branch={git_clone_branch_arg}', + FRE_WORKFLOWS_URL, f'{directory}/{name}'], + capture_output = True, text = True, check = True) + print(f'(checkout_script) {clone_output}') + + else: # the repo checkout does exist, scenarios 3 and 4. + os.chdir(f'{directory}/{name}') + + # capture the branch and tag + # if either match git_clone_branch_arg, then success. otherwise, fail. + + current_tag = subprocess.run(["git","describe","--tags"], + capture_output = True, + text = True, check = True).stdout.strip() + current_branch = subprocess.run(["git", "branch", "--show-current"], + capture_output = True, + text = True, check = True).stdout.strip() + + if current_tag == git_clone_branch_arg or current_branch == git_clone_branch_arg: + print(f"(checkout_script) checkout exists ('{directory}/{name}'), and matches '{git_clone_branch_arg}'") + else: + print(f"(checkout_script) ERROR: checkout exists ('{directory}/{name}') and does not match '{git_clone_branch_arg}'") + print(f"(checkout_script) ERROR: current branch is '{current_branch}', current tag-describe is '{current_tag}'") + exit(1) + + # make sure we are back where we should be + if os.getcwd() != go_back_here: + os.chdir(go_back_here) + + return 0 + +############################################# + +@click.command() +def _checkout_template(experiment, platform, target, branch ): + ''' + Wrapper script for calling checkout_template - allows the decorated version + of the function to be separate from the undecorated version + ''' + return checkout_template(experiment, platform, target, branch) + + +if __name__ == '__main__': + checkout_template() diff --git a/fre/pp/configure_script_xml.py b/fre/pp/configure_script_xml.py index 899b4830..38247ce1 100644 --- a/fre/pp/configure_script_xml.py +++ b/fre/pp/configure_script_xml.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 ''' Primary Usage: fre-bronx-to-canopy -x XML -e EXP -p PLATFORM -t TARGET diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index b782e3de..25507c14 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Script creates rose-apps and rose-suite files for the workflow from the pp yaml. @@ -147,7 +146,7 @@ def set_rose_apps(yamlfile,rose_regrid,rose_remap): value=f'{interp_split[0]}_{interp_split[1]}.{interp_method}') #################### -def yamlInfo(yamlfile,experiment,platform,target): +def yaml_info(yamlfile,experiment,platform,target): """ Using a valid pp.yaml, the rose-app and rose-suite configuration files are created in the cylc-src @@ -200,13 +199,13 @@ def yamlInfo(yamlfile,experiment,platform,target): print(" " + outfile) @click.command() -def _yamlInfo(yamlfile,experiment,platform,target): +def _yaml_info(yamlfile,experiment,platform,target): ''' - Wrapper script for calling yamlInfo - allows the decorated version + Wrapper script for calling yaml_info - allows the decorated version of the function to be separate from the undecorated version ''' - return yamlInfo(yamlfile,experiment,platform,target) + return yaml_info(yamlfile,experiment,platform,target) # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': - yamlInfo() + yaml_info() diff --git a/fre/pp/frepp.py b/fre/pp/frepp.py index b3456e31..cdd905d4 100644 --- a/fre/pp/frepp.py +++ b/fre/pp/frepp.py @@ -1,14 +1,16 @@ ''' fre pp ''' import click -from .checkoutScript import checkoutTemplate -from .configure_script_yaml import yamlInfo -from .configure_script_xml import convert -from .validate import validate_subtool -from .install import install_subtool -from .run import pp_run_subtool -from .status import status_subtool -from .wrapper import runFre2pp + +from fre.pp import checkout_script +from fre.pp import configure_script_yaml +from fre.pp import configure_script_xml +from fre.pp import validate_script +from fre.pp import install_script +from fre.pp import run_script +from fre.pp import trigger_script +from fre.pp import status_script +from fre.pp import wrapper_script @click.group(help=click.style(" - access fre pp subcommands", fg=(57,139,210))) def pp_cli(): @@ -30,7 +32,7 @@ def pp_cli(): def status(context, experiment, platform, target): # pylint: disable=unused-argument """ - Report status of PP configuration""" - context.forward(status_subtool) + context.forward(status_script.status_subtool) # fre pp run @pp_cli.command() @@ -47,7 +49,7 @@ def status(context, experiment, platform, target): def run(context, experiment, platform, target): # pylint: disable=unused-argument """ - Run PP configuration""" - context.forward(pp_run_subtool) + context.forward(run_script.pp_run_subtool) # fre pp validate @pp_cli.command() @@ -64,7 +66,7 @@ def run(context, experiment, platform, target): def validate(context, experiment, platform, target): # pylint: disable=unused-argument """ - Validate PP configuration""" - context.forward(validate_subtool) + context.forward(validate_script._validate_subtool) # fre pp install @pp_cli.command() @@ -81,7 +83,7 @@ def validate(context, experiment, platform, target): def install(context, experiment, platform, target): # pylint: disable=unused-argument """ - Install PP configuration""" - context.forward(install_subtool) + context.forward(install_script.install_subtool) @pp_cli.command() @click.option("-y", "--yamlfile", type=str, @@ -100,7 +102,7 @@ def install(context, experiment, platform, target): def configure_yaml(context,yamlfile,experiment,platform,target): # pylint: disable=unused-argument """ - Execute fre pp configure """ - context.forward(yamlInfo) + context.forward(configure_script_yaml._yaml_info) @pp_cli.command() @click.option("-e", "--experiment", type=str, @@ -112,17 +114,14 @@ def configure_yaml(context,yamlfile,experiment,platform,target): @click.option("-t", "--target", type=str, help="Target name", required=True) -@click.option("-b", "--branch", - show_default=True, - default="main", type=str, - help="Name of fre2/workflows/postproc branch to clone; " \ - "defaults to 'main'. Not intended for production use, " \ - "but needed for branch testing." ) +@click.option("-b", "--branch", type =str, + required=False, default = None, + help="fre-workflows branch/tag to clone; default is $(fre --version)") @click.pass_context -def checkout(context, experiment, platform, target, branch='main'): +def checkout(context, experiment, platform, target, branch=None): # pylint: disable=unused-argument """ - Execute fre pp checkout """ - context.forward(checkoutTemplate) + context.forward(checkout_script._checkout_template) @pp_cli.command() @click.option('-x', '--xml', @@ -154,11 +153,11 @@ def checkout(context, experiment, platform, target, branch='main'): is_flag=True, default=False, help="Optional. Process refineDiag scripts") -@click.option('--pp_start', +@click.option('--pp_start', type=str, default='0000', help="Optional. Starting year of postprocessing. " \ "If not specified, a default value of '0000' " \ "will be set and must be changed in rose-suite.conf") -@click.option('--pp_stop', +@click.option('--pp_stop', type=str, default='0000', help="Optional. Ending year of postprocessing. " \ "If not specified, a default value of '0000' " \ "will be set and must be changed in rose-suite.conf") @@ -180,7 +179,7 @@ def configure_xml(context, xml, platform, target, experiment, do_analysis, histo ppdir, do_refinediag, pp_start, pp_stop, validate, verbose, quiet, dual): # pylint: disable=unused-argument """ - Converts a Bronx XML to a Canopy rose-suite.conf """ - context.forward(convert) + context.forward(configure_script_xml.convert) #fre pp wrapper @pp_cli.command() @@ -190,23 +189,44 @@ def configure_xml(context, xml, platform, target, experiment, do_analysis, histo @click.option("-p", "--platform", type=str, help="Platform name", required=True) -@click.option("-t", "--target", type=str, +@click.option("-T", "--target", type=str, help="Target name", required=True) @click.option("-c", "--config-file", type=str, help="Path to a configuration file in either XML or YAML", required=True) @click.option("-b", "--branch", - show_default=True, - default="main", type=str, - help="Name of fre2/workflows/postproc branch to clone; " \ - "defaults to 'main'. Not intended for production use, " \ - "but needed for branch testing." ) + required=False, default=None, + help="fre-workflows branch/tag to clone; default is $(fre --version)") +@click.option("-t", "--time", + required=False, default=None, + help="Time whose history files are ready") @click.pass_context -def wrapper(context, experiment, platform, target, config_file, branch='main'): +def wrapper(context, experiment, platform, target, config_file, branch, time): # pylint: disable=unused-argument """ - Execute fre pp steps in order """ - context.forward(runFre2pp) + print(f'(frepp.wrapper) about to foward context to wrapper.run_all_fre_pp_steps via click...') + context.forward(wrapper_script._run_all_fre_pp_steps) + print(f'(frepp.wrapper) done fowarding context to wrapper.run_all_fre_pp_steps via click.') + +@pp_cli.command() +@click.option("-e", "--experiment", type=str, + help="Experiment name", + required=True) +@click.option("-p", "--platform", type=str, + help="Platform name", + required=True) +@click.option("-T", "--target", type=str, + help="Target name", + required=True) +@click.option("-t", "--time", + required=True, + help="Time whose history files are ready") +@click.pass_context +def trigger(context, experiment, platform, target, time): + # pylint: disable=unused-argument + """ - Start postprocessing for a particular time """ + context.forward(trigger_script._trigger) if __name__ == "__main__": ''' entry point for click to fre pp commands ''' diff --git a/fre/pp/install.py b/fre/pp/install.py deleted file mode 100644 index 9ffc00ee..00000000 --- a/fre/pp/install.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -''' fre pp install ''' - -import subprocess -import click - -def _install_subtool(experiment, platform, target): - """ - Install the Cylc workflow definition located in - ~/cylc-src/____ - to - ~/cylc-run/____ - """ - - name = experiment + '__' + platform + '__' + target - cmd = f"cylc install --no-run-name {name}" - subprocess.run(cmd, shell=True, check=True) - -@click.command() -def install_subtool(experiment, platform, target): - ''' entry point to install for click ''' - return _install_subtool(experiment, platform, target) diff --git a/fre/pp/install_script.py b/fre/pp/install_script.py new file mode 100644 index 00000000..cb2ed021 --- /dev/null +++ b/fre/pp/install_script.py @@ -0,0 +1,41 @@ +''' fre pp install ''' + +from pathlib import Path +import os +import subprocess +import click + +def install_subtool(experiment, platform, target): + """ + Install the Cylc workflow definition located in + ~/cylc-src/____ + to + ~/cylc-run/____ + """ + + name = experiment + '__' + platform + '__' + target + # if the cylc-run directory already exists, + # then check whether the cylc expanded definition (cylc config) + # is identical. If the same, good. If not, bad. + source_dir = Path(os.path.expanduser("~/cylc-src"), name) + install_dir = Path(os.path.expanduser("~/cylc-run"), name) + if os.path.isdir(install_dir): + installed_def = subprocess.run(["cylc", "config", name],capture_output=True).stdout + go_back_here = os.getcwd() + os.chdir(source_dir) + source_def = subprocess.run(['cylc', 'config', '.'], capture_output=True).stdout + if installed_def == source_def: + print(f"NOTE: Workflow '{install_dir}' already installed, and the definition is unchanged") + else: + print(f"ERROR: Workflow '{install_dir}' already installed, and the definition has changed!") + print(f"ERROR: Please remove installed workflow with 'cylc clean {name}' or move the workflow run directory '{install_dir}'") + exit(1) + else: + print(f"NOTE: About to install workflow into ~/cylc-run/{name}") + cmd = f"cylc install --no-run-name {name}" + subprocess.run(cmd, shell=True, check=True) + +@click.command() +def _install_subtool(experiment, platform, target): + ''' entry point to install for click ''' + return _install_subtool(experiment, platform, target) diff --git a/fre/pp/run.py b/fre/pp/run_script.py similarity index 81% rename from fre/pp/run.py rename to fre/pp/run_script.py index 57f2c427..b6ca607a 100644 --- a/fre/pp/run.py +++ b/fre/pp/run_script.py @@ -1,10 +1,9 @@ -#!/usr/bin/env python ''' fre pp run ''' import subprocess import click -def _pp_run_subtool(experiment, platform, target): +def pp_run_subtool(experiment, platform, target): """ Start or restart the Cylc workflow identified by: ____ @@ -15,6 +14,10 @@ def _pp_run_subtool(experiment, platform, target): subprocess.run(cmd, shell=True, check=True) @click.command() -def pp_run_subtool(experiment, platform, target): +def _pp_run_subtool(experiment, platform, target): ''' entry point to run for click ''' - return _pp_run_subtool(experiment, platform, target) + return pp_run_subtool(experiment, platform, target) + + +if __name__ == "__main__": + pp_run_subtool() diff --git a/fre/pp/status.py b/fre/pp/status_script.py similarity index 68% rename from fre/pp/status.py rename to fre/pp/status_script.py index 6e2c07e1..b4f579c7 100644 --- a/fre/pp/status.py +++ b/fre/pp/status_script.py @@ -1,10 +1,11 @@ -#!/usr/bin/env python ''' fre pp status ''' import subprocess import click -def _status_subtool(experiment, platform, target): +TIMEOUT_SECS=120#30 + +def status_subtool(experiment, platform, target): """ Report workflow state for the Cylc workflow ____ @@ -12,10 +13,13 @@ def _status_subtool(experiment, platform, target): name = experiment + '__' + platform + '__' + target cmd = f"cylc workflow-state {name}" - subprocess.run(cmd, shell=True, check=True, timeout=30) + subprocess.run(cmd, shell=True, check=True, timeout=TIMEOUT_SECS) @click.command() -def status_subtool(experiment, platform, target): +def _status_subtool(experiment, platform, target): ''' entry point to status for click ''' - return _status_subtool(experiment, platform, target) + return status_subtool(experiment, platform, target) + +if __name__ == "__main__": + status_subtool() diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index eaf1fc2e..e6391513 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -37,7 +37,7 @@ def test_configure_script(): model_yaml = str(Path(f"{test_dir}/{test_yaml}")) # Invoke configure_yaml_script.py - csy.yamlInfo(model_yaml,EXPERIMENT,PLATFORM,TARGET) + csy.yaml_info(model_yaml,EXPERIMENT,PLATFORM,TARGET) # Check for configuration creation and final combined yaml assert all([Path(f"{out_dir}/{EXPERIMENT}.yaml").exists(), diff --git a/fre/pp/tests/test_rose_quoting.py b/fre/pp/tests/test_rose_quoting.py index bd5353b1..13b3141a 100644 --- a/fre/pp/tests/test_rose_quoting.py +++ b/fre/pp/tests/test_rose_quoting.py @@ -1,7 +1,10 @@ +''' quick tests to make sure rose handles certain types of values with quotes correctly ''' from fre.pp.configure_script_yaml import quote_rose_values def test_boolean(): + ''' check that boolean values with quotes are handled correctly by rose''' assert quote_rose_values(True) == 'True' def test_string(): + ''' check that string values with quotes are handled correctly by rose''' assert quote_rose_values('foo') == "'foo'" diff --git a/fre/pp/trigger_script.py b/fre/pp/trigger_script.py new file mode 100644 index 00000000..14ce4291 --- /dev/null +++ b/fre/pp/trigger_script.py @@ -0,0 +1,22 @@ +''' fre pp trigger ''' + +import subprocess +import click + +def trigger(experiment, platform, target, time): + """ + Trigger the pp-starter task for the time indicated + """ + + name = experiment + '__' + platform + '__' + target + cmd = f"cylc trigger {name}//{time}/pp-starter" + subprocess.run(cmd, shell=True, check=True, timeout=30) + + +@click.command() +def _trigger(experiment, platform, target, time): + ''' entry point to trigger for click ''' + return trigger(experiment, platform, target, time) + +if __name__ == "__main__": + trigger() diff --git a/fre/pp/validate.py b/fre/pp/validate_script.py similarity index 81% rename from fre/pp/validate.py rename to fre/pp/validate_script.py index 9c07340f..d48f5f47 100644 --- a/fre/pp/validate.py +++ b/fre/pp/validate_script.py @@ -1,16 +1,15 @@ -#!/usr/bin/env python ''' fre pp validate ''' import os import subprocess import click -def _validate_subtool(experiment, platform, target): +def validate_subtool(experiment, platform, target): """ Validate the Cylc workflow definition located in ~/cylc-src/____ """ - + go_back_here = os.getcwd() directory = os.path.expanduser('~/cylc-src/' + experiment + '__' + platform + '__' + target) # Change the current working directory @@ -23,8 +22,12 @@ def _validate_subtool(experiment, platform, target): # Validate the Cylc configuration cmd = "cylc validate ." subprocess.run(cmd, shell=True, check=True) + os.chdir(go_back_here) @click.command() -def validate_subtool(experiment, platform, target): +def _validate_subtool(experiment, platform, target): ''' entry point to validate for click ''' - return _validate_subtool(experiment, platform, target) + return validate_subtool(experiment, platform, target) + +if __name__ == "__main__": + validate_subtool() diff --git a/fre/pp/wrapper.py b/fre/pp/wrapper.py deleted file mode 100644 index 0f0aad39..00000000 --- a/fre/pp/wrapper.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -frepp.py, a replacement for the frepp bash script located at: -https://gitlab.gfdl.noaa.gov/fre2/system-settings/-/blob/main/bin/frepp -Author: Carolyn.Whitlock -""" - -#todo: -# add relative path import to rest of pp tools -# add command-line args using same format as fre.py -# include arg for pp start / stop -# test yaml path -# error handling - -import os -import time -import click - -# Import from the local packages -from .checkoutScript import _checkoutTemplate -from .configure_script_xml import _convert -from .configure_script_yaml import _yamlInfo -from .validate import _validate_subtool -from .install import _install_subtool -from .run import _pp_run_subtool -from .status import _status_subtool - -@click.command() -def runFre2pp(experiment, platform, target, config_file, branch): - ''' - Wrapper script for calling a FRE2 pp experiment with the canopy-style - infrastructure and fre-cli - time=0000 - ''' - - config_file = os.path.abspath(config_file) - - #env_setup - #todo: check for experiment existing, call frepp_stop to clean experiment, - try: - print("calling _checkoutTemplate") - _checkoutTemplate(experiment, platform, target, branch) - except Exception as err: - raise - - #dumb xml check;does it need to be smarter? - is_xml = config_file[-3:] == "xml" - if is_xml: - #TODO: should this prompt for pp start/stop years? - try: - _convert(config_file, platform, target, experiment, do_analysis=False) - #note: arg list for this function is a looooot longer, but all those - #args can be deduced from the xml when given default vals - except Exception as err: - raise - try: - _validate_subtool(experiment, platform, target) - #See notes in main() function - except Exception as err: - raise - else: - try: - _yamlInfo(config_file, experiment, platform, target) - except Exception as err: - raise - - try: - _install_subtool(experiment, platform, target) - except: - raise - - try: - _pp_run_subtool(experiment, platform, target) - except Exception as err: - raise - - #send off a watcher script that reports on how it's going - for n in range(1,12): - try: - _status_subtool(experiment, platform, target) - except Exception as err: - raise - time.sleep(300) - -if __name__ == '__main__': - runFre2pp() diff --git a/fre/pp/wrapper_script.py b/fre/pp/wrapper_script.py new file mode 100644 index 00000000..15a48dea --- /dev/null +++ b/fre/pp/wrapper_script.py @@ -0,0 +1,64 @@ +""" +frepp.py, a replacement for the frepp bash script located at: +https://gitlab.gfdl.noaa.gov/fre2/system-settings/-/blob/main/bin/frepp +Author: Carolyn.Whitlock +""" + +# add relative path import to rest of pp tools +# add command-line args using same format as fre.py +# include arg for pp start / stop +# test yaml path +# error handling + +import os +#import time +import click + +# Import from the local packages +from fre.pp.checkout_script import checkout_template +from fre.pp.configure_script_yaml import yaml_info +from fre.pp.install_script import install_subtool +from fre.pp.run_script import pp_run_subtool +from fre.pp.trigger_script import trigger +from fre.pp.status_script import status_subtool + +def run_all_fre_pp_steps(experiment, platform, target, config_file, branch=None, time=None): + ''' + Wrapper script for calling a FRE2 pp experiment with the canopy-style + infrastructure and fre-cli + ''' + print('(run_all_fre_pp_steps) config_file path resolving...') + config_file = os.path.abspath(config_file) + print(f' config_file={config_file}') + + print('(run_all_fre_pp_steps) calling checkout_template') + checkout_template(experiment, platform, target, branch) + + print('(run_all_fre_pp_steps) calling yaml_info') + yaml_info(config_file, experiment, platform, target) + + print('(run_all_fre_pp_steps) calling install_subtool') + install_subtool(experiment, platform, target) + + print('(run_all_fre_pp_steps) calling pp_run_subtool') + pp_run_subtool(experiment, platform, target) + + if time is not None: + print('(run_all_fre_pp_steps) calling trigger') + trigger(experiment, platform, target, time) + + print('(run_all_fre_pp_steps) calling status_subtool') + status_subtool(experiment, platform, target) + + print('(run_all_fre_pp_steps) done.') + + +@click.command() +def _run_all_fre_pp_steps(experiment, platform, target, config_file, branch, time): + ''' + click entry point for run_all_fre_pp_steps. + ''' + return run_all_fre_pp_steps(experiment, platform, target, config_file, branch, time) + +if __name__ == '__main__': + run_all_fre_pp_steps() diff --git a/fre/pp/wrapperscript b/fre/pp/wrapperscript deleted file mode 100755 index f98d9cd6..00000000 --- a/fre/pp/wrapperscript +++ /dev/null @@ -1,320 +0,0 @@ -#!/bin/bash -set -euo pipefail -set -x - -# https://stackoverflow.com/questions/402377/using-getopts-to-process-long-and-short-command-line-options -TEMP=$(getopt -o x:p:P:T:t:shvc:D:d: --long xml:,platform:,target:,time:,help,mppnccombine-opts:,mail-list: -n 'frepp' -- "$@") -eval set -- "$TEMP" - -# defaults -xml= -platform= -target= -time= -help= - -# arg parsing -while true; do - case "$1" in - # required - -x | --xml ) xml="$2"; shift 2 ;; - -p | -P | --platform ) platform="$2"; shift 2 ;; - -T | --target ) target="$2"; shift 2 ;; - -t | --time ) time="$2"; shift 2 ;; - - # optional - -h | --help ) help=true; shift ;; - - # ignored - -v ) shift ;; - -c ) shift 2 ;; - -D ) shift 2 ;; - -d ) shift 2 ;; - -s ) shift ;; - --mppnccombine-opts ) shift 2 ;; - --mail-list ) shift 2 ;; - - -- ) shift; break ;; - * ) break ;; - esac -done -if [[ -n ${1-} ]]; then - expname=$1 -else - expname= -fi - -# If $FRE_DUALPP is set, then take two different actions -# 1. Append "_canopy" to pp, analysis, and history_refined directories created through the XML converter -# 2. Submit Bronx frepp as well -set +u -if [[ $FRE_DUALPP ]]; then - dual=true -else - dual=false -fi -set -u - -# Help -usage="Usage: frepp --xml=XML --platform=PLATFORM --target=TARGET --time=YYYY EXP" -if [[ $help ]]; then - echo $usage - cat << EOF -################################################################################ -FRE Canopy frepp wrapper to start Canopy postprocessing workflow with -traditional Bronx frepp usage. - -Cylc implementation current settings used by this wrapper: -1. Workflow name is ____ -e.g. use cylc commands such as: - -cylc workflow-state ____ - -This is somewhat overly verbose and also not verbose enough -(i.e. does not include FRE STEM). -If you have suggestions please let the FRE team know. - -2. Will not use unique run directories. -If the run directory exists you will need to remove it before re-installing. - -################################################################################ -What does this script do? -1. If workflow run-dir was previously installed, - start postprocessing for a history file segment: - -- Check if the workflow is running -- Check the task states -- Start cylc scheduler -- Trigger requested processing (-t YYYY) -- Exit - -2. Otherwise, if workflow src-dir does not exist, - configure the postprocessing: - -- Checkout a fresh PP template -- Run the XML converter - -3. Then, install and start the postprocessing for a history file segment -- Run the validation scripts -- Install the workflow -- Start cylc scheduler -- Trigger requested processing (-t YYYY) - -################################################################################ -Recovery steps and scenarios: -1. Something is terribly wrong with PP and you want to reconfigure and try again -- Stop cylc scheduler with "cylc stop --kill " -- Remove run directory with "cylc clean " -- Edit the configuration files in ~/cylc-src/ -- Run frepp again to reinstall and run the updated PP configuration. - -2. Something is terribly wrong and you want a complete fresh start, - or you want an update from the pp template repo. -- Stop cylc scheduler with "cylc stop --kill" -- Remove run directory with "cylc clean " -- Remove src directory with "rm -rf ~/cylc-src/" -- Run frepp again to recheckout pp template, run xml converter, and install/run - -################################################################################ -Specific suggestions to recover from task failures: - -1. refineDiag script failures are likely with a XML-converted configs - for two reasons, so you will probably need to either adjust or remove them. - To disable refineDiag, - - set DO_REFINEDIAG=False, and - - comment out HISTORY_DIR_REFINED - -a. It may use something in the XML, using an xml shell variable that does not - exist now. In these cases, you could rewrite the refineDiag script to - not use the xmlDir shell variable or not use the script. - For "refineDiag_atmos_cmip6.csh", it was included in the postprocessing - template checkout with a small modification. Use this location: - '\$CYLC_WORKFLOW_RUN_DIR/etc/refineDiag/refineDiag_atmos_cmip6.csh'. - - set REFINEDIAG_SCRIPTS to that location - -b. It may be a refineDiag script that does not generate .nc files - as it was expected to do. FRE Bronx allows these side-effect refineDiags, - and instead a new mechanism was invented for these scripts that - do not generate netcdf output: - - set DO_PREANALYSIS=True, and - - PREANALYSIS_SCRIPT="/paath/to/script". - -2. Many PP components in Bronx XMLs are doomed (in terms of failing to - produce output and job failing) caused by using history files that do not - exist, but do not cause problems for the other components. Currently, - the Canopy pp template is not robust in terms of this error mode, - so it's best to not process history files that do not exist. - - In the future, diag manager metadata output will provide a catalog - of history output that the validators will check against. For now, - a simple checker exists, but you must manually generate the - history output list ("history-manifest" file). - - Generate the file with a simple listing of the history tarfile. - You can append a history_refined tarfile as well. Then, the validator - will report on PP components you have specified - (PP_COMPONENTS) but that do not exist in the history-manifest file. - - tar -tf /path/to/history/YYYYMMDD.nc.tar | sort > history-manifest - - To run the configuration validation: - -cd ~/cylc-src/ -rose macro --validate - - It is a good idea to not include pp components (PP_COMPONENTS) that - include history files that do not exist. - - In all cases it is recommended to remove validation errors. - See README.md for general configuration instructions. -EOF - exit 0 -fi - -# check for all options -if [[ $xml ]]; then - xml=$(readlink -f $xml) - if [[ -f $xml ]]; then - echo "using $xml" - else - echo "XML '$xml' does not exist" - exit 1 - fi -else - echo $usage - exit 1 -fi - -if [[ $platform ]]; then - echo "using $platform" -else - echo $usage - exit 1 -fi - -if [[ $target ]]; then - echo "using $target" -else - echo $usage - exit 1 -fi - -if [[ $time ]]; then - echo "using $time" -else - echo $usage - exit 1 -fi - -if [[ $expname ]]; then - echo "using $expname" -else - echo $usage - exit 1 -fi - -cylc --version -if cylc cycle-point $time; then - time_iso=$(cylc cycle-point $time --template CCYYMMDDT0000Z) -else - echo "Time '$time' not a valid ISO8601 date" - exit 1 -fi - -# Start bronx dual-pp -if [[ $dual == true ]]; then - $FRE_COMMANDS_HOME/bin/frepp -x $xml -P $platform -T $target -t $time -D '' $expname -v -s -fi - -# Set the cylc workflow name to __ -# use the default workflow source convention -name=${expname}__${platform}__$target -rundir="$HOME/cylc-run/$name" -srcdir="$HOME/cylc-src/$name" -echo Workflow name: $name -echo Run directory: $rundir -echo Src directory: $srcdir - -# Start postprocessing for a history file segment (workflow was previously installed) -if [[ -d $rundir ]]; then - echo "Run directory '$rundir' exists, so will now try to start it" - cylc scan - cylc workflow-state $name - if cylc workflow-state $name | grep failed; then - cat << EOF -################################################################################ -Unfortunately, there are failed tasks, probably caused by refineDiag errors -or try to use a history file that does not exist. - -While Cylc workflows can be configured to handle failure gracefully, -this workflow is not yet set to do this, so currently it's recommended -to reconfigure your postprocessing to remove task errors. - -For some suggestions to recover from the above most common errors, see: - -frepp --help -################################################################################ -EOF - fi - # sometimes this hangs for unknown reasons - # So for now we'll add --debug to try to diagnose it, and - # use /bin/timeout to exit after 10 min - timeout 10m cylc play --debug $name - sleep 20 - cylc trigger $name//$time_iso/pp-starter - exit 0 -fi - -# Checkout postprocessing template and configure -if [[ ! -d $srcdir ]]; then - echo "Workflow source directory '$srcdir' does not exist, so will now try to checkout template" - - # checkout - mkdir -p $HOME/cylc-src - cd $HOME/cylc-src - # try to reduce checkout size with depth=1 - #git clone --depth=1 --recursive git@gitlab.gfdl.noaa.gov:fre2/workflows/postprocessing.git $name - git clone --depth=1 --recursive https://gitlab.gfdl.noaa.gov/fre2/workflows/postprocessing.git $name - - # xml converter - cd $srcdir - if [[ $dual == true ]]; then - bin/fre-bronx-to-canopy.py -x $xml -p $platform -t $target -e $expname -v --dual - else - bin/fre-bronx-to-canopy.py -x $xml -p $platform -t $target -e $expname -v - fi -fi - -# validate configuration -cd $srcdir -if ! rose macro --validate; then - cat << EOF -################################################################################ -Configuration may not be valid. - -In general, Canopy configurations should pass all available validation scripts. -To run them, - -cd $HOME/cylc-src/$name -rose macro --validate - -Most validation errors reflect configurations problems that should be corrected. -The exceptions are: -1. PP_DIR will be created if it does not exist -2. HISTORY_DIR_REFINED will be created if it does not exist, - assuming DO_REFINEDIAG is also set - -See README.md for general configuration instructions. -################################################################################ -EOF -fi -cylc validate . - -# Install -cylc install --no-run-name $name - -# Start -cylc play $name -sleep 20 -cylc trigger $name//$time_iso/pp-starter -exit 0 diff --git a/fre/tests/test_fre_cli.py b/fre/tests/test_fre_cli.py index 5c5bdb12..e7bad1d0 100644 --- a/fre/tests/test_fre_cli.py +++ b/fre/tests/test_fre_cli.py @@ -20,3 +20,15 @@ def test_cli_fre_option_dne(): ''' fre optionDNE ''' result = runner.invoke(fre.fre, args='optionDNE') assert result.exit_code == 2 + + +def test_fre_version(): + ''' module import flavor of below cli test ''' + assert '2024.01' == fre.version + +def test_cli_fre_version(): + ''' fre --version ''' + result = runner.invoke(fre.fre, args='--version') + expected_out = 'fre, version 2024.01' + assert all( [ result.exit_code == 0, + expected_out in result.stdout.split('\n') ] ) diff --git a/fre/tests/test_fre_pp_cli.py b/fre/tests/test_fre_pp_cli.py index 399c8e3e..e60d012d 100644 --- a/fre/tests/test_fre_pp_cli.py +++ b/fre/tests/test_fre_pp_cli.py @@ -1,5 +1,9 @@ ''' test "fre pp" calls ''' +import os +import shutil +from pathlib import Path + from click.testing import CliRunner from fre import fre @@ -39,6 +43,18 @@ def test_cli_fre_pp_checkout_opt_dne(): result = runner.invoke(fre.fre, args=["pp", "checkout", "optionDNE"]) assert result.exit_code == 2 +def test_cli_fre_pp_checkout_case(): + ''' fre pp checkout -e FOO -p BAR -t BAZ''' + directory = os.path.expanduser("~/cylc-src")+'/FOO__BAR__BAZ' + if Path(directory).exists(): + shutil.rmtree(directory) + result = runner.invoke(fre.fre, args=["pp", "checkout", + "-e", "FOO", + "-p", "BAR", + "-t", "BAZ"] ) + assert all( [ result.exit_code == 0, + Path(directory).exists()] ) + #-- fre pp configure-xml def test_cli_fre_pp_configure_xml(): ''' fre pp configure-xml ''' @@ -71,6 +87,18 @@ def test_cli_fre_pp_configure_yaml_opt_dne(): result = runner.invoke(fre.fre, args=["pp", "configure-yaml", "optionDNE"]) assert result.exit_code == 2 +def test_cli_fre_pp_configure_yaml_fail1(): + ''' fre pp configure-yaml ''' + result = runner.invoke(fre.fre, args = [ "pp", "configure-yaml", + "-e", "FOO", + "-p", "BAR", + "-t", "BAZ", + "-y", "BOO" ] ) + assert all( [ result.exit_code == 1, + isinstance(result.exception, FileNotFoundError ) + ] ) + + #-- fre pp install def test_cli_fre_pp_install(): ''' fre pp install ''' diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index b2b6540f..6deb9ae2 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -94,13 +94,15 @@ def experiment_check(mainyaml_dir,comb,experiment): ey=Path(os.path.join(mainyaml_dir,e)) ey_path.append(ey) else: - raise ValueError("Incorrect experiment yaml path given; does not exist.") + raise ValueError(f"Experiment yaml path given ({e}) does not exist.") else: raise ValueError("No experiment yaml path given!") if analysisyaml is not None: ay_path=[] for a in analysisyaml: + # prepend the directory containing the yaml + a = Path(mainyaml_dir, a) if Path(a).exists(): ay=Path(os.path.join(mainyaml_dir,a)) ay_path.append(ay) @@ -113,185 +115,192 @@ def experiment_check(mainyaml_dir,comb,experiment): ## COMPILE CLASS ## class init_compile_yaml(): - def __init__(self,yamlfile,platform,target): - """ - Process to combine yamls applicable to compilation - """ - self.yml = yamlfile - self.name = yamlfile.split(".")[0] - self.namenopath = self.name.split("/")[-1].split(".")[0] - self.platform = platform - self.target = target + """ class holding routines for initalizing compilation yamls """ + def __init__(self,yamlfile,platform,target): + """ + Process to combine yamls applicable to compilation + """ + self.yml = yamlfile + self.name = yamlfile.split(".")[0] + self.namenopath = self.name.split("/")[-1].split(".")[0] + self.platform = platform + self.target = target + + # Register tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + base_name=f"combined-{self.namenopath}.yaml" + self.combined = base_name if len(self.mainyaml_dir) == 0 else f"{self.mainyaml_dir}/{base_name}" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + try: + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + except Exception as exc: + raise FileNotFoundError(f'{self.yml} not found') from exc + print(f" model yaml: {self.yml}") + + def combine_compile(self): + """ + Combine compile yaml with the defined combined.yaml + """ + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) + + # copy compile yaml info into combined yaml + if cy_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(cy_path,'r',encoding='UTF-8') as f2: + f1.write("\n### COMPILE INFO ###\n") + shutil.copyfileobj(f2,f1) + print(f" compile yaml: {cy_path}") - # Register tag handler - yaml.add_constructor('!join', join_constructor) + def combine_platforms(self): + """ + Combine platforms yaml with the defined combined.yaml + """ + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - # Path to the main model yaml - self.mainyaml_dir = os.path.dirname(self.yml) + # combine platform yaml + if py_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(py_path,'r',encoding='UTF-8') as f2: + f1.write("\n### PLATFORM INFO ###\n") + shutil.copyfileobj(f2,f1) + print(f" platforms yaml: {py_path}") - # Name of the combined yaml - self.combined= f"combined-{self.namenopath}.yaml" if len(self.mainyaml_dir) == 0 else f"{self.mainyaml_dir}/combined-{self.namenopath}.yaml" + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) - print("Combining yaml files: ") + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] - def combine_model(self): - """ - Create the combined.yaml and merge it with the model yaml - """ - # copy model yaml info into combined yaml - with open(self.combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{self.name}"\n') - f1.write(f'platform: &platform "{self.platform}"\n') - f1.write(f'target: &target "{self.target}"\n\n') - with open(self.yml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - - print(f" model yaml: {self.yml}") - - def combine_compile(self): - """ - Combine compile yaml with the defined combined.yaml - """ - # Get compile info - (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - - # copy compile yaml info into combined yaml - if cy_path is not None: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(cy_path,'r',encoding='UTF-8') as f2: - f1.write("\n### COMPILE INFO ###\n") - shutil.copyfileobj(f2,f1) - print(f" compile yaml: {cy_path}") - - def combine_platforms(self): - """ - Combine platforms yaml with the defined combined.yaml - """ - # Get compile info - (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - - # combine platform yaml - if py_path is not None: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(py_path,'r',encoding='UTF-8') as f2: - f1.write("\n### PLATFORM INFO ###\n") - shutil.copyfileobj(f2,f1) - print(f" platforms yaml: {py_path}") - - def clean_yaml(self): - """ - Clean the yaml; remove unnecessary sections in - final combined yaml. - """ - # Load the fully combined yaml - full_yaml = yaml_load(self.combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - with open(self.combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) - - print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") - return self.combined + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") + return self.combined ## PP CLASS ## class init_pp_yaml(): - def __init__(self,yamlfile,experiment,platform,target): - """ - Process to combine the applicable yamls for post-processing - """ - self.yml = yamlfile - self.name = experiment - self.platform = platform - self.target = target - - # Regsiter tag handler - yaml.add_constructor('!join', join_constructor) - - # Path to the main model yaml - self.mainyaml_dir = os.path.dirname(self.yml) - - # Name of the combined yaml - self.combined=f"combined-{self.name}.yaml" - - print("Combining yaml files: ") - - def combine_model(self): - """ - Create the combined.yaml and merge it with the model yaml - """ - # copy model yaml info into combined yaml - with open(self.combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{self.name}"\n') - f1.write(f'platform: &platform "{self.platform}"\n') - f1.write(f'target: &target "{self.target}"\n\n') - with open(self.yml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - - print(f" model yaml: {self.yml}") - - def combine_experiment(self): - """ - Combine experiment yamls with the defined combined.yaml - """ - # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - - ## COMBINE EXPERIMENT YAML INFO - if ey_path is not None: - for i in ey_path: - #expyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(i,'r',encoding='UTF-8') as f2: - #copy expyaml into combined - shutil.copyfileobj(f2,f1) - print(f" experiment yaml: {i}") - - def combine_analysis(self): - """ - Combine analysis yamls with the defined combined.yaml - """ - # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - - ## COMBINE EXPERIMENT YAML INFO - if ay_path is not None: - for i in ay_path: - #analysisyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(i,'r',encoding='UTF-8') as f2: - #f1.write(f"\n### {i.upper()} settings ###\n") - #copy expyaml into combined + """ class holding routines for initalizing post-processing yamls """ + def __init__(self,yamlfile,experiment,platform,target): + """ + Process to combine the applicable yamls for post-processing + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + try: + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") shutil.copyfileobj(f2,f1) - print(f" analysis yaml: {i}") - - def clean_yaml(self): - """ - Clean the yaml; remove unnecessary sections in - final combined yaml. - """ - # Load the fully combined yaml - full_yaml = yaml_load(self.combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - with open(self.combined,'w') as f: - yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) - - print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") - return self.combined + except Exception as exc: + raise FileNotFoundError(f'{self.yml} not found') from exc + print(f" model yaml: {self.yml}") + + def combine_experiment(self): + """ + Combine experiment yamls with the defined combined.yaml + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + + ## COMBINE EXPERIMENT YAML INFO + if ey_path is not None: + for i in ey_path: + #expyaml_path = os.path.join(mainyaml_dir, i) + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(i,'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + print(f" experiment yaml: {i}") + + def combine_analysis(self): + """ + Combine analysis yamls with the defined combined.yaml + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + + ## COMBINE EXPERIMENT YAML INFO + if ay_path is not None: + for i in ay_path: + #analysisyaml_path = os.path.join(mainyaml_dir, i) + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(i,'r',encoding='UTF-8') as f2: + #f1.write(f"\n### {i.upper()} settings ###\n") + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + print(f" analysis yaml: {i}") + + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + with open(self.combined,'w') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.dirname(self.combined)}/{self.combined}") + return self.combined ## Functions to combine the yaml files ## def get_combined_compileyaml(comb): diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index 55817472..97a06721 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -33,6 +33,7 @@ def yamltools_cli(): required=True) @click.pass_context def combine_yamls(context,yamlfile,experiment,platform,target,use): + # pylint: disable=unused-argument """ - Combine the model yaml with the compile, platform, experiment, and analysis yamls