diff --git a/.github/workflows/bld_scm_images.yaml b/.github/workflows/bld_scm_images.yaml new file mode 100644 index 000000000..1735b05cc --- /dev/null +++ b/.github/workflows/bld_scm_images.yaml @@ -0,0 +1,81 @@ +name: CCPP SCM Base Images Build +run-name: CI Image Build for CCPP SCM + +on: + pull_request: + workflow_dispatch: +# push: +# branches: +# # Only build containers when pushing to main +# - "main" + +jobs: + docker: + if: github.repository == 'NCAR/ccpp-scm' + strategy: + fail-fast: false # Disable fail-fast + matrix: + toolchain: [oneapi,gnu,nvhpc] + include: + # Set toolchain configuration(s) + - toolchain: oneapi + nfversion: 4.5.4 + pnfversion: 1.12.3 + - toolchain: gnu + nfversion: 4.6.2 + pnfversion: 1.14.1 + - toolchain: nvhpc + nfversion: 4.5.4 + pnfversion: 1.12.3 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Check if secrets are defined + run: | + if [ -z "${{ secrets.DOCKER_USERNAME }}" ]; then echo "Username is MISSING"; else echo "Username is SET"; fi + if [ -z "${{ secrets.DOCKER_TOKEN }}" ]; then echo "Token is MISSING"; else echo "Token is SET"; fi + + - name: Log in to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + - name: Build minimal-toolchain:${{ matrix.toolchain }} + uses: docker/build-push-action@v6 + with: + file: docker/Dockerfile-${{ matrix.toolchain }}-minimal + tags: minimal-toolchain:${{ matrix.toolchain }} + + - name: Build add-netcdf:${{ matrix.toolchain }} + uses: docker/build-push-action@v6 + with: + file: docker/Dockerfile-add-netcdf + tags: add-netcdf:${{ matrix.toolchain }} + build-args: | + TOOLCHAIN=${{ matrix.toolchain }} + NFVERSION=${{ matrix.nfversion }} + + - name: Build add-nceplibs:${{ matrix.toolchain }} + uses: docker/build-push-action@v6 + with: + file: docker/Dockerfile-add-nceplibs + build-args: TOOLCHAIN=${{ matrix.toolchain }} + tags: add-nceplibs:${{ matrix.toolchain }} + + - name: Build add-python:${{ matrix.toolchain }} + uses: docker/build-push-action@v6 + with: + file: docker/Dockerfile-add-python + build-args: TOOLCHAIN=${{ matrix.toolchain }} + tags: add-python:${{ matrix.toolchain }} + + - name: Build and push dtcenter/ccpp-scm:${{ matrix.toolchain }} + uses: docker/build-push-action@v6 + with: + file: docker/Dockerfile-finalize + build-args: TOOLCHAIN=${{ matrix.toolchain }} + push: True + tags: | + dustinswales/ccpp-scm-ci:${{ matrix.toolchain }} diff --git a/.github/workflows/build_and_push_docker_latest.yml b/.github/workflows/build_and_push_docker_latest.yml deleted file mode 100644 index 01a427000..000000000 --- a/.github/workflows/build_and_push_docker_latest.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: build_test_and_push_docker - -on: - push: - branches: - # Only build containers when pushing to main - - "main" - -env: - LATEST_TAG: dtcenter/ccpp-scm:latest - -jobs: - docker: - if: github.repository == 'NCAR/ccpp-scm' - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Check if secrets are defined - run: | - if [ -z "${{ secrets.DOCKERHUB_USERNAME }}" ]; then echo "Username is MISSING"; else echo "Username is SET"; fi - if [ -z "${{ secrets.DOCKERHUB_TOKEN }}" ]; then echo "Token is MISSING"; else echo "Token is SET"; fi - - - name: Login to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Build Docker image - run: | - docker build -t ${{ env.LATEST_TAG}} docker/ - - - name: Push Docker image - run: | - docker push ${{ env.LATEST_TAG }} diff --git a/.github/workflows/ci_build_scm_ubuntu.yml b/.github/workflows/ci_build_scm_ubuntu.yml deleted file mode 100644 index 96f6fd9c1..000000000 --- a/.github/workflows/ci_build_scm_ubuntu.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: build the CCPP-SCM on latest Ubuntu runner - -on: [pull_request,workflow_dispatch] - -jobs: - build_scm: - runs-on: ubuntu-latest - strategy: - matrix: - fortran-compiler: [13] # currently the only version usable with this configuration - build-type: [Release, Debug] - py-version: [3.11, '3.x'] - - # Environmental variables - env: - NFHOME: /home/runner/netcdf-fortran - NFVERSION: v4.5.3 - bacio_ROOT: /home/runner/bacio - sp_ROOT: /home/runner/NCEPLIBS-sp - w3emc_ROOT: /home/runner/myw3emc - SCM_ROOT: /home/runner/work/ccpp-scm/ccpp-scm - suites: SCM_GFS_v15p2,SCM_GFS_v16,SCM_GFS_v16_RRTMGP,SCM_GFS_v17_p8,SCM_HRRR,SCM_RRFS_v1beta,SCM_RAP,SCM_WoFS_v0,SCM_GFS_v15p2_ps,SCM_GFS_v16_ps,SCM_GFS_v16_RRTMGP_ps,SCM_GFS_v17_p8_ps,SCM_HRRR_ps,SCM_RRFS_v1beta_ps,SCM_RAP_ps,SCM_WoFS_v0_ps,SCM_HRRR_gf,SCM_HRRR_gf_ps - - # Workflow steps - steps: - # Install System Dependencies - - name: Install System Dependencies - run: | - sudo apt-get update -qq && sudo apt-get install -qq -y --no-install-recommends \ - gfortran-${{matrix.fortran-compiler}} \ - libhdf5-dev \ - libnetcdf-pnetcdf-19 \ - libnetcdff7 \ - libnetcdf-dev \ - libnetcdff-dev \ - libxml2 \ - openmpi-bin \ - libopenmpi-dev - - - # Python setup - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: ${{matrix.py-version}} - - - name: Install NetCDF Python libraries - run: | - pip install f90nml h5py netCDF4 - - - name: Environment for openmpi compiler - run: | - echo "FC=mpif90" >> $GITHUB_ENV - echo "CC=mpicc" >> $GITHUB_ENV - - - name: Check MPI version - run: | - which mpif90 - mpif90 --version - - - # Install NCEP Lib Dependencies - - name: Cache bacio library v2.4.1 - id: cache-bacio-fortran - uses: actions/cache@v5 - with: - path: /home/runner/bacio - key: cache-bacio-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-${{matrix.py-version}}-key - - - name: Install bacio library v2.4.1 - if: steps.cache-bacio-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.4.1 https://github.com/NOAA-EMC/NCEPLIBS-bacio.git bacio - cd bacio && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${bacio_ROOT} ../ - make -j2 - make install - echo "bacio_DIR=/home/runner/bacio/lib/cmake/bacio" >> $GITHUB_ENV - - - name: Cache SP-library v2.3.3 - id: cache-sp-fortran - uses: actions/cache@v5 - with: - path: /home/runner/NCEPLIBS-sp - key: cache-sp-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-${{matrix.py-version}}-key - - - name: Install SP-library v2.3.3 - if: steps.cache-sp-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.3.3 https://github.com/NOAA-EMC/NCEPLIBS-sp.git NCEPLIBS-sp - cd NCEPLIBS-sp && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${sp_ROOT} ../ - make -j2 - make install - echo "sp_DIR=/home/runner/NCEPLIBS-sp/lib/cmake/sp" >> $GITHUB_ENV - - - name: Cache w3emc library v2.9.2 - id: cache-w3emc-fortran - uses: actions/cache@v5 - with: - path: /home/runner/myw3emc - key: cache-w3emc-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-${{matrix.py-version}}-key - - - name: Install w3emc library v2.9.2 - if: steps.cache-w3emc-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.9.2 https://github.com/NOAA-EMC/NCEPLIBS-w3emc.git NCEPLIBS-w3emc - cd NCEPLIBS-w3emc && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${w3emc_ROOT} ../ - make -j2 - make install - echo "w3emc_DIR=/home/runner/myw3emc/lib/cmake/w3emc" >> $GITHUB_ENV - - - # Initialize and Build SCM - - name: Checkout SCM code (into /home/runner/work/ccpp-scm/) - uses: actions/checkout@v6 - - - name: Initialize Submodules - run: git submodule update --init --recursive - - - name: Configure Build with CMake - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites} -DCMAKE_BUILD_TYPE=${{matrix.build-type}} ../src - - - name: Build SCM - run: | - cd ${SCM_ROOT}/scm/bin - make -j4 diff --git a/.github/workflows/ci_build_scm_ubuntu_nvidia.yml b/.github/workflows/ci_build_scm_ubuntu_nvidia.yml deleted file mode 100644 index 4fb85cb64..000000000 --- a/.github/workflows/ci_build_scm_ubuntu_nvidia.yml +++ /dev/null @@ -1,332 +0,0 @@ -name: build the CCPP-SCM with Nvidia - -on: workflow_dispatch - -jobs: - - build_scm: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - fortran-compiler: [nvfortran] - build-type: [Release] #, Debug] - enable-gpu-acc: [False] #, True] # GPUs aren't available for testing - py-version: [3.11.7] - - # Environmental variables - env: - NETCDF: /home/runner/netcdf - bacio_ROOT: /home/runner/bacio - sp_ROOT: /home/runner/NCEPLIBS-sp - w3emc_ROOT: /home/runner/myw3emc - SCM_ROOT: /home/runner/work/ccpp-scm/ccpp-scm - HDF5_ROOT: /home/runner/hdf5 - MPI_ROOT: /home/runner/openmpi - suites: SCM_GFS_v15p2,SCM_GFS_v16,SCM_GFS_v17_p8,SCM_HRRR,SCM_RRFS_v1beta,SCM_RAP,SCM_WoFS_v0 - suites_ps: SCM_GFS_v15p2_ps,SCM_GFS_v16_ps,SCM_GFS_v17_p8_ps,SCM_HRRR_ps,SCM_RRFS_v1beta_ps,SCM_RAP_ps,SCM_WoFS_v0_ps - - # Workflow steps - steps: - - ####################################################################################### - # Cleanup space - ####################################################################################### - - name: Check space (pre) - run: | - df -h - - - name: Free Disk Space (Ubuntu) - uses: jlumbroso/free-disk-space@main - with: - # this might remove tools that are actually needed, - # if set to "true" but frees about 6 GB - tool-cache: false - - # all of these default to true, but feel free to set to - # "false" if necessary for your workflow - android: false - dotnet: false - haskell: true - large-packages: true - docker-images: false - swap-storage: false - - - name: Check space (post) - run: | - df -h - - # - name: Add conda to system path - # run: | - # echo $CONDA/bin >> $GITHUB_PATH - - # - name: Install NetCDF Python libraries - # run: | - # conda install --yes -c conda-forge h5py>=3.4 netCDF4 f90nml - - ####################################################################################### - # Install Nvidia. - ####################################################################################### - - - name: Nvidia setup compilers. - env: - NVCOMPILERS: /home/runner/hpc_sdk - NVARCH: Linux_x86_64 - NVYEAR: 2025 - NVVERSION: 25.1 - CUDA_VERSION: 12.6 - NVVERSION_PACKED: 251 # Manually take NVVERSION and remove . because funcationality not in actions - NVHPC_SILENT: true - NVHPC_INSTALL_DIR: /home/runner/hpc_sdk - NVHPC_INSTALL_TYPE: network - NVHPC_INSTALL_LOCAL_DIR: /home/runner/hpc_sdk - run: | - mkdir /home/runner/hpc_sdk && cd /home/runner/hpc_sdk - wget -q https://developer.download.nvidia.com/hpc-sdk/${NVVERSION}/nvhpc_${NVYEAR}_${NVVERSION_PACKED}_Linux_x86_64_cuda_${CUDA_VERSION}.tar.gz - tar xpzf nvhpc_${NVYEAR}_${NVVERSION_PACKED}_Linux_x86_64_cuda_${CUDA_VERSION}.tar.gz - ls - nvhpc_${NVYEAR}_${NVVERSION_PACKED}_Linux_x86_64_cuda_${CUDA_VERSION}/install - export PATH=${PATH}:${NVCOMPILERS}/${NVARCH}/${NVVERSION}/compilers/bin - export MANPATH=${MANPATH}:${NVCOMPILERS}/${NVARCH}/${NVVERSION}/compilers/man - echo "The nvfortran installed is:" - nvfortran --version - echo "The path to nvfortran is:" - command -v nvfortran - echo "Removing tarball" - rm nvhpc_${NVYEAR}_${NVVERSION_PACKED}_Linux_x86_64_cuda_${CUDA_VERSION}.tar.gz - echo "CC=/home/runner/hpc_sdk/Linux_x86_64/${NVVERSION}/compilers/bin/nvc" >> $GITHUB_ENV - echo "FC=/home/runner/hpc_sdk/Linux_x86_64/${NVVERSION}/compilers/bin/nvfortran" >> $GITHUB_ENV - echo "CMAKE_C_COMPILER=/home/runner/hpc_sdk/Linux_x86_64/${NVVERSION}/compilers/bin/nvc" >> $GITHUB_ENV - echo "CMAKE_Fortran_COMPILER=/home/runner/hpc_sdk/Linux_x86_64/${NVVERSION}/compilers/bin/nvfortran" >> $GITHUB_ENV - - - name: Check space (pre dependency install) - run: | - df -h - - ####################################################################################### - # Initialize SCM - ####################################################################################### - - name: Checkout SCM code (into /home/runner/work/ccpp-scm/) - uses: actions/checkout@v6 - - - name: Initialize submodules - run: git submodule update --init --recursive - - ####################################################################################### - # Python setup - ####################################################################################### - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: ${{matrix.py-version}} - - ####################################################################################### - # Install FORTRAN dependencies - ####################################################################################### - - - name: Install Curl and zlib - run: | - sudo apt-get update - sudo apt-get install curl - sudo apt-get install libssl-dev libcurl4-openssl-dev - sudo apt-get install zlib1g-dev - - - name: Cache HDF5 - id: cache-hdf5 - uses: actions/cache@v5 - with: - path: /home/runner/hdf5 - KEY: cache-hdf5-${{matrix.fortran-compiler}}-key - - - name: Install HDF5 - if: steps.cache-hdf5.outputs.cache-hit != 'true' - run: | - wget -q https://github.com/HDFGroup/hdf5/archive/refs/tags/hdf5-1_14_1-2.tar.gz - tar zxf hdf5-1_14_1-2.tar.gz - cd hdf5-hdf5-1_14_1-2 - ./configure --prefix=${HDF5_ROOT} - make -j - make install - cd .. - rm -rf hdf5-hdf5-1_14_1-2 hdf5-1_14_1-2.tar.gz - - - name: Setup HDF5 Paths - run: | - echo "LD_LIBRARY_PATH=$HDF5_ROOT/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV - echo "PATH=$HDF5_ROOT/bin:$PATH" >> $GITHUB_ENV - - - name: Cache OpenMPI - id: cache-openmpi - uses: actions/cache@v5 - with: - path: /home/runner/openmpi - KEY: cache-openmpi-${{matrix.fortran-compiler}}-key - - - name: Configure OpenMPI - if: steps.cache-openmpi.outputs.cache-hit != 'true' - run: | - cd ${HOME} - wget -q https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-4.1.6.tar.gz - tar zxf openmpi-4.1.6.tar.gz - cd openmpi-4.1.6 - CFLAGS=-fPIC CXXFLAGS=-fPIC FCFLAGS=-fPIC ./configure --prefix=${MPI_ROOT} - - - name: Build OpenMPI - if: steps.cache-openmpi.outputs.cache-hit != 'true' - run: | - cd ${HOME}/openmpi-4.1.6 - make -j - - - name: Install OpenMPI - if: steps.cache-openmpi.outputs.cache-hit != 'true' - run: | - cd ${HOME}/openmpi-4.1.6 - sudo make install -j - cd .. - rm -rf openmpi-4.1.6 openmpi-4.1.6.tar.gz - - - name: Setup OpenMPI Paths - run: | - echo "PATH=${MPI_ROOT}/bin:$PATH" >> $GITHUB_ENV - echo "LD_LIBRARY_PATH=${MPI_ROOT}/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV - - - name: Check MPI Version - run: | - mpif90 --version - - - name: Set environment for Nvidia compiler with MPI - run: | - echo "CC=$(which mpicc)" >> $GITHUB_ENV - echo "FC=$(which mpif90)" >> $GITHUB_ENV - echo "CMAKE_C_COMPILER=$(which mpicc)" >> $GITHUB_ENV - echo "CMAKE_Fortran_COMPILER=$(which mpif90)" >> $GITHUB_ENV - - - name: Cache NetCDF library - id: cache-netcdf - uses: actions/cache@v5 - with: - path: /home/runner/netcdf - key: cache-netcdf-${{matrix.fortran-compiler}}-key - - - name: Setup NetCDF Paths - run: | - echo "LD_LIBRARY_PATH=$NETCDF/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV - echo "PATH=$NETCDF/bin:$PATH" >> $GITHUB_ENV - - - name: Install NetCDF C library - if: steps.cache-netcdf.outputs.cache-hit != 'true' - run: | - wget -q https://github.com/Unidata/netcdf-c/archive/refs/tags/v4.7.4.tar.gz - tar zxf v4.7.4.tar.gz - cd netcdf-c-4.7.4 - CPPFLAGS="-I/home/runner/hdf5/include" LDFLAGS="-L/home/runner/hdf5/lib" ./configure --prefix=${NETCDF} - make - make install - cd .. - rm -rf netcdf-c-4.7.4 v4.7.4.tar.gz - - - name: Install NetCDF Fortran library - if: steps.cache-netcdf.outputs.cache-hit != 'true' - run: | - wget -q https://github.com/Unidata/netcdf-fortran/archive/refs/tags/v4.6.1.tar.gz - tar zxf v4.6.1.tar.gz - cd netcdf-fortran-4.6.1 - FCFLAGS="-fPIC" FFLAGS="-fPIC" CPPFLAGS="-I/home/runner/hdf5/include -I/home/runner/netcdf/include" LDFLAGS="-L/home/runner/hdf5/lib -L/home/runner/netcdf/lib" ./configure --prefix=${NETCDF} - make - make install - cd .. - rm -rf netcdf-fortran-4.6.1 v4.6.1.tar.gz - - - name: Cache bacio library v2.4.1 - id: cache-bacio-fortran - uses: actions/cache@v5 - with: - path: /home/runner/bacio - key: cache-bacio-fortran-${{matrix.fortran-compiler}}-key - - - name: Install bacio library v2.4.1 - if: steps.cache-bacio-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.4.1 https://github.com/NOAA-EMC/NCEPLIBS-bacio.git bacio - cd bacio && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${bacio_ROOT} ../ - make -j - make install - echo "bacio_DIR=/home/runner/bacio/lib/cmake/bacio" >> $GITHUB_ENV - cd ../../ - rm -rf bacio - - - name: Cache SP-library v2.3.3 - id: cache-sp-fortran - uses: actions/cache@v5 - with: - path: /home/runner/NCEPLIBS-sp - key: cache-sp-fortran-${{matrix.fortran-compiler}}-key - - - name: Install SP-library v2.3.3 - if: steps.cache-sp-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.3.3 https://github.com/NOAA-EMC/NCEPLIBS-sp.git NCEPLIBS-sp - cd NCEPLIBS-sp && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${sp_ROOT} ../ - make -j - make install - echo "sp_DIR=/home/runner/NCEPLIBS-sp/lib/cmake/sp" >> $GITHUB_ENV - cd ../../ - rm -rf NCEPLIBS-sp - - - name: Cache w3emc library v2.9.2 - id: cache-w3emc-fortran - uses: actions/cache@v5 - with: - path: /home/runner/myw3emc - key: cache-w3emc-fortran-${{matrix.fortran-compiler}}-key - - - name: Install w3emc library v2.9.2 - if: steps.cache-w3emc-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.9.2 https://github.com/NOAA-EMC/NCEPLIBS-w3emc.git NCEPLIBS-w3emc - cd NCEPLIBS-w3emc && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${w3emc_ROOT} ../ - make -j - make install - echo "w3emc_DIR=/home/runner/myw3emc/lib/cmake/w3emc" >> $GITHUB_ENV - cd ../../ - rm -rf NCEPLIBS-w3emc - - - name: Check space (pre SCM build) - run: | - df -h - - ####################################################################################### - # Build and run SCM regression tests (ccpp-scm/test/rt_test_cases.py) - ####################################################################################### - - - name: Configure build with CMake - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -DENABLE_NVIDIA_OPENACC=${{matrix.enable-gpu-acc}} ../src - - - name: Build SCM. - run: | - cd ${SCM_ROOT}/scm/bin - make -j - - - name: Check space (post SCM build) - run: | - df -h - - # - name: Download data for SCM - # if: contains(matrix.enable-gpu-acc, 'False') - # run: | - # cd ${SCM_ROOT} - # ./contrib/get_all_static_data.sh - # ./contrib/get_thompson_tables.sh - # ./contrib/get_aerosol_climo.sh - - # - name: Run SCM RTs (w/o GPU) - # if: contains(matrix.enable-gpu-acc, 'False') - # run: | - # cd ${SCM_ROOT}/scm/bin - # ./run_scm.py --file /home/runner/work/ccpp-scm/ccpp-scm/test/rt_test_cases.py --runtime_mult 0.1 -v diff --git a/.github/workflows/ci_create_scm_rts_artifacts.yml b/.github/workflows/ci_create_scm_rts_artifacts.yml deleted file mode 100644 index f151a0ba6..000000000 --- a/.github/workflows/ci_create_scm_rts_artifacts.yml +++ /dev/null @@ -1,187 +0,0 @@ - -name: create scm rt artifacts - -on: - workflow_dispatch: - -jobs: - run_scm_rts: - - # The type of runner that the job will run on - runs-on: ubuntu-latest - strategy: - matrix: - fortran-compiler: [12] - build-type: [Release, Debug, SinglePrecision] - py-version: [3.11] - - continue-on-error: true - - # Environmental variables - env: - NFHOME: /home/runner/netcdf-fortran - NFVERSION: v4.5.3 - bacio_ROOT: /home/runner/bacio - sp_ROOT: /home/runner/NCEPLIBS-sp - w3emc_ROOT: /home/runner/myw3emc - SCM_ROOT: /home/runner/work/ccpp-scm/ccpp-scm - suites: SCM_GFS_v15p2,SCM_GFS_v15p2_ntiedtke,SCM_GFS_v16,SCM_GFS_v17_p8,SCM_HRRR,SCM_RRFS_v1beta,SCM_RAP,SCM_WoFS_v0,SCM_HRRR_gf,SCM_GFS_v17_p8_ugwpv1,SCM_GFS_v16_RRTMGP,SCM_GFS_v16_debug,SCM_GFS_v16_no_nsst,SCM_GFS_v17_p8_ugwpv1_no_nsst,SCM_RRFS_v1beta_no_nsst,SCM_GFS_v16_gfdlmpv3,SCM_GFS_v17_p8_ugwpv1_tempo - suites_ps: SCM_GFS_v15p2_ps,SCM_GFS_v15p2_ntiedtke_ps,SCM_GFS_v16_ps,SCM_GFS_v17_p8_ps,SCM_HRRR_ps,SCM_RRFS_v1beta_ps,SCM_RAP_ps,SCM_WoFS_v0_ps,SCM_HRRR_gf_ps,SCM_GFS_v17_p8_ugwpv1_ps,SCM_GFS_v16_RRTMGP_ps,SCM_GFS_v16_debug_ps,SCM_GFS_v16_no_nsst_ps,SCM_GFS_v17_p8_ugwpv1_no_nsst_ps,SCM_RRFS_v1beta_no_nsst_ps,SCM_GFS_v16_gfdlmpv3_ps,SCM_GFS_v17_p8_ugwpv1_tempo_ps - dir_rt: /home/runner/work/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}} - dir_bl: /home/runner/work/ccpp-scm/ccpp-scm/test/BL-${{matrix.build-type}} - artifact_origin: ${{ github.event_name == 'pull_request' && 'PR' || ('main' == 'main' && 'main' || 'PR') }} - GH_TOKEN: ${{ github.token }} - - # Workflow steps - steps: - # Install System Dependencies - - name: Install System Dependencies - run: | - sudo apt-get update -qq && sudo apt-get install -qq -y --no-install-recommends \ - gfortran-${{matrix.fortran-compiler}} \ - libhdf5-dev \ - libnetcdf-pnetcdf-19 \ - libnetcdff7 \ - libnetcdf-dev \ - libnetcdff-dev \ - libxml2 \ - openmpi-bin \ - libopenmpi-dev - - # Python setup - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: ${{matrix.py-version}} - - - name: Install NetCDF Python libraries - run: | - pip install f90nml h5py netCDF4 matplotlib - - - name: Environment for openmpi compiler - run: | - echo "FC=mpif90" >> $GITHUB_ENV - echo "CC=mpicc" >> $GITHUB_ENV - - - name: Check MPI version - run: | - which mpif90 - mpif90 --version - - # Install NCEP libs - - name: Cache bacio library v2.4.1 - id: cache-bacio-fortran - uses: actions/cache@v5 - with: - path: /home/runner/bacio - key: cache-bacio-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-key - - - name: Install bacio library v2.4.1 - if: steps.cache-bacio-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.4.1 https://github.com/NOAA-EMC/NCEPLIBS-bacio.git bacio - cd bacio && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${bacio_ROOT} ../ - make -j2 - make install - echo "bacio_DIR=/home/runner/bacio/lib/cmake/bacio" >> $GITHUB_ENV - - - name: Cache SP-library v2.3.3 - id: cache-sp-fortran - uses: actions/cache@v5 - with: - path: /home/runner/NCEPLIBS-sp - key: cache-sp-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-key - - - name: Install SP-library v2.3.3 - if: steps.cache-sp-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.3.3 https://github.com/NOAA-EMC/NCEPLIBS-sp.git NCEPLIBS-sp - cd NCEPLIBS-sp && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${sp_ROOT} ../ - make -j2 - make install - echo "sp_DIR=/home/runner/NCEPLIBS-sp/lib/cmake/sp" >> $GITHUB_ENV - - - name: Cache w3emc library v2.9.2 - id: cache-w3emc-fortran - uses: actions/cache@v5 - with: - path: /home/runner/myw3emc - key: cache-w3emc-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-key - - - name: Install w3emc library v2.9.2 - if: steps.cache-w3emc-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.9.2 https://github.com/NOAA-EMC/NCEPLIBS-w3emc.git NCEPLIBS-w3emc - cd NCEPLIBS-w3emc && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${w3emc_ROOT} ../ - make -j2 - make install - echo "w3emc_DIR=/home/runner/myw3emc/lib/cmake/w3emc" >> $GITHUB_ENV - - - # Initialize and build SCM, run regressions tests - - name: Checkout SCM code (into /home/runner/work/ccpp-scm/) - uses: actions/checkout@v6 - - - name: Initialize submodules - run: git submodule update --init --recursive - - - name: Download data for SCM - run: | - cd ${SCM_ROOT} - ./contrib/get_all_static_data.sh - ./contrib/get_thompson_tables.sh - ./contrib/get_tempo_data.sh - ./contrib/get_aerosol_climo.sh - - - name: Configure build with CMake (Release) - if: contains(matrix.build-type, 'Release') - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} ../src - - - name: Configure build with CMake (Debug) - if: contains(matrix.build-type, 'Debug') - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} -DCMAKE_BUILD_TYPE=Debug ../src - - - name: Configure build with CMake (Single Precision) - if: matrix.build-type == 'SinglePrecision' - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} -D32BIT=1 ../src - - - name: Build SCM - run: | - cd ${SCM_ROOT}/scm/bin - make -j4 - - - name: Run SCM RTs - if: matrix.build-type != 'SinglePrecision' - run: | - cd ${SCM_ROOT}/scm/bin - ./run_scm.py --file /home/runner/work/ccpp-scm/ccpp-scm/test/rt_test_cases.py --runtime_mult 0.1 - - - name: Run SCM Single Precision RTs - if: matrix.build-type == 'SinglePrecision' - run: | - cd ${SCM_ROOT}/scm/bin - ./run_scm.py --file /home/runner/work/ccpp-scm/ccpp-scm/test/rt_test_cases_sp.py --runtime_mult 0.1 - - - name: Gather SCM RT output - run: | - cd ${SCM_ROOT}/test - mkdir /home/runner/work/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}} - ./ci_util.py -b ${{matrix.build-type}} - - - name: Upload SCM RTs as GitHub Artifact - uses: actions/upload-artifact@v6 - with: - name: rt-baselines-${{matrix.build-type}}-${{ env.artifact_origin }} - path: /home/runner/work/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}} diff --git a/.github/workflows/ci_run_scm_rts.yml b/.github/workflows/ci_run_scm_rts.yml deleted file mode 100644 index a23fb2dd2..000000000 --- a/.github/workflows/ci_run_scm_rts.yml +++ /dev/null @@ -1,231 +0,0 @@ - -name: build and run SCM regression tests - -on: - push: - branches: [main] # add artifact to main branch - pull_request: - workflow_dispatch: - -jobs: - run_scm_rts: - # Don't run in forks - if: github.repository == 'NCAR/ccpp-scm' - - # The type of runner that the job will run on - runs-on: ubuntu-latest - strategy: - matrix: - fortran-compiler: [12] - build-type: [Release, Debug, SinglePrecision] - py-version: [3.11] - - continue-on-error: true - - # Environmental variables - env: - NFHOME: /home/runner/netcdf-fortran - NFVERSION: v4.5.3 - bacio_ROOT: /home/runner/bacio - sp_ROOT: /home/runner/NCEPLIBS-sp - w3emc_ROOT: /home/runner/myw3emc - SCM_ROOT: /home/runner/work/ccpp-scm/ccpp-scm - suites: SCM_GFS_v15p2,SCM_GFS_v15p2_ntiedtke,SCM_GFS_v16,SCM_GFS_v17_p8,SCM_HRRR,SCM_RRFS_v1beta,SCM_RAP,SCM_WoFS_v0,SCM_HRRR_gf,SCM_GFS_v17_p8_ugwpv1,SCM_GFS_v16_RRTMGP,SCM_GFS_v16_debug,SCM_GFS_v16_no_nsst,SCM_GFS_v17_p8_ugwpv1_no_nsst,SCM_RRFS_v1beta_no_nsst,SCM_GFS_v16_gfdlmpv3,SCM_GFS_v17_p8_ugwpv1_tempo - suites_ps: SCM_GFS_v15p2_ps,SCM_GFS_v15p2_ntiedtke_ps,SCM_GFS_v16_ps,SCM_GFS_v17_p8_ps,SCM_HRRR_ps,SCM_RRFS_v1beta_ps,SCM_RAP_ps,SCM_WoFS_v0_ps,SCM_HRRR_gf_ps,SCM_GFS_v17_p8_ugwpv1_ps,SCM_GFS_v16_RRTMGP_ps,SCM_GFS_v16_debug_ps,SCM_GFS_v16_no_nsst_ps,SCM_GFS_v17_p8_ugwpv1_no_nsst_ps,SCM_RRFS_v1beta_no_nsst_ps,SCM_GFS_v16_gfdlmpv3_ps,SCM_GFS_v17_p8_ugwpv1_tempo_ps - dir_rt: /home/runner/work/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}} - dir_bl: /home/runner/work/ccpp-scm/ccpp-scm/test/BL-${{matrix.build-type}} - artifact_origin: ${{ github.event_name == 'pull_request' && 'PR' || ('main' == 'main' && 'main' || 'PR') }} - GH_TOKEN: ${{ github.token }} - - # Workflow steps - steps: - # Install System Dependencies - - name: Install System Dependencies - run: | - sudo apt-get update -qq && sudo apt-get install -qq -y --no-install-recommends \ - gfortran-${{matrix.fortran-compiler}} \ - libhdf5-dev \ - libnetcdf-pnetcdf-19 \ - libnetcdff7 \ - libnetcdf-dev \ - libnetcdff-dev \ - libxml2 \ - openmpi-bin \ - libopenmpi-dev - - # Python setup - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: ${{matrix.py-version}} - - - name: Install NetCDF Python libraries - run: | - pip install f90nml h5py netCDF4 matplotlib - - - name: Environment for openmpi compiler - run: | - echo "FC=mpif90" >> $GITHUB_ENV - echo "CC=mpicc" >> $GITHUB_ENV - - - name: Check MPI version - run: | - which mpif90 - mpif90 --version - - # Install NCEP libs - - name: Cache bacio library v2.4.1 - id: cache-bacio-fortran - uses: actions/cache@v5 - with: - path: /home/runner/bacio - key: cache-bacio-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-key - - - name: Install bacio library v2.4.1 - if: steps.cache-bacio-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.4.1 https://github.com/NOAA-EMC/NCEPLIBS-bacio.git bacio - cd bacio && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${bacio_ROOT} ../ - make -j2 - make install - echo "bacio_DIR=/home/runner/bacio/lib/cmake/bacio" >> $GITHUB_ENV - - - name: Cache SP-library v2.3.3 - id: cache-sp-fortran - uses: actions/cache@v5 - with: - path: /home/runner/NCEPLIBS-sp - key: cache-sp-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-key - - - name: Install SP-library v2.3.3 - if: steps.cache-sp-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.3.3 https://github.com/NOAA-EMC/NCEPLIBS-sp.git NCEPLIBS-sp - cd NCEPLIBS-sp && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${sp_ROOT} ../ - make -j2 - make install - echo "sp_DIR=/home/runner/NCEPLIBS-sp/lib/cmake/sp" >> $GITHUB_ENV - - - name: Cache w3emc library v2.9.2 - id: cache-w3emc-fortran - uses: actions/cache@v5 - with: - path: /home/runner/myw3emc - key: cache-w3emc-fortran-${{matrix.fortran-compiler}}-${{matrix.build-type}}-key - - - name: Install w3emc library v2.9.2 - if: steps.cache-w3emc-fortran.outputs.cache-hit != 'true' - run: | - git clone --branch v2.9.2 https://github.com/NOAA-EMC/NCEPLIBS-w3emc.git NCEPLIBS-w3emc - cd NCEPLIBS-w3emc && mkdir build && cd build - cmake -DCMAKE_INSTALL_PREFIX=${w3emc_ROOT} ../ - make -j2 - make install - echo "w3emc_DIR=/home/runner/myw3emc/lib/cmake/w3emc" >> $GITHUB_ENV - - - # Initialize and build SCM, run regressions tests - - name: Checkout SCM code (into /home/runner/work/ccpp-scm/) - uses: actions/checkout@v6 - - - name: Initialize submodules - run: git submodule update --init --recursive - - - name: Download data for SCM - run: | - cd ${SCM_ROOT} - ./contrib/get_all_static_data.sh - ./contrib/get_thompson_tables.sh - ./contrib/get_tempo_data.sh - ./contrib/get_aerosol_climo.sh - ./contrib/get_rrtmgp_data.sh - - - name: Configure build with CMake (Release) - if: contains(matrix.build-type, 'Release') - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} ../src - - - name: Configure build with CMake (Debug) - if: contains(matrix.build-type, 'Debug') - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} -DCMAKE_BUILD_TYPE=Debug ../src - - - name: Configure build with CMake (Single Precision) - if: matrix.build-type == 'SinglePrecision' - run: | - cd ${SCM_ROOT}/scm - mkdir bin && cd bin - cmake -DCCPP_SUITES=${suites},${suites_ps} -D32BIT=1 ../src - - - name: Build SCM - run: | - cd ${SCM_ROOT}/scm/bin - make -j4 - - - name: Run SCM RTs - if: matrix.build-type != 'SinglePrecision' - run: | - cd ${SCM_ROOT}/scm/bin - ./run_scm.py --file /home/runner/work/ccpp-scm/ccpp-scm/test/rt_test_cases.py --runtime_mult 0.1 - - - name: Run SCM Single Precision RTs - if: matrix.build-type == 'SinglePrecision' - run: | - cd ${SCM_ROOT}/scm/bin - ./run_scm.py --file /home/runner/work/ccpp-scm/ccpp-scm/test/rt_test_cases_sp.py --runtime_mult 0.1 - - - name: Gather SCM RT output - run: | - cd ${SCM_ROOT}/test - mkdir /home/runner/work/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}} - ./ci_util.py -b ${{matrix.build-type}} - - - name: Save Artifact Id Numbers and Create Directory for SCM RT baselines - run: | - mkdir -p ${dir_bl} - ARTIFACT_ID=$(gh api --paginate \ - repos/NCAR/ccpp-scm/actions/artifacts | \ - jq -s '[ .[] | .artifacts[] | select(.name == "rt-baselines-${{matrix.build-type}}-main" and (.expired | not))] | sort_by(.created_at) | last | .workflow_run | .id ') - echo "artifact_id=${ARTIFACT_ID}" - echo "artifact_id=${ARTIFACT_ID}" >> "$GITHUB_ENV" - - - name: Download SCM RT baselines - uses: actions/download-artifact@v7 - with: - name: rt-baselines-${{matrix.build-type}}-main - path: ${{ env.dir_bl }} - github-token: ${{ secrets.GITHUB_TOKEN }} - run-id: ${{ env.artifact_id }} - - - name: Compare SCM RT output to baselines - run: | - cd ${SCM_ROOT}/test - ./cmp_rt2bl.py --dir_rt ${dir_rt} --dir_bl ${dir_bl} - - - name: Check if SCM RT plots exist - id: check_files - run: | - if [ -n "$(ls -A /home/runner/work/ccpp-scm/ccpp-scm/test/scm_rt_out 2>/dev/null)" ]; then - echo "files_exist=true" >> "$GITHUB_ENV" - else - echo "files_exist=false" >> "$GITHUB_ENV" - fi - - - name: Upload plots of SCM Baselines/RTs as GitHub Artifact. - if: env.files_exist == 'true' - uses: actions/upload-artifact@v6 - with: - name: rt-plots-${{matrix.build-type}}-${{ env.artifact_origin }} - path: /home/runner/work/ccpp-scm/ccpp-scm/test/scm_rt_out - - - name: Upload SCM RTs as GitHub Artifact - uses: actions/upload-artifact@v6 - with: - name: rt-baselines-${{matrix.build-type}}-${{ env.artifact_origin }} - path: /home/runner/work/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}} diff --git a/.github/workflows/ci_test_docker.yml b/.github/workflows/ci_test_docker.yml deleted file mode 100644 index 8651d727d..000000000 --- a/.github/workflows/ci_test_docker.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: build and test docker - -on: - # not listing pull_request closed, since it would run when merged - pull_request: - types: [opened, synchronize, reopened] - workflow_dispatch: - -env: - TEST_TAG: dtcenter/ccpp-scm:test - PR_NUMBER: ${{ github.event.number }} - -jobs: - docker: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v6 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Build and export test tag - uses: docker/build-push-action@v6 - with: - context: . - file: docker/Dockerfile - load: true - tags: ${{ env.TEST_TAG }} - build-args: PR_NUMBER=${{ github.event.number }} - - name: Test - run: | - mkdir $HOME/output - chmod a+rw $HOME/output - docker run --rm -v $HOME/output:/home ${{ env.TEST_TAG }} ./run_scm.py -f ../../test/rt_test_cases.py --runtime_mult 0.1 --mpi_command "mpirun -np 1 --allow-run-as-root" -d diff --git a/.github/workflows/create_rt_baselines.yml b/.github/workflows/create_rt_baselines.yml new file mode 100644 index 000000000..c8cb5a644 --- /dev/null +++ b/.github/workflows/create_rt_baselines.yml @@ -0,0 +1,139 @@ +name: Build/run the CCPP-SCM, create baselines, store as GitHub artifact + +on: + workflow_dispatch: + pull_request: + +jobs: + create_rt_baselines: + runs-on: ubuntu-24.04 + strategy: + fail-fast: false + matrix: + fortran-compiler: [ifx, gfortran]#, nvfortran] + build-type: [Release, Debug] + run_lists: [supported, legacy, dev, sp]#, nvhpc] + exclude: + - build-type: Debug + run_lists: sp + include: + # Set container images for each compiler + - fortran-compiler: ifx + image: dustinswales/ccpp-scm-ci:oneapi + - fortran-compiler: gfortran + image: dustinswales/ccpp-scm-ci:gnu +# - fortran-compiler: nvfortran +# image: dustinswales/ccpp-scm-ci:nvhpc + container: + image: ${{ matrix.image }} + # Environmental variables + env: + SCM_ROOT: /__w/ccpp-scm/ccpp-scm + artifact_origin: ${{ github.event_name == 'pull_request' && 'PR' || ('main' == 'main' && 'main' || 'PR') }} + GH_TOKEN: ${{ github.token }} + + # Workflow steps + steps: + ####################################################################################### + # Initial + ####################################################################################### + - name: Checkout SCM code (/__w/ccpp-scm/ccpp-scm) + uses: actions/checkout@v4 + + - name: Install Required Tools + run: | + apt-get update + + - name: Initialize Submodules + run: | + git config --global --add safe.directory ${SCM_ROOT} + cd ${SCM_ROOT} + git submodule update --init --recursive + + - name: Setup MPI (GNU) + if: matrix.fortran-compiler == 'gfortran' + run: | + echo "MPI_COMM=mpirun --allow-run-as-root -np 1" >> $GITHUB_ENV + echo "CC=mpicc" >> $GITHUB_ENV + echo "CXX=mpicxx" >> $GITHUB_ENV + echo "FC=mpif90" >> $GITHUB_ENV + + - name: Setup MPI (Intel OneAPI) + if: matrix.fortran-compiler == 'ifx' + run: | + echo "PATH=/opt/intel/oneapi/mpi/latest/bin:${PATH}" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=/opt/intel/oneapi/mpi/latest/lib:${LD_LIBRARY_PATH}" >> $GITHUB_ENV + echo "MPI_COMM=mpirun -np 1" >> $GITHUB_ENV + echo "CC=mpiicx" >> $GITHUB_ENV + echo "CXX=mpiicpx" >> $GITHUB_ENV + echo "FC=mpiifx" >> $GITHUB_ENV + + - name: Setup MPI (Nvidia) + if: matrix.fortran-compiler == 'nvfortran' + run: | + echo "MPI_COMM=mpirun --allow-run-as-root -np 1" >> $GITHUB_ENV + echo "CC=mpicc" >> $GITHUB_ENV + echo "CXX=mpic++" >> $GITHUB_ENV + echo "FC=mpifort" >> $GITHUB_ENV + + ####################################################################################### + # Build SCM. + ####################################################################################### + - name: Get SDF names for this run_list + id: set_sdfs + run: | + cd ${SCM_ROOT}/test + suites=$(./get_sdfs_for_run_list.py --sdf_list suites_${{matrix.run_lists}}_${{matrix.fortran-compiler}}) + echo "suites=${suites}" >> $GITHUB_OUTPUT + + - name: Print SDF names + run: | + echo ${{steps.set_sdfs.outputs.suites}} + + - name: Download data for SCM + run: | + cd ${SCM_ROOT} + ./contrib/get_all_static_data.sh + ./contrib/get_thompson_tables.sh + ./contrib/get_tempo_data.sh + ./contrib/get_aerosol_climo.sh + ./contrib/get_rrtmgp_data.sh + + - name: Configure Build with CMake (64-bit) + if: matrix.run_lists != 'sp' + run: | + cd ${SCM_ROOT}/scm + mkdir bin && cd bin + cmake -DCCPP_SUITES=${{steps.set_sdfs.outputs.suites}} -DCMAKE_BUILD_TYPE=${{matrix.build-type}} ../src + + - name: Configure Build with CMake (32-bit) + if: matrix.run_lists == 'sp' + run: | + cd ${SCM_ROOT}/scm + mkdir bin && cd bin + cmake -DCCPP_SUITES=${{steps.set_sdfs.outputs.suites}} -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -D32BIT=1 ../src + + - name: Build SCM + run: | + cd ${SCM_ROOT}/scm/bin + make -j4 + + ####################################################################################### + # Run regression tests. + ####################################################################################### + - name: Run SCM RTs + run: | + cd ${SCM_ROOT}/scm/bin + ./run_scm.py --file /__w/ccpp-scm/ccpp-scm/test/rt_test_cases.py --run_list ${{matrix.run_lists}}_${{matrix.fortran-compiler}} --runtime_mult 0.1 --mpi_command "${MPI_COMM}" + + - name: Gather SCM RT output + run: | + cd ${SCM_ROOT}/test + mkdir /__w/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} + ./ci_util.py -b ${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} --run_list run_list_${{matrix.run_lists}}_${{matrix.fortran-compiler}} + + - name: Upload SCM RTs as GitHub Artifact + uses: actions/upload-artifact@v5 + with: + name: rt-baselines-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}}-${{ env.artifact_origin }} + path: /__w/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} \ No newline at end of file diff --git a/.github/workflows/run_scm_rts.yml b/.github/workflows/run_scm_rts.yml new file mode 100644 index 000000000..111dd0658 --- /dev/null +++ b/.github/workflows/run_scm_rts.yml @@ -0,0 +1,179 @@ +name: Build/run the CCPP-SCM, compare to existing baselines (GitHub artifact) + +on: + push: + pull_request: + workflow_dispatch: + +jobs: + run_scm_rts: + runs-on: ubuntu-24.04 + strategy: + fail-fast: false + matrix: + fortran-compiler: [ifx, gfortran]#, nvfortran] + build-type: [Release, Debug] + run_lists: [supported, legacy, dev, sp]#, nvhpc] + exclude: + - build-type: Debug + run_lists: sp + include: + # Set container images for each compiler + - fortran-compiler: ifx + image: dustinswales/ccpp-scm-ci:oneapi + - fortran-compiler: gfortran + image: dustinswales/ccpp-scm-ci:gnu +# - fortran-compiler: nvfortran +# image: dustinswales/ccpp-scm-ci:nvhpc + container: + image: ${{ matrix.image }} + # Environmental variables + env: + SCM_ROOT: /__w/ccpp-scm/ccpp-scm + dir_rt: /__w/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} + dir_bl: /__w/ccpp-scm/ccpp-scm/test/BL-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} + artifact_origin: ${{ github.event_name == 'pull_request' && 'PR' || ('main' == 'main' && 'main' || 'PR') }} + GH_TOKEN: ${{ github.token }} + + # Workflow steps + steps: + ####################################################################################### + # Initial + ####################################################################################### + - name: Checkout SCM code (/__w/ccpp-scm/ccpp-scm) + uses: actions/checkout@v4 + + - name: Install Required Tools + run: | + apt-get update + + - name: Initialize Submodules + run: | + git config --global --add safe.directory ${SCM_ROOT} + cd ${SCM_ROOT} + git submodule update --init --recursive + + - name: Setup MPI (GNU) + if: matrix.fortran-compiler == 'gfortran' + run: | + echo "MPI_COMM=mpirun --allow-run-as-root -np 1" >> $GITHUB_ENV + echo "CC=mpicc" >> $GITHUB_ENV + echo "CXX=mpicxx" >> $GITHUB_ENV + echo "FC=mpif90" >> $GITHUB_ENV + + - name: Setup MPI (Intel OneAPI) + if: matrix.fortran-compiler == 'ifx' + run: | + echo "PATH=/opt/intel/oneapi/mpi/latest/bin:${PATH}" >> $GITHUB_ENV + echo "LD_LIBRARY_PATH=/opt/intel/oneapi/mpi/latest/lib:${LD_LIBRARY_PATH}" >> $GITHUB_ENV + echo "MPI_COMM=mpirun -np 1" >> $GITHUB_ENV + echo "CC=mpiicx" >> $GITHUB_ENV + echo "CXX=mpiicpx" >> $GITHUB_ENV + echo "FC=mpiifx" >> $GITHUB_ENV + + - name: Setup MPI (Nvidia) + if: matrix.fortran-compiler == 'nvfortran' + run: | + echo "MPI_COMM=mpirun --allow-run-as-root -np 1" >> $GITHUB_ENV + echo "CC=mpicc" >> $GITHUB_ENV + echo "CXX=mpic++" >> $GITHUB_ENV + echo "FC=mpifort" >> $GITHUB_ENV + + ####################################################################################### + # Build SCM. + ####################################################################################### + - name: Get SDF names for this run_list + id: set_sdfs + run: | + cd ${SCM_ROOT}/test + suites=$(./get_sdfs_for_run_list.py --sdf_list suites_${{matrix.run_lists}}_${{matrix.fortran-compiler}}) + echo "suites=${suites}" >> $GITHUB_OUTPUT + + - name: Print SDF names + run: | + echo ${{steps.set_sdfs.outputs.suites}} + + - name: Download data for SCM + run: | + cd ${SCM_ROOT} + ./contrib/get_all_static_data.sh + ./contrib/get_thompson_tables.sh + ./contrib/get_tempo_data.sh + ./contrib/get_aerosol_climo.sh + ./contrib/get_rrtmgp_data.sh + + - name: Configure Build with CMake (64-bit) + if: matrix.run_lists != 'sp' + run: | + cd ${SCM_ROOT}/scm + mkdir bin && cd bin + cmake -DCCPP_SUITES=${{steps.set_sdfs.outputs.suites}} -DCMAKE_BUILD_TYPE=${{matrix.build-type}} ../src + + - name: Configure Build with CMake (32-bit) + if: matrix.run_lists == 'sp' + run: | + cd ${SCM_ROOT}/scm + mkdir bin && cd bin + cmake -DCCPP_SUITES=${{steps.set_sdfs.outputs.suites}} -DCMAKE_BUILD_TYPE=${{matrix.build-type}} -D32BIT=1 ../src + + - name: Build SCM + run: | + cd ${SCM_ROOT}/scm/bin + make -j4 + + ####################################################################################### + # Run regression tests. + ####################################################################################### + - name: Run SCM RTs + run: | + cd ${SCM_ROOT}/scm/bin + ./run_scm.py --file /__w/ccpp-scm/ccpp-scm/test/rt_test_cases.py --run_list ${{matrix.run_lists}}_${{matrix.fortran-compiler}} --runtime_mult 0.1 --mpi_command "${MPI_COMM}" + + - name: Gather SCM RT output + run: | + cd ${SCM_ROOT}/test + mkdir /__w/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} + ./ci_util.py -b ${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} --run_list run_list_${{matrix.run_lists}}_${{matrix.fortran-compiler}} + + - name: Save Artifact Id Numbers and Create Directory for SCM RT baselines + run: | + mkdir -p ${dir_bl} + ARTIFACT_ID=$(gh api --paginate \ + repos/NCAR/ccpp-scm/actions/artifacts | \ + jq -s '[ .[] | .artifacts[] | select(.name == "rt-baselines-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}}-${{ env.artifact_origin }}" and (.expired | not))] | sort_by(.created_at) | last | .workflow_run | .id ') + echo "artifact_id=${ARTIFACT_ID}" + echo "artifact_id=${ARTIFACT_ID}" >> "$GITHUB_ENV" + + - name: Download SCM RT baselines + uses: actions/download-artifact@v6 + with: + name: rt-baselines-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}}-${{ env.artifact_origin }} + path: ${{ env.dir_bl }} + github-token: ${{ secrets.GITHUB_TOKEN }} + run-id: ${{ env.artifact_id }} + + - name: Compare SCM RT output to baselines + run: | + cd ${SCM_ROOT}/test + ./cmp_rt2bl.py --dir_rt ${dir_rt} --dir_bl ${dir_bl} --run_list run_list_${{matrix.run_lists}}_${{matrix.fortran-compiler}} + + - name: Check if SCM RT plots exist + id: check_files + run: | + if [ -n "$(ls -A /__w/ccpp-scm/ccpp-scm/test/scm_rt_out 2>/dev/null)" ]; then + echo "files_exist=true" >> "$GITHUB_ENV" + else + echo "files_exist=false" >> "$GITHUB_ENV" + fi + + - name: Upload plots of SCM Baselines/RTs as GitHub Artifact + uses: actions/upload-artifact@v4 + with: + name: rt-plots-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}}-${{ env.artifact_origin }} + path: /__w/ccpp-scm/ccpp-scm/test/scm_rt_out + + - name: Upload SCM RTs as GitHub Artifact + uses: actions/upload-artifact@v5 + with: + name: rt-baselines-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}}-${{ env.artifact_origin }} + path: /__w/ccpp-scm/ccpp-scm/test/artifact-${{matrix.build-type}}-${{matrix.fortran-compiler}}-${{matrix.run_lists}} \ No newline at end of file diff --git a/docker/Dockerfile-add-nceplibs b/docker/Dockerfile-add-nceplibs new file mode 100644 index 000000000..6445833bf --- /dev/null +++ b/docker/Dockerfile-add-nceplibs @@ -0,0 +1,39 @@ +# +# Install NCEPlibs and its dependencies +# + +ARG TOOLCHAIN +FROM add-netcdf:$TOOLCHAIN + +ENV NCEPLIBS_DIR=/opt/nceplibs + +# Install bacio library +ENV BACIO_VERSION=2.4.1 +RUN mkdir -p $NCEPLIBS_DIR/src && cd $NCEPLIBS_DIR/src \ + && git clone -b v${BACIO_VERSION} --recursive https://github.com/NOAA-EMC/NCEPLIBS-bacio \ + && mkdir NCEPLIBS-bacio/build && cd NCEPLIBS-bacio/build \ + && cmake -DCMAKE_INSTALL_PREFIX=$NCEPLIBS_DIR .. \ + && make VERBOSE=1 -j \ + && make install + +# Install SP-library +ENV SP_VERSION=2.3.3 +RUN cd $NCEPLIBS_DIR/src \ + && git clone -b v${SP_VERSION} --recursive https://github.com/NOAA-EMC/NCEPLIBS-sp \ + && mkdir NCEPLIBS-sp/build && cd NCEPLIBS-sp/build \ + && cmake -DCMAKE_INSTALL_PREFIX=$NCEPLIBS_DIR .. \ + && make VERBOSE=1 -j \ + && make install + +# Install w3emc library +ENV W3EMC_VERSION=2.11.0 +RUN cd $NCEPLIBS_DIR/src \ + && git clone -b v${W3EMC_VERSION} --recursive https://github.com/NOAA-EMC/NCEPLIBS-w3emc \ + && mkdir NCEPLIBS-w3emc/build && cd NCEPLIBS-w3emc/build \ + && cmake -DCMAKE_INSTALL_PREFIX=$NCEPLIBS_DIR .. \ + && make VERBOSE=1 -j \ + && make install + +ENV bacio_DIR=/opt/nceplibs/lib/cmake/bacio +ENV sp_DIR=/opt/nceplibs/lib/cmake/sp +ENV w3emc_DIR=/opt/nceplibs/lib/cmake/w3emc diff --git a/docker/Dockerfile-add-netcdf b/docker/Dockerfile-add-netcdf new file mode 100644 index 000000000..4e442f3e4 --- /dev/null +++ b/docker/Dockerfile-add-netcdf @@ -0,0 +1,35 @@ +# +# Install NetCDF Fortran and its dependencies +# + +ARG TOOLCHAIN +FROM minimal-toolchain:$TOOLCHAIN + +# Install the dependencies +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + curl \ + libnetcdf-dev \ +# libopenmpi-dev \ + openmpi-bin \ + && rm -rf /var/lib/apt/lists/* + +# Install NetCDF Fortran +# The version must be compitible with NetCDF C installed above +ARG NFVERSION +ENV NFVERSION=$NFVERSION + +RUN curl https://downloads.unidata.ucar.edu/netcdf-fortran/$NFVERSION/netcdf-fortran-$NFVERSION.tar.gz | tar xz \ + && cd netcdf-fortran-$NFVERSION \ + && { ./configure \ + CFLAGS='-O2' \ + FCFLAGS='-O2 -fPIC' \ + --disable-static \ + --prefix=/usr || \ + { cat ./config.log; exit 1; } } +RUN cd netcdf-fortran-$NFVERSION \ + && make -j \ + && make install \ + && cd .. \ + && rm -rf netcdf-fortran-$NFVERSION \ + && ldconfig diff --git a/docker/Dockerfile-add-pnetcdf b/docker/Dockerfile-add-pnetcdf new file mode 100644 index 000000000..1223588f7 --- /dev/null +++ b/docker/Dockerfile-add-pnetcdf @@ -0,0 +1,39 @@ +# +# Install Parallel NetCDF Fortran and its dependencies +# + +ARG TOOLCHAIN +FROM add-netcdf:$TOOLCHAIN + +ARG PNFVERSION +ENV PNFVERSION=$PNFVERSION +ARG FC +ENV FC=$FC +ARG CC +ENV CC=$CC + +# Set environment variables for PnetCDF installation +ENV PNETCDF_PREFIX=/opt/pnetcdf +ENV PATH="${PNETCDF_PREFIX}/bin:${PATH}" +ENV LD_LIBRARY_PATH="${PNETCDF_PREFIX}/lib:{$LD_LIBRARY_PATH}" +ENV PKG_CONFIG_PATH="${PNETCDF_PREFIX}/lib/pkgconfig:${PKG_CONFIG_PATH}" + +# Update and install necessary build tools +RUN apt-get update && apt-get install -y --no-install-recommends \ + automake \ + libtool \ + && rm -rf /var/lib/apt/lists/* + +# Download, compile, and install PnetCDF +WORKDIR /tmp +RUN wget https://parallel-netcdf.github.io/Release/pnetcdf-${PNFVERSION}.tar.gz \ + && tar xzf pnetcdf-${PNFVERSION}.tar.gz \ + && cd pnetcdf-${PNFVERSION} \ + && CC=$CC FC=$FC ./configure --prefix=${PNETCDF_PREFIX} \ + && make -j 8 install \ + && cd /tmp \ + && rm -rf pnetcdf-${PNFVERSION} pnetcdf-${PNFVERSION}.tar.gz + +# Add PnetCDF library path to the environment +ENV PATH="${PNETCDF_PREFIX}/bin:${PATH}" +ENV LD_LIBRARY_PATH="${PNETCDF_PREFIX}/lib:${LD_LIBRARY_PATH}" diff --git a/docker/Dockerfile-add-python b/docker/Dockerfile-add-python new file mode 100644 index 000000000..eee9082f4 --- /dev/null +++ b/docker/Dockerfile-add-python @@ -0,0 +1,27 @@ +# +# Install Python3 and a small set of packages +# + +ARG TOOLCHAIN +FROM add-nceplibs:$TOOLCHAIN + +# Install Python +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + python-is-python3 \ + python3 \ + python3-pip \ + && rm -rf /var/lib/apt/lists/* + +# Install essential packages +RUN pip3 install --break-system-packages \ + f90nml \ + h5py \ + netCDF4 \ + numpy \ + xarray + +# Install packages for the validation plot generation +RUN pip3 install --break-system-packages \ + matplotlib + diff --git a/docker/Dockerfile-finalize b/docker/Dockerfile-finalize new file mode 100644 index 000000000..581663cd4 --- /dev/null +++ b/docker/Dockerfile-finalize @@ -0,0 +1,12 @@ +# +# Finalize CI containers +# + +ARG TOOLCHAIN +FROM add-python:$TOOLCHAIN + +# Install additional tools +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + zstd \ + && rm -rf /var/lib/apt/lists/* diff --git a/docker/Dockerfile-gnu-minimal b/docker/Dockerfile-gnu-minimal new file mode 100644 index 000000000..f8334e19a --- /dev/null +++ b/docker/Dockerfile-gnu-minimal @@ -0,0 +1,30 @@ +FROM ubuntu:24.04 + +# Extend and update the package registry +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + ca-certificates \ + curl \ + wget \ + gpg \ + binutils \ + g++ \ + gcc \ + libc-dev \ + make \ + git \ + gfortran>=14 \ + cmake \ + libopenmpi-dev \ + openmpi-bin \ + ksh \ + m4 \ + tcsh \ + time \ + vim \ + file \ + libxml2 \ + gh \ + jq + +ENV FC=mpif90 CC=mpicc CXX=mpicxx \ No newline at end of file diff --git a/docker/Dockerfile-nvhpc-minimal b/docker/Dockerfile-nvhpc-minimal new file mode 100644 index 000000000..484c6ecf2 --- /dev/null +++ b/docker/Dockerfile-nvhpc-minimal @@ -0,0 +1,33 @@ +FROM ubuntu:24.04 + +ARG NVHPC_VERSION_MAJOR='25' +ARG NVHPC_VERSION_MINOR='9' + +# Extend and update the package registry +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + ca-certificates \ + curl \ + time \ + gpg \ + make \ + cmake \ + wget \ + git + RUN curl https://developer.download.nvidia.com/hpc-sdk/ubuntu/DEB-GPG-KEY-NVIDIA-HPC-SDK \ + | gpg --dearmor > /usr/share/keyrings/nvidia-hpcsdk-archive-keyring.gpg \ + && echo "deb [signed-by=/usr/share/keyrings/nvidia-hpcsdk-archive-keyring.gpg] https://developer.download.nvidia.com/hpc-sdk/ubuntu/amd64 /" > /etc/apt/sources.list.d/nvhpc.list \ + && apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + nvhpc-${NVHPC_VERSION_MAJOR}-${NVHPC_VERSION_MINOR} \ + && rm -rf /var/lib/apt/lists/* \ + && for dir in comm_libs examples math_libs profilers; do \ + rm -rf "/opt/nvidia/hpc_sdk/Linux_x86_64/${NVHPC_VERSION_MAJOR}.${NVHPC_VERSION_MINOR}/${dir}"; \ + done + +ENV PATH="/opt/nvidia/hpc_sdk/Linux_x86_64/${NVHPC_VERSION_MAJOR}.${NVHPC_VERSION_MINOR}/compilers/bin:${PATH}" + +# Set default compiler executables +ENV FC=nvfortran CC=nvc CXX=nvc++ +ENV MPICC=mpicc +#ENV FC=mpifort CC=mpicc CXX=mpic++ \ No newline at end of file diff --git a/docker/Dockerfile-oneapi-minimal b/docker/Dockerfile-oneapi-minimal new file mode 100644 index 000000000..cf07bc76e --- /dev/null +++ b/docker/Dockerfile-oneapi-minimal @@ -0,0 +1,50 @@ +FROM ubuntu:24.04 + +ARG ONEAPI_VERSION='2025.3' + +# Extend and update the package registry +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get --yes install --no-install-recommends \ + ca-certificates \ + curl \ + wget \ + gpg \ + binutils \ + g++ \ + gcc \ + gfortran \ + libc-dev \ + make \ + cmake \ + git \ + ksh \ + m4 \ + tcsh \ + time \ + vim \ + gh \ + jq +# See https://www.intel.com/content/www/us/en/docs/oneapi/installation-guide-linux/2025-2/hpc-apt.html + +RUN wget -O - https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ + | gpg --dearmor \ + | tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null \ + && echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" \ + | tee /etc/apt/sources.list.d/oneAPI.list \ + && apt-get update \ + && apt-get install --yes --no-install-recommends \ + intel-oneapi-common-vars \ + intel-oneapi-compiler-fortran-${ONEAPI_VERSION} \ + intel-oneapi-mpi \ + intel-oneapi-mpi-devel\ + intel-oneapi-compiler-dpcpp-cpp-${ONEAPI_VERSION} \ + && rm -rf /var/lib/apt/lists/* + +ENV PATH="/opt/intel/oneapi/compiler/latest/bin:${PATH}" +ENV LD_LIBRARY_PATH="/opt/intel/oneapi/compiler/latest/lib:${LD_LIBRARY_PATH}" + +ENV I_MPI_ROOT=/opt/intel/oneapi/mpi/latest + +# oneAPI environments +RUN . /opt/intel/oneapi/setvars.sh +ENV FC=ifx CC=icx CXX=icpx diff --git a/scm/src/CMakeLists.txt b/scm/src/CMakeLists.txt index b0c68574d..49a592209 100644 --- a/scm/src/CMakeLists.txt +++ b/scm/src/CMakeLists.txt @@ -150,7 +150,11 @@ if (32BIT) add_definitions(-DRTE_USE_SP) endif() +#------------------------------------------------------------------------------ +# GNU +#------------------------------------------------------------------------------ if (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + message(STATUS "Compile using GNU") set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -ggdb -fbacktrace -cpp -fcray-pointer -ffree-line-length-none -fno-range-check") if(${CMAKE_Fortran_COMPILER_VERSION} VERSION_GREATER_EQUAL 10) @@ -173,7 +177,50 @@ if (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") set(CMAKE_Fortran_FLAGS_RELEASE "-O2 -fPIC" CACHE STRING "" FORCE) set(CMAKE_C_FLAGS_BITFORBIT "-O2 -fPIC" CACHE STRING "" FORCE) set(CMAKE_Fortran_FLAGS_BITFORBIT "-O2 -fPIC" CACHE STRING "" FORCE) -elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") +#------------------------------------------------------------------------------ +# Intel oneAPI +#------------------------------------------------------------------------------ + elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "IntelLLVM") + message(STATUS "Compile using Intel OneAPI") + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -g -traceback -fpp -fno-alias -auto -safe-cray-ptr -ftz -assume byterecl -nowarn -align array64byte -qno-opt-dynamic-align") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -qno-opt-dynamic-align -sox -fp-model source") + + if(NOT 32BIT) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -real-size 64") + endif() + + if (${CMAKE_BUILD_TYPE} MATCHES "Debug") +# set(CMAKE_Fortran_FLAGS_DEBUG "${CMAKE_Fortran_FLAGS_DEBUG} -O0 -check -check noarg_temp_created -check nopointer -warn -warn noerrors -fstack-protector-all -fpe0 -debug -ftrapuv -init=snan,arrays") + set(CMAKE_Fortran_FLAGS_DEBUG "${CMAKE_Fortran_FLAGS_DEBUG} -O0 -check -check noarg_temp_created -check nopointer -warn -warn noerrors -fpe0 -debug") + set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 -ftrapuv") + else() + if(32BIT) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -O2 -debug minimal -fp-model source -qoverride-limits -qopt-prefetch=3") + else() + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -O2 -debug minimal -fp-model source -qoverride-limits -qopt-prefetch=3 -no-prec-div") + endif() + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O2 -debug minimal") + set(FAST "-fast-transcendentals") + if(AVX2) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -march=core-avx2") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=core-avx2") + elseif(SIMDMULTIARCH) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -axSSE4.2,CORE-AVX2") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -axSSE4.2,CORE-AVX2") + elseif(AVX) + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -march=core-avx-i") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=core-avx-i") + endif() + endif() + set(CMAKE_C_FLAGS_RELEASE "-O2 -fPIC" CACHE STRING "" FORCE) + set(CMAKE_Fortran_FLAGS_RELEASE "-O2 -fPIC" CACHE STRING "" FORCE) + set(CMAKE_C_FLAGS_BITFORBIT "-O2 -fPIC" CACHE STRING "" FORCE) + set(CMAKE_Fortran_FLAGS_BITFORBIT "-O2 -fPIC" CACHE STRING "" FORCE) +#------------------------------------------------------------------------------ +# Intel Classic +#------------------------------------------------------------------------------ + elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") + message(STATUS "Compile using Intel Classic") set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -g -traceback -fpp -fno-alias -auto -safe-cray-ptr -ftz -assume byterecl -nowarn -sox -align array64byte -qno-opt-dynamic-align") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -qno-opt-dynamic-align -sox -fp-model source") @@ -209,7 +256,11 @@ elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") set(CMAKE_C_FLAGS_BITFORBIT "-O2 -fPIC" CACHE STRING "" FORCE) set(CMAKE_Fortran_FLAGS_BITFORBIT "-O2 -fPIC" CACHE STRING "" FORCE) +#------------------------------------------------------------------------------ +# Nvidia HPC stack +#------------------------------------------------------------------------------ elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "NVHPC") + message(STATUS "Compile using Nvidia HPC Stack") if(ENABLE_NVIDIA_OPENACC MATCHES "true") set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -acc -Minfo=accel") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -acc -Minfo=accel") diff --git a/scm/src/run_scm.py b/scm/src/run_scm.py index ce628eac6..364dbd91a 100755 --- a/scm/src/run_scm.py +++ b/scm/src/run_scm.py @@ -107,6 +107,7 @@ sgroup = parser.add_argument_group('Single experiment') sgroup.add_argument('-c', '--case', help='name of case to run') sgroup.add_argument('-s', '--suite', help='name of suite to use') +sgroup.add_argument('--run_list', help='name of run_list in rt_test_cases file', required=False) sgroup.add_argument('-n', '--namelist', help='physics namelist to use') sgroup.add_argument('-t', '--tracers', help='tracer configuration to use') parser.add_argument('-g', '--gdb', help='invoke scm through gdb', action='store_true', default=False) @@ -174,6 +175,7 @@ def parse_arguments(): """Parse command line arguments""" args = parser.parse_args() file = args.file + run_list = args.run_list case = args.case sdf = args.suite namelist = args.namelist @@ -201,7 +203,7 @@ def parse_arguments(): if not sdf: sdf = DEFAULT_SUITE - return (file, case, sdf, namelist, tracers, gdb, runtime, runtime_mult, docker, \ + return (file, run_list, case, sdf, namelist, tracers, gdb, runtime, runtime_mult, docker, \ verbose, levels, npz_type, vert_coord_file, case_data_dir, n_itt_out, \ n_itt_diag, run_dir, bin_dir, timestep, mpi_command, stop_on_error) @@ -835,7 +837,7 @@ def find_max_str_lengths(run_list): def main(): - (file, case, sdf, namelist, tracers, use_gdb, runtime, runtime_mult, docker, \ + (file, run_list_name, case, sdf, namelist, tracers, use_gdb, runtime, runtime_mult, docker, \ verbose, levels, npz_type, vert_coord_file, case_data_dir, n_itt_out, \ n_itt_diag, run_dir, bin_dir, timestep, mpi_command, stop_on_error \ ) = parse_arguments() @@ -867,6 +869,11 @@ def main(): global EXECUTABLE EXECUTABLE = os.path.join(SCM_RUN, EXECUTABLE_NAME) + list_name = "run_list" + if (run_list_name): + list_name = "run_list_"+run_list_name + # end if + # Debugger if use_gdb: gdb = find_gdb() @@ -879,7 +886,7 @@ def main(): sys.path.append(dirname) module_name = os.path.splitext(basename)[0] scm_runs = importlib.import_module(module_name) - run_list = scm_runs.run_list + run_list = getattr(scm_runs,list_name) sys.path.pop() except ImportError: message = 'There was a problem loading {0}. Please check that the path exists.'.format(file) diff --git a/scm/src/suite_info.py b/scm/src/suite_info.py index 6d0aadfbd..cc9251a7b 100755 --- a/scm/src/suite_info.py +++ b/scm/src/suite_info.py @@ -95,13 +95,19 @@ def main(): import rt_test_cases_sp import rt_test_cases_nvidia - for item in rt_test_cases.run_list: + for item in rt_test_cases.run_list_supported: rt_suite_list.append(item.get("suite")) - - for item in rt_test_cases_sp.run_list: + + for item in rt_test_cases.run_list_legacy: + rt_suite_list.append(item.get("suite")) + + for item in rt_test_cases.run_list_dev: + rt_suite_list.append(item.get("suite")) + + for item in rt_test_cases.run_list_sp: rt_suite_list.append(item.get("suite")) - for item in rt_test_cases_nvidia.run_list: + for item in rt_test_cases.run_list_nvhpc: rt_suite_list.append(item.get("suite")) unique_suite_list = list(set(rt_suite_list)) diff --git a/test/ci_util.py b/test/ci_util.py index 07baebeee..04edb42ec 100755 --- a/test/ci_util.py +++ b/test/ci_util.py @@ -8,23 +8,25 @@ ############################################################################## import os import sys -from rt_test_cases import run_list +import rt_test_cases from os.path import exists import argparse # parser = argparse.ArgumentParser() -parser.add_argument('-b', '--build_type', help='SCM build type') +parser.add_argument('-b', '--build_type', help='SCM build type') +parser.add_argument('-r', '--run_list', help='Run list of SCM SDFs and cases') def parse_args(): args = parser.parse_args() build_type = args.build_type - return (build_type) + run_list = args.run_list + return (build_type, run_list) def main(): - (build_type) = parse_args() - + (build_type, run_list_name) = parse_args() + run_list = getattr(rt_test_cases, run_list_name) # errmsgs=[] for run in run_list: diff --git a/test/cmp_rt2bl.py b/test/cmp_rt2bl.py index 7560d1b61..86318c4b0 100755 --- a/test/cmp_rt2bl.py +++ b/test/cmp_rt2bl.py @@ -7,7 +7,7 @@ ############################################################################## import os import sys -from rt_test_cases import run_list +import rt_test_cases from os.path import exists import argparse from plot_scm_out import plot_results @@ -17,19 +17,22 @@ parser.add_argument('-drt', '--dir_rt', help='Directory containing SCM RT output', required=True) parser.add_argument('-dbl', '--dir_bl', help='Directory containing SCM RT baselines', required=True) parser.add_argument('-np', '--no_plots', help='flag to turn off generation of difference plots', required=False, action='store_true') +parser.add_argument('-r', '--run_list', help='Run list of SCM SDFs and cases') # def parse_args(): - args = parser.parse_args() - dir_rt = args.dir_rt - dir_bl = args.dir_bl - no_plots = args.no_plots - return (dir_rt, dir_bl, no_plots) + args = parser.parse_args() + dir_rt = args.dir_rt + dir_bl = args.dir_bl + no_plots = args.no_plots + run_list = args.run_list + return (dir_rt, dir_bl, no_plots, run_list) # def main(): - # - (dir_rt, dir_bl, no_plots) = parse_args() + + (dir_rt, dir_bl, no_plots, run_list_name) = parse_args() + run_list = getattr(rt_test_cases, run_list_name) # error_count = 0 diff --git a/test/get_sdfs_for_run_list.py b/test/get_sdfs_for_run_list.py new file mode 100755 index 000000000..db62fc5f7 --- /dev/null +++ b/test/get_sdfs_for_run_list.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +############################################################################## +# +# This script gets SDFs needed for a run list. +# +############################################################################## +import os +import sys +import rt_test_cases +from os.path import exists +import argparse + +# +parser = argparse.ArgumentParser() +parser.add_argument('-r', '--sdf_list', help='List of SCM SDFs') + +def parse_args(): + args = parser.parse_args() + sdf_list = args.sdf_list + return (sdf_list) + +def main(): + + (sdf_list_name) = parse_args() + sdf_list = getattr(rt_test_cases, sdf_list_name) + list_out = '' + for count,sdf in enumerate(sdf_list): + if (count < len(sdf_list)-1): + list_out = list_out+sdf+',' + else: + list_out = list_out+sdf + # endif + # end for + print(list_out) +# end def + +if __name__ == '__main__': + main() diff --git a/test/rt_test_cases.py b/test/rt_test_cases.py index 9d200e832..7b343c981 100644 --- a/test/rt_test_cases.py +++ b/test/rt_test_cases.py @@ -1,7 +1,10 @@ -run_list = [\ - #---------------------------------------------------------------------------------------------------------------------------------------------- - # Supported suites for CCPP Version 7 release - #---------------------------------------------------------------------------------------------------------------------------------------------- +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +# Supported suites for CCPP Version 7 release. +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +suites_supported_gfortran = [\ + "SCM_GFS_v17_p8_ugwpv1", "SCM_GFS_v16_RRTMGP", "SCM_GFS_v16", "SCM_WoFS_v0", "SCM_HRRR_gf", \ + "SCM_GFS_v17_p8_ugwpv1_ps","SCM_GFS_v16_RRTMGP_ps","SCM_GFS_v16_ps","SCM_WoFS_v0_ps","SCM_HRRR_gf_ps"] +run_list_supported_gfortran = [\ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16_RRTMGP"}, \ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16"}, \ @@ -37,32 +40,108 @@ {"case": "MOSAiC-AMPS", "suite": "SCM_GFS_v16"}, \ {"case": "MOSAiC-AMPS", "suite": "SCM_WoFS_v0"}, \ {"case": "MOSAiC-AMPS", "suite": "SCM_HRRR_gf"}, \ - {"case": "gabls3", "suite": "SCM_GFS_v16"}, \ - #---------------------------------------------------------------------------------------------------------------------------------------------- - # Unsupported suites (w/ supported cases) - #---------------------------------------------------------------------------------------------------------------------------------------------- + {"case": "gabls3", "suite": "SCM_GFS_v16"}] +# +suites_supported_ifx = suites_supported_gfortran +run_list_supported_ifx = [\ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_WoFS_v0"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_HRRR_gf"}, \ + {"case": "twpice", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + #{"case": "twpice", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "twpice", "suite": "SCM_GFS_v16"}, \ + {"case": "twpice", "suite": "SCM_WoFS_v0"}, \ + {"case": "twpice", "suite": "SCM_HRRR_gf"}, \ + {"case": "bomex", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + {"case": "bomex", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "bomex", "suite": "SCM_GFS_v16"}, \ + {"case": "bomex", "suite": "SCM_WoFS_v0"}, \ + {"case": "bomex", "suite": "SCM_HRRR_gf"}, \ + {"case": "astex", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + #{"case": "astex", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "astex", "suite": "SCM_GFS_v16"}, \ + {"case": "astex", "suite": "SCM_WoFS_v0"}, \ + {"case": "astex", "suite": "SCM_HRRR_gf"}, \ + {"case": "LASSO_2016051812", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + #{"case": "LASSO_2016051812", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "LASSO_2016051812", "suite": "SCM_GFS_v16"}, \ + {"case": "LASSO_2016051812", "suite": "SCM_WoFS_v0"}, \ + {"case": "LASSO_2016051812", "suite": "SCM_HRRR_gf"}, \ + {"case": "COMBLE", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + #{"case": "COMBLE", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "COMBLE", "suite": "SCM_GFS_v16"}, \ + {"case": "COMBLE", "suite": "SCM_WoFS_v0"}, \ + {"case": "COMBLE", "suite": "SCM_HRRR_gf"}, \ + {"case": "MOSAiC-AMPS", "suite": "SCM_GFS_v17_p8_ugwpv1"}, \ + #{"case": "MOSAiC-AMPS", "suite": "SCM_GFS_v16_RRTMGP"}, \ + {"case": "MOSAiC-AMPS", "suite": "SCM_GFS_v16"}, \ + {"case": "MOSAiC-AMPS", "suite": "SCM_WoFS_v0"}, \ + {"case": "MOSAiC-AMPS", "suite": "SCM_HRRR_gf"}, \ + {"case": "gabls3", "suite": "SCM_GFS_v16"}] +# +suites_supported_nvhpc = ["SCM_RAP", "SCM_RAP_ps"] +run_list_supported_nvhpc = [\ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_RAP"}, \ + {"case": "twpice", "suite": "SCM_RAP"}, \ + {"case": "bomex", "suite": "SCM_RAP"}, \ + {"case": "astex", "suite": "SCM_RAP"}, \ + {"case": "LASSO_2016051812", "suite": "SCM_RAP"}] + +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +# Developmental suites, (w/ supported cases). +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +suites_dev_gfortran = [\ + " SCM_GFS_v16_no_nsst", "SCM_GFS_v17_p8_ugwpv1_no_nsst", "SCM_RRFS_v1beta_no_nsst", "SCM_GFS_v17_p8_ugwpv1_tempo", \ + "SCM_GFS_v16_no_nsst_ps", "SCM_GFS_v17_p8_ugwpv1_no_nsst_ps", "SCM_RRFS_v1beta_no_nsst_ps", "SCM_GFS_v17_p8_ugwpv1_tempo_ps", \ + "SCM_GFS_v16_gfdlmpv3", "SCM_GFS_v15p2_ntiedtke", "SCM_GFS_v16_debug", \ + "SCM_GFS_v16_gfdlmpv3_ps","SCM_GFS_v15p2_ntiedtke_ps", "SCM_GFS_v16_debug_ps"] +run_list_dev_gfortran = [\ + {"case": "atomic_Jan16T22Jan18T06", "suite": "SCM_GFS_v16_no_nsst"}, \ + {"case": "atomic_Jan16T22Jan18T06", "suite": "SCM_GFS_v17_p8_ugwpv1_no_nsst"}, \ + {"case": "atomic_Jan16T22Jan18T06", "suite": "SCM_RRFS_v1beta_no_nsst"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v17_p8_ugwpv1_tempo"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16_gfdlmpv3"}, \ + {"case": "twpice", "suite": "SCM_GFS_v15p2_ntiedtke"}, \ + {"case": "bomex", "suite": "SCM_GFS_v16_debug"}] +# +suites_dev_ifx = [\ + "SCM_GFS_v16_no_nsst", "SCM_GFS_v17_p8_ugwpv1_no_nsst", "SCM_RRFS_v1beta_no_nsst", \ + "SCM_GFS_v16_no_nsst_ps", "SCM_GFS_v17_p8_ugwpv1_no_nsst_ps", "SCM_RRFS_v1beta_no_nsst_ps", \ + "SCM_GFS_v16_gfdlmpv3", "SCM_GFS_v15p2_ntiedtke", "SCM_GFS_v16_debug", \ + "SCM_GFS_v16_gfdlmpv3_ps", "SCM_GFS_v15p2_ntiedtke_ps", "SCM_GFS_v16_debug_ps"] +run_list_dev_ifx = [\ {"case": "atomic_Jan16T22Jan18T06", "suite": "SCM_GFS_v16_no_nsst"}, \ {"case": "atomic_Jan16T22Jan18T06", "suite": "SCM_GFS_v17_p8_ugwpv1_no_nsst"}, \ {"case": "atomic_Jan16T22Jan18T06", "suite": "SCM_RRFS_v1beta_no_nsst"}, \ + #{"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v17_p8_ugwpv1_tempo"}, \ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16_gfdlmpv3"}, \ + {"case": "twpice", "suite": "SCM_GFS_v15p2_ntiedtke"}, \ + {"case": "bomex", "suite": "SCM_GFS_v16_debug"}] + +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +# Legacy suites, (w/ supported cases). +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +suites_legacy_gfortran = [\ + "SCM_GFS_v17_p8", "SCM_HRRR", "SCM_RRFS_v1beta", "SCM_RAP", "SCM_GFS_v15p2", \ + "SCM_GFS_v17_p8_ps", "SCM_HRRR_ps", "SCM_RRFS_v1beta_ps", "SCM_RAP_ps", "SCM_GFS_v15p2_ps"] +run_list_legacy_gfortran = [\ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v17_p8"}, \ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_HRRR"}, \ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_RRFS_v1beta"}, \ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_RAP"}, \ {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v15p2"}, \ - {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v17_p8_ugwpv1_tempo"}, \ - {"case": "arm_sgp_summer_1997_A", "suite": "SCM_GFS_v16_gfdlmpv3"}, \ {"case": "twpice", "suite": "SCM_GFS_v17_p8"}, \ {"case": "twpice", "suite": "SCM_HRRR"}, \ {"case": "twpice", "suite": "SCM_RRFS_v1beta"}, \ {"case": "twpice", "suite": "SCM_RAP"}, \ {"case": "twpice", "suite": "SCM_GFS_v15p2"}, \ - {"case": "twpice", "suite": "SCM_GFS_v15p2_ntiedtke"}, \ {"case": "bomex", "suite": "SCM_GFS_v17_p8"}, \ {"case": "bomex", "suite": "SCM_HRRR"}, \ {"case": "bomex", "suite": "SCM_RRFS_v1beta"}, \ {"case": "bomex", "suite": "SCM_RAP"}, \ {"case": "bomex", "suite": "SCM_GFS_v15p2"}, \ - {"case": "bomex", "suite": "SCM_GFS_v16_debug"}, \ {"case": "astex", "suite": "SCM_GFS_v17_p8"}, \ {"case": "astex", "suite": "SCM_HRRR"}, \ {"case": "astex", "suite": "SCM_RRFS_v1beta"}, \ @@ -73,3 +152,20 @@ {"case": "LASSO_2016051812", "suite": "SCM_RRFS_v1beta"}, \ {"case": "LASSO_2016051812", "suite": "SCM_RAP"}, \ {"case": "LASSO_2016051812", "suite": "SCM_GFS_v15p2"}] +# +suites_legacy_ifx = suites_legacy_gfortran +run_list_legacy_ifx = run_list_legacy_gfortran + +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +# Single precision supported suites, (w/ supported cases). +#---------------------------------------------------------------------------------------------------------------------------------------------------------- +suites_sp_gfortran = ["SCM_HRRR_gf", "SCM_HRRR_gf_ps"] +run_list_sp_gfortran = [\ + {"case": "arm_sgp_summer_1997_A", "suite": "SCM_HRRR_gf"}, \ + {"case": "twpice", "suite": "SCM_HRRR_gf"}, \ + {"case": "bomex", "suite": "SCM_HRRR_gf"}, \ + {"case": "astex", "suite": "SCM_HRRR_gf"}, \ + {"case": "LASSO_2016051812", "suite": "SCM_HRRR_gf"}] +# +suites_sp_ifx = suites_sp_gfortran +run_list_sp_ifx = run_list_sp_gfortran diff --git a/test/rt_test_cases_nvidia.py b/test/rt_test_cases_nvidia.py deleted file mode 100644 index 14473306c..000000000 --- a/test/rt_test_cases_nvidia.py +++ /dev/null @@ -1,9 +0,0 @@ -run_list = [\ - #---------------------------------------------------------------------------------------------------------------------------------------------- - # CCPP-SCM suites for use with Nvidia compilers - #---------------------------------------------------------------------------------------------------------------------------------------------- - {"case": "arm_sgp_summer_1997_A", "suite": "SCM_RAP"}, \ - {"case": "twpice", "suite": "SCM_RAP"}, \ - {"case": "bomex", "suite": "SCM_RAP"}, \ - {"case": "astex", "suite": "SCM_RAP"}, \ - {"case": "LASSO_2016051812", "suite": "SCM_RAP"}] diff --git a/test/rt_test_cases_sp.py b/test/rt_test_cases_sp.py deleted file mode 100644 index b1fa04ea4..000000000 --- a/test/rt_test_cases_sp.py +++ /dev/null @@ -1,10 +0,0 @@ -run_list = [\ - #---------------------------------------------------------------------------------------------------------------------------------------------- - # CCPP-SCM single precision supported suites - #---------------------------------------------------------------------------------------------------------------------------------------------- - {"case": "arm_sgp_summer_1997_A", "suite": "SCM_HRRR_gf"}, \ - {"case": "twpice", "suite": "SCM_HRRR_gf"}, \ - {"case": "bomex", "suite": "SCM_HRRR_gf"}, \ - {"case": "astex", "suite": "SCM_HRRR_gf"}, \ - {"case": "LASSO_2016051812", "suite": "SCM_HRRR_gf"}, \ - ]