diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 5698586e43..7ea285a6bc 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -64,3 +64,6 @@ e8fc526e0d7818d45f171488c78392c4ff63902a cdf40d265cc82775607a1bf25f5f527bacc97405 251e389b361ba673b508e07d04ddcc06b2681989 8ec50135eca1b99c8b903ecdaa1bd436644688bd +3b7a2876933263f8986e4069f5d23bd45635756f +3dd489af7ebe06566e2c6a1c7ade18550f1eb4ba +742cfa606039ab89602fde5fef46458516f56fd4 diff --git a/.github/ISSUE_TEMPLATE/03_documentation.md b/.github/ISSUE_TEMPLATE/03_documentation.md new file mode 100644 index 0000000000..3d423ec463 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/03_documentation.md @@ -0,0 +1,24 @@ +--- +name: Documentation +about: Something should be added to or fixed in the documentation + +--- + + +### What sort(s) of issue is this? +- [ ] Something is missing. +- [ ] Something is (or might be) incorrect. +- [ ] Something is confusing. +- [ ] Something is broken. + +### What part(s) of the documentation does this concern? +- [ ] [Technical Note](https://escomp.github.io/ctsm-docs/versions/master/html/tech_note/index.html) (science and design of the model) +- [ ] [User's Guide](https://escomp.github.io/ctsm-docs/versions/master/html/users_guide/index.html) (using the model and related tools) +- [ ] Somewhere else (e.g., README file, tool help text, or code comment): _Please specify_ +- [ ] I don't know + +### Describe the issue +A clear and concise description of what is missing or wrong. + +### Additional context (optional) +Add any other context or screenshots about the issue here. diff --git a/.github/ISSUE_TEMPLATE/03_other.md b/.github/ISSUE_TEMPLATE/03_other.md deleted file mode 100644 index 61898d3a75..0000000000 --- a/.github/ISSUE_TEMPLATE/03_other.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Other -about: Other issues (enhancement, cleanup, documentation, etc.) - ---- - - diff --git a/.github/ISSUE_TEMPLATE/04_other.md b/.github/ISSUE_TEMPLATE/04_other.md new file mode 100644 index 0000000000..f2cfcc7407 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/04_other.md @@ -0,0 +1,7 @@ +--- +name: Other +about: Other issues (enhancement, cleanup, etc.) + +--- + + diff --git a/.github/workflows/docker-image-build-publish.yml b/.github/workflows/docker-image-build-publish.yml index b52be7f031..9096434946 100644 --- a/.github/workflows/docker-image-build-publish.yml +++ b/.github/workflows/docker-image-build-publish.yml @@ -2,12 +2,12 @@ name: Build and publish ctsm-docs Docker image on: - # Run this whenever something gets pushed to master + # Run this whenever a change to certain files gets pushed to master push: branches: ['master'] paths: - - 'doc/ctsm-docs_container/Dockerfile' - - 'doc/ctsm-docs_container/requirements.txt' + - 'doc/ctsm-docs_container/**' + - '!doc/ctsm-docs_container/README.md' # Run this whenever it's manually called workflow_dispatch: @@ -31,7 +31,7 @@ jobs: env: REGISTRY: ${{ needs.build-image-and-test-docs.outputs.REGISTRY }} IMAGE_NAME: ${{ needs.build-image-and-test-docs.outputs.IMAGE_NAME }} - IMAGE_TAG: ${{ needs.build-image-and-test-docs.outputs.image_tag }} + VERSION_TAG: ${{ needs.build-image-and-test-docs.outputs.version_tag }} # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. permissions: @@ -61,6 +61,7 @@ jobs: # This step uses the `docker/build-push-action` action to build the image, based on the ctsm-docs `Dockerfile`. # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see [Usage](https://github.com/docker/build-push-action#usage) in the README of the `docker/build-push-action` repository. # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. + # Note that we should avoid relying on the "latest" tag for anything, but it's good practice to have one. # v6.15.0 - name: Push Docker image id: push @@ -70,7 +71,9 @@ jobs: platforms: linux/amd64,linux/arm64 push: true load: false - tags: ${{ env.IMAGE_TAG }} + tags: | + ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest + ${{ env.VERSION_TAG }} labels: "" # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see [Using artifact attestations to establish provenance for builds](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds). diff --git a/.github/workflows/docker-image-build.yml b/.github/workflows/docker-image-build.yml index e013b888a7..0ac43426a6 100644 --- a/.github/workflows/docker-image-build.yml +++ b/.github/workflows/docker-image-build.yml @@ -1,19 +1,24 @@ # Modified from https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions#publishing-a-package-using-an-action (last accessed 2025-05-09) name: Test building ctsm-docs Docker image and using it to build the docs -# Configures this workflow to run every time a change in the Docker container setup is pushed to the master branch +# Configures this workflow to run every time a change in the Docker container setup is pushed or included in a PR on: push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] paths: - 'doc/ctsm-docs_container/**' + - '!doc/ctsm-docs_container/README.md' - '.github/workflows/docker-image-ctsm-docs-build.yml' - - '.github/workflows/docker-image-build-common.yml' + - '.github/workflows/docker-image-common.yml' pull_request: + # Run on pull requests that change the listed files paths: - 'doc/ctsm-docs_container/**' + - '!doc/ctsm-docs_container/README.md' - '.github/workflows/docker-image-ctsm-docs-build.yml' - - '.github/workflows/docker-image-build-common.yml' + - '.github/workflows/docker-image-common.yml' workflow_dispatch: diff --git a/.github/workflows/docker-image-common.yml b/.github/workflows/docker-image-common.yml index 40cbd90ac3..224e06e2dc 100644 --- a/.github/workflows/docker-image-common.yml +++ b/.github/workflows/docker-image-common.yml @@ -9,14 +9,15 @@ on: IMAGE_NAME: description: "Docker image name" value: ${{ jobs.build-image-and-test-docs.outputs.IMAGE_NAME }} - image_tag: - description: "First image tag" - value: ${{ jobs.build-image-and-test-docs.outputs.image_tag }} + version_tag: + description: "Version tag from Dockerfile" + value: ${{ jobs.check-version.outputs.VERSION_TAG }} -# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. +# Defines custom environment variables for the workflow. env: REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository }}/ctsm-docs + IMAGE_BASENAME: ctsm-docs + REPO: ${{ github.repository }} # There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. jobs: @@ -25,8 +26,7 @@ jobs: # Variables that might be needed by the calling workflow outputs: REGISTRY: ${{ env.REGISTRY }} - IMAGE_NAME: ${{ env.IMAGE_NAME }} - image_tag: ${{ steps.set-image-tag.outputs.IMAGE_TAG }} + IMAGE_NAME: ${{ steps.set-image-name.outputs.IMAGE_NAME }} # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. permissions: contents: read @@ -39,6 +39,14 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + # Ensure that the repository part of IMAGE_NAME is lowercase. This is needed because Docker requires image names to be entirely lowercase. Note that the *image name* part, set as IMAGE_BASENAME in the env block above, is *not* converted. This will cause the check-version job to fail if the IMAGE_BASENAME contains capitals. We don't want to silently fix that here; rather, we require the user to specify a lowercase IMAGE_BASENAME. + - name: Get image name with lowercase repo + id: set-image-name + run: | + lowercase_repo=$(echo $REPO | tr '[:upper:]' '[:lower:]') + echo "IMAGE_NAME=${lowercase_repo}/${IMAGE_BASENAME}" >> $GITHUB_ENV + echo "IMAGE_NAME=${lowercase_repo}/${IMAGE_BASENAME}" >> $GITHUB_OUTPUT + # Uses the `docker/login-action` action to log in to the Container registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. - name: Log in to the Container registry uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 @@ -52,7 +60,7 @@ jobs: id: meta uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + images: ${{ env.REGISTRY }}/${{ steps.set-image-name.outputs.IMAGE_NAME }} # This step uses the `docker/build-push-action` action to build the image, based on the ctsm-docs `Dockerfile`. # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see [Usage](https://github.com/docker/build-push-action#usage) in the README of the `docker/build-push-action` repository. @@ -75,9 +83,16 @@ jobs: - name: Set image tag for docs build id: set-image-tag run: | - echo "IMAGE_TAG=$(echo '${{ steps.meta.outputs.tags }}' | cut -d',' -f1)" >> $GITHUB_ENV - echo "IMAGE_TAG=$(echo '${{ steps.meta.outputs.tags }}' | cut -d',' -f1)" >> $GITHUB_OUTPUT + echo "IMAGE_TAG=$(echo '${{ steps.meta.outputs.tags }}' | head -n 1 | cut -d',' -f1)" >> $GITHUB_ENV - name: Build docs using container id: build-docs run: | cd doc && ./build_docs -b ${PWD}/_build -c -d -i $IMAGE_TAG + + + check-version: + needs: build-image-and-test-docs + uses: ./.github/workflows/docker-image-get-version.yml + with: + registry: ${{ needs.build-image-and-test-docs.outputs.REGISTRY }} + image_name: ${{ needs.build-image-and-test-docs.outputs.IMAGE_NAME }} diff --git a/.github/workflows/docker-image-get-version.yml b/.github/workflows/docker-image-get-version.yml new file mode 100644 index 0000000000..1a4ceb87b0 --- /dev/null +++ b/.github/workflows/docker-image-get-version.yml @@ -0,0 +1,72 @@ +name: Get and check version specified in a Dockerfile + +on: + workflow_call: + inputs: + registry: + required: true # Require any workflows calling this one to provide input + type: string + default: 'ghcr.io' # Provide default so this workflow works standalone too + image_name: + required: true # Require any workflows calling this one to provide input + type: string + default: 'escomp/ctsm/ctsm-docs' # Provide default so this workflow works standalone too + outputs: + VERSION_TAG: + description: "Tag to be pushed to container registry" + value: ${{ jobs.get-check-version.outputs.VERSION_TAG }} + workflow_dispatch: + inputs: + registry: + description: 'Container registry' + required: false + type: string + default: 'ghcr.io' + image_name: + description: 'Image name' + required: false + type: string + default: 'escomp/ctsm/ctsm-docs' + +# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. +jobs: + get-check-version: + name: Get version number from Dockerfile and check it + runs-on: ubuntu-latest + outputs: + VERSION_TAG: ${{ steps.get-check-version.outputs.version_tag }} + # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. + permissions: + contents: read + packages: read + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Get version number from Dockerfile and check it + id: get-check-version + run: | + set -e + set -o pipefail + set -u + VERSION="$(doc/ctsm-docs_container/get_version.sh)" + VERSION_TAG="${{ inputs.registry }}/${{ inputs.image_name }}:${VERSION}" + + # Store the manifest inspect result and output + set +e + INSPECT_RESULT="$(docker manifest inspect "$VERSION_TAG" 2>&1)" + INSPECT_STATUS=$? + set -e + + if [[ "${INSPECT_RESULT}" == *"schemaVersion"* ]]; then + echo "Tag $VERSION_TAG already exists!" >&2 + exit 123 + elif [[ "${INSPECT_RESULT}" != "manifest unknown" ]]; then + # "manifest unknown" means the tag doesn't exist, which is what we want + echo -e "Error checking manifest for $VERSION_TAG:\n${INSPECT_RESULT}" >&2 + exit $INSPECT_STATUS + fi + + echo "Setting version_tag to $VERSION_TAG" + echo "version_tag=$VERSION_TAG" >> $GITHUB_OUTPUT diff --git a/.github/workflows/docs-ctsm_pylib.yml b/.github/workflows/docs-ctsm_pylib.yml index e4efc973a2..1e3412475f 100644 --- a/.github/workflows/docs-ctsm_pylib.yml +++ b/.github/workflows/docs-ctsm_pylib.yml @@ -2,10 +2,13 @@ name: Test building docs with ctsm_pylib on: push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] paths: - 'python/conda_env_ctsm_py.txt' pull_request: + # Run on pull requests that change the listed files paths: - 'python/conda_env_ctsm_py.txt' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8cc1b7c7c2..068d75da8c 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -3,12 +3,21 @@ name: Test building docs when they're updated on: push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] paths: - 'doc/**' + - '!doc/*ChangeLog*' + - '!doc/*ChangeSum*' + - '!doc/UpdateChangelog.pl' pull_request: + # Run on pull requests that change the listed files paths: - 'doc/**' + - '!doc/*ChangeLog*' + - '!doc/*ChangeSum*' + - '!doc/UpdateChangelog.pl' workflow_dispatch: diff --git a/.github/workflows/fleximod_test.yaml b/.github/workflows/fleximod_test.yaml index 7f3e5a1404..8680541676 100644 --- a/.github/workflows/fleximod_test.yaml +++ b/.github/workflows/fleximod_test.yaml @@ -3,7 +3,11 @@ name: git-fleximod test # Test git-fleximod update and cleanliness # Based closely on workflow from CESM repo # -on: [push, pull_request] +on: + push: + # Run when a change to any is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if no files are changed. + branches: ['*'] + pull_request: jobs: fleximod-test: diff --git a/.github/workflows/formatting_python.yml b/.github/workflows/formatting_python.yml index 6fffd4261b..131e44a7af 100644 --- a/.github/workflows/formatting_python.yml +++ b/.github/workflows/formatting_python.yml @@ -2,12 +2,15 @@ name: Check Python formatting on: push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] paths: - 'python/**' - 'cime_config/SystemTests/**' - 'cime_config/buildlib/**' - 'cime_config/buildnml/**' pull_request: + # Run on pull requests that change the listed files paths: - 'python/**' - 'cime_config/SystemTests/**' diff --git a/.gitignore b/.gitignore index 278c0957f0..41ab22b198 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.nc # but don't ignore netcdf files here: !/python/ctsm/test/testinputs/*.nc +!/python/ctsm/test/testinputs/**/*.nc # editor files *.swp diff --git a/doc/ChangeLog b/doc/ChangeLog index 3708a3319d..2b89a435f0 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,73 @@ =============================================================== +Tag name: ctsm5.3.047 +Originator(s): samrabin (Sam Rabin, UCAR/TSS) +Date: Mon May 26 16:30:10 MDT 2025 +One-line Summary: Merge b4b-dev to master + +Purpose and description of changes +---------------------------------- + +Merge b4b-dev to master. Includes PRs: +- [Pull Request #3056: QOL improvements in mksurfdata_esmf makefile by samsrabin](https://github.com/ESCOMP/CTSM/pull/3056) +- [Pull Request #3100: Fix Longitude comparison error for regional subset_data by samsrabin](https://github.com/ESCOMP/CTSM/pull/3100) +- [Pull Request #3122: Fix a couple of things in documentation conf.py by samsrabin](https://github.com/ESCOMP/CTSM/pull/3122) +- [Pull Request #3128: Fix ctsm-docs container version tagging by samsrabin](https://github.com/ESCOMP/CTSM/pull/3128) +- [Pull Request #3138: Don't build docs for ChangeLog/Sum by samsrabin](https://github.com/ESCOMP/CTSM/pull/3138) +- [Pull Request #3143: Don't run workflows on plain tag creation by samsrabin](https://github.com/ESCOMP/CTSM/pull/3143) +- [Pull Request #3144: Add docs issue template by samsrabin](https://github.com/ESCOMP/CTSM/pull/3144) + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed (include CTSM Issue # and description): +- [Issue #3093: subset_data: Longitude comparison error](https://github.com/ESCOMP/CTSM/issues/3093) +- [Issue #3126: Increment docs container version](https://github.com/ESCOMP/CTSM/issues/3126) +- [Issue #3142: Workflows run on new tags even w/o file changes](https://github.com/ESCOMP/CTSM/issues/3142) + + +Testing summary: +---------------- + + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +- [Pull Request #3154: Merge b4b-dev 2025-05-24 by samsrabin](https://github.com/ESCOMP/CTSM/pull/3154) + +=============================================================== +=============================================================== Tag name: ctsm5.3.046 Originator(s): rgknox (Ryan Knox) Date: Mon 26 May 2025 02:45:52 AM MDT diff --git a/doc/ChangeSum b/doc/ChangeSum index 205abb7e50..c691363c34 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.3.047 samrabin 05/26/2025 Merge b4b-dev to master ctsm5.3.046 rgknox 05/26/2025 For FATES set itype to ispval and a few unused variables to nan to help prevent future problems ctsm5.3.045 glemieux 05/20/2025 FATES default parameter update for API 40 ctsm5.3.044 slevis 05/14/2025 Introduce time-evolving LEAFCN_TARGET as function of leafcn param diff --git a/doc/ctsm-docs_container/Dockerfile b/doc/ctsm-docs_container/Dockerfile index 90e3a6160e..1a0ae0c9f8 100644 --- a/doc/ctsm-docs_container/Dockerfile +++ b/doc/ctsm-docs_container/Dockerfile @@ -28,4 +28,5 @@ WORKDIR /home/user CMD ["/bin/bash", "-l"] LABEL org.opencontainers.image.title="Container for building CTSM documentation" -LABEL org.opencontainers.image.source=https://github.com/ESCOMP/CTSM \ No newline at end of file +LABEL org.opencontainers.image.source=https://github.com/ESCOMP/CTSM +LABEL org.opencontainers.image.version="v1.0.2" diff --git a/doc/ctsm-docs_container/README.md b/doc/ctsm-docs_container/README.md index bdd4eb3fa7..0af0ae71c0 100644 --- a/doc/ctsm-docs_container/README.md +++ b/doc/ctsm-docs_container/README.md @@ -7,7 +7,7 @@ This Readme tells you how to update the ctsm-docs Docker container if a need to ## Building -If you actually want to build the container, make sure Docker is running. In the Docker Desktop settings, make sure you've enabled the [`continerd` image store](https://docs.docker.com/desktop/features/containerd/), which allows multi-platform builds. Then do: +If you actually want to build the container, make sure Docker is running. In the Docker Desktop settings, make sure you've enabled the [`containerd` image store](https://docs.docker.com/desktop/features/containerd/), which allows multi-platform builds. Then do: ```shell docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/escomp/ctsm/ctsm-docs . ``` @@ -21,7 +21,26 @@ ghcr.io/escomp/ctsm/ctsm-docs latest ab51446519a4 3 seconds ago 233M To test, you can tell `build_docs` to use your new version by adding `--docker-image IMAGE_ID` to your call, where in the example above `IMAGE_ID` is `ab51446519a4`. -## Publishing +## Publishing automatically + +The `docker-image-build-publish.yml` workflow makes it so that new versions of the workflow will be published to the GitHub Container Registry whenever changes to the container setup are merged to CTSM's `master` branch. This will fail (as will a similar, no-publish workflow that happens on PRs) unless you specify exactly one new version number in the Dockerfile. This version number will be used as a tag that can be referenced by, e.g., doc-builder. + +Lots of Docker instructions tell you to use the `latest` tag, and indeed the workflow will add that tag automatically. However, actually _using_ `latest` can lead to support headaches as users think they have the right version but actually don't. Instead, you'll make a new version number incremented from the [previous one](https://github.com/ESCOMP/CTSM/pkgs/container/ctsm%2Fctsm-docs/versions), in the `vX.Y.Z` format. + +Here's where you need to specify the version number in the Dockerfile: +```docker +LABEL org.opencontainers.image.version="vX.Y.Z" +``` +The string there can technically be anything as long as (a) it starts with a lowercase `v` and (b) it hasn't yet been used on a published version of the container. + +You can check the results of the automatic publication on the [container's GitHub page](https://github.com/ESCOMP/CTSM/pkgs/container/ctsm%2Fctsm-docs). + +### Updating doc-builder +After the new version of the container is published, you will probably want to tell [doc-builder](https://github.com/ESMCI/doc-builder) to use the new one. Open a PR where you change the tag (the part after the colon) in the definition of `DEFAULT_DOCKER_IMAGE` in `doc_builder/build_commands.py`. Remember, **use the version number**, not "latest". + +## Publishing manually (NOT recommended) + +It's vastly preferable to let GitHub build and publish the new repo using the `docker-image-build-publish.yml` workflow as described above. However, if you need to publish manually for some reason, here's how. ### Pushing to GitHub Container Registry If you want to publish the container, you first need a [GitHub Personal Access Token (Classic)](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#personal-access-tokens-classic) with the `write:packages` permissions. You can see your existing PAT(C)s [here](https://github.com/settings/tokens). If you don't have one with the right permissions, [this link](https://github.com/settings/tokens/new?scopes=write:packages) should start the setup process for you. @@ -31,7 +50,7 @@ Once you have a PAT(C), you can authenticate in your shell session like so: ```shell echo YOUR_PERSONAL_ACCESS_TOKEN_CLASSIC | docker login ghcr.io -u YOUR_USERNAME --password-stdin ``` -The leading spaces are intended to prevent this command, which contains your secret PAT(C), from being written to your shell's history file. That at least works in bash... sometimes. To be extra safe, in bash you can do `history -c` and it will clear the session's history entirely. +The leading spaces are intended to prevent this command, which contains your secret PAT(C), from being written to your shell's history file. That at least works in bash... sometimes. To be extra safe, in bash you can do `history -c` and it will clear your entire bash history. That can be pretty disruptive, but fortunately you should only need to authenticate once. ### Tagging You'll next need to tag the image. Lots of Docker instructions tell you to use the `latest` tag, and Docker may actually do that for you. However, `latest` can lead to support headaches as users think they have the right version but actually don't. Instead, you'll make a new version number incremented from the [previous one](https://github.com/ESCOMP/CTSM/pkgs/container/ctsm%2Fctsm-docs/versions), in the `vX.Y.Z` format. @@ -49,7 +68,7 @@ docker push ghcr.io/escomp/ctsm/ctsm-docs:vX.Y.Z Then browse to the [container's GitHub page](https://github.com/ESCOMP/CTSM/pkgs/container/ctsm%2Fctsm-docs) to make sure this all worked and the image is public. ### Updating doc-builder -Since you've updated the container, you will probably want to tell [doc-builder](https://github.com/ESMCI/doc-builder) to use the new one. Open a PR where you change the tag (the part after the colon) in the definition of `DEFAULT_DOCKER_IMAGE` in `doc_builder/build_commands.py`. Remember, **use the version number**, not "latest". +See "Updating doc-builder" in the "Publishing automatically" section above. ## See also diff --git a/doc/ctsm-docs_container/get_version.sh b/doc/ctsm-docs_container/get_version.sh new file mode 100755 index 0000000000..485e720eff --- /dev/null +++ b/doc/ctsm-docs_container/get_version.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +set -e +cd doc/ctsm-docs_container + +# Extract version from Dockerfile +version="$(grep "org.opencontainers.image.version" Dockerfile | cut -d'"' -f2 | sort |uniq)" +n_found=$(echo $version | wc -w) + +# Error if anything other than exactly one version tag was found +if [[ ${n_found} -gt 1 ]]; then + echo -e "Multiple version tags found:\n${version}" >&2 + exit 2 +elif [[ ${n_found} -lt 1 ]]; then + echo "Expected 1 but found 0 version tags" >&2 + exit -1 +fi + +# Error if version doesn't start with v +if [[ "${version}" != "v"* ]]; then + echo "Version '${version}' doesn't start with v" >&2 + exit 22 +fi + +echo ${version} + +exit 0 diff --git a/doc/source/conf.py b/doc/source/conf.py index fc88b1e359..9678849187 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -67,7 +67,7 @@ version_label = 'the latest development code' # List of versions to populate version picker dropdown menu -version_list = ["latest", "release-clm5.0"] +version_list = ["master", "release-clm5.0"] # version_label is not a standard sphinx variable, so we need some custom rst to allow # pages to use it. We need a separate replacement for the bolded version because it @@ -197,4 +197,5 @@ def setup(app): html_context["versions"] = [] for this_version in version_list: - html_context["versions"].append([this_version, f"../../../versions/{this_version}/html"]) + # Note: with 4 pardir operators, link in version selector will be broken locally but work on the Web 🤷 + html_context["versions"].append([this_version, f"../../../../versions/{this_version}/html"]) diff --git a/python/Makefile b/python/Makefile index 9645242111..3d607cab43 100644 --- a/python/Makefile +++ b/python/Makefile @@ -19,7 +19,7 @@ ifneq ($(verbose), not-set) endif PYLINT=pylint -PYLINT_ARGS=-j 4 --rcfile=ctsm/.pylintrc --fail-under=0 +PYLINT_ARGS=-j 4 --rcfile=ctsm/.pylintrc PYLINT_SRC = \ ctsm # NOTE: These don't pass pylint checking and should be added when we put into effort to get them to pass diff --git a/python/ctsm/longitude.py b/python/ctsm/longitude.py index bb04e2b98f..8afa731131 100644 --- a/python/ctsm/longitude.py +++ b/python/ctsm/longitude.py @@ -3,6 +3,8 @@ """ import logging +from argparse import ArgumentTypeError +import numpy as np logger = logging.getLogger(__name__) @@ -11,16 +13,22 @@ def _check_lon_type_180(lon_in): """ Checks value range of longitude with type 180 """ - if not -180 <= lon_in <= 180: - raise ValueError(f"lon_in needs to be in the range [-180, 180]: {lon_in}") + lon_min = np.min(lon_in) + lon_max = np.max(lon_in) + for lon in [lon_min, lon_max]: + if not -180 <= lon <= 180: + raise ValueError("(All values of) lon_in must be in the range [-180, 180]") def _check_lon_type_360(lon_in): """ Checks value range of longitude with type 360 """ - if not 0 <= lon_in <= 360: - raise ValueError(f"lon_in needs to be in the range [0, 360]: {lon_in}") + lon_min = np.min(lon_in) + lon_max = np.max(lon_in) + for lon in [lon_min, lon_max]: + if not 0 <= lon <= 360: + raise ValueError("(All values of) lon_in must be in the range [0, 360]") def _check_lon_value_given_type(lon_in, lon_type_in): @@ -50,6 +58,31 @@ def _convert_lon_type_180_to_360(lon_in): return lon_out +def _detect_lon_type(lon_in): + """ + Detect longitude type of a given numeric. If lon_in contains more than one number (as in a list + or Numpy array), this function will assume all members are of the same type if (a) there is at + least one unambiguous member and (b) all unambiguous members are of the same type. + """ + lon_min = np.min(lon_in) + lon_max = np.max(lon_in) + if lon_min < -180: + raise ValueError(f"(Minimum) longitude < -180: {lon_min}") + if lon_max > 360: + raise ValueError(f"(Maximum) longitude > 360: {lon_max}") + min_type_180 = lon_min < 0 + max_type_360 = lon_max > 180 + if min_type_180 and max_type_360: + raise RuntimeError("Longitude array contains values of both types 180 and 360") + if not min_type_180 and not max_type_360: + raise ArgumentTypeError("Longitude(s) ambiguous; could be type 180 or 360") + if min_type_180: + lon_type = 180 + else: + lon_type = 360 + return lon_type + + def _convert_lon_type_360_to_180(lon_in): """ Convert a longitude from type 360 to type 180 @@ -70,6 +103,22 @@ def _convert_lon_type_360_to_180(lon_in): return lon_out +def check_other_is_lontype(other): + """ + Used in comparison operators to throw an error if the "other" object being compared isn't also + a Longitude object. This makes it so that comparing longitudes requires that both sides of the + comparison must be Longitude objects and thus must have a specified longitude type (180 or 360). + + We could try to coerce non-Longitude `other` to Longitude, but that might result in + situations where tests think everything works but code will fail if `other` is + ambiguous. + """ + if not isinstance(other, Longitude): + raise TypeError( + f"Comparison not supported between instances of 'Longitude' and '{type(other)}'" + ) + + class Longitude: """ A class to keep track of a longitude and its type @@ -93,6 +142,41 @@ def __init__(self, lon, lon_type): self._lon = lon self._lon_type = lon_type + def _check_lons_same_type(self, other): + """ + If you're comparing two Longitudes of different types in different hemispheres, then + `lon1 > lon2` and `lon2 < lon1` will incorrectly give different answers! We could make it so + that this doesn't fail as long as symmetricality isn't violated, but that might lead to + unexpected failures in practice. + """ + if self.lon_type() != other.lon_type(): + raise TypeError("Comparison not supported between Longitudes of different types") + + # __eq__ makes it so that == and != both work. + def __eq__(self, other): + check_other_is_lontype(other) + return self._lon == other.get(self._lon_type) + + def __lt__(self, other): + check_other_is_lontype(other) + self._check_lons_same_type(other) + return self._lon < other._lon + + def __gt__(self, other): + check_other_is_lontype(other) + self._check_lons_same_type(other) + return self._lon > other._lon + + def __le__(self, other): + check_other_is_lontype(other) + self._check_lons_same_type(other) + return self._lon <= other._lon + + def __ge__(self, other): + check_other_is_lontype(other) + self._check_lons_same_type(other) + return self._lon >= other._lon + def get(self, lon_type_out): """ Get the longitude value, converting longitude type if needed @@ -104,3 +188,9 @@ def get(self, lon_type_out): if lon_type_out == 360: return _convert_lon_type_180_to_360(self._lon) raise RuntimeError(f"Add handling for lon_type_out {lon_type_out}") + + def lon_type(self): + """ + Getter method for self._lon_type + """ + return self._lon_type diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index a0b746d47d..94f6011569 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -19,6 +19,7 @@ from ctsm.utils import add_tag_to_filename from ctsm.utils import abort from ctsm.config_utils import check_lon1_lt_lon2 +from ctsm.longitude import Longitude, _detect_lon_type logger = logging.getLogger(__name__) @@ -132,6 +133,33 @@ def __init__( self.ni = None self.nj = None + def _subset_lon_lat(self, x_dim, y_dim, f_in): + """ + subset longitude and latitude arrays + """ + lat = f_in["lat"] + lon = f_in["lon"] + + # Detect longitude type (180 or 360) of input file, throwing a helpful error if it can't be + # determined. + f_lon_type = _detect_lon_type(lon) + lon1_type = self.lon1.lon_type() + lon2_type = self.lon2.lon_type() + if lon1_type != lon2_type: + raise RuntimeError(f"lon1 type ({lon1_type}) doesn't match lon2 type ({lon2_type})") + if f_lon_type != lon1_type: + # This may be overly strict; we might want to allow conversion to lon1 type. + raise RuntimeError( + f"File lon type ({f_lon_type}) doesn't match boundary lon type ({lon1_type})" + ) + + # Convert input file longitudes to Longitude class, then trim where it's in region bounds + lon = Longitude(lon, lon1_type) + xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] + yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] + f_out = f_in.isel({y_dim: yind, x_dim: xind}) + return f_out + def create_tag(self): """ Create a tag for a region which is either the region name @@ -192,14 +220,12 @@ def create_domain_at_reg(self, indir, file): logger.info("Creating domain file at region: %s", self.tag) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord(fdomain_in, "xc", "yc", "ni", "nj") - lat = f_in["lat"] - lon = f_in["lon"] + x_dim = "ni" + y_dim = "nj" + f_in = self.create_1d_coord(fdomain_in, "xc", "yc", x_dim, y_dim) # subset longitude and latitude arrays - xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f_out = f_in.isel(nj=yind, ni=xind) + f_out = self._subset_lon_lat(x_dim, y_dim, f_in) # update attributes self.update_metadata(f_out) @@ -240,14 +266,12 @@ def create_surfdata_at_reg(self, indir, file, user_mods_dir, specify_fsurf_out): logger.info("fsurf_out: %s", os.path.join(self.out_dir, fsurf_out)) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") - lat = f_in["lat"] - lon = f_in["lon"] + x_dim = "lsmlon" + y_dim = "lsmlat" + f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", x_dim, y_dim) # subset longitude and latitude arrays - xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f_out = f_in.isel(lsmlat=yind, lsmlon=xind) + f_out = self._subset_lon_lat(x_dim, y_dim, f_in) # update attributes self.update_metadata(f_out) @@ -304,14 +328,12 @@ def create_landuse_at_reg(self, indir, file, user_mods_dir): logger.info("fluse_out: %s", os.path.join(self.out_dir, fluse_out)) # create 1d coordinate variables to enable sel() method - f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") - lat = f_in["lat"] - lon = f_in["lon"] + x_dim = "lsmlon" + y_dim = "lsmlat" + f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", x_dim, y_dim) # subset longitude and latitude arrays - xind = np.where((lon >= self.lon1) & (lon <= self.lon2))[0] - yind = np.where((lat >= self.lat1) & (lat <= self.lat2))[0] - f_out = f_in.isel(lsmlat=yind, lsmlon=xind) + f_out = self._subset_lon_lat(x_dim, y_dim, f_in) # update attributes self.update_metadata(f_out) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index e4a323be53..81f1f703f3 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -69,7 +69,7 @@ from ctsm.path_utils import path_to_ctsm_root from ctsm.utils import abort from ctsm.config_utils import check_lon1_lt_lon2 -from ctsm.longitude import Longitude +from ctsm.longitude import Longitude, _detect_lon_type # -- import ctsm logging flags from ctsm.ctsm_logging import ( @@ -626,46 +626,23 @@ def setup_files(args, defaults, cesmroot): logger.info("clmforcingindir does not exist: %s", clmforcingindir) abort("inputdata directory does not exist") + file_dict = {"main_dir": clmforcingindir} + # DATM data # TODO Issue #2960: Make datm_type a user option at the command # line. For reference, this option affects three .cfg files: # tools/site_and_regional/default_data_1850.cfg # tools/site_and_regional/default_data_2000.cfg # python/ctsm/test/testinputs/default_data.cfg - datm_type = "datm_crujra" # also available: datm_type = "datm_gswp3" - dir_output_datm = "datmdata" - dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_type, "dir")) if args.create_datm: + datm_type = "datm_crujra" # also available: datm_type = "datm_gswp3" + dir_output_datm = "datmdata" + dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_type, "dir")) if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): os.mkdir(os.path.join(args.out_dir, dir_output_datm)) logger.info("dir_input_datm : %s", dir_input_datm) logger.info("dir_output_datm: %s", os.path.join(args.out_dir, dir_output_datm)) - - # if the crop flag is on - we need to use a different land use and surface data file - num_pft = determine_num_pft(args.crop_flag) - - fsurf_in = defaults.get("surfdat", "surfdat_" + num_pft + "pft") - fluse_in = defaults.get("landuse", "landuse_" + num_pft + "pft") - if args.out_surface: - fsurf_out = args.out_surface - else: - fsurf_out = None - - file_dict = { - "main_dir": clmforcingindir, - "fdomain_in": defaults.get("domain", "file"), - "fsurf_dir": os.path.join( - clmforcingindir, - os.path.join(defaults.get("surfdat", "dir")), - ), - "fluse_dir": os.path.join( - clmforcingindir, - os.path.join(defaults.get("landuse", "dir")), - ), - "fsurf_in": fsurf_in, - "fsurf_out": fsurf_out, - "fluse_in": fluse_in, - "datm_tuple": DatmFiles( + file_dict["datm_tuple"] = DatmFiles( dir_input_datm, dir_output_datm, defaults.get(datm_type, "domain"), @@ -678,8 +655,30 @@ def setup_files(args, defaults, cesmroot): defaults.get(datm_type, "solarname"), defaults.get(datm_type, "precname"), defaults.get(datm_type, "tpqwname"), - ), - } + ) + + # if the crop flag is on - we need to use a different land use and surface data file + num_pft = determine_num_pft(args.crop_flag) + + if args.create_domain: + file_dict["fdomain_in"] = defaults.get("domain", "file") + if args.create_surfdata: + file_dict["fsurf_dir"] = os.path.join( + clmforcingindir, + os.path.join(defaults.get("surfdat", "dir")), + ) + file_dict["fsurf_in"] = defaults.get("surfdat", "surfdat_" + num_pft + "pft") + if args.out_surface: + fsurf_out = args.out_surface + else: + fsurf_out = None + file_dict["fsurf_out"] = fsurf_out + if args.create_landuse: + file_dict["fluse_in"] = defaults.get("landuse", "landuse_" + num_pft + "pft") + file_dict["fluse_dir"] = os.path.join( + clmforcingindir, + os.path.join(defaults.get("landuse", "dir")), + ) return file_dict @@ -821,17 +820,6 @@ def subset_region(args, file_dict: dict): logger.info("Successfully ran script for a regional case.") -def _detect_lon_type(lon_in): - if lon_in < 0: - lon_type = 180 - elif lon_in > 180: - lon_type = 360 - else: - msg = "When providing an ambiguous longitude, you must specify --lon-type 180 or 360" - raise argparse.ArgumentTypeError(msg) - return lon_type - - def process_args(args): """ Process arguments after parsing diff --git a/python/ctsm/test/test_sys_subset_data.py b/python/ctsm/test/test_sys_subset_data.py new file mode 100644 index 0000000000..bc73c8c41d --- /dev/null +++ b/python/ctsm/test/test_sys_subset_data.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 +""" +System tests for subset_data + +You can run this by: + python -m unittest test_sys_subset_data.py +""" + +import unittest +import os +import sys +import tempfile +import inspect +import xarray as xr + +# -- add python/ctsm to path (needed if we want to run the test stand-alone) +_CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) +sys.path.insert(1, _CTSM_PYTHON) + +# pylint: disable=wrong-import-position +from ctsm import unit_testing +from ctsm import subset_data +from ctsm.utils import find_one_file_matching_pattern + + +class TestSubsetDataSys(unittest.TestCase): + """ + Basic class for testing subset_data.py. + """ + + def setUp(self): + self.temp_dir_out = tempfile.TemporaryDirectory() + self.temp_dir_umd = tempfile.TemporaryDirectory() + self.inputdata_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) + + def tearDown(self): + self.temp_dir_out.cleanup() + self.temp_dir_umd.cleanup() + + def _check_result_file_matches_expected(self, expected_output_files): + """ + Loop through a list of output files, making sure they match what we expect. + """ + all_files_present_and_match = True + for basename in expected_output_files: + result_file = os.path.join(self.temp_dir_out.name, basename) + result_file = find_one_file_matching_pattern(result_file) + expected_file = os.path.join( + os.path.dirname(__file__), + "testinputs", + "expected_result_files", + inspect.stack()[1][3], # Name of calling function (i.e., test name) + basename, + ) + expected_file = find_one_file_matching_pattern(expected_file) + ds_result = xr.open_dataset(result_file) + ds_expected = xr.open_dataset(expected_file) + if not ds_result.equals(ds_expected): + print("Result differs from expected: " + basename) + print(ds_result) + print(ds_expected) + all_files_present_and_match = False + return all_files_present_and_match + + def test_subset_data_reg_amazon(self): + """ + Test subset_data for Amazon region + """ + cfg_file = os.path.join( + self.inputdata_dir, + "ctsm", + "test", + "testinputs", + "subset_data_amazon.cfg", + ) + print(cfg_file) + sys.argv = [ + "subset_data", + "region", + "--lat1", + "-12", + "--lat2", + "-7", + "--lon1", + "291", + "--lon2", + "299", + "--reg", + "TMP", + "--create-mesh", + "--create-domain", + "--create-surface", + "--surf-year", + "2000", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--inputdata-dir", + self.inputdata_dir, + "--cfg-file", + cfg_file, + "--overwrite", + ] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + expected_output_files = [ + f"domain.lnd.5x5pt-amazon_navy_TMP_c{daystr}_ESMF_UNSTRUCTURED_MESH.nc", + f"domain.lnd.5x5pt-amazon_navy_TMP_c{daystr}.nc", + f"surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c{daystr}.nc", + ] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files)) + + def test_subset_data_reg_infile_detect360(self): + """ + Test subset_data for region with ambiguous longitudes. We specify the longitude type for + lon1 and lon2 but not for the input data files. This should still work as long as the input + data file longitude type is detectable and matches --lon-type. + """ + sys.argv = [ + "subset_data", + "region", + "--lat1", + "-12", + "--lat2", + "-7", + "--lon1", + "15", + "--lon2", + "23", + "--lon-type", + "360", + "--reg", + "TMP", + "--create-mesh", + "--create-domain", + "--create-surface", + "--surf-year", + "2000", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--overwrite", + ] + subset_data.main() + + def test_subset_data_reg_infile_detect180_error(self): + """ + Specifying --lon-type 180 but an input file of type 360 should error + """ + sys.argv = [ + "subset_data", + "region", + "--lat1", + "-12", + "--lat2", + "-7", + "--lon1", + "15", + "--lon2", + "23", + "--lon-type", + "180", + "--reg", + "TMP", + "--create-mesh", + "--create-domain", + "--create-surface", + "--surf-year", + "2000", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--overwrite", + ] + with self.assertRaisesRegex( + RuntimeError, r"File lon type \(360\) doesn't match boundary lon type \(180\)" + ): + subset_data.main() + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_unit_longitude.py b/python/ctsm/test/test_unit_longitude.py index 1810cfb10b..6bf7ec53e2 100644 --- a/python/ctsm/test/test_unit_longitude.py +++ b/python/ctsm/test/test_unit_longitude.py @@ -3,21 +3,92 @@ """Unit tests for config_utils""" import unittest +from argparse import ArgumentTypeError +import numpy as np from ctsm import unit_testing from ctsm.longitude import Longitude from ctsm.longitude import _convert_lon_type_180_to_360, _convert_lon_type_360_to_180 +from ctsm.longitude import _check_lon_type_180, _check_lon_type_360 +from ctsm.longitude import _detect_lon_type # Allow test names that pylint doesn't like; otherwise hard to make them # readable # pylint: disable=invalid-name -# pylint: disable=protected-access +# pylint: disable=protected-access,too-many-public-methods class TestLongitude(unittest.TestCase): """Tests of Longitude class and helper functions""" + # Checking longitude type + + def test_check_lon_type_180(self): + """ + Check that a single value in [-180, 180] passes _check_lon_type_180() + """ + _check_lon_type_180(-180) + _check_lon_type_180(-55) + _check_lon_type_180(55) + _check_lon_type_180(155) + _check_lon_type_180(180) + + def test_check_lon_type_180_list(self): + """ + Check that a list with all values in [-180, 180] passes _check_lon_type_180() + """ + _check_lon_type_180([-180, -55, 55, 155, 180]) + + def test_check_lon_type_180_errors(self): + """ + Check that a single value outside [-180, 180] fails _check_lon_type_180() + """ + msg = r"lon_in must be in the range \[-180, 180\]" + with self.assertRaisesRegex(ValueError, msg): + _check_lon_type_180(-181) + with self.assertRaisesRegex(ValueError, msg): + _check_lon_type_180(181) + + def test_check_lon_type_180_list_errors(self): + """ + Check that a list with one value outside [-180, 180] fails _check_lon_type_180() + """ + with self.assertRaisesRegex(ValueError, r"lon_in must be in the range \[-180, 180\]"): + _check_lon_type_180([-191, -180, -55, 55, 155, 180]) + + def test_check_lon_type_360(self): + """ + Check that a single value in [0, 360] passes _check_lon_type_360() + """ + _check_lon_type_360(0) + _check_lon_type_360(55) + _check_lon_type_360(180) + _check_lon_type_360(360) + + def test_check_lon_type_360_list(self): + """ + Check that a list with all values in [0, 360] passes _check_lon_type_360() + """ + _check_lon_type_360([0, 55, 180, 360]) + + def test_check_lon_type_360_errors(self): + """ + Check that a single value outside [0, 360] fails _check_lon_type_360() + """ + msg = r"lon_in must be in the range \[0, 360\]" + with self.assertRaisesRegex(ValueError, msg): + _check_lon_type_360(-1) + with self.assertRaisesRegex(ValueError, msg): + _check_lon_type_360(361) + + def test_check_lon_type_360_list_errors(self): + """ + Check that a list with one value outside [0, 360] fails _check_lon_type_360() + """ + with self.assertRaisesRegex(ValueError, r"lon_in must be in the range \[0, 360\]"): + _check_lon_type_360([-155, 0, 55, 180, 360]) + # Converting between types 180 and 360 def test_convert_lon_type_180_to_360_positive(self): @@ -47,14 +118,14 @@ def test_convert_lon_type_180_to_360_upperbound(self): def test_convert_lon_type_180_to_360_toohigh(self): """Test conversion 180→360 for a value > 180: Should error""" lon = 555 - with self.assertRaisesRegex(ValueError, r"lon_in needs to be in the range \[-180, 180\]"): + with self.assertRaisesRegex(ValueError, r"lon_in must be in the range \[-180, 180\]"): _convert_lon_type_180_to_360(lon) def test_convert_lon_type_180_to_360_toolow(self): """Test conversion 180→360 for a value < -180: Should error""" lon = -555 - with self.assertRaisesRegex(ValueError, r"lon_in needs to be in the range \[-180, 180\]"): + with self.assertRaisesRegex(ValueError, r"lon_in must be in the range \[-180, 180\]"): _convert_lon_type_180_to_360(lon) def test_convert_lon_type_360_to_180_positive_low(self): @@ -84,14 +155,14 @@ def test_convert_lon_type_360_to_180_upperbound(self): def test_convert_lon_type_360_to_180_toohigh(self): """Test conversion 360→180 for a value > 180: Should error""" lon = 555 - with self.assertRaisesRegex(ValueError, r"lon_in needs to be in the range \[0, 360\]"): + with self.assertRaisesRegex(ValueError, r"lon_in must be in the range \[0, 360\]"): _convert_lon_type_360_to_180(lon) def test_convert_lon_type_360_to_180_toolow(self): """Test conversion 360→180 for a value < -180: Should error""" lon = -555 - with self.assertRaisesRegex(ValueError, r"lon_in needs to be in the range \[0, 360\]"): + with self.assertRaisesRegex(ValueError, r"lon_in must be in the range \[0, 360\]"): _convert_lon_type_360_to_180(lon) # Initializing new Longitude objects @@ -152,6 +223,229 @@ def test_lon_obj_type360_max(self): lon_obj = Longitude(this_lon, lon_type) self.assertEqual(lon_obj.get(lon_type), this_lon) + def test_lon_eq_both360(self): + """Test that == works for two equal Longitudes both of type 360""" + lon1 = Longitude(275, 360) + lon2 = Longitude(275, 360) + self.assertTrue(lon1 == lon2) + + def test_lon_eq_360num_error(self): + """Test that == fails if RHS isn't Longitude""" + lon1 = Longitude(275, 360) + lon2 = 275 + with self.assertRaisesRegex( + TypeError, "Comparison not supported between instances of 'Longitude' and " + ): + _ = lon1 == lon2 + + def test_lon_eq_num360_error(self): + """Test that == fails if LHS isn't Longitude""" + lon1 = 275 + lon2 = Longitude(275, 360) + with self.assertRaisesRegex( + TypeError, "Comparison not supported between instances of 'Longitude' and " + ): + _ = lon1 == lon2 + + def test_lon_eq_both180(self): + """Test that == works for two equal Longitudes both of type 180""" + lon1 = Longitude(-5, 180) + lon2 = Longitude(-5, 180) + self.assertTrue(lon1 == lon2) + + def test_lon_eq_180360(self): + """Test that == works for two equal Longitudes of different types""" + lon1 = Longitude(-5, 180) + lon2 = Longitude(355, 360) + self.assertTrue(lon1 == lon2) + self.assertTrue(lon2 == lon1) + + def test_lon_eqfalse_180360(self): + """Test that == works for two unequal Longitudes of different types""" + lon1 = Longitude(-1, 180) + lon2 = Longitude(355, 360) + self.assertFalse(lon1 == lon2) + self.assertFalse(lon2 == lon1) + + def test_lon_noteqtrue_180360(self): + """Test that != works for two unequal Longitudes of different types""" + lon1 = Longitude(-1, 180) + lon2 = Longitude(355, 360) + self.assertTrue(lon1 != lon2) + self.assertTrue(lon2 != lon1) + + def test_lon_compare_both360(self): + """ + Ensure that comparison operators work if both are type 360 + """ + lon1 = Longitude(155, 360) + lon2 = Longitude(150, 360) + self.assertTrue(lon1 > lon2) + self.assertTrue(lon1 >= lon2) + self.assertFalse(lon1 <= lon2) + self.assertFalse(lon1 < lon2) + + def test_lon_compare_both180(self): + """ + Ensure that comparison operators work if both are type 180 + """ + lon1 = Longitude(155, 180) + lon2 = Longitude(150, 180) + self.assertTrue(lon1 > lon2) + self.assertTrue(lon1 >= lon2) + self.assertFalse(lon1 <= lon2) + self.assertFalse(lon1 < lon2) + + def test_lon_compare_arrays(self): + """ + Ensure that comparison operators work elementwise for arrays + """ + lon1 = Longitude(np.array([155, 156, 157]), 180) + lon2 = Longitude(np.array([155, 157, 156]), 180) + result = lon1 == lon2 + expected = [True, False, False] + for i in np.arange(3): + self.assertTrue(result[i] == expected[i]) + result = lon1 < lon2 + expected = [False, True, False] + for i in np.arange(3): + self.assertTrue(result[i] == expected[i]) + result = lon1 <= lon2 + expected = [True, True, False] + for i in np.arange(3): + self.assertTrue(result[i] == expected[i]) + result = lon1 > lon2 + expected = [False, False, True] + for i in np.arange(3): + self.assertTrue(result[i] == expected[i]) + result = lon1 >= lon2 + expected = [True, False, True] + for i in np.arange(3): + self.assertTrue(result[i] == expected[i]) + + def test_lon_compare_lists(self): + """ + Ensure that comparison operators work on the entire object for lists + """ + lon1 = Longitude([155, 156, 157], 360) + lon2 = Longitude([155, 157, 156], 360) + self.assertFalse(lon1 == lon2) + self.assertTrue(lon1 != lon2) + lon2 = lon1 + self.assertTrue(lon1 == lon2) + self.assertFalse(lon1 != lon2) + + def test_lon_compare_difftypes_error(self): + """ + Ensure that comparison operators fail if Longitudes are different types + """ + lon1 = Longitude(155, 360) + lon2 = Longitude(150, 180) + msg = "Comparison not supported between Longitudes of different types" + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 < lon2 + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 > lon2 + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 <= lon2 + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 >= lon2 + + def test_lon_compare_notlon_error(self): + """ + Ensure that comparison operators fail if one isn't a Longitude + """ + lon1 = Longitude(155, 360) + lon2 = 255 + msg = "Comparison not supported between instances of 'Longitude' and" + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 < lon2 + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 > lon2 + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 <= lon2 + with self.assertRaisesRegex(TypeError, msg): + _ = lon1 >= lon2 + + def test_detect_lon_type_mid_180(self): + """test that detect_lon_type works for an unambiguously 180 value""" + self.assertEqual(_detect_lon_type(-150), 180) + + def test_detect_lon_type_min_180(self): + """test that detect_lon_type works at -180""" + self.assertEqual(_detect_lon_type(-180), 180) + + def test_detect_lon_type_mid_360(self): + """test that detect_lon_type works for an unambiguously 360 value""" + self.assertEqual(_detect_lon_type(355), 360) + + def test_detect_lon_type_max_360(self): + """test that detect_lon_type works at 360""" + self.assertEqual(_detect_lon_type(360), 360) + + def test_detect_lon_type_list_180(self): + """test that detect_lon_type works for a list with just one unambiguously 180 value""" + self.assertEqual(_detect_lon_type([-150, 150]), 180) + + def test_detect_lon_type_list_360(self): + """test that detect_lon_type works for a list with just one unambiguously 360 value""" + self.assertEqual(_detect_lon_type([256, 150]), 360) + + def test_detect_lon_type_ambig(self): + """test that detect_lon_type fails if ambiguous""" + with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): + _detect_lon_type(150) + + def test_detect_lon_type_list_ambig(self): + """test that detect_lon_type fails for an ambiguous list""" + with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): + _detect_lon_type([150, 170]) + + def test_detect_lon_type_list_both(self): + """test that detect_lon_type fails for a list with unambiguous members of both types""" + with self.assertRaisesRegex(RuntimeError, r"Longitude array contains values of both types"): + _detect_lon_type([-150, 270]) + + def test_detect_lon_type_ambig0(self): + """test that detect_lon_type fails at 0""" + with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): + _detect_lon_type(0) + + def test_detect_lon_type_oob_low(self): + """test that detect_lon_type fails if out of bounds below min""" + with self.assertRaisesRegex(ValueError, r"\(Minimum\) longitude < -180"): + _detect_lon_type(-300) + + def test_detect_lon_type_oob_high(self): + """test that detect_lon_type fails if out of bounds above max""" + with self.assertRaisesRegex(ValueError, r"\(Maximum\) longitude > 360"): + _detect_lon_type(500) + + def test_list_as_lon(self): + """ + Test that converting a List to Longitude works + """ + expected_lon = [1, 5] + expected_type = 360 + result = Longitude(expected_lon, expected_type) + self.assertEqual(expected_lon, result._lon) + self.assertEqual(expected_type, result.lon_type()) + + def test_array_as_lon(self): + """ + Test that converting a Numpy array to Longitude works + """ + expected_lon = np.array([1, 5]) + expected_type = 180 + result = Longitude(expected_lon, expected_type) + self.assertTrue(np.array_equal(expected_lon, result._lon)) + self.assertEqual(expected_type, result.lon_type()) + + def test_lon_type_getter(self): + """Test lon_type() getter method""" + lon = Longitude(55, 180) + self.assertEqual(lon.lon_type(), 180) + if __name__ == "__main__": unit_testing.setup_for_tests() diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index d1b27d12cc..eeb0a9a38a 100755 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -11,6 +11,8 @@ import argparse import os import sys +import numpy as np +import xarray as xr # -- add python/ctsm to path (needed if we want to run the test stand-alone) _CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) @@ -21,7 +23,50 @@ from ctsm.subset_data import get_parser, setup_files, check_args, _set_up_regional_case from ctsm.path_utils import path_to_ctsm_root -# pylint: disable=invalid-name,too-many-public-methods +# pylint: disable=invalid-name,too-many-public-methods,protected-access + + +def setup_fake_dataset(fake_values, lon_values, lat_values): + """ + Set up a Dataset with some fake data for use in testing subset_data + """ + + # Define longitude dimension/coordinates + x_dimname = "lon_dim" + x_varname = "lon_var" + lon_da = xr.DataArray( + data=lon_values, + name=x_varname, + dims=x_dimname, + coords={x_dimname: lon_values}, + ) + + # Define latitude dimension/coordinates + y_dimname = "lat_dim" + y_varname = "lat_var" + lat_da = xr.DataArray( + data=lat_values, + name=y_varname, + dims=y_dimname, + coords={y_dimname: lat_values}, + ) + + # Make DataArray (lat x lon) with fake data + fake_da = xr.DataArray( + data=fake_values, + dims=[y_dimname, x_dimname], + ) + + # Make Dataset + fake_ds = xr.Dataset( + data_vars={ + "lon": lon_da, + "lat": lat_da, + "fake": fake_da, + } + ) + + return x_dimname, y_dimname, fake_ds class TestSubsetData(unittest.TestCase): @@ -328,7 +373,7 @@ def test_region_lon_type_360_toolow(self): args = self.parser.parse_args() with self.assertRaisesRegex( ValueError, - r"lon_in needs to be in the range \[0, 360\]", + r"\(All values of\) lon_in must be in the range \[0, 360\]", ): check_args(args) @@ -499,7 +544,7 @@ def test_point_ambiguous_lon_errors(self): args = self.parser.parse_args() with self.assertRaisesRegex( argparse.ArgumentTypeError, - "When providing an ambiguous longitude, you must specify --lon-type 180 or 360", + r"Longitude\(s\) ambiguous; could be type 180 or 360", ): check_args(args) @@ -564,7 +609,7 @@ def test_region_ambiguous_lon_errors(self): args = self.parser.parse_args() with self.assertRaisesRegex( argparse.ArgumentTypeError, - "When providing an ambiguous longitude, you must specify --lon-type 180 or 360", + r"Longitude\(s\) ambiguous; could be type 180 or 360", ): check_args(args) @@ -591,7 +636,7 @@ def test_region_ambiguous_lons_errors(self): args = self.parser.parse_args() with self.assertRaisesRegex( argparse.ArgumentTypeError, - "When providing an ambiguous longitude, you must specify --lon-type 180 or 360", + r"Longitude\(s\) ambiguous; could be type 180 or 360", ): check_args(args) @@ -673,6 +718,127 @@ def test_region_lon_type_180_ok_at_180(self): self.assertEqual(args.lon2.get(lon_type), lon2) _set_up_regional_case(args) + def test_check_region_bounds_none_error(self): + """ + In region mode, test that error is thrown if any region bound is None + """ + # Define a good region to pass initial setup + sys.argv = [ + "subset_data", + "region", + "--create-domain", + "--verbose", + "--lat1", + "0", + "--lat2", + "40", + "--lon1", + "194", + "--lon2", + "287", + ] + self.parser = get_parser() + args = self.parser.parse_args() + args = check_args(args) + region = _set_up_regional_case(args) + + # Mess up the region + region.lon1 = None + err_msg = "Latitude and longitude bounds must be provided and not None" + with self.assertRaisesRegex(argparse.ArgumentTypeError, err_msg): + region.check_region_bounds() + + def test_check_region_bounds_lat_eq_error(self): + """ + In region mode, test that error is thrown if lat1 == lat2 + """ + # Define a good region to pass initial setup + sys.argv = [ + "subset_data", + "region", + "--create-domain", + "--verbose", + "--lat1", + "0", + "--lat2", + "40", + "--lon1", + "194", + "--lon2", + "287", + ] + self.parser = get_parser() + args = self.parser.parse_args() + args = check_args(args) + region = _set_up_regional_case(args) + + # Mess up the region + region.lat2 = region.lat1 + err_msg = "ERROR: lat1 is bigger than lat2" + with self.assertRaisesRegex(argparse.ArgumentTypeError, err_msg): + region.check_region_bounds() + + def test_subset_lon_lat(self): + """ + Test that RegionalCase._subset_lon_lat() works as expected + """ + + # Define lon/lat boundaries of the fake Dataset we'll be making + fakefile_bounds_lon = [-21, -18] + fakefile_bounds_lat = [3, 7] + # Get lon/lat values within those bounds, with 1-deg increments + lon_values = np.arange(fakefile_bounds_lon[0], fakefile_bounds_lon[1] + 1) + lat_values = np.arange(fakefile_bounds_lat[0], fakefile_bounds_lat[1] + 1) + # Define array of data to be in the "fake" variable of our Dataset + fake_values = np.array( + [ + [0, 1, 2, 3], + [4, 5, 6, 7], + [8, 9, 10, 11], + [12, 13, 14, 15], + [16, 17, 18, 19], + ] + ) + # Set up fake input Dataset + x_dimname, y_dimname, fake_ds = setup_fake_dataset(fake_values, lon_values, lat_values) + + # Define lon/lat boundaries of the region from that file we're subsetting + region_bounds_lon = [-21, -19] + region_bounds_lat = [4, 6] + # Set up command-line arguments for subset region boundaries + region_bound_args = [ + "--lat1", + str(region_bounds_lat[0]), + "--lat2", + str(region_bounds_lat[1]), + "--lon1", + str(region_bounds_lon[0]), + "--lon2", + str(region_bounds_lon[1]), + ] + + sys.argv = [ + "subset_data", + "region", + "--create-domain", + "--verbose", + ] + region_bound_args + self.parser = get_parser() + args = self.parser.parse_args() + args = check_args(args) + region = _set_up_regional_case(args) + + # Test subsetting + result = region._subset_lon_lat(x_dimname, y_dimname, fake_ds) + expected_fake_values = np.array( + [ + [4, 5, 6], + [8, 9, 10], + [12, 13, 14], + ] + ) + self.assertTrue(np.array_equal(result["fake"].values, expected_fake_values)) + if __name__ == "__main__": unit_testing.setup_for_tests() diff --git a/python/ctsm/test/test_unit_utils.py b/python/ctsm/test/test_unit_utils.py index 75b240b365..77bbbd7e34 100755 --- a/python/ctsm/test/test_unit_utils.py +++ b/python/ctsm/test/test_unit_utils.py @@ -9,6 +9,7 @@ from ctsm import unit_testing from ctsm.utils import fill_template_file, ensure_iterable +from ctsm.utils import find_one_file_matching_pattern from ctsm.config_utils import _handle_config_value # Allow names that pylint doesn't like, because otherwise I find it hard @@ -316,6 +317,54 @@ def test_ensure_iterable_error_wrong_length(self): ensure_iterable([11, 12], 3) +class TestUtilsFindOneFileMatchingPattern(unittest.TestCase): + """Tests of utils: find_one_file_matching_pattern""" + + def setUp(self): + self._testdir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self._testdir) + + def test_find_one_file_matching_pattern(self): + """ + Tests that find_one_file_matching_pattern passes if one file matches + """ + # Create empty file + # pylint: disable=consider-using-with,unspecified-encoding + test_file_path = os.path.join(self._testdir, "abc123.txt") + open(test_file_path, "x").close() + + # Look for empty file given a pattern with wildcard + pattern = os.path.join(self._testdir, "abc*") + result = find_one_file_matching_pattern(pattern) + self.assertEqual(result, test_file_path) + + def test_find_one_file_matching_pattern_0found(self): + """ + Tests that find_one_file_matching_pattern errors if no files match + """ + # Look for non-existent empty file given a pattern with wildcard + pattern = os.path.join(self._testdir, "abc*") + with self.assertRaisesRegex(FileNotFoundError, "No file found matching pattern"): + find_one_file_matching_pattern(pattern) + + def test_find_one_file_matching_pattern_2found(self): + """ + Tests that find_one_file_matching_pattern errors if multiple files match + """ + # Create empty files + # pylint: disable=consider-using-with,unspecified-encoding + open(os.path.join(self._testdir, "abc123.txt"), "x").close() + open(os.path.join(self._testdir, "abc456.txt"), "x").close() + + # Look for empty file given a pattern with wildcard + pattern = os.path.join(self._testdir, "abc*") + err_msg = "Expected 1 but found 2 files found matching pattern" + with self.assertRaisesRegex(RuntimeError, err_msg): + find_one_file_matching_pattern(pattern) + + if __name__ == "__main__": unit_testing.setup_for_tests() unittest.main() diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508.nc new file mode 100644 index 0000000000..ed5b318782 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bcaf4ac786ae6b37fb2000fbc79d288072ff41f7e39ca08e55de5bfca58518c +size 3804 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508_ESMF_UNSTRUCTURED_MESH.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508_ESMF_UNSTRUCTURED_MESH.nc new file mode 100644 index 0000000000..55f1e3ae36 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508_ESMF_UNSTRUCTURED_MESH.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86ed3c23c471689f97803bc65737f19d2255d8cdbbf4050c388fc5c4aef6a154 +size 14606 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250508.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250508.nc new file mode 100644 index 0000000000..1c0ef655a6 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250508.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30c6a1f3d1e32ec75b2ef406362cc1da2cd8200fccb88ab1ea0579dd14500b42 +size 105164 diff --git a/python/ctsm/test/testinputs/subset_data_amazon.cfg b/python/ctsm/test/testinputs/subset_data_amazon.cfg new file mode 100644 index 0000000000..f99486c82a --- /dev/null +++ b/python/ctsm/test/testinputs/subset_data_amazon.cfg @@ -0,0 +1,9 @@ +[surfdat] +dir = ctsm/test/testinputs +surfdat_16pft = surfdata_5x5_amazon_hist_16pfts_CMIP6_2000_c231031.nc +surfdat_78pft = surfdata_5x5_amazon_hist_78pfts_CMIP6_2000_c230517.nc +mesh_dir = ctsm/test/testinputs +mesh_surf = ESMF_mesh_5x5pt_amazon_from_domain_c230308.nc + +[domain] +file = ctsm/test/testinputs/domain.lnd.5x5pt-amazon_navy.090715.nc diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 8ae6d9435e..0b529fe282 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -3,6 +3,7 @@ import logging import os import sys +import glob import string import re import pdb @@ -250,3 +251,19 @@ def is_instantaneous(time_var): if "time at exact middle" in long_name: return False raise RuntimeError(f"Does this long_name mean instantaneous or not? {long_name}") + + +def find_one_file_matching_pattern(pattern): + """ + Given a file path with wildcards, find all the matching files. Throw an error if there is not + exactly one matching file. If there's just one, return its path. + """ + file_list = glob.glob(pattern) + if not file_list: + raise FileNotFoundError("No file found matching pattern: " + pattern) + n_found = len(file_list) + if n_found > 1: + raise RuntimeError( + f"Expected 1 but found {n_found} files found matching pattern: " + pattern + ) + return file_list[0] diff --git a/tools/mksurfdata_esmf/Makefile b/tools/mksurfdata_esmf/Makefile index 936fb7da78..835f732a02 100644 --- a/tools/mksurfdata_esmf/Makefile +++ b/tools/mksurfdata_esmf/Makefile @@ -1,5 +1,7 @@ # -*- mode:Makefile -*- # +# Running with this Makefile requires that you build the code first. +# # Before running "make urban-alpha" or any target that includes it, # execute "module load nco" first. # @@ -28,14 +30,20 @@ # Set up special characters null := +# Path to the executable +MKSURFDATA_EXE = tool_bld/mksurfdata_esmf + # Set a few things needed for batch handling -PROJECT = $(shell cat $(HOME)/.cesm_proj) +PROJECT := ${PROJECT} +ifeq ($(PROJECT),$(null)) + PROJECT = $(shell cat $(HOME)/.cesm_proj) +endif LOGOUT = $@.stdout.txt PWD = $(shell pwd) BATCHJOBS_ch = qsub ifeq ($(PROJECT),$(null)) - $(error Can NOT find PROJECT number from ~/.cesm_proj file create it and try again) + $(error Can't get project number from either PROJECT environment variable or ~/.cesm_proj file. Try calling make like "PROJECT=P12345678 make ..." or put your project number in ~/.cesm_proj) endif BATCHJOBS = $(BATCHJOBS_ch) @@ -104,6 +112,10 @@ CROP = \ crop-global-1850-ne30 \ crop-global-1850-mpasa480 \ +# Build the executable if it doesn't exist and any target depends on it +$(MKSURFDATA_EXE): + ./gen_mksurfdata_build + # Start all with all-subset because user is bound to forget to first run # module load nco # Usually, include global-present-ultra-hi-res temporarily while