diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 7ea285a6bc..518de3672c 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -67,3 +67,5 @@ cdf40d265cc82775607a1bf25f5f527bacc97405 3b7a2876933263f8986e4069f5d23bd45635756f 3dd489af7ebe06566e2c6a1c7ade18550f1eb4ba 742cfa606039ab89602fde5fef46458516f56fd4 +4ad46f46de7dde753b4653c15f05326f55116b73 +75db098206b064b8b7b2a0604d3f0bf8fdb950cc diff --git a/.github/workflows/docker-image-build.yml b/.github/workflows/docker-image-build.yml index 0ac43426a6..6d38e12c8b 100644 --- a/.github/workflows/docker-image-build.yml +++ b/.github/workflows/docker-image-build.yml @@ -1,5 +1,5 @@ # Modified from https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions#publishing-a-package-using-an-action (last accessed 2025-05-09) -name: Test building ctsm-docs Docker image and using it to build the docs +name: Build and test ctsm-docs container # Configures this workflow to run every time a change in the Docker container setup is pushed or included in a PR on: @@ -9,7 +9,6 @@ on: paths: - 'doc/ctsm-docs_container/**' - '!doc/ctsm-docs_container/README.md' - - '.github/workflows/docker-image-ctsm-docs-build.yml' - '.github/workflows/docker-image-common.yml' pull_request: @@ -17,7 +16,6 @@ on: paths: - 'doc/ctsm-docs_container/**' - '!doc/ctsm-docs_container/README.md' - - '.github/workflows/docker-image-ctsm-docs-build.yml' - '.github/workflows/docker-image-common.yml' workflow_dispatch: diff --git a/.github/workflows/docker-image-common.yml b/.github/workflows/docker-image-common.yml index d44c14c1f8..3522069132 100644 --- a/.github/workflows/docker-image-common.yml +++ b/.github/workflows/docker-image-common.yml @@ -76,14 +76,16 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - # Try building our docs using the new container - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o + - name: Set image tag for docs build id: set-image-tag run: | echo "IMAGE_TAG=$(echo '${{ steps.meta.outputs.tags }}' | head -n 1 | cut -d',' -f1)" >> $GITHUB_ENV + - name: Build docs using Docker (Podman has trouble on GitHub runners) id: build-docs run: | diff --git a/.github/workflows/docs-build-and-deploy.yml b/.github/workflows/docs-build-and-deploy.yml index 2c928e0ccb..1b0c0cb412 100644 --- a/.github/workflows/docs-build-and-deploy.yml +++ b/.github/workflows/docs-build-and-deploy.yml @@ -6,12 +6,14 @@ on: branches: ['master', 'release-clm5.0'] paths: - 'doc/**' + - '!doc/test/*' - '!doc/*ChangeLog*' - '!doc/*ChangeSum*' - '!doc/UpdateChangelog.pl' # Include all include::ed files outside doc/ directory! - 'src/README.unit_testing' - 'tools/README' + - 'doc/test/test_container_eq_ctsm_pylib.sh' # Allows you to run this workflow manually from the Actions tab workflow_dispatch: @@ -46,10 +48,14 @@ jobs: - name: Setup Pages uses: actions/configure-pages@v5 + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + - name: Build docs using container id: build-docs run: | - bin/git-fleximod update -o cd doc ./build_docs_to_publish -d --site-root https://escomp.github.io/CTSM diff --git a/.github/workflows/docs-common.yml b/.github/workflows/docs-common.yml index 6dd8f7d53b..9c9d9f386c 100644 --- a/.github/workflows/docs-common.yml +++ b/.github/workflows/docs-common.yml @@ -26,9 +26,10 @@ jobs: fetch-depth: 0 lfs: true - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o # Do this if not using conda # Based on https://github.com/actions/cache/blob/main/examples.md#python---pip diff --git a/.github/workflows/docs-omnibus.yml b/.github/workflows/docs-omnibus.yml index 1636150fae..1c73eb8224 100644 --- a/.github/workflows/docs-omnibus.yml +++ b/.github/workflows/docs-omnibus.yml @@ -5,29 +5,23 @@ on: # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. branches: ['*'] paths: - - 'doc/**' - - '!doc/*ChangeLog*' - - '!doc/*ChangeSum*' - - '!doc/UpdateChangelog.pl' - # Include all include::ed files outside doc/ directory! - - 'src/README.unit_testing' - - 'tools/README' + - 'doc/test/*' + - 'doc/Makefile' pull_request: # Run on pull requests that change the listed files paths: - - 'doc/**' - - '!doc/*ChangeLog*' - - '!doc/*ChangeSum*' - - '!doc/UpdateChangelog.pl' - # Include all include::ed files outside doc/ directory! - - 'src/README.unit_testing' - - 'tools/README' + - 'doc/test/*' + - 'doc/Makefile' workflow_dispatch: jobs: build-docs-omnibus-test: + # Don't run on forks, because part(s) of omnibus testing script will look for + # branch(es) that forks may not have. + if: ${{ github.repository == 'ESCOMP/CTSM' }} + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -35,9 +29,10 @@ jobs: fetch-depth: 0 lfs: true - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o # Set up conda - name: Set up conda environment @@ -48,7 +43,6 @@ jobs: channels: conda-forge auto-activate-base: false - # TODO: Split testing.sh tests into their own steps in this job - name: Text Sphinx builds with omnibus script run: | - cd doc && ./testing.sh + cd doc/test/ && ./testing.sh diff --git a/.github/workflows/docs-ctsm_pylib.yml b/.github/workflows/docs-update-ctsm_pylib.yml similarity index 66% rename from .github/workflows/docs-ctsm_pylib.yml rename to .github/workflows/docs-update-ctsm_pylib.yml index 850f58063f..865f092f92 100644 --- a/.github/workflows/docs-ctsm_pylib.yml +++ b/.github/workflows/docs-update-ctsm_pylib.yml @@ -1,4 +1,4 @@ -name: Test building docs with ctsm_pylib +name: Docs tests to run when ctsm_pylib is updated on: push: @@ -6,13 +6,17 @@ on: branches: ['*'] paths: - 'python/conda_env_ctsm_py.txt' + - 'doc/ctsm-docs_container/requirements.txt' - '.github/workflows/docs-common.yml' + - '.github/workflows/docs-update-dependency-common.yml' pull_request: # Run on pull requests that change the listed files paths: - 'python/conda_env_ctsm_py.txt' + - 'doc/ctsm-docs_container/requirements.txt' - '.github/workflows/docs-common.yml' + - '.github/workflows/docs-update-dependency-common.yml' schedule: # 8 am every Monday UTC @@ -25,14 +29,23 @@ permissions: jobs: test-build-docs-ctsm_pylib: if: ${{ always() }} - name: With ctsm_pylib + name: Build with ctsm_pylib uses: ./.github/workflows/docs-common.yml with: use_conda: true conda_env_file: python/conda_env_ctsm_py.yml conda_env_name: ctsm_pylib - # File an issue if the docs build failed during a scheduled run + test-update-dependency: + if: ${{ always() }} + name: Docs dependency update tests + uses: ./.github/workflows/docs-update-dependency-common.yml + + # File an issue if the docs build failed during a scheduled run. + # The main thing we're concerned about in that case is something having + # changed outside the repository that's causing the ctsm_pylib setup to + # fail. Thus, we don't need this job to wait for BOTH the above jobs--- + # if one fails, they both will. file-issue-on-failure: if: | failure() && diff --git a/.github/workflows/docs-update-dependency-common.yml b/.github/workflows/docs-update-dependency-common.yml new file mode 100644 index 0000000000..a64e1a8ad5 --- /dev/null +++ b/.github/workflows/docs-update-dependency-common.yml @@ -0,0 +1,77 @@ +name: Jobs shared by docs workflows that run when a dependency is updated + +on: + workflow_call: + inputs: + # Conda is always needed for both jobs in this workflow. Here, + # we set default values for the variables in case the calling + # workflow doesn't provide them. + conda_env_file: + required: false + type: string + default: "python/conda_env_ctsm_py.yml" + conda_env_name: + required: false + type: string + default: "ctsm_pylib" + secrets: {} + +jobs: + compare-docbuilder-vs-ctsmpylib: + name: Are both methods identical? + + # Don't run on forks, because test_container_eq_ctsm_pylib.sh uses + # build_docs_to_publish, which will look for branch(es) that forks + # may not have + if: ${{ github.repository == 'ESCOMP/CTSM' }} + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true + + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + + - name: Set up conda environment + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ${{ inputs.conda_env_name }} + environment-file: ${{ inputs.conda_env_file }} + channels: conda-forge + auto-activate-base: false + + - name: Compare docs built with container vs. ctsm_pylib + run: | + cd doc/test/ + ./test_container_eq_ctsm_pylib.sh + + makefile-method: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true + + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + + - name: Set up conda environment + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ${{ inputs.conda_env_name }} + environment-file: ${{ inputs.conda_env_file }} + channels: conda-forge + auto-activate-base: false + + - name: Check that Makefile method works + run: | + cd doc/test/ + conda run -n ${{ inputs.conda_env_name }} --no-capture-output ./test_makefile_method.sh diff --git a/.github/workflows/docs-update-doc-builder.yml b/.github/workflows/docs-update-doc-builder.yml new file mode 100644 index 0000000000..0756ed94c5 --- /dev/null +++ b/.github/workflows/docs-update-doc-builder.yml @@ -0,0 +1,43 @@ +name: Docs tests to run when doc-builder is updated + +on: + push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] + paths: + - 'doc/doc-builder' + - '.github/workflows/docs-update-dependency-common.yml' + + pull_request: + # Run on pull requests that change the listed files + paths: + - 'doc/doc-builder' + - '.github/workflows/docs-update-dependency-common.yml' + + workflow_dispatch: + +permissions: + contents: read +jobs: + test-update-dependency: + + name: Tests to run when either docs dependency is updated + uses: ./.github/workflows/docs-update-dependency-common.yml + + test-rv-setup: + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true + + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + + - name: build_docs rv method + run: | + cd doc/test/ && ./test_build_docs_-r-v.sh docker diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 074a674ffe..362818eb90 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,6 +7,7 @@ on: branches: ['*'] paths: - 'doc/**' + - '!doc/test/*' - '!doc/*ChangeLog*' - '!doc/*ChangeSum*' - '!doc/UpdateChangelog.pl' @@ -14,11 +15,13 @@ on: # Include all include::ed files outside doc/ directory! - 'src/README.unit_testing' - 'tools/README' + - 'doc/test/test_container_eq_ctsm_pylib.sh' pull_request: # Run on pull requests that change the listed files paths: - 'doc/**' + - '!doc/test/*' - '!doc/*ChangeLog*' - '!doc/*ChangeSum*' - '!doc/UpdateChangelog.pl' @@ -26,6 +29,7 @@ on: # Include all include::ed files outside doc/ directory! - 'src/README.unit_testing' - 'tools/README' + - 'doc/test/test_container_eq_ctsm_pylib.sh' workflow_dispatch: @@ -49,9 +53,10 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o - name: Build docs using Docker (Podman has trouble on GitHub runners) id: build-docs diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index 02de98ea6e..06ea82d99b 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -4478,6 +4478,10 @@ sub setup_logic_cngeneral { "(eg. don't use these options with SP mode)."); } } + if ( &value_is_true($nl->get_value('reseed_dead_plants')) && + &remove_leading_and_trailing_quotes($nl_flags->{'clm_start_type'}) eq "branch") { + $log->fatal_error("reseed_dead_plants MUST be .false. in a branch run"); + } } #------------------------------------------------------------------------------- diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 6892638a21..effca5ea5c 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -678,6 +678,10 @@ sub cat_and_create_namelistinfile { namelst=>"soil_decomp_method='None'", phys=>"clm5_0", }, + "reseed with branch" =>{ options=>"-clm_start_type branch -envxml_dir .", + namelst=>"reseed_dead_plants=.true.", + phys=>"clm6_0", + }, "reseed without CN" =>{ options=>" -envxml_dir . -bgc sp", namelst=>"reseed_dead_plants=.true.", phys=>"clm5_0", diff --git a/doc/ChangeLog b/doc/ChangeLog index 0c1927c239..569347dfce 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,84 @@ =============================================================== +Tag name: ctsm5.3.061 +Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) +Date: Thu 26 Jun 2025 11:28:43 AM MDT +One-line Summary: Merge b4b-dev to master + +Purpose and description of changes +---------------------------------- +PR #3231 Clean up docs workflows +Resolves #3160 +Resolves #3213 + +PR #3272 Throw error if reseed_dead_plants = .true. in a branch simulation +Resolves #3257 + +PR #3264 Fix plumber2_surf_wrapper +Resolves #3262 + +PR #3259 subset_data point: Fix --create-datm and Longitude TypeErrors +Resolves #3258 +Resolves #3260 +Resolves #3197 +Resolves #2960 + +PR #3227 Docs docs: Update Windows instructions +Resolves #3185 + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Listed along with corresponding PRs in "Purpose and description of changes" above + +Notes of particular relevance for users +--------------------------------------- +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + #3272 Throw error if reseed_dead_plants = .true. in a branch simulation + +Changes to documentation: + #3227 Docs docs: Update Windows instructions + +Testing summary: +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + +Answer changes +-------------- +Changes answers relative to baseline: No + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/3283 + +=============================================================== +=============================================================== Tag name: ctsm5.3.060 Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) Date: Tue 24 Jun 2025 02:13:05 PM MDT diff --git a/doc/ChangeSum b/doc/ChangeSum index 522c814f4a..e10850838e 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.3.061 slevis 06/26/2025 Merge b4b-dev to master ctsm5.3.060 slevis 06/24/2025 Preliminary update of ctsm54 defaults (answer changing) ctsm5.3.059 erik 06/23/2025 Bring in various cleanup efforts found in previous testing after the chill changes came in ctsm5.3.058 samrabin 06/16/2025 Fix clm6 compset aliases diff --git a/doc/ctsm-docs_container/Dockerfile b/doc/ctsm-docs_container/Dockerfile index 2ffd7a1702..5c78a0c14f 100644 --- a/doc/ctsm-docs_container/Dockerfile +++ b/doc/ctsm-docs_container/Dockerfile @@ -29,4 +29,4 @@ CMD ["/bin/bash", "-l"] LABEL org.opencontainers.image.title="Container for building CTSM documentation" LABEL org.opencontainers.image.source=https://github.com/ESCOMP/CTSM -LABEL org.opencontainers.image.version="v1.0.2c" +LABEL org.opencontainers.image.version="v1.0.2d" diff --git a/doc/source/users_guide/running-single-points/generic-single-point-regional.rst b/doc/source/users_guide/running-single-points/generic-single-point-regional.rst index 3d418b00fb..7e0b1e72fd 100644 --- a/doc/source/users_guide/running-single-points/generic-single-point-regional.rst +++ b/doc/source/users_guide/running-single-points/generic-single-point-regional.rst @@ -41,7 +41,7 @@ You can also have the script subset land-use data. See the help (``tools/site_an .. note:: This script defaults to subsetting specific surface data, land-use timeseries, and the CRUJRA2024 DATM data. It can currently only be run as-is on Derecho. If you're not on Derecho, use ``--inputdata-dir`` to specify where the top level of your CESM input data is. - Also, to subset GSWP3 instead of CRUJRA2024 DATM data, you currently need to hardwire ``datm_type = "datm_gswp3"`` (instead of the default ``"datm_crujra"``) in ``python/ctsm/subset_data.py``. + Using ``--create-datm`` with GSWP3 data is no longer supported; see `CTSM issue #3269 `_. diff --git a/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst b/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst index a127103d0e..895dbf2a65 100644 --- a/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst +++ b/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst @@ -7,12 +7,12 @@ There is a menu in the lower left of the webpage that lets readers switch betwee Note that this is not necessary in order for you to contribute an update to the documentation. GitHub will test this automatically when you open a PR. But if you'd like to try, this will generate a local site for you in ``_publish/`` and then open it: -.. literalinclude:: ../../../testing.sh +.. literalinclude:: ../../../test/test_container_eq_ctsm_pylib.sh :start-at: ./build_docs_to_publish :end-before: VERSION LINKS WILL NOT RESOLVE - :append: open _publish/index.html + :append: CMD _publish/index.html # where CMD is open for Mac or wslview for Windows (Ubuntu VM) -**Note:** This is not yet supported with Podman on Linux (including Ubuntu VM on Windows). See `doc-builder Issue #27: build_docs_to_publish fails on Linux (maybe just Ubuntu?) with Podman `_. +**Note:** This is not yet supported with Podman on Linux (including Ubuntu VM on Windows). See `doc-builder Issue #27: build_docs_to_publish fails on Linux (maybe just Ubuntu?) with Podman `_. It does work with Docker on Linux, though. How this works diff --git a/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md b/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md index 251622b6f0..63acab53a7 100644 --- a/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md +++ b/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md @@ -2,7 +2,7 @@ # ⚠️ Original docs documentation from the GitHub Wiki -.. todo:: +.. warning:: ⚠️⚠️⚠️WARNING⚠️⚠️⚠️ The linked page contains documentation that (a) is more complicated than you probably require and (b) has not been fully checked for accuracy with the latest documentation setup. Unless you have a very good reason, you should probably go to :ref:`docs-intro-and-recommended`. diff --git a/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md b/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md index ab972cdfc4..ceb701b5cf 100644 --- a/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md +++ b/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md @@ -8,7 +8,17 @@ Note that you may need administrator privileges on your PC (or approval from you ## Install Linux subsystem -We don't support building our documentation in the native Windows command-line environment. Thus, you will need to install a little version of Linux inside a virtual machine (VM) to use instead. +We don't support building our documentation in the native Windows command-line environment. Thus, you will need to install a little version of Linux inside a virtual machine (VM) to use instead. The process for doing this varies depending on how tightly the installation process is controlled on your computer. + +### NCAR computers + +Please follow the [Windows Subsystem for Linux (WSL) setup instructions](https://wiki.ucar.edu/pages/viewpage.action?pageId=514032264&spaceKey=CONFIGMGMT&title=Setup) on the UCAR Wiki. In the step about installing a Linux distribution, choose Ubuntu. + +Feel free to peruse the [overall WSL documentation](https://wiki.ucar.edu/spaces/CONFIGMGMT/pages/514032242/Windows+Subsystem+for+Linux) on and linked from the UCAR Wiki for additional information. + +### Non-NCAR computers + +If your computer is managed by an organization other than NCAR, please check with your IT department or equivalent for instructions on installing Windows Subsystem for Linux (WSL) and Ubuntu. Otherwise, follow these instructions: 1. Download and install Ubuntu from the Microsoft Store. 1. Restart your computer. @@ -16,6 +26,8 @@ We don't support building our documentation in the native Windows command-line e If Ubuntu opens in that last step but you see an error, you may need to manually enable Windows Subsystem for Linux (WSL). To do so: Open Control Panel, go to "Programs" > "Programs and Features" > "Turn Windows features on or off". Check the box next to "Windows Subsystem for Linux" and click OK. +Once Ubuntu is working and open, you'll be asked to create a new UNIX username and password. This doesn't have to match your Windows username and password, but do make sure to save this information somewhere secure. + .. _windows-docs-ubuntu-utilities: ## Install utilities @@ -31,9 +43,8 @@ which make || sudo apt-get -y install make which git || sudo apt-get -y install git which git-lfs || sudo apt-get -y install git-lfs -# Chromium: A web browser engine that's the basis for popular browsers like Google -# Chrome and Microsoft Edge -which chromium || sudo apt-get -y install chromium +# WSL utilities, which will give us the wslview command for opening HTML pages in a Windows browser +which wslview || sudo apt-get -y install wslu ``` .. _container-or-conda-windows: @@ -42,42 +53,78 @@ which chromium || sudo apt-get -y install chromium We recommend building the software in what's called a container—basically a tiny little operating system with just some apps and utilities needed by the doc-building process. This is nice because, if we change the doc-building process in ways that require new versions of those apps and utilities, that will be completely invisible to you. You won't need to manually do anything to update your setup to work with the new process; it'll just happen automatically. -We recommend using the container software Podman. +For builds in WSL (Ubuntu), we recommend using the container software Docker. You can install it in Ubuntu like so: -1. Install Podman with `sudo apt-get -y install podman`. -1. Set up and start a Podman "virtual machine" with `podman machine init --now`. -1. Test your installation by doing `podman run --rm hello-world`. If it worked, you should see ASCII art of the Podman logo. +```shell +# If needed, download and run the Docker installation script. +# Ignore the message saying "We recommend using Docker Desktop for Windows." +# The script will make you wait 20 seconds to make sure this is want you want, +# and then it should continue automatically. +which docker || curl -fsSL https://get.docker.com -o get-docker.sh +which docker || sudo sh ./get-docker.sh + +# Set up the docker "group," if needed, and add your username to it. +sudo groupadd docker # Create docker group if it doesn't exist +sudo usermod -aG docker $USER # Add your user to the docker group +newgrp docker # Apply the new group membership (avoids needing to log out and back in) + +# Make sure it worked: This should print a "Hello from Docker!" message +docker run hello-world +``` -You may not be able to install Podman or any other containerization software, so there is an alternative method: a Conda environment. +You may not be able to install Docker or any other containerization software, so there is an alternative method: a Conda environment. 1. Check whether you already have Conda installed by doing `which conda`. If that doesn't print anything, [install Miniconda](https://www.anaconda.com/docs/getting-started/miniconda/install#linux). 1. Follow the instructions for setting up the `ctsm_pylib` Conda environment in Sect. :numref:`using-ctsm-pylib`. +.. _editing-text-files-wsl: + +## Editing documentation files +If you prefer using an old-school text editor like `vim`, it's probably already installed in your Ubuntu VM, or can be installed with `sudo apt-get -y install EDITOR_NAME`. If you prefer a more user-friendly interface, there are several options. Note that **all commands in this section are to be run in your Ubuntu VM, not a Windows terminal**. -## Set up your permissions -This will make sure that you "own" your home directory in the Ubuntu VM. **In your Ubuntu terminal**, do: +### In a Windows app (recommended) +If you installed `wslview` in the instructions above, you can edit files by doing ```shell -chown -R $USER:$USER $HOME +wslview path/to/file_i_want_to_edit.rst ``` +If not, you can do +```shell +explorer.exe $(wslpath -w path/to/file_i_want_to_edit.rst) +``` +These both do the same thing, but the `wslview` method is simpler. Either way, at least the first time you do this, it will open a window asking which app you'd like to open the file in. Choose whatever you're most comfortable with. At the bottom of the window, you can then choose whether you always want to open HTML files using the selected app or just this once. -.. _editing-text-files-wsl: - -## Editing text files in an Ubuntu VM -If you prefer using an old-school text editor like `vim`, it's probably already installed, or can be installed with `sudo apt-get -y install EDITOR_NAME`. If you prefer a more user-friendly interface, there are several options. - -You may be able to edit files in your Ubuntu VM in the Ubuntu terminal by using the name of the Windows executable. For Notepad, for instance, you would do +You may also be able to open files in Windows apps by using the name of the Windows executable. For Notepad, for instance, you would do ```shell -notepad.exe file_i_want_to_edit.rst +notepad.exe $(wslpath -w path/to/file_i_want_to_edit.rst) ``` -If you use [VS Code](https://code.visualstudio.com/), you can install the [WSL VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl). Then you can open any file or folder in your Ubuntu VM by doing +If you use [VS Code](https://code.visualstudio.com/), you can install the [WSL VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl). Then (after closing and re-opening Ubuntu) you can open any documentation file **or folder** by doing ```shell code path/to/file-or-folder ``` +### In an Ubuntu app (not recommended) + You can also install a user-friendly text editor in Ubuntu. This may be slower and have unexpected differences in behavior from what you expect from Windows apps, but it does work. For example: - [gedit](https://gedit-text-editor.org/): `sudo apt-get install -y gedit` - [Kate](https://kate-editor.org/): `sudo apt-get install -y kate` - [VS Code](https://code.visualstudio.com/) (if you don't already have it installed on Windows): `sudo snap install code --classic` You can use all of those to open and edit files, but Kate and VS Code let you open entire folders, which can be convenient. In any case, you'd do `EDITOR_NAME path/to/thing/youre/editing` to open it, where `EDITOR_NAME` is `gedit`, `kate`, or `code`, respectively. + +## Troubleshooting + +### "Permission denied" error + +If you get this error, it may be a result of opening Ubuntu as an administrator (e.g., by right-clicking on its icon and choosing "Run as administrator.") Try not doing that, although this will result in you needing to get a new copy of CTSM to work in. + +If that's not feasible or doesn't solve the problem, you may need to remind Linux that you do actually own your files. **In your Ubuntu terminal**, do: +```shell +chown -R $USER:$USER $HOME +``` + +If that also gives a permission error, you may need to put `sudo` at the start of the command. + +### "The host 'wsl$' was not found in the list of allowed hosts" + +You may see this warning in a dialog box after trying to open a file with `wslview`, `explorer.exe`, or something else. Check "Permanently allow host 'wsl$'" and then press "Allow". diff --git a/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md b/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md index 1501f8d48a..bfc537f223 100644 --- a/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md +++ b/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md @@ -55,8 +55,12 @@ open _build/html/index.html ### Windows (Ubuntu VM) -Assuming you installed Chromium in the :ref:`windows-docs-ubuntu-utilities` setup step, you can open your build of the documentation like so: +Assuming you installed the WSL Utilities in the :ref:`windows-docs-ubuntu-utilities` setup step, you can open your build of the documentation like so: ```shell -chromium _build/html/index.html & +wslview _build/html/index.html ``` -This will generate a lot of warnings in the terminal that seem to be inconsequential to our purpose here. You may need to press Ctrl-C and/or Enter a few times to clear them and return your cursor to the prompt. +If you didn't, you can do +```shell +explorer.exe $(wslpath -w _build/html/index.html) +``` +These both do the same thing, but the `wslview` method is simpler. Either way, at least the first time you do this, it will open a window asking which app you'd like to view the HTML file in. Choose a browser like Microsoft Edge or Chrome. At the bottom of the window, you can then choose whether you always want to open HTML files using the selected app or just this once. diff --git a/doc/test/compose_test_cmd.sh b/doc/test/compose_test_cmd.sh new file mode 100755 index 0000000000..2b2fd3cf67 --- /dev/null +++ b/doc/test/compose_test_cmd.sh @@ -0,0 +1,13 @@ +# This should only be run locally within another shell + +if [[ "${cli_tool}" == "" ]]; then + echo "${msg} (no container)" +else + cmd="${cmd} -d" + if [[ "${cli_tool}" != "default" ]]; then + cmd="${cmd} --container-cli-tool ${cli_tool}" + fi + echo "${msg} (container: ${cli_tool})" +fi + +echo cmd diff --git a/doc/test/test_build_docs_-b.sh b/doc/test/test_build_docs_-b.sh new file mode 100755 index 0000000000..8b49e2f7aa --- /dev/null +++ b/doc/test/test_build_docs_-b.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +msg="~~~~~ Check that -b works" +cmd="./build_docs -b _build -c" + +. test/compose_test_cmd.sh +set -x +$cmd + +exit 0 diff --git a/doc/test/test_build_docs_-r-v.sh b/doc/test/test_build_docs_-r-v.sh new file mode 100755 index 0000000000..6f9415b563 --- /dev/null +++ b/doc/test/test_build_docs_-r-v.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +msg="~~~~~ Check that -r -v works" +cmd="./build_docs -r _build -v latest -c --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates" + +. test/compose_test_cmd.sh +set -x +$cmd + +exit 0 diff --git a/doc/test/test_container_eq_ctsm_pylib.sh b/doc/test/test_container_eq_ctsm_pylib.sh new file mode 100755 index 0000000000..729f1b723e --- /dev/null +++ b/doc/test/test_container_eq_ctsm_pylib.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +# Compare docs built with container vs. ctsm_pylib + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +rm -rf _publish* + +# Build all docs using container +echo "~~~~~ Build all docs using container" +# Also do a custom --conf-py-path +rm -rf _build _publish +d1="$PWD/_publish_container" +./build_docs_to_publish -r _build -d --site-root "$PWD/_publish" +# VERSION LINKS WILL NOT RESOLVE IN _publish_container +cp -a _publish "${d1}" + +# Build all docs using ctsm_pylib +echo "~~~~~ Build all docs using ctsm_pylib" +rm -rf _build _publish +d2="$PWD/_publish_nocontainer" +conda run -n ctsm_pylib --no-capture-output ./build_docs_to_publish -r _build --site-root "$PWD/_publish" --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates +# VERSION LINKS WILL NOT RESOLVE IN _publish_nocontainer +cp -a _publish "${d2}" + +# Make sure container version is identical to no-container version +echo "~~~~~ Make sure container version is identical to no-container version" +diff -qr "${d1}" "${d2}" +echo "Successful: Docs built with container are identical to those built without" + +exit 0 diff --git a/doc/test/test_doc-builder_tests.sh b/doc/test/test_doc-builder_tests.sh new file mode 100755 index 0000000000..07cfa73ea1 --- /dev/null +++ b/doc/test/test_doc-builder_tests.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}" + +echo "~~~~~ Check that doc-builder tests pass" +cd ../doc-builder/test +set -x +conda run --no-capture-output -n ctsm_pylib make test + +exit 0 diff --git a/doc/test/test_makefile_method.sh b/doc/test/test_makefile_method.sh new file mode 100755 index 0000000000..b0fd80984e --- /dev/null +++ b/doc/test/test_makefile_method.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +echo "~~~~~ Check that Makefile method works" +set -x +make SPHINXOPTS="-W --keep-going" BUILDDIR=${PWD}/_build html + +exit 0 diff --git a/doc/test/testing.sh b/doc/test/testing.sh new file mode 100755 index 0000000000..2e91025e6c --- /dev/null +++ b/doc/test/testing.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/" + +# Compare docs built with container vs. ctsm_pylib +./test_container_eq_ctsm_pylib.sh + +# Check that -r -v works (Docker) +# Also do a custom --conf-py-path and other stuff +cd "${SCRIPT_DIR}/" +rm -rf _build +./test_build_docs_-r-v.sh docker + +# Check that Makefile method works +cd "${SCRIPT_DIR}/" +rm -rf _build +conda run --no-capture-output -n ctsm_pylib ./test_makefile_method.sh + +# Check that -b works +cd "${SCRIPT_DIR}/" +rm -rf _build +./test_build_docs_-b.sh docker + +# Check that doc-builder tests pass +# Don't run if on a GitHub runner; failing 🤷. Trust that doc-builder does this test. +if [[ "${GITHUB_ACTIONS}" == "" ]]; then + cd "${SCRIPT_DIR}/" + ./test_doc-builder_tests.sh +fi + +exit 0 diff --git a/doc/testing.sh b/doc/testing.sh deleted file mode 100755 index 9253df848c..0000000000 --- a/doc/testing.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -set -e -set -x - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -cd "${SCRIPT_DIR}" - -../bin/git-fleximod update -o -rm -rf _publish* - -# Build all docs using container -echo "~~~~~ Build all docs using container" -# Also do a custom --conf-py-path -rm -rf _build _publish -d1="$PWD/_publish_container" -./build_docs_to_publish -r _build -d --site-root "$PWD/_publish" -# VERSION LINKS WILL NOT RESOLVE IN _publish_container -cp -a _publish "${d1}" - -# Build all docs using ctsm_pylib -echo "~~~~~ Build all docs using ctsm_pylib" -rm -rf _build _publish -d2="$PWD/_publish_nocontainer" -conda run -n ctsm_pylib ./build_docs_to_publish -r _build --site-root "$PWD/_publish" --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates -# VERSION LINKS WILL NOT RESOLVE IN _publish_nocontainer -cp -a _publish "${d2}" - -# Make sure container version is identical to no-container version -echo "~~~~~ Make sure container version is identical to no-container version" -diff -qr "${d1}" "${d2}" - -# Check that -r -v works -echo "~~~~~ Check that -r -v works (Docker)" -# Also do a custom --conf-py-path -rm -rf _build_container -./build_docs -r _build_container -v latest -d -c --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates --container-cli-tool docker - -# Check that Makefile method works -echo "~~~~~ Check that Makefile method works" -rm -rf _build -conda run -n ctsm_pylib make SPHINXOPTS="-W --keep-going" BUILDDIR=${PWD}/_build html - -# Check that -b works -echo "~~~~~ Check that -b works (Podman)" -rm -rf _build_container -./build_docs -b _build_container -d -c --container-cli-tool docker - -# Check that doc-builder tests pass -# Don't run if on a GitHub runner; failing 🤷. Trust that doc-builder does this test. -if [[ "${GITHUB_ACTIONS}" == "" ]]; then - echo "~~~~~ Check that doc-builder tests pass" - cd doc-builder/test - conda run -n ctsm_pylib make test -fi - -exit 0 \ No newline at end of file diff --git a/python/ctsm/longitude.py b/python/ctsm/longitude.py index 8afa731131..fb5998524d 100644 --- a/python/ctsm/longitude.py +++ b/python/ctsm/longitude.py @@ -58,7 +58,7 @@ def _convert_lon_type_180_to_360(lon_in): return lon_out -def _detect_lon_type(lon_in): +def detect_lon_type(lon_in): """ Detect longitude type of a given numeric. If lon_in contains more than one number (as in a list or Numpy array), this function will assume all members are of the same type if (a) there is at diff --git a/python/ctsm/pft_utils.py b/python/ctsm/pft_utils.py new file mode 100644 index 0000000000..40ab8b9f23 --- /dev/null +++ b/python/ctsm/pft_utils.py @@ -0,0 +1,21 @@ +""" +Constants and functions relating to PFTs +""" + +MIN_PFT = 0 # bare ground +MIN_NAT_PFT = 1 # minimum natural pft (not including bare ground) +MAX_NAT_PFT = 14 # maximum natural pft +MAX_PFT_GENERICCROPS = 16 # for runs with generic crops +MAX_PFT_MANAGEDCROPS = 78 # for runs with explicit crops + + +def is_valid_pft(pft_num, managed_crops): + """ + Given a number, check whether it represents a valid PFT (bare ground OK) + """ + if managed_crops: + max_allowed_pft = MAX_PFT_MANAGEDCROPS + else: + max_allowed_pft = MAX_PFT_GENERICCROPS + + return MIN_PFT <= pft_num <= max_allowed_pft diff --git a/python/ctsm/site_and_regional/plumber2_shared.py b/python/ctsm/site_and_regional/plumber2_shared.py new file mode 100644 index 0000000000..d4ab9d00b3 --- /dev/null +++ b/python/ctsm/site_and_regional/plumber2_shared.py @@ -0,0 +1,21 @@ +""" +Things shared between plumber2 scripts +""" + +import os +import pandas as pd +from ctsm.path_utils import path_to_ctsm_root + +PLUMBER2_SITES_CSV = os.path.join( + path_to_ctsm_root(), + "tools", + "site_and_regional", + "PLUMBER2_sites.csv", +) + + +def read_plumber2_sites_csv(file=PLUMBER2_SITES_CSV): + """ + Read PLUMBER2_sites.csv using pandas + """ + return pd.read_csv(file, skiprows=5) diff --git a/python/ctsm/site_and_regional/plumber2_surf_wrapper.py b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py index 022914d17e..cedc6b25e0 100755 --- a/python/ctsm/site_and_regional/plumber2_surf_wrapper.py +++ b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py @@ -22,16 +22,18 @@ import argparse import logging -import os -import subprocess +import sys import tqdm -import pandas as pd +# pylint:disable=wrong-import-position +from ctsm.site_and_regional.plumber2_shared import PLUMBER2_SITES_CSV, read_plumber2_sites_csv +from ctsm import subset_data +from ctsm.pft_utils import MAX_PFT_MANAGEDCROPS, is_valid_pft -def get_parser(): +def get_args(): """ - Get parser object for this script. + Get arguments for this script. """ parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter @@ -45,39 +47,44 @@ def get_parser(): help="Verbose mode will print more information. ", action="store_true", dest="verbose", - default=False, ) parser.add_argument( - "--16pft", - help="Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ", + "--crop", + help=f"Create and/or modify {MAX_PFT_MANAGEDCROPS}-PFT " + "surface datasets (e.g. for a non-FATES run)", action="store_true", - dest="pft_16", - default=True, + dest="use_managed_crops", ) - return parser + parser.add_argument( + "--overwrite", + help="Overwrite any existing files", + action="store_true", + ) + + parser.add_argument( + "--plumber2-sites-csv", + help=f"Comma-separated value (CSV) file with Plumber2 sites. Default: {PLUMBER2_SITES_CSV}", + default=PLUMBER2_SITES_CSV, + ) + + return parser.parse_args() def execute(command): """ - Function for running a command on shell. + Runs subset_data with given arguments. Args: - command (str): - command that we want to run. + command (list): + list of args for command that we want to run. Raises: - Error with the return code from shell. + Whatever error subset_data gives, if any. """ print("\n", " >> ", *command, "\n") - try: - subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT) - - except subprocess.CalledProcessError as err: - # raise RuntimeError("command '{}' return with error - # (code {}): {}".format(e.cmd, e.returncode, e.output)) - # print (e.ouput) - print(err) + sys.argv = command + subset_data.main() def main(): @@ -85,97 +92,103 @@ def main(): Read plumber2_sites from csv, iterate through sites, and add dominant PFT """ - args = get_parser().parse_args() + args = get_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) - plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4) + plumber2_sites = read_plumber2_sites_csv(args.plumber2_sites_csv) for _, row in tqdm.tqdm(plumber2_sites.iterrows()): lat = row["Lat"] lon = row["Lon"] site = row["Site"] + + clmsite = "1x1_PLUMBER2_" + site + print("Now processing site :", site) + + # Set up part of subset_data command that is shared among all options + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--lon-type", + "180", + ] + + # Read info for first PFT pft1 = row["pft1"] + if not is_valid_pft(pft1, args.use_managed_crops): + raise RuntimeError(f"pft1 must be a valid PFT; got {pft1}") pctpft1 = row["pft1-%"] cth1 = row["pft1-cth"] cbh1 = row["pft1-cbh"] - pft2 = row["pft2"] - pctpft2 = row["pft2-%"] - cth2 = row["pft2-cth"] - cbh2 = row["pft2-cbh"] - # overwrite missing values from .csv file - if pft1 == -999: - pft1 = 0 - pctpft1 = 0 - cth1 = 0 - cbh1 = 0 - if pft2 == -999: - pft2 = 0 - pctpft2 = 0 - cth2 = 0 - cbh2 = 0 - clmsite = "1x1_PLUMBER2_" + site - print("Now processing site :", site) - if args.pft_16: - # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT - # don't set crop flag - # set dominant pft - subset_command = [ - "./subset_data", - "point", - "--lat", - str(lat), - "--lon", - str(lon), - "--site", - clmsite, + # Read info for second PFT, if a valid one is given in the .csv file + pft2 = row["pft2"] + if is_valid_pft(pft2, args.use_managed_crops): + pctpft2 = row["pft2-%"] + cth2 = row["pft2-cth"] + cbh2 = row["pft2-cbh"] + + # Set dominant PFT(s) + if is_valid_pft(pft2, args.use_managed_crops): + subset_command += [ "--dompft", str(pft1), str(pft2), "--pctpft", str(pctpft1), str(pctpft2), - "--cth", - str(cth1), - str(cth2), - "--cbh", - str(cbh1), - str(cbh2), - "--create-surface", - "--uniform-snowpack", - "--cap-saturation", - "--verbose", - "--overwrite", ] else: - # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT - # NOTE: FATES will currently not run with a 78-PFT surface dataset - # set crop flag - # set dominant pft - subset_command = [ - "./subset_data", - "point", - "--lat", - str(lat), - "--lon", - str(lon), - "--site", - clmsite, - "--crop", + subset_command += [ "--dompft", str(pft1), - str(pft2), "--pctpft", str(pctpft1), - str(pctpft2), - "--create-surface", - "--uniform-snowpack", - "--cap-saturation", - "--verbose", - "--overwrite", ] + + if not args.use_managed_crops: + # use surface dataset with 78 pfts, but overwrite to 100% 1 dominant PFT + # don't set crop flag + # set canopy top and bottom heights + if is_valid_pft(pft2, args.use_managed_crops): + subset_command += [ + "--cth", + str(cth1), + str(cth2), + "--cbh", + str(cbh1), + str(cbh2), + ] + else: + subset_command += [ + "--cth", + str(cth1), + "--cbh", + str(cbh1), + ] + else: + # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT + # NOTE: FATES will currently not run with a 78-PFT surface dataset + # set crop flag + subset_command += ["--crop"] + # don't set canopy top and bottom heights + + if args.verbose: + subset_command += ["--verbose"] + if args.overwrite: + subset_command += ["--overwrite"] + execute(subset_command) diff --git a/python/ctsm/site_and_regional/plumber2_usermods.py b/python/ctsm/site_and_regional/plumber2_usermods.py index 7b7f294a24..7c8f37b1b5 100644 --- a/python/ctsm/site_and_regional/plumber2_usermods.py +++ b/python/ctsm/site_and_regional/plumber2_usermods.py @@ -13,7 +13,8 @@ import os import tqdm -import pandas as pd +# pylint:disable=wrong-import-position +from ctsm.site_and_regional.plumber2_shared import read_plumber2_sites_csv # Big ugly function to create usermod_dirs for each site @@ -155,7 +156,7 @@ def main(): """ # For now we can just run the 'main' program as a loop - plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4) + plumber2_sites = read_plumber2_sites_csv() for _, row in tqdm.tqdm(plumber2_sites.iterrows()): lat = row["Lat"] diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 94f6011569..ed91f3d474 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -19,7 +19,7 @@ from ctsm.utils import add_tag_to_filename from ctsm.utils import abort from ctsm.config_utils import check_lon1_lt_lon2 -from ctsm.longitude import Longitude, _detect_lon_type +from ctsm.longitude import Longitude, detect_lon_type logger = logging.getLogger(__name__) @@ -142,7 +142,7 @@ def _subset_lon_lat(self, x_dim, y_dim, f_in): # Detect longitude type (180 or 360) of input file, throwing a helpful error if it can't be # determined. - f_lon_type = _detect_lon_type(lon) + f_lon_type = detect_lon_type(lon) lon1_type = self.lon1.lon_type() lon2_type = self.lon2.lon_type() if lon1_type != lon2_type: diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index bd16bae226..d71d014f36 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -15,17 +15,11 @@ # -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR, DatmFiles from ctsm.utils import add_tag_to_filename, ensure_iterable +from ctsm.longitude import detect_lon_type +from ctsm.pft_utils import MAX_NAT_PFT, MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS logger = logging.getLogger(__name__) -NAT_PFT = 15 # natural pfts -NUM_PFT = 17 # for runs with generic crops -MAX_PFT = 78 # for runs with explicit crops - -# -- constants to represent months of year -FIRST_MONTH = 1 -LAST_MONTH = 12 - class SinglePointCase(BaseCase): """ @@ -151,6 +145,26 @@ def __init__( # self.check_nonveg() self.check_pct_pft() + def convert_plon_to_filetype_if_needed(self, lon_da): + """ + Check that point and input file longitude types are equal. If not, convert point to match + file. + """ + plon_in = self.plon + f_lon_type = detect_lon_type(lon_da) + plon_type = plon_in.lon_type() + if f_lon_type == plon_type: + plon_out = plon_in.get(plon_type) + else: + plon_orig = plon_in.get(plon_type) + plon_out = plon_in.get(f_lon_type) + if plon_orig != plon_out: + print( + f"Converted plon from type {plon_type} (value {plon_orig}) " + f"to type {f_lon_type} (value {plon_out})" + ) + return plon_out + def create_tag(self): """ Create a tag for single point which is the site name @@ -173,20 +187,21 @@ def check_dom_pft(self): same range. e.g. If users specified multiple dom_pft, they should be either in : - - 0 - NAT_PFT-1 range + - 0 - MAX_NAT_PFT range or - - NAT_PFT - MAX_PFT range + - MAX_NAT_PFT+1 - MAX_PFT_MANAGEDCROPS range - give an error: mixed land units not possible ------------- Raises: Error (ArgumentTypeError): - If any dom_pft is bigger than MAX_PFT. + If any dom_pft is bigger than MAX_PFT_MANAGEDCROPS. Error (ArgumentTypeError): If any dom_pft is less than 1. Error (ArgumentTypeError): If mixed land units are chosen. - dom_pft values are both in range of (0 - NAT_PFT-1) and (NAT_PFT - MAX_PFT). + dom_pft values are both in range of + (0 - MAX_NAT_PFT) and (MAX_NAT_PFT+1 - MAX_PFT_MANAGEDCROPS). """ @@ -200,27 +215,29 @@ def check_dom_pft(self): min_dom_pft = min(self.dom_pft) max_dom_pft = max(self.dom_pft) - # -- check dom_pft values should be between 0-MAX_PFT - if min_dom_pft < 0 or max_dom_pft > MAX_PFT: - err_msg = "values for --dompft should be between 1 and 78." + # -- check dom_pft values should be between 0-MAX_PFT_MANAGEDCROPS + if min_dom_pft < 0 or max_dom_pft > MAX_PFT_MANAGEDCROPS: + err_msg = f"values for --dompft should be between 1 and {MAX_PFT_MANAGEDCROPS}." raise argparse.ArgumentTypeError(err_msg) # -- check dom_pft vs num_pft if max_dom_pft > self.num_pft: - err_msg = "Please use --crop flag when --dompft is above 16." + err_msg = f"Please use --crop flag when --dompft is above {MAX_PFT_GENERICCROPS}." raise argparse.ArgumentTypeError(err_msg) # -- check dom_pft vs MAX_pft - if self.num_pft - 1 < max_dom_pft < NUM_PFT: + if self.num_pft - 1 < max_dom_pft <= MAX_PFT_GENERICCROPS: logger.info( - "WARNING, you trying to run with generic crops (16 PFT surface dataset)" + "WARNING, you are trying to run with generic crops (%s PFT surface dataset)", + MAX_PFT_GENERICCROPS, ) # -- check if all dom_pft are in the same range: - if min_dom_pft < NAT_PFT <= max_dom_pft: + if min_dom_pft <= MAX_NAT_PFT < max_dom_pft: err_msg = ( "You are subsetting using mixed land units that have both " - "natural pfts and crop cfts. Check your surface dataset. " + "natural pfts and crop cfts. Check your surface dataset.\n" + f"{min_dom_pft} <= {MAX_NAT_PFT} < {max_dom_pft}\n" ) raise argparse.ArgumentTypeError(err_msg) @@ -363,8 +380,11 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lsmlon"]) + # extract gridcell closest to plon/plat - f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + f_out = f_in.sel(lsmlon=plon_float, lsmlat=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["lsmlat", "lsmlon"]) @@ -405,7 +425,7 @@ def modify_surfdata_atpoint(self, f_orig): if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) # -- First initialize everything: - if max_dom_pft < NAT_PFT: + if max_dom_pft <= MAX_NAT_PFT: f_mod["PCT_NAT_PFT"][:, :, :] = 0 else: f_mod["PCT_CFT"][:, :, :] = 0 @@ -424,10 +444,10 @@ def modify_surfdata_atpoint(self, f_orig): if cth is not None: f_mod["MONTHLY_HEIGHT_TOP"][:, :, :, dom_pft] = cth f_mod["MONTHLY_HEIGHT_BOT"][:, :, :, dom_pft] = cbh - if dom_pft < NAT_PFT: + if dom_pft <= MAX_NAT_PFT: f_mod["PCT_NAT_PFT"][:, :, dom_pft] = pct_pft else: - dom_pft = dom_pft - NAT_PFT + dom_pft = dom_pft - (MAX_NAT_PFT + 1) f_mod["PCT_CFT"][:, :, dom_pft] = pct_pft # ------------------------------- @@ -445,7 +465,7 @@ def modify_surfdata_atpoint(self, f_orig): if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) - if max_dom_pft < NAT_PFT: + if max_dom_pft <= MAX_NAT_PFT: f_mod["PCT_NATVEG"][:, :] = 100 f_mod["PCT_CROP"][:, :] = 0 else: @@ -498,8 +518,11 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir, specify_fsurf_out # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lsmlon"]) + # extract gridcell closest to plon/plat - f_tmp = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + f_tmp = f_in.sel(lsmlon=plon_float, lsmlat=self.plat, method="nearest") # expand dimensions f_tmp = f_tmp.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) @@ -525,10 +548,10 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir, specify_fsurf_out # update lsmlat and lsmlon to match site specific instead of the nearest point # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match # the surface data coordinates - which is required - f_out["lsmlon"] = np.atleast_1d(self.plon) + f_out["lsmlon"] = np.atleast_1d(plon_float) f_out["lsmlat"] = np.atleast_1d(self.plat) f_out["LATIXY"][:, :] = self.plat - f_out["LONGXY"][:, :] = self.plon + f_out["LONGXY"][:, :] = plon_float # update attributes self.update_metadata(f_out) @@ -568,8 +591,11 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fdatmdomain_in, "xc", "yc", "ni", "nj") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lon"]) + # extract gridcell closest to plon/plat - f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest") + f_out = f_in.sel(ni=plon_float, nj=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["nj", "ni"]) @@ -591,14 +617,17 @@ def extract_datm_at(self, file_in, file_out): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lon"]) + # extract gridcell closest to plon/plat - f_out = f_in.sel(lon=self.plon, lat=self.plat, method="nearest") + f_out = f_in.sel(lon=plon_float, lat=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["lat", "lon"]) # specify dimension order - f_out = f_out.transpose("scalar", "time", "lat", "lon") + f_out = f_out.transpose("time", "lat", "lon") # update attributes self.update_metadata(f_out) @@ -653,46 +682,36 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s tpqwfiles = [] for year in range(datm_syr, datm_eyr + 1): ystr = str(year) - for month in range(FIRST_MONTH, LAST_MONTH + 1): - mstr = str(month) - if month < 10: - mstr = "0" + mstr - - dtag = ystr + "-" + mstr - fsolar = os.path.join( - datm_tuple.indir, - datm_tuple.dir_solar, - "{}{}.nc".format(datm_tuple.tag_solar, dtag), - ) - fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, dtag) - fprecip = os.path.join( - datm_tuple.indir, - datm_tuple.dir_prec, - "{}{}.nc".format(datm_tuple.tag_prec, dtag), - ) - fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, dtag) - ftpqw = os.path.join( - datm_tuple.indir, - datm_tuple.dir_tpqw, - "{}{}.nc".format(datm_tuple.tag_tpqw, dtag), - ) - ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag) - - outdir = os.path.join(self.out_dir, datm_tuple.outdir) - infile += [fsolar, fprecip, ftpqw] - outfile += [ - os.path.join(outdir, fsolar2), - os.path.join(outdir, fprecip2), - os.path.join(outdir, ftpqw2), - ] - solarfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2) - ) - precfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2) - ) - tpqwfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) + fsolar = os.path.join( + datm_tuple.indir, + datm_tuple.dir_solar, + "{}{}.nc".format(datm_tuple.tag_solar, ystr), + ) + fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, ystr) + fprecip = os.path.join( + datm_tuple.indir, + datm_tuple.dir_prec, + "{}{}.nc".format(datm_tuple.tag_prec, ystr), + ) + fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, ystr) + ftpqw = os.path.join( + datm_tuple.indir, + datm_tuple.dir_tpqw, + "{}{}.nc".format(datm_tuple.tag_tpqw, ystr), + ) + ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, ystr) + + outdir = os.path.join(self.out_dir, datm_tuple.outdir) + infile += [fsolar, fprecip, ftpqw] + outfile += [ + os.path.join(outdir, fsolar2), + os.path.join(outdir, fprecip2), + os.path.join(outdir, ftpqw2), + ] + solarfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2)) + precfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2)) + tpqwfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) for idx, out_f in enumerate(outfile): logger.debug(out_f) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 81f1f703f3..de4e51db9b 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -69,7 +69,8 @@ from ctsm.path_utils import path_to_ctsm_root from ctsm.utils import abort from ctsm.config_utils import check_lon1_lt_lon2 -from ctsm.longitude import Longitude, _detect_lon_type +from ctsm.longitude import Longitude, detect_lon_type +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS # -- import ctsm logging flags from ctsm.ctsm_logging import ( @@ -597,9 +598,9 @@ def determine_num_pft(crop): num_pft (int) : number of pfts for surface dataset """ if crop: - num_pft = "78" + num_pft = str(MAX_PFT_MANAGEDCROPS) else: - num_pft = "16" + num_pft = str(MAX_PFT_GENERICCROPS) logger.debug("crop_flag = %s => num_pft = %s", str(crop), num_pft) return num_pft @@ -629,15 +630,24 @@ def setup_files(args, defaults, cesmroot): file_dict = {"main_dir": clmforcingindir} # DATM data - # TODO Issue #2960: Make datm_type a user option at the command - # line. For reference, this option affects three .cfg files: - # tools/site_and_regional/default_data_1850.cfg - # tools/site_and_regional/default_data_2000.cfg - # python/ctsm/test/testinputs/default_data.cfg + # To find the affected files, from the top level of ctsm, do: + # grep "\[datm\]" $(find . -type f -name "*cfg") if args.create_datm: - datm_type = "datm_crujra" # also available: datm_type = "datm_gswp3" + datm_cfg_section = "datm" + + # Issue #3269: Changes in PR #3259 mean that --create-datm won't work with GSWP3 + settings_to_check_for_gswp3 = ["solartag", "prectag", "tpqwtag"] + for setting in settings_to_check_for_gswp3: + value = defaults.get(datm_cfg_section, setting) + if "gswp3" in value.lower(): + msg = ( + "--create-datm is no longer supported for GSWP3 data; " + "see https://github.com/ESCOMP/CTSM/issues/3269" + ) + raise NotImplementedError(msg) + dir_output_datm = "datmdata" - dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_type, "dir")) + dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_cfg_section, "dir")) if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): os.mkdir(os.path.join(args.out_dir, dir_output_datm)) logger.info("dir_input_datm : %s", dir_input_datm) @@ -645,16 +655,16 @@ def setup_files(args, defaults, cesmroot): file_dict["datm_tuple"] = DatmFiles( dir_input_datm, dir_output_datm, - defaults.get(datm_type, "domain"), - defaults.get(datm_type, "solardir"), - defaults.get(datm_type, "precdir"), - defaults.get(datm_type, "tpqwdir"), - defaults.get(datm_type, "solartag"), - defaults.get(datm_type, "prectag"), - defaults.get(datm_type, "tpqwtag"), - defaults.get(datm_type, "solarname"), - defaults.get(datm_type, "precname"), - defaults.get(datm_type, "tpqwname"), + defaults.get(datm_cfg_section, "domain"), + defaults.get(datm_cfg_section, "solardir"), + defaults.get(datm_cfg_section, "precdir"), + defaults.get(datm_cfg_section, "tpqwdir"), + defaults.get(datm_cfg_section, "solartag"), + defaults.get(datm_cfg_section, "prectag"), + defaults.get(datm_cfg_section, "tpqwtag"), + defaults.get(datm_cfg_section, "solarname"), + defaults.get(datm_cfg_section, "precname"), + defaults.get(datm_cfg_section, "tpqwname"), ) # if the crop flag is on - we need to use a different land use and surface data file @@ -833,10 +843,10 @@ def process_args(args): if any(lon_arg_values): if args.lon_type is None: if hasattr(args, "plon"): - args.lon_type = _detect_lon_type(args.plon) + args.lon_type = detect_lon_type(args.plon) else: - lon1_type = _detect_lon_type(args.lon1) - lon2_type = _detect_lon_type(args.lon2) + lon1_type = detect_lon_type(args.lon1) + lon2_type = detect_lon_type(args.lon2) if lon1_type != lon2_type: raise argparse.ArgumentTypeError( "--lon1 and --lon2 seem to be of different types" diff --git a/python/ctsm/test/test_sys_plumber2_surf_wrapper.py b/python/ctsm/test/test_sys_plumber2_surf_wrapper.py new file mode 100755 index 0000000000..12ca561150 --- /dev/null +++ b/python/ctsm/test/test_sys_plumber2_surf_wrapper.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 + +"""System tests for plumber2_surf_wrapper""" + +import os +import unittest +import tempfile +import shutil +import sys + +from ctsm import unit_testing +from ctsm.site_and_regional.plumber2_surf_wrapper import main +from ctsm.site_and_regional.plumber2_shared import read_plumber2_sites_csv +from ctsm.path_utils import path_to_ctsm_root + +# Allow test names that pylint doesn't like; otherwise hard to make them +# readable +# pylint: disable=invalid-name + + +class TestSysPlumber2SurfWrapper(unittest.TestCase): + """ + System tests for plumber2_surf_wrapper + """ + + def setUp(self): + """ + Make tempdir for use by these tests. + """ + self._previous_dir = os.getcwd() + self._tempdir = tempfile.mkdtemp() + os.chdir(self._tempdir) # cd to tempdir + + # Path to script + self.tool_path = os.path.join( + path_to_ctsm_root(), + "tools", + "site_and_regional", + "plumber2_surf_wrapper", + ) + + # Path to test inputs directory + self.test_inputs = os.path.join( + os.path.dirname(__file__), "testinputs", "plumber2_surf_wrapper" + ) + + def tearDown(self): + """ + Remove temporary directory + """ + os.chdir(self._previous_dir) + shutil.rmtree(self._tempdir, ignore_errors=True) + + def test_plumber2_surf_wrapper(self): + """ + Run the entire tool with default settings. + CAN ONLY RUN ON SYSTEMS WITH INPUTDATA + """ + + sys.argv = [self.tool_path] + main() + + # How many files do we expect? + plumber2_csv = read_plumber2_sites_csv() + n_files_expected = len(plumber2_csv) + + # How many files did we get? + file_list = os.listdir("subset_data_single_point") + n_files = len(file_list) + + # Check + self.assertEqual(n_files_expected, n_files) + + def test_plumber2_surf_wrapper_78pft(self): + """ + Run the entire tool with --crop. + CAN ONLY RUN ON SYSTEMS WITH INPUTDATA + """ + + sys.argv = [self.tool_path, "--crop"] + main() + + # How many files do we expect? + plumber2_csv = read_plumber2_sites_csv() + n_files_expected = len(plumber2_csv) + + # How many files did we get? + file_list = os.listdir("subset_data_single_point") + n_files = len(file_list) + + # Check + self.assertEqual(n_files_expected, n_files) + + def test_plumber2_surf_wrapper_invalid_pft(self): + """ + plumber2_surf_wrapper should error if invalid PFT is given + """ + + sys.argv = [ + self.tool_path, + "--plumber2-sites-csv", + os.path.join(self.test_inputs, "PLUMBER2_sites_invalid_pft.csv"), + ] + with self.assertRaisesRegex(RuntimeError, "must be a valid PFT"): + main() + + def test_plumber2_surf_wrapper_existing_no_overwrite_fails(self): + """ + plumber2_surf_wrapper should fail if file exists but --overwrite isn't given + """ + + sys_argv_shared = [ + self.tool_path, + "--plumber2-sites-csv", + os.path.join(self.test_inputs, "PLUMBER2_site_valid.csv"), + ] + + # Run twice, expecting second to fail + sys.argv = sys_argv_shared + main() + sys.argv = sys_argv_shared + with self.assertRaisesRegex(SystemExit, "exists"): + main() + + def test_plumber2_surf_wrapper_existing_overwrite_passes(self): + """ + plumber2_surf_wrapper should pass if file exists and --overwrite is given + """ + + sys_argv_shared = [ + self.tool_path, + "--plumber2-sites-csv", + os.path.join(self.test_inputs, "PLUMBER2_site_valid.csv"), + ] + + # Run once to generate the files + sys.argv = sys_argv_shared + main() + + # Run again with --overwrite, expecting pass + sys.argv = sys_argv_shared + ["--overwrite"] + main() + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_sys_subset_data.py b/python/ctsm/test/test_sys_subset_data.py index bc73c8c41d..39d448cccd 100644 --- a/python/ctsm/test/test_sys_subset_data.py +++ b/python/ctsm/test/test_sys_subset_data.py @@ -37,9 +37,13 @@ def tearDown(self): self.temp_dir_out.cleanup() self.temp_dir_umd.cleanup() - def _check_result_file_matches_expected(self, expected_output_files): + def _check_result_file_matches_expected(self, expected_output_files, caller_n): """ Loop through a list of output files, making sure they match what we expect. + + caller_n should be an integer giving the number of levels above this function you need to + traverse before you hit the actual test name. If the test is calling this function directly, + caller_n = 1. If the test is calling a function that calls this function, caller_n = 2. Etc. """ all_files_present_and_match = True for basename in expected_output_files: @@ -49,7 +53,7 @@ def _check_result_file_matches_expected(self, expected_output_files): os.path.dirname(__file__), "testinputs", "expected_result_files", - inspect.stack()[1][3], # Name of calling function (i.e., test name) + inspect.stack()[caller_n][3], # Name of calling function (i.e., test name) basename, ) expected_file = find_one_file_matching_pattern(expected_file) @@ -112,7 +116,7 @@ def test_subset_data_reg_amazon(self): f"domain.lnd.5x5pt-amazon_navy_TMP_c{daystr}.nc", f"surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c{daystr}.nc", ] - self.assertTrue(self._check_result_file_matches_expected(expected_output_files)) + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 1)) def test_subset_data_reg_infile_detect360(self): """ @@ -185,6 +189,180 @@ def test_subset_data_reg_infile_detect180_error(self): ): subset_data.main() + def _do_test_subset_data_pt_surface(self, lon): + """ + Given a longitude, test subset_data point --create-surface + """ + cfg_file = os.path.join( + self.inputdata_dir, + "ctsm", + "test", + "testinputs", + "subset_data_amazon.cfg", + ) + print(cfg_file) + sys.argv = [ + "subset_data", + "point", + "--lat", + "-12", + "--lon", + str(lon), + "--site", + "TMP", + "--create-domain", + "--create-surface", + "--surf-year", + "2000", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--inputdata-dir", + self.inputdata_dir, + "--cfg-file", + cfg_file, + "--overwrite", + ] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + expected_output_files = [ + f"surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c{daystr}.nc", + ] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + def test_subset_data_pt_surface_amazon_type360(self): + """ + Test subset_data --create-surface for Amazon point with longitude type 360 + """ + self._do_test_subset_data_pt_surface(291) + + def test_subset_data_pt_surface_amazon_type180(self): + """ + Test subset_data --create-surface for Amazon point with longitude type 180 + """ + self._do_test_subset_data_pt_surface(-69) + + def _do_test_subset_data_pt_landuse(self, lon): + """ + Given a longitude, test subset_data point --create-landuse + """ + cfg_file = os.path.join( + self.inputdata_dir, + "ctsm", + "test", + "testinputs", + "subset_data_amazon_1850.cfg", + ) + print(cfg_file) + sys.argv = [ + "subset_data", + "point", + "--lat", + "-12", + "--lon", + str(lon), + "--site", + "TMP", + "--create-domain", + "--create-surface", + "--surf-year", + "1850", + "--create-landuse", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--inputdata-dir", + self.inputdata_dir, + "--cfg-file", + cfg_file, + "--overwrite", + ] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + expected_output_files = [ + f"surfdata_TMP_amazon_hist_1850_78pfts_c{daystr}.nc", + f"landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c{daystr}.nc", + ] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + def test_subset_data_pt_landuse_amazon_type360(self): + """ + Test subset_data --create-landuse for Amazon point with longitude type 360 + """ + self._do_test_subset_data_pt_landuse(291) + + def test_subset_data_pt_landuse_amazon_type180(self): + """ + Test subset_data --create-landuse for Amazon point with longitude type 180 + """ + self._do_test_subset_data_pt_landuse(-69) + + def _do_test_subset_data_pt_datm(self, lon): + """ + Given a longitude, test subset_data point --create-datm + """ + start_year = 1986 + end_year = 1988 + sitename = "TMP" + outdir = self.temp_dir_out.name + sys.argv = [ + "subset_data", + "point", + "--lat", + "-12", + "--lon", + str(lon), + "--site", + sitename, + "--create-datm", + "--datm-syr", + str(start_year), + "--datm-eyr", + str(end_year), + "--create-user-mods", + "--outdir", + outdir, + "--user-mods-dir", + self.temp_dir_umd.name, + "--overwrite", + ] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + expected_output_files = [ + f"domain.crujra_v2.3_0.5x0.5_{sitename}_c{daystr}.nc", + ] + for year in list(range(start_year, end_year + 1)): + for forcing in ["Solr", "Prec", "TPQWL"]: + expected_output_files.append( + f"clmforc.CRUJRAv2.5_0.5x0.5.{forcing}.{sitename}.{year}.nc" + ) + expected_output_files = [os.path.join("datmdata", x) for x in expected_output_files] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + def test_subset_data_pt_datm_amazon_type360(self): + """ + Test subset_data --create-datm for Amazon point with longitude type 360 + FOR NOW CAN ONLY BE RUN ON DERECHO/CASPER + """ + self._do_test_subset_data_pt_datm(291) + + def test_subset_data_pt_datm_amazon_type180(self): + """ + Test subset_data --create-datm for Amazon point with longitude type 180 + FOR NOW CAN ONLY BE RUN ON DERECHO/CASPER + """ + self._do_test_subset_data_pt_datm(-69) + if __name__ == "__main__": unit_testing.setup_for_tests() diff --git a/python/ctsm/test/test_unit_longitude.py b/python/ctsm/test/test_unit_longitude.py index 6bf7ec53e2..6766f90764 100644 --- a/python/ctsm/test/test_unit_longitude.py +++ b/python/ctsm/test/test_unit_longitude.py @@ -10,7 +10,7 @@ from ctsm.longitude import Longitude from ctsm.longitude import _convert_lon_type_180_to_360, _convert_lon_type_360_to_180 from ctsm.longitude import _check_lon_type_180, _check_lon_type_360 -from ctsm.longitude import _detect_lon_type +from ctsm.longitude import detect_lon_type # Allow test names that pylint doesn't like; otherwise hard to make them # readable @@ -369,57 +369,57 @@ def test_lon_compare_notlon_error(self): def test_detect_lon_type_mid_180(self): """test that detect_lon_type works for an unambiguously 180 value""" - self.assertEqual(_detect_lon_type(-150), 180) + self.assertEqual(detect_lon_type(-150), 180) def test_detect_lon_type_min_180(self): """test that detect_lon_type works at -180""" - self.assertEqual(_detect_lon_type(-180), 180) + self.assertEqual(detect_lon_type(-180), 180) def test_detect_lon_type_mid_360(self): """test that detect_lon_type works for an unambiguously 360 value""" - self.assertEqual(_detect_lon_type(355), 360) + self.assertEqual(detect_lon_type(355), 360) def test_detect_lon_type_max_360(self): """test that detect_lon_type works at 360""" - self.assertEqual(_detect_lon_type(360), 360) + self.assertEqual(detect_lon_type(360), 360) def test_detect_lon_type_list_180(self): """test that detect_lon_type works for a list with just one unambiguously 180 value""" - self.assertEqual(_detect_lon_type([-150, 150]), 180) + self.assertEqual(detect_lon_type([-150, 150]), 180) def test_detect_lon_type_list_360(self): """test that detect_lon_type works for a list with just one unambiguously 360 value""" - self.assertEqual(_detect_lon_type([256, 150]), 360) + self.assertEqual(detect_lon_type([256, 150]), 360) def test_detect_lon_type_ambig(self): """test that detect_lon_type fails if ambiguous""" with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): - _detect_lon_type(150) + detect_lon_type(150) def test_detect_lon_type_list_ambig(self): """test that detect_lon_type fails for an ambiguous list""" with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): - _detect_lon_type([150, 170]) + detect_lon_type([150, 170]) def test_detect_lon_type_list_both(self): """test that detect_lon_type fails for a list with unambiguous members of both types""" with self.assertRaisesRegex(RuntimeError, r"Longitude array contains values of both types"): - _detect_lon_type([-150, 270]) + detect_lon_type([-150, 270]) def test_detect_lon_type_ambig0(self): """test that detect_lon_type fails at 0""" with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): - _detect_lon_type(0) + detect_lon_type(0) def test_detect_lon_type_oob_low(self): """test that detect_lon_type fails if out of bounds below min""" with self.assertRaisesRegex(ValueError, r"\(Minimum\) longitude < -180"): - _detect_lon_type(-300) + detect_lon_type(-300) def test_detect_lon_type_oob_high(self): """test that detect_lon_type fails if out of bounds above max""" with self.assertRaisesRegex(ValueError, r"\(Maximum\) longitude > 360"): - _detect_lon_type(500) + detect_lon_type(500) def test_list_as_lon(self): """ diff --git a/python/ctsm/test/test_unit_plumber2_surf_wrapper.py b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py index 66f5578caa..4b84752edb 100755 --- a/python/ctsm/test/test_unit_plumber2_surf_wrapper.py +++ b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py @@ -16,7 +16,7 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing -from ctsm.site_and_regional.plumber2_surf_wrapper import get_parser +from ctsm.site_and_regional.plumber2_surf_wrapper import get_args # pylint: disable=invalid-name @@ -26,12 +26,60 @@ class TestPlumber2SurfWrapper(unittest.TestCase): Basic class for testing plumber2_surf_wrapper.py. """ - def test_parser(self): + def setUp(self): + sys.argv = ["subset_data"] # Could actually be anything + + def test_parser_default_csv_exists(self): + """ + Test that default PLUMBER2 sites CSV file exists + """ + + args = get_args() + self.assertTrue(os.path.exists(args.plumber2_sites_csv)) + + def test_parser_custom_csv(self): + """ + Test that script accepts custom CSV file path + """ + + custom_path = "path/to/custom.csv" + sys.argv += ["--plumber2-sites-csv", custom_path] + args = get_args() + self.assertEqual(args.plumber2_sites_csv, custom_path) + + def test_parser_verbose_false_default(self): + """ + Test that script is not verbose by default + """ + + args = get_args() + self.assertFalse(args.verbose) + + def test_parser_verbose_true(self): + """ + Test that --verbose sets verbose to True + """ + + sys.argv += ["--verbose"] + args = get_args() + self.assertTrue(args.verbose) + + def test_parser_78pft_false_default(self): + """ + Test that script does not use 78pft mode by default + """ + + args = get_args() + self.assertFalse(args.use_managed_crops) + + def test_parser_78pft_true(self): """ - Test that parser has same defaults as expected + Test that --crop sets use_managed_crops to True """ - self.assertEqual(get_parser().argument_default, None, "Parser not working as expected") + sys.argv += ["--crop"] + args = get_args() + self.assertTrue(args.use_managed_crops) if __name__ == "__main__": diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index 644af82588..bf29ced331 100755 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -18,6 +18,7 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS # pylint: disable=invalid-name @@ -38,7 +39,7 @@ class TestSinglePointCase(unittest.TestCase): dom_pft = [8] evenly_split_cropland = False pct_pft = None - num_pft = 16 + num_pft = MAX_PFT_GENERICCROPS cth = [0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9] cbh = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] include_nonveg = False @@ -131,7 +132,7 @@ def test_check_dom_pft_too_big(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.dom_pft = [16, 36, 79] + single_point.dom_pft = [MAX_PFT_GENERICCROPS, 36, 79] with self.assertRaisesRegex(argparse.ArgumentTypeError, "values for --dompft should*"): single_point.check_dom_pft() @@ -161,7 +162,7 @@ def test_check_dom_pft_too_small(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.dom_pft = [16, 36, -1] + single_point.dom_pft = [MAX_PFT_GENERICCROPS, 36, -1] with self.assertRaisesRegex(argparse.ArgumentTypeError, "values for --dompft should*"): single_point.check_dom_pft() @@ -192,7 +193,7 @@ def test_check_dom_pft_numpft(self): overwrite=self.overwrite, ) single_point.dom_pft = [15, 53] - single_point.num_pft = 16 + single_point.num_pft = MAX_PFT_GENERICCROPS with self.assertRaisesRegex(argparse.ArgumentTypeError, "Please use --crop*"): single_point.check_dom_pft() @@ -223,7 +224,7 @@ def test_check_dom_pft_mixed_range(self): overwrite=self.overwrite, ) single_point.dom_pft = [1, 5, 15] - single_point.num_pft = 78 + single_point.num_pft = MAX_PFT_MANAGEDCROPS with self.assertRaisesRegex( argparse.ArgumentTypeError, "You are subsetting using mixed land*" ): diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index 2106799a4b..d163c29e4f 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -23,6 +23,7 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS # pylint: disable=invalid-name # pylint: disable=too-many-lines @@ -46,7 +47,7 @@ class TestSinglePointCaseSurfaceNoCrop(unittest.TestCase): dom_pft = [8] evenly_split_cropland = False pct_pft = None - num_pft = 16 + num_pft = MAX_PFT_GENERICCROPS cth = 0.9 cbh = 0.1 include_nonveg = False @@ -667,7 +668,7 @@ class TestSinglePointCaseSurfaceCrop(unittest.TestCase): dom_pft = [17] evenly_split_cropland = False pct_pft = None - num_pft = 78 + num_pft = MAX_PFT_MANAGEDCROPS cth = 0.9 cbh = 0.1 include_nonveg = False diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index eeb0a9a38a..c4ce21e959 100755 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -7,6 +7,8 @@ """ import unittest +import tempfile +import shutil import configparser import argparse import os @@ -85,6 +87,18 @@ def setUp(self): self.defaults = configparser.ConfigParser() self.defaults.read(os.path.join(self.cesmroot, "tools/site_and_regional", DEFAULTS_FILE)) + # Work in temporary directory + self._previous_dir = os.getcwd() + self._tempdir = tempfile.mkdtemp() + os.chdir(self._tempdir) # cd to tempdir + + def tearDown(self): + """ + Remove temporary directory + """ + os.chdir(self._previous_dir) + shutil.rmtree(self._tempdir, ignore_errors=True) + def test_inputdata_setup_files_basic(self): """ Test @@ -116,6 +130,23 @@ def test_inputdata_setup_files_inputdata_dne(self): with self.assertRaisesRegex(SystemExit, "inputdata directory does not exist"): setup_files(self.args, self.defaults, self.cesmroot) + def test_inputdata_setup_files_gswp3_error(self): + """ + Test that error is thrown if user tries to --create-datm GSWP3 + """ + cfg_file = os.path.join( + _CTSM_PYTHON, "ctsm", "test", "testinputs", "default_data_gswp3.cfg" + ) + sys.argv = ["subset_data", "point", "--create-datm", "--cfg-file", cfg_file] + self.args = self.parser.parse_args() + self.defaults = configparser.ConfigParser() + self.defaults.read(self.args.config_file) + + with self.assertRaisesRegex( + NotImplementedError, "https://github.com/ESCOMP/CTSM/issues/3269" + ): + setup_files(self.args, self.defaults, self.cesmroot) + def test_check_args_nooutput(self): """ Test that check args aborts when no-output is asked for @@ -229,7 +260,7 @@ def test_check_args_outsurfdat_fails_without_overwrite(self): for an existing dataset without the overwrite option """ outfile = os.path.join( - os.getcwd(), + _CTSM_PYTHON, "ctsm/test/testinputs/", "surfdata_1x1_mexicocityMEX_hist_16pfts_CMIP6_2000_c231103.nc", ) diff --git a/python/ctsm/test/testinputs/default_data.cfg b/python/ctsm/test/testinputs/default_data.cfg index a832d810cc..60c012561c 100644 --- a/python/ctsm/test/testinputs/default_data.cfg +++ b/python/ctsm/test/testinputs/default_data.cfg @@ -1,7 +1,7 @@ [main] clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata -[datm_crujra] +[datm] dir = atm/datm7/atm_forcing.datm7.CRUJRA.0.5d.c20241231/three_stream domain = domain.crujra_v2.3_0.5x0.5.c220801.nc solardir = . @@ -14,19 +14,6 @@ solarname = CLMCRUJRA2024.Solar precname = CLMCRUJRA2024.Precip tpqwname = CLMCRUJRA2024.TPQW -[datm_gswp3] -dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - [surfdat] dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc diff --git a/python/ctsm/test/testinputs/default_data_gswp3.cfg b/python/ctsm/test/testinputs/default_data_gswp3.cfg new file mode 100644 index 0000000000..09e1463eb2 --- /dev/null +++ b/python/ctsm/test/testinputs/default_data_gswp3.cfg @@ -0,0 +1,30 @@ +[main] +clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata + +[datm] +dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 +domain = domain.lnd.360x720_gswp3.0v1.c170606.nc +solardir = Solar +precdir = Precip +tpqwdir = TPHWL +solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. +prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. +tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. +solarname = CLMGSWP3v1.Solar +precname = CLMGSWP3v1.Precip +tpqwname = CLMGSWP3v1.TPQW + +[surfdat] +dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 +surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc +surfdat_78pft = surfdata_0.9x1.25_hist_2000_78pfts_c240908.nc +mesh_dir = share/meshes/ +mesh_surf = fv0.9x1.25_141008_ESMFmesh.nc + +[landuse] +dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 +landuse_16pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc +landuse_78pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc + +[domain] +file = share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc new file mode 100644 index 0000000000..84da04d260 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e1075a199de0d85b974bd9dbd09216e460eda035b3a6652cbfc59b75829e3ee +size 13136 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc new file mode 100644 index 0000000000..f05b8eb442 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21fb1ae2b2e75336e409770988dabd80e9ee69d990e5aa63dc7008c9145a455f +size 13136 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc new file mode 100644 index 0000000000..3d521c66f4 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f8e2624c686c86d5d1071ed618b564e4589731555836083cad0a1e8259b7962e +size 13136 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc new file mode 100644 index 0000000000..1d551867f0 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b4164da71cf6bdf351143b936d2ad84da0c943378cb54534ec46f03513e2d17 +size 13144 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc new file mode 100644 index 0000000000..b752309969 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93e9ab5686acc5fb7ddaf775e7f561d572a4fbecab28088b643868432e3d1ed3 +size 13144 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc new file mode 100644 index 0000000000..c3c47b61be --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fbb6d1679040959e540928b7df056a848e9a385441d725f5f84271a07c64889c +size 13144 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc new file mode 100644 index 0000000000..9be8249601 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7145768c96bdf8b3cbab234b2a09c4506916dbbc8db9fbc73282d643251ed318 +size 37324 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc new file mode 100644 index 0000000000..068a7ff28e --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1e38846646d2514671bd340daa0954bf1981aa328d4923cb42044097bb77f38 +size 37324 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc new file mode 100644 index 0000000000..1b7094dbee --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:395aa495fd3b926521cd355fd2a012cdcd07d19b7a00467fdc49dafbf80751a1 +size 37324 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc new file mode 100644 index 0000000000..c9b19f474b --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:206ba64ca50dbd3b34e93f498eb1f526689e3a6900762f12e30c3af9b75ccb5c +size 2000 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type360 b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type360 new file mode 120000 index 0000000000..88385bbff2 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type360 @@ -0,0 +1 @@ +test_subset_data_pt_datm_amazon_type180 \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc new file mode 100644 index 0000000000..d34fdf3acf --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b063aeb04ed3a0a613608ecf88ac47efb39de7ba74bf6e33a490925540bf47fb +size 18176 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc new file mode 100644 index 0000000000..02999b6b00 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efbf02729f8741bfdfbd51d748cce31c2d90b0c9ef2f00d841d2940dea5bc144 +size 53256 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360 b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360 new file mode 120000 index 0000000000..ad4f251586 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360 @@ -0,0 +1 @@ +test_subset_data_pt_landuse_amazon_type180 \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc new file mode 100644 index 0000000000..6e742560d0 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e694ca46925fbe07270b5468fe3899ead98dcc7d41353a6551dcc1ec92a9f9e0 +size 27740 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type360 b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type360 new file mode 120000 index 0000000000..3a7bc5efe3 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type360 @@ -0,0 +1 @@ +test_subset_data_pt_surface_amazon_type180 \ No newline at end of file diff --git a/python/ctsm/test/testinputs/landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc b/python/ctsm/test/testinputs/landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc new file mode 100644 index 0000000000..9e81ad351c --- /dev/null +++ b/python/ctsm/test/testinputs/landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83b34be6da2047bb9a099346f7f5472b932ead7033fe8ab817540b99ff3117b8 +size 215248 diff --git a/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_site_valid.csv b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_site_valid.csv new file mode 100644 index 0000000000..2c1580bc03 --- /dev/null +++ b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_site_valid.csv @@ -0,0 +1,7 @@ +#pftX-cth and pftX-cbh are the site=specific canopy top and bottom heights +#start_year and end_year will be used to define DATM_YR_ALIGH, DATM_YR_START and DATM_YR_END, and STOP_N in units of nyears. +#RUN_STARTDATE and START_TOD are specified because we are starting at GMT corresponding to local midnight. +#ATM_NCPL is specified so that the time step of the model matches the time interval specified by the atm forcing data. +#longitudes must be in the range [-180,180] +,Site,Lat,Lon,pft1,pft1-%,pft1-cth,pft1-cbh,pft2,pft2-%,pft2-cth,pft2-cbh,start_year,end_year,RUN_STARTDATE,START_TOD,ATM_NCPL +27,BE-Lon,50.551590, 4.746130,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2014,2004-12-31,82800,48 diff --git a/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_sites_invalid_pft.csv b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_sites_invalid_pft.csv new file mode 100644 index 0000000000..e8f0eb8fbb --- /dev/null +++ b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_sites_invalid_pft.csv @@ -0,0 +1,8 @@ +#pftX-cth and pftX-cbh are the site=specific canopy top and bottom heights +#start_year and end_year will be used to define DATM_YR_ALIGH, DATM_YR_START and DATM_YR_END, and STOP_N in units of nyears. +#RUN_STARTDATE and START_TOD are specified because we are starting at GMT corresponding to local midnight. +#ATM_NCPL is specified so that the time step of the model matches the time interval specified by the atm forcing data. +#longitudes must be in the range [-180,180] +,Site,Lat,Lon,pft1,pft1-%,pft1-cth,pft1-cbh,pft2,pft2-%,pft2-cth,pft2-cbh,start_year,end_year,RUN_STARTDATE,START_TOD,ATM_NCPL +26,Invalid-Pft,51.309166, 4.520560,-1,19.22,21.00,10.50,7,80.78,21.00,12.08,2004,2014,2003-12-31,82800,48 +27,BE-Lon,50.551590, 4.746130,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2014,2004-12-31,82800,48 diff --git a/python/ctsm/test/testinputs/subset_data_amazon_1850.cfg b/python/ctsm/test/testinputs/subset_data_amazon_1850.cfg new file mode 100644 index 0000000000..6b16160f48 --- /dev/null +++ b/python/ctsm/test/testinputs/subset_data_amazon_1850.cfg @@ -0,0 +1,14 @@ +[surfdat] +dir = ctsm/test/testinputs +surfdat_16pft = surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc +surfdat_78pft = surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc +mesh_dir = ctsm/test/testinputs +mesh_surf = ESMF_mesh_5x5pt_amazon_from_domain_c230308.nc + +[landuse] +dir = ctsm/test/testinputs +landuse_16pft = landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc +landuse_78pft = landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc + +[domain] +file = ctsm/test/testinputs/domain.lnd.5x5pt-amazon_navy.090715.nc diff --git a/python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc b/python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc new file mode 100644 index 0000000000..747c33a2b0 --- /dev/null +++ b/python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0795d84b3e07a9437c7e9869810b74002210f7c55349f57983c36db9990db4a +size 893512 diff --git a/python/ctsm/toolchain/gen_mksurfdata_namelist.py b/python/ctsm/toolchain/gen_mksurfdata_namelist.py index 31fcbfe8ff..3a405bf5fa 100755 --- a/python/ctsm/toolchain/gen_mksurfdata_namelist.py +++ b/python/ctsm/toolchain/gen_mksurfdata_namelist.py @@ -15,6 +15,7 @@ from ctsm.path_utils import path_to_ctsm_root, path_to_cime from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS logger = logging.getLogger(__name__) @@ -306,9 +307,9 @@ def main(): # Determine num_pft if nocrop_flag: - num_pft = "16" + num_pft = str(MAX_PFT_GENERICCROPS) else: - num_pft = "78" + num_pft = str(MAX_PFT_MANAGEDCROPS) logger.info("num_pft is %s", num_pft) # Write out if surface dataset will be created diff --git a/tools/site_and_regional/PLUMBER2_sites.csv b/tools/site_and_regional/PLUMBER2_sites.csv index f252fa1d61..1097568051 100644 --- a/tools/site_and_regional/PLUMBER2_sites.csv +++ b/tools/site_and_regional/PLUMBER2_sites.csv @@ -2,6 +2,7 @@ #start_year and end_year will be used to define DATM_YR_ALIGH, DATM_YR_START and DATM_YR_END, and STOP_N in units of nyears. #RUN_STARTDATE and START_TOD are specified because we are starting at GMT corresponding to local midnight. #ATM_NCPL is specified so that the time step of the model matches the time interval specified by the atm forcing data. +#longitudes must be in the range [-180,180] ,Site,Lat,Lon,pft1,pft1-%,pft1-cth,pft1-cbh,pft2,pft2-%,pft2-cth,pft2-cbh,start_year,end_year,RUN_STARTDATE,START_TOD,ATM_NCPL 1,AR-SLu,-33.464802,-66.459808,5,50.00, 4.50, 0.13,7,50.00, 4.50, 2.59,2010,2010,2010-01-01,10800,48 2,AT-Neu,47.116669,11.317500,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2001-12-31,82800,48 @@ -73,7 +74,7 @@ 68,DK-Sor,55.485870,11.644640,7,100.00,25.00,14.37,-999,-999.00,-999.00,-999.00,1997,2014,1996-12-31,82800,48 69,DK-ZaH,74.473282,-20.550293,12,100.00, 0.47, 0.01,-999,-999.00,-999.00,-999.00,2000,2013,2000-01-01,0,48 70,ES-ES1,39.345970,-0.318817,1,100.00, 7.50, 3.75,-999,-999.00,-999.00,-999.00,1999,2006,1998-12-31,82800,48 -71,ES-ES2,39.275558,-0.315277,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2005,2006,2004-12-31,82800,48 +71,ES-ES2,39.275558,-0.315277,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2006,2004-12-31,82800,48 72,ES-LgS,37.097935,-2.965820,10,30.00, 0.20, 0.04,13,70.00, 0.50, 0.01,2007,2007,2006-12-31,82800,48 73,ES-LMa,39.941502,-5.773346,7,30.00, 8.00, 4.60,14,70.00, 0.50, 0.01,2004,2006,2003-12-31,82800,48 74,ES-VDA,42.152180, 1.448500,7,30.00, 0.50, 0.29,13,70.00, 0.50, 0.01,2004,2004,2003-12-31,82800,48 @@ -94,7 +95,7 @@ 89,IE-Ca1,52.858791,-6.918152,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2004,2006,2004-01-01,0,48 90,IE-Dri,51.986691,-8.751801,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2003,2005,2003-01-01,0,48 91,IT-Amp,41.904099,13.605160,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2003,2006,2002-12-31,82800,48 -92,IT-BCi,40.523800,14.957440,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2005,2010,2004-12-31,82800,48 +92,IT-BCi,40.523800,14.957440,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2010,2004-12-31,82800,48 93,IT-CA1,42.380409,12.026560,7,100.00, 5.50, 3.16,-999,-999.00,-999.00,-999.00,2012,2013,2011-12-31,82800,48 94,IT-CA2,42.377220,12.026040,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2012,2013,2011-12-31,82800,48 95,IT-CA3,42.380001,12.022200,7,100.00, 3.50, 2.01,-999,-999.00,-999.00,-999.00,2012,2013,2011-12-31,82800,48 @@ -151,8 +152,8 @@ 146,US-MMS,39.323200,-86.413086,7,100.00,27.00,15.52,-999,-999.00,-999.00,-999.00,1999,2014,1999-01-01,18000,24 147,US-MOz,38.744110,-92.200012,7,100.00,24.00,13.80,-999,-999.00,-999.00,-999.00,2005,2006,2005-01-01,21600,48 148,US-Myb,38.049801,-121.765106,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2011,2014,2011-01-01,28800,48 -149,US-Ne1,41.165100,-96.476593,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2002,2012,2002-01-01,21600,24 -150,US-Ne2,41.164902,-96.470093,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2002,2012,2002-01-01,21600,24 +149,US-Ne1,41.165100,-96.476593,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2002-01-01,21600,24 +150,US-Ne2,41.164902,-96.470093,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2002-01-01,21600,24 151,US-Ne3,41.179699,-96.439697,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2002-01-01,21600,24 152,US-NR1,40.032902,-105.546402,1,100.00,12.00, 6.00,-999,-999.00,-999.00,-999.00,1999,2014,1999-01-01,25200,48 153,US-PFa,45.945900,-90.272308,1, 8.18,30.00,15.00,7,91.82,30.00,17.25,1995,2014,1995-01-01,21600,24 @@ -165,7 +166,7 @@ 160,US-Syv,46.242001,-89.347717,1, 4.91,27.00,13.50,7,95.09,27.00,15.53,2002,2008,2002-01-01,21600,48 161,US-Ton,38.431599,-120.966003,7,70.00, 7.10, 4.08,14,30.00, 0.50, 0.01,2001,2014,2001-01-01,28800,48 162,US-Tw4,38.103001,-121.641403,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2014,2014,2014-01-01,28800,48 -163,US-Twt,38.108700,-121.653107,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2010,2014,2010-01-01,28800,48 +163,US-Twt,38.108700,-121.653107,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2010,2014,2010-01-01,28800,48 164,US-UMB,45.559799,-84.713806,7,100.00,20.00,11.50,-999,-999.00,-999.00,-999.00,2000,2014,2000-01-01,18000,24 165,US-Var,38.413300,-120.950729,14,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2001,2014,2001-01-01,28800,48 166,US-WCr,45.805901,-90.079895,7,100.00,24.00,13.80,-999,-999.00,-999.00,-999.00,1999,2006,1999-01-01,21600,48 diff --git a/tools/site_and_regional/default_data_1850.cfg b/tools/site_and_regional/default_data_1850.cfg index 3c9f28c0a2..ce68b1debf 100644 --- a/tools/site_and_regional/default_data_1850.cfg +++ b/tools/site_and_regional/default_data_1850.cfg @@ -1,7 +1,7 @@ [main] clmforcingindir = /glade/campaign/cesm/cesmdata/inputdata -[datm_crujra] +[datm] dir = atm/datm7/atm_forcing.datm7.CRUJRA.0.5d.c20241231/three_stream domain = domain.crujra_v2.3_0.5x0.5.c220801.nc solardir = . @@ -14,19 +14,6 @@ solarname = CLMCRUJRA2024.Solar precname = CLMCRUJRA2024.Precip tpqwname = CLMCRUJRA2024.TPQW -[datm_gswp3] -dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - [surfdat] dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 surfdat_78pft = surfdata_0.9x1.25_hist_1850_78pfts_c240908.nc diff --git a/tools/site_and_regional/default_data_2000.cfg b/tools/site_and_regional/default_data_2000.cfg index a832d810cc..60c012561c 100644 --- a/tools/site_and_regional/default_data_2000.cfg +++ b/tools/site_and_regional/default_data_2000.cfg @@ -1,7 +1,7 @@ [main] clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata -[datm_crujra] +[datm] dir = atm/datm7/atm_forcing.datm7.CRUJRA.0.5d.c20241231/three_stream domain = domain.crujra_v2.3_0.5x0.5.c220801.nc solardir = . @@ -14,19 +14,6 @@ solarname = CLMCRUJRA2024.Solar precname = CLMCRUJRA2024.Precip tpqwname = CLMCRUJRA2024.TPQW -[datm_gswp3] -dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - [surfdat] dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc