diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 7ea285a6bc..203d7b487a 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -67,3 +67,6 @@ cdf40d265cc82775607a1bf25f5f527bacc97405 3b7a2876933263f8986e4069f5d23bd45635756f 3dd489af7ebe06566e2c6a1c7ade18550f1eb4ba 742cfa606039ab89602fde5fef46458516f56fd4 +4ad46f46de7dde753b4653c15f05326f55116b73 +75db098206b064b8b7b2a0604d3f0bf8fdb950cc +84609494b54ea9732f64add43b2f1dd035632b4c diff --git a/.github/workflows/check-clm6-aliases.sh b/.github/workflows/check-clm6-aliases.sh new file mode 100755 index 0000000000..32778f15d6 --- /dev/null +++ b/.github/workflows/check-clm6-aliases.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +set -e + +# Check that clm6* compset aliases return CLM6* longnames + +# Change to top level of clone +cd "$(git rev-parse --show-toplevel)" + +# Check that query_config can run without error +cime/scripts/query_config --compsets 1>/dev/null + +# Find bad compsets +OLD_IFS=$IFS +IFS='\n' +set +e +# Relies on case sensitivity here: Alias should have Clm6 and longname should have CLM6 +bad_compsets="$(cime/scripts/query_config --compsets | sort | uniq | grep Clm6 | grep -v CLM6)" +set -e +if [[ "${bad_compsets}" != "" ]]; then + echo "One or more compsets with Clm6 alias but not CLM6 longname:" >&2 + echo $bad_compsets >&2 + exit 1 +fi + +exit 0 \ No newline at end of file diff --git a/.github/workflows/check-clm6-aliases.yml b/.github/workflows/check-clm6-aliases.yml new file mode 100644 index 0000000000..46e79d6a55 --- /dev/null +++ b/.github/workflows/check-clm6-aliases.yml @@ -0,0 +1,40 @@ +name: Check that clm6* compset aliases return CLM6* longnames +# Only check files in our repo that AREN'T in submodules +# Use a Python command to check each file because xmllint isn't available on GH runners + +on: + push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] + paths: + - '.github/workflows/check-clm6-aliases.sh' + - 'cime/**' + - 'cime_config/config_compsets.xml' + + pull_request: + # Run on pull requests that change the listed files + paths: + - '.github/workflows/check-clm6-aliases.sh' + - 'cime/**' + - 'cime_config/config_compsets.xml' + + workflow_dispatch: + +jobs: + check-clm6-aliases: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Checkout submodules + run: | + bin/git-fleximod update + + - name: Install xmllint for CIME + run: | + sudo apt-get update && sudo apt-get install --no-install-recommends -y libxml2-utils + + - name: Check aliases + run: | + .github/workflows/check-clm6-aliases.sh diff --git a/.github/workflows/docker-image-build.yml b/.github/workflows/docker-image-build.yml index 0ac43426a6..6d38e12c8b 100644 --- a/.github/workflows/docker-image-build.yml +++ b/.github/workflows/docker-image-build.yml @@ -1,5 +1,5 @@ # Modified from https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions#publishing-a-package-using-an-action (last accessed 2025-05-09) -name: Test building ctsm-docs Docker image and using it to build the docs +name: Build and test ctsm-docs container # Configures this workflow to run every time a change in the Docker container setup is pushed or included in a PR on: @@ -9,7 +9,6 @@ on: paths: - 'doc/ctsm-docs_container/**' - '!doc/ctsm-docs_container/README.md' - - '.github/workflows/docker-image-ctsm-docs-build.yml' - '.github/workflows/docker-image-common.yml' pull_request: @@ -17,7 +16,6 @@ on: paths: - 'doc/ctsm-docs_container/**' - '!doc/ctsm-docs_container/README.md' - - '.github/workflows/docker-image-ctsm-docs-build.yml' - '.github/workflows/docker-image-common.yml' workflow_dispatch: diff --git a/.github/workflows/docker-image-common.yml b/.github/workflows/docker-image-common.yml index d44c14c1f8..3522069132 100644 --- a/.github/workflows/docker-image-common.yml +++ b/.github/workflows/docker-image-common.yml @@ -76,14 +76,16 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - # Try building our docs using the new container - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o + - name: Set image tag for docs build id: set-image-tag run: | echo "IMAGE_TAG=$(echo '${{ steps.meta.outputs.tags }}' | head -n 1 | cut -d',' -f1)" >> $GITHUB_ENV + - name: Build docs using Docker (Podman has trouble on GitHub runners) id: build-docs run: | diff --git a/.github/workflows/docs-build-and-deploy.yml b/.github/workflows/docs-build-and-deploy.yml index 2c928e0ccb..1b0c0cb412 100644 --- a/.github/workflows/docs-build-and-deploy.yml +++ b/.github/workflows/docs-build-and-deploy.yml @@ -6,12 +6,14 @@ on: branches: ['master', 'release-clm5.0'] paths: - 'doc/**' + - '!doc/test/*' - '!doc/*ChangeLog*' - '!doc/*ChangeSum*' - '!doc/UpdateChangelog.pl' # Include all include::ed files outside doc/ directory! - 'src/README.unit_testing' - 'tools/README' + - 'doc/test/test_container_eq_ctsm_pylib.sh' # Allows you to run this workflow manually from the Actions tab workflow_dispatch: @@ -46,10 +48,14 @@ jobs: - name: Setup Pages uses: actions/configure-pages@v5 + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + - name: Build docs using container id: build-docs run: | - bin/git-fleximod update -o cd doc ./build_docs_to_publish -d --site-root https://escomp.github.io/CTSM diff --git a/.github/workflows/docs-common.yml b/.github/workflows/docs-common.yml index 6dd8f7d53b..9c9d9f386c 100644 --- a/.github/workflows/docs-common.yml +++ b/.github/workflows/docs-common.yml @@ -26,9 +26,10 @@ jobs: fetch-depth: 0 lfs: true - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o # Do this if not using conda # Based on https://github.com/actions/cache/blob/main/examples.md#python---pip diff --git a/.github/workflows/docs-omnibus.yml b/.github/workflows/docs-omnibus.yml index 1636150fae..1c73eb8224 100644 --- a/.github/workflows/docs-omnibus.yml +++ b/.github/workflows/docs-omnibus.yml @@ -5,29 +5,23 @@ on: # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. branches: ['*'] paths: - - 'doc/**' - - '!doc/*ChangeLog*' - - '!doc/*ChangeSum*' - - '!doc/UpdateChangelog.pl' - # Include all include::ed files outside doc/ directory! - - 'src/README.unit_testing' - - 'tools/README' + - 'doc/test/*' + - 'doc/Makefile' pull_request: # Run on pull requests that change the listed files paths: - - 'doc/**' - - '!doc/*ChangeLog*' - - '!doc/*ChangeSum*' - - '!doc/UpdateChangelog.pl' - # Include all include::ed files outside doc/ directory! - - 'src/README.unit_testing' - - 'tools/README' + - 'doc/test/*' + - 'doc/Makefile' workflow_dispatch: jobs: build-docs-omnibus-test: + # Don't run on forks, because part(s) of omnibus testing script will look for + # branch(es) that forks may not have. + if: ${{ github.repository == 'ESCOMP/CTSM' }} + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -35,9 +29,10 @@ jobs: fetch-depth: 0 lfs: true - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o # Set up conda - name: Set up conda environment @@ -48,7 +43,6 @@ jobs: channels: conda-forge auto-activate-base: false - # TODO: Split testing.sh tests into their own steps in this job - name: Text Sphinx builds with omnibus script run: | - cd doc && ./testing.sh + cd doc/test/ && ./testing.sh diff --git a/.github/workflows/docs-ctsm_pylib.yml b/.github/workflows/docs-update-ctsm_pylib.yml similarity index 66% rename from .github/workflows/docs-ctsm_pylib.yml rename to .github/workflows/docs-update-ctsm_pylib.yml index 850f58063f..865f092f92 100644 --- a/.github/workflows/docs-ctsm_pylib.yml +++ b/.github/workflows/docs-update-ctsm_pylib.yml @@ -1,4 +1,4 @@ -name: Test building docs with ctsm_pylib +name: Docs tests to run when ctsm_pylib is updated on: push: @@ -6,13 +6,17 @@ on: branches: ['*'] paths: - 'python/conda_env_ctsm_py.txt' + - 'doc/ctsm-docs_container/requirements.txt' - '.github/workflows/docs-common.yml' + - '.github/workflows/docs-update-dependency-common.yml' pull_request: # Run on pull requests that change the listed files paths: - 'python/conda_env_ctsm_py.txt' + - 'doc/ctsm-docs_container/requirements.txt' - '.github/workflows/docs-common.yml' + - '.github/workflows/docs-update-dependency-common.yml' schedule: # 8 am every Monday UTC @@ -25,14 +29,23 @@ permissions: jobs: test-build-docs-ctsm_pylib: if: ${{ always() }} - name: With ctsm_pylib + name: Build with ctsm_pylib uses: ./.github/workflows/docs-common.yml with: use_conda: true conda_env_file: python/conda_env_ctsm_py.yml conda_env_name: ctsm_pylib - # File an issue if the docs build failed during a scheduled run + test-update-dependency: + if: ${{ always() }} + name: Docs dependency update tests + uses: ./.github/workflows/docs-update-dependency-common.yml + + # File an issue if the docs build failed during a scheduled run. + # The main thing we're concerned about in that case is something having + # changed outside the repository that's causing the ctsm_pylib setup to + # fail. Thus, we don't need this job to wait for BOTH the above jobs--- + # if one fails, they both will. file-issue-on-failure: if: | failure() && diff --git a/.github/workflows/docs-update-dependency-common.yml b/.github/workflows/docs-update-dependency-common.yml new file mode 100644 index 0000000000..a64e1a8ad5 --- /dev/null +++ b/.github/workflows/docs-update-dependency-common.yml @@ -0,0 +1,77 @@ +name: Jobs shared by docs workflows that run when a dependency is updated + +on: + workflow_call: + inputs: + # Conda is always needed for both jobs in this workflow. Here, + # we set default values for the variables in case the calling + # workflow doesn't provide them. + conda_env_file: + required: false + type: string + default: "python/conda_env_ctsm_py.yml" + conda_env_name: + required: false + type: string + default: "ctsm_pylib" + secrets: {} + +jobs: + compare-docbuilder-vs-ctsmpylib: + name: Are both methods identical? + + # Don't run on forks, because test_container_eq_ctsm_pylib.sh uses + # build_docs_to_publish, which will look for branch(es) that forks + # may not have + if: ${{ github.repository == 'ESCOMP/CTSM' }} + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true + + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + + - name: Set up conda environment + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ${{ inputs.conda_env_name }} + environment-file: ${{ inputs.conda_env_file }} + channels: conda-forge + auto-activate-base: false + + - name: Compare docs built with container vs. ctsm_pylib + run: | + cd doc/test/ + ./test_container_eq_ctsm_pylib.sh + + makefile-method: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true + + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + + - name: Set up conda environment + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ${{ inputs.conda_env_name }} + environment-file: ${{ inputs.conda_env_file }} + channels: conda-forge + auto-activate-base: false + + - name: Check that Makefile method works + run: | + cd doc/test/ + conda run -n ${{ inputs.conda_env_name }} --no-capture-output ./test_makefile_method.sh diff --git a/.github/workflows/docs-update-doc-builder.yml b/.github/workflows/docs-update-doc-builder.yml new file mode 100644 index 0000000000..0756ed94c5 --- /dev/null +++ b/.github/workflows/docs-update-doc-builder.yml @@ -0,0 +1,43 @@ +name: Docs tests to run when doc-builder is updated + +on: + push: + # Run when a change to these files is pushed to any branch. Without the "branches:" line, for some reason this will be run whenever a tag is pushed, even if the listed files aren't changed. + branches: ['*'] + paths: + - 'doc/doc-builder' + - '.github/workflows/docs-update-dependency-common.yml' + + pull_request: + # Run on pull requests that change the listed files + paths: + - 'doc/doc-builder' + - '.github/workflows/docs-update-dependency-common.yml' + + workflow_dispatch: + +permissions: + contents: read +jobs: + test-update-dependency: + + name: Tests to run when either docs dependency is updated + uses: ./.github/workflows/docs-update-dependency-common.yml + + test-rv-setup: + + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + lfs: true + + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules + run: | + bin/git-fleximod update -o + + - name: build_docs rv method + run: | + cd doc/test/ && ./test_build_docs_-r-v.sh docker diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 074a674ffe..362818eb90 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,6 +7,7 @@ on: branches: ['*'] paths: - 'doc/**' + - '!doc/test/*' - '!doc/*ChangeLog*' - '!doc/*ChangeSum*' - '!doc/UpdateChangelog.pl' @@ -14,11 +15,13 @@ on: # Include all include::ed files outside doc/ directory! - 'src/README.unit_testing' - 'tools/README' + - 'doc/test/test_container_eq_ctsm_pylib.sh' pull_request: # Run on pull requests that change the listed files paths: - 'doc/**' + - '!doc/test/*' - '!doc/*ChangeLog*' - '!doc/*ChangeSum*' - '!doc/UpdateChangelog.pl' @@ -26,6 +29,7 @@ on: # Include all include::ed files outside doc/ directory! - 'src/README.unit_testing' - 'tools/README' + - 'doc/test/test_container_eq_ctsm_pylib.sh' workflow_dispatch: @@ -49,9 +53,10 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 - - name: Checkout doc-builder external + # Check out all submodules because we might :literalinclude: something from one + - name: Checkout all submodules run: | - bin/git-fleximod update doc-builder + bin/git-fleximod update -o - name: Build docs using Docker (Podman has trouble on GitHub runners) id: build-docs diff --git a/.github/workflows/formatting_python.yml b/.github/workflows/python-tests.yml similarity index 68% rename from .github/workflows/formatting_python.yml rename to .github/workflows/python-tests.yml index 131e44a7af..a13e594acd 100644 --- a/.github/workflows/formatting_python.yml +++ b/.github/workflows/python-tests.yml @@ -1,4 +1,4 @@ -name: Check Python formatting +name: Run Python tests on: push: @@ -18,7 +18,27 @@ on: - 'cime_config/buildnml/**' jobs: - lint-and-format-check: + python-unit-tests: + runs-on: ubuntu-latest + steps: + # Checkout the code + - uses: actions/checkout@v4 + + # Set up the conda environment + - uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: ctsm_pylib + environment-file: python/conda_env_ctsm_py.yml + channels: conda-forge + auto-activate-base: false + + # Run Python unit tests check + - name: Run Python unit tests + run: | + cd python + conda run -n ctsm_pylib ./run_ctsm_py_tests -u + + python-lint-and-black: runs-on: ubuntu-latest steps: # Checkout the code diff --git a/.gitmodules b/.gitmodules index 48bc074b92..b6615c41fc 100644 --- a/.gitmodules +++ b/.gitmodules @@ -44,7 +44,7 @@ fxDONOTUSEurl = https://github.com/ESCOMP/CISM-wrapper [submodule "rtm"] path = components/rtm url = https://github.com/ESCOMP/RTM -fxtag = rtm1_0_86 +fxtag = rtm1_0_87 fxrequired = ToplevelRequired # Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed fxDONOTUSEurl = https://github.com/ESCOMP/RTM @@ -52,14 +52,14 @@ fxDONOTUSEurl = https://github.com/ESCOMP/RTM [submodule "mosart"] path = components/mosart url = https://github.com/ESCOMP/MOSART -fxtag = mosart1.1.08 +fxtag = mosart1.1.09 fxrequired = ToplevelRequired # Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed fxDONOTUSEurl = https://github.com/ESCOMP/MOSART [submodule "mizuRoute"] -path = components/mizuRoute -url = https://github.com/ESCOMP/mizuRoute + path = components/mizuroute + url = https://github.com/ESCOMP/mizuRoute fxtag = cesm-coupling.n03_v2.2.0 fxrequired = ToplevelRequired # Standard Fork to compare to with "git fleximod test" to ensure personal forks aren't committed diff --git a/README b/README index 99c6d8e9d4..d678aa9771 100644 --- a/README +++ b/README @@ -66,7 +66,7 @@ components/cmeps -------------------- CESM top level driver (for NUOPC driver [w components/cdeps -------------------- CESM top level data model shared code (for NUOPC driver). components/cism --------------------- CESM Community land Ice Sheet Model. components/mosart ------------------- Model for Scale Adaptive River Transport -components/mizuRoute ---------------- Reached based river transport model for water routing +components/mizuroute ---------------- Reached based river transport model for water routing (allows both gridded river and Hydrologic Responce Unit river grids) components/rtm ---------------------- CESM River Transport Model. diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index e498f275e6..06ea82d99b 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -4062,7 +4062,16 @@ sub setup_logic_fire_emis { if ( &value_is_true( $nl_flags->{'use_fates'} ) ) { $log->warning("Fire emission option $var can NOT be on when FATES is also on.\n" . " DON'T use the '--fire_emis' option when '--bgc fates' is activated"); - } + } elsif ( ! &value_is_true( $nl_flags->{'use_cn'} ) ) { + $log->fatal_error("Fire emission option $var can NOT be on when BGC SP (i.e. Satellite Phenology) is also on.\n" . + " DON'T use the '--fire_emis' option when '--bgc sp' is activated"); + } elsif ( &value_is_true( $nl_flags->{'use_cn'}) ) { + my $fire_method = remove_leading_and_trailing_quotes( $nl->get_value('fire_method') ); + if ( $fire_method eq "nofire" ) { + $log->fatal_error("Fire emission option $var can NOT be on with BGC and fire_method=='nofire'.\n" . + " DON'T use the '--fire_emis' option when fire_method is nofire"); + } + } } } } @@ -4226,7 +4235,7 @@ sub setup_logic_lai_streams { if ( &value_is_true($nl_flags->{'use_crop'}) && &value_is_true($nl->get_value('use_lai_streams')) ) { $log->fatal_error("turning use_lai_streams on is incompatable with use_crop set to true."); } - if ( $nl_flags->{'bgc_mode'} eq "sp" || ($nl_flags->{'bgc_mode'} eq "fates" && &value_is_true($nl->get_value('use_fates_sp')) )) { + if ( $nl_flags->{'bgc_mode'} eq "sp" || ($nl_flags->{'bgc_mode'} eq "fates" && &value_is_true($nl_flags->{'use_fates_sp'}) )) { if ( &value_is_true($nl->get_value('use_lai_streams')) ) { add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_lai_streams'); add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'lai_mapalgo', @@ -4469,6 +4478,10 @@ sub setup_logic_cngeneral { "(eg. don't use these options with SP mode)."); } } + if ( &value_is_true($nl->get_value('reseed_dead_plants')) && + &remove_leading_and_trailing_quotes($nl_flags->{'clm_start_type'}) eq "branch") { + $log->fatal_error("reseed_dead_plants MUST be .false. in a branch run"); + } } #------------------------------------------------------------------------------- @@ -4736,29 +4749,26 @@ sub setup_logic_fates { # For FATES SP mode make sure no-competetiion, and fixed-biogeography are also set # And also check for other settings that can't be trigged on as well # - my $var = "use_fates_sp"; - if ( defined($nl->get_value($var)) ) { - if ( &value_is_true($nl->get_value($var)) ) { - my @list = ( "use_fates_nocomp", "use_fates_fixed_biogeog" ); - foreach my $var ( @list ) { - if ( ! &value_is_true($nl->get_value($var)) ) { - $log->fatal_error("$var is required when FATES SP is on (use_fates_sp)" ); - } - } - # spit-fire can't be on with FATES SP mode is active - if ( $nl->get_value('fates_spitfire_mode') > 0 ) { - $log->fatal_error('fates_spitfire_mode can NOT be set to greater than 0 when use_fates_sp is true'); - } + if ( &value_is_true($nl_flags->{'use_fates_sp'}) ) { + my @list = ( "use_fates_nocomp", "use_fates_fixed_biogeog" ); + foreach my $var ( @list ) { + if ( ! &value_is_true($nl->get_value($var)) ) { + $log->fatal_error("$var is required when FATES SP is on (use_fates_sp)" ); + } + } + # spit-fire can't be on with FATES SP mode is active + if ( $nl->get_value('fates_spitfire_mode') > 0 ) { + $log->fatal_error('fates_spitfire_mode can NOT be set to greater than 0 when use_fates_sp is true'); + } - # fates landuse can't be on with FATES SP mode is active - if ( &value_is_true($nl->get_value('use_fates_luh')) ) { - $log->fatal_error('use_fates_luh can NOT be true when use_fates_sp is true'); - } + # fates landuse can't be on with FATES SP mode is active + if ( &value_is_true($nl->get_value('use_fates_luh')) ) { + $log->fatal_error('use_fates_luh can NOT be true when use_fates_sp is true'); + } - # hydro isn't currently supported to work when FATES SP mode is active - if (&value_is_true( $nl->get_value('use_fates_planthydro') )) { - $log->fatal_error('fates sp mode is currently not supported to work with fates hydro'); - } + # hydro isn't currently supported to work when FATES SP mode is active + if (&value_is_true( $nl->get_value('use_fates_planthydro') )) { + $log->fatal_error('fates sp mode is currently not supported to work with fates hydro'); } } my $var = "use_fates_inventory_init"; @@ -4783,6 +4793,13 @@ sub setup_logic_fates { } } } + # Check that both FaTES-SP and FATES ST3 aren't both on + my $var = "use_fates_ed_st3"; + if ( defined($nl->get_value($var)) ) { + if ( &value_is_true($nl->get_value($var)) && &value_is_true($nl_flags->{'use_fates_sp'}) ) { + $log->fatal_error("$var can NOT also be true with use_fates_sp true" ); + } + } # check that fates landuse change mode has the necessary luh2 landuse timeseries data # and add the default if not defined. Do not add default if use_fates_potentialveg is true. # If fixed biogeography is on, make sure that flandusepftdat is avilable. diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 59fc9e42dd..50e3cf68ad 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -1128,6 +1128,12 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false. + +hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. use_excess_ice=.false. + + lnd/clm2/initdata_esmf/ctsm5.4/ctsm53041_54surfdata_snowTherm_100_pSASU.clm2.r.0161-01-01-00000_64bitoffset.nc @@ -1676,6 +1682,8 @@ lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa480_hist_2000_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_mpasa120_hist_2000_78pfts_c240908.nc + +lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4_hist_2000_78pfts_c240925.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4.pg3_hist_2000_78pfts_c240908.nc @@ -1709,7 +1717,7 @@ lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_4x5_hist_1850_16pfts_c241007.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_360x720cru_hist_1850_78pfts_c240908.nc -lnd/clm2/surfdata_esmf/ctsm5.4.0/surfdata_0.9x1.25_hist_1850_78pfts_c250428.nc +lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_0.9x1.25_hist_1850_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1.9x2.5_hist_1850_78pfts_c240908.nc @@ -1727,9 +1735,11 @@ lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4_hist_1850_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4.pg2_hist_1850_78pfts_c240908.nc -lnd/clm2/surfdata_esmf/ctsm5.4.0/surfdata_ne30np4.pg3_hist_1850_78pfts_c250428.nc +lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne30np4.pg3_hist_1850_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4.pg3_hist_1850_78pfts_c240908.nc + +lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4_hist_1850_78pfts_c240925.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_C96_hist_1850_78pfts_c240908.nc @@ -1740,8 +1750,6 @@ lnd/clm2/surfdata_esmf/ctsm5.3.0/synthetic/surfdata_1x1_cidadinhoBR_synth_hist_2 lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_1x1_brazil_hist_1850_78pfts_c240912.nc - -lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne3np4.pg3_hist_1850_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/surfdata_ne16np4.pg3_hist_1850_78pfts_c240908.nc @@ -1787,7 +1795,7 @@ lnd/clm2/surfdata_esmf/NEON/ctsm5.3.0/surfdata_1x1_NEON_TOOL_hist_2000_78pfts_c2 lnd/clm2/surfdata_esmf/ctsm5.4.0/landuse.timeseries_0.9x1.25_hist_1850-2023_78pfts_c250428.nc + >lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_1.9x2.5_SSP2-4.5_1850-2100_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_mpasa120_SSP2-4.5_1850-2100_78pfts_c240908.nc +lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne3np4_SSP2-4.5_1850-2100_78pfts_c240926.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne3np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne16np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.4.0/landuse.timeseries_ne30np4.pg3_hist_1850-2023_78pfts_c250428.nc +>lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne30np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240908.nc @@ -1858,6 +1868,8 @@ lnd/clm2/surfdata_esmf/NEON/ctsm5.3.0/surfdata_1x1_NEON_TOOL_hist_2000_78pfts_c2 lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_mpasa120_SSP2-4.5_1850-2100_78pfts_c240908.nc +lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne3np4_SSP2-4.5_1850-2100_78pfts_c240926.nc lnd/clm2/surfdata_esmf/ctsm5.3.0/landuse.timeseries_ne3np4.pg3_SSP2-4.5_1850-2100_78pfts_c240908.nc If FALSE (which is the default): If an output type cannot be found in the input for initInterp, code aborts -If TRUE: If an output type cannot be found in the input, fill with closest natural veg column +If TRUE: If a non-urban output type cannot be found in the input, fill with closest natural veg column (using bare soil for patch-level variables) NOTE: Natural vegetation and crop landunits always behave as if this were true. e.g., if @@ -3021,6 +3021,14 @@ always fill with the closest natural veg patch / column, regardless of the value flag. So interpolation from non-crop to crop cases can be done without setting this flag. + +If FALSE (which is the default): If an urban output type cannot be found in the input for initInterp, +code aborts +If TRUE: If an urban output type cannot be found in the input, fill with closest urban high density +(HD) landunit + + diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index ab7cb4edf9..effca5ea5c 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile { # # Figure out number of tests that will run # -my $ntests = 3276; +my $ntests = 3285; if ( defined($opts{'compare'}) ) { - $ntests += 1987; + $ntests += 2161; } plan( tests=>$ntests ); @@ -288,7 +288,7 @@ sub cat_and_create_namelistinfile { &make_config_cache($phys); my @mfiles = ( "lnd_in", "drv_flds_in", $tempfile ); my $mfiles = NMLTest::CompFiles->new( $cwd, @mfiles ); -foreach my $options ( "-drydep", "-megan", "-drydep -megan", "-fire_emis", "-drydep -megan -fire_emis" ) { +foreach my $options ( "-drydep --bgc sp", "-megan --bgc sp", "-drydep -megan --bgc bgc", "-fire_emis --bgc bgc", "-drydep -megan -fire_emis --bgc bgc" ) { &make_env_run(); eval{ system( "$bldnml -envxml_dir . $options > $tempfile 2>&1 " ); }; is( $@, '', "options: $options" ); @@ -576,8 +576,8 @@ sub cat_and_create_namelistinfile { "--res 1.9x2.5 --bgc bgc --use_case 1850-2100_SSP2-4.5_transient --namelist '&a start_ymd=19101023/'", "-namelist \"&a dust_emis_method='Zender_2003', zender_soil_erod_source='lnd' /'\"", "-bgc bgc -use_case 2000_control -namelist \"&a fire_method='nofire'/\" -crop", - "-res 0.9x1.25 -bgc sp -use_case 1850_noanthro_control -drydep -fire_emis", - "-res 0.9x1.25 -bgc bgc -use_case 1850_noanthro_control -drydep -fire_emis -light_res 360x720", + "-res 0.9x1.25 -bgc sp -use_case 1850_noanthro_control -drydep", + "-res 0.9x1.25 -bgc bgc -use_case 1850_noanthro_control -drydep -fire_emis -megan -light_res 360x720", "--bgc bgc --light_res none --namelist \"&a fire_method='nofire'/\"", "--bgc fates --light_res 360x720 --no-megan --namelist \"&a fates_spitfire_mode=2/\"", "--bgc fates --light_res none --no-megan --namelist \"&a fates_spitfire_mode=1/\"", @@ -678,6 +678,10 @@ sub cat_and_create_namelistinfile { namelst=>"soil_decomp_method='None'", phys=>"clm5_0", }, + "reseed with branch" =>{ options=>"-clm_start_type branch -envxml_dir .", + namelst=>"reseed_dead_plants=.true.", + phys=>"clm6_0", + }, "reseed without CN" =>{ options=>" -envxml_dir . -bgc sp", namelst=>"reseed_dead_plants=.true.", phys=>"clm5_0", @@ -1093,6 +1097,10 @@ sub cat_and_create_namelistinfile { namelst=>"suplnitro='NONE'", phys=>"clm6_0", }, + "FATESwBothSpST3" =>{ options=>"--bgc fates --envxml_dir . --no-megan", + namelst=>"use_fates_sp = TRUE, use_fates_ed_st3 = TRUE", + phys=>"clm6_0", + }, "FireNoneButBGCfireon" =>{ options=>"-bgc bgc -envxml_dir . -light_res none", namelst=>"fire_method='li2021gswpfrc'", phys=>"clm6_0", @@ -1145,6 +1153,14 @@ sub cat_and_create_namelistinfile { namelst=>"", phys=>"clm4_5", }, + "useFIREEMISwithNOFIRE" =>{ options=>"--bgc bgc --envxml_dir . --fire_emis", + namelst=>"fire_method='nofire'", + phys=>"clm6_0", + }, + "useFIREEMISwithSP" =>{ options=>"--bgc sp --envxml_dir . --fire_emis", + namelst=>"", + phys=>"clm6_0", + }, "useDRYDEPwithFATES" =>{ options=>"--bgc fates --envxml_dir . --no-megan --drydep", namelst=>"", phys=>"clm4_5", @@ -1494,7 +1510,7 @@ sub cat_and_create_namelistinfile { print "========================================================================\n"; # Check for ALL resolutions with CLM50SP -my @resolutions = ( "360x720cru", "10x15", "4x5", "0.9x1.25", "1.9x2.5", "ne3np4.pg3", "ne16np4.pg3", "ne30np4", "ne30np4.pg2", "ne30np4.pg3", "ne120np4.pg3", "ne0np4CONUS.ne30x8", "ne0np4.ARCTIC.ne30x4", "ne0np4.ARCTICGRIS.ne30x8", "C96", "mpasa480", "mpasa120" ); +my @resolutions = ( "360x720cru", "10x15", "4x5", "0.9x1.25", "1.9x2.5", "ne3np4", "ne3np4.pg3", "ne16np4.pg3", "ne30np4", "ne30np4.pg2", "ne30np4.pg3", "ne120np4.pg3", "ne0np4CONUS.ne30x8", "ne0np4.ARCTIC.ne30x4", "ne0np4.ARCTICGRIS.ne30x8", "C96", "mpasa480", "mpasa120" ); my @only2000_resolutions = ( "1x1_numaIA", "1x1_brazil", "1x1_mexicocityMEX", "1x1_vancouverCAN", "1x1_urbanc_alpha", "5x5_amazon", "0.125nldas2", "mpasa60", "mpasa15", "mpasa3p75" ); my @regional; foreach my $res ( @resolutions ) { @@ -1531,7 +1547,7 @@ sub cat_and_create_namelistinfile { print " Test important resolutions for BGC and historical\n"; print "==================================================\n"; -my @resolutions = ( "4x5", "10x15", "360x720cru", "ne30np4.pg3", "ne3np4.pg3", "1.9x2.5", "0.9x1.25", "C96", "mpasa120" ); +my @resolutions = ( "4x5", "10x15", "360x720cru", "ne30np4.pg3", "ne3np4", "ne3np4.pg3", "1.9x2.5", "0.9x1.25", "C96", "mpasa120" ); my @regional; my $nlbgcmode = "bgc"; my $mode = "$phys-$nlbgcmode"; @@ -1758,7 +1774,7 @@ sub cat_and_create_namelistinfile { &cleanup(); } -my @crop_res = ( "1x1_numaIA", "4x5", "10x15", "0.9x1.25", "1.9x2.5", "ne3np4.pg3", "ne30np4", "ne30np4.pg3", "C96", "mpasa120" ); +my @crop_res = ( "1x1_numaIA", "4x5", "10x15", "0.9x1.25", "1.9x2.5", "ne3np4", "ne3np4.pg3", "ne30np4", "ne30np4.pg3", "C96", "mpasa120" ); foreach my $res ( @crop_res ) { $options = "-bgc bgc -crop -res $res -envxml_dir ."; &make_env_run(); @@ -1847,7 +1863,7 @@ sub cat_and_create_namelistinfile { &cleanup(); } # Transient ssp_rcp scenarios that work -my @tran_res = ( "4x5", "0.9x1.25", "1.9x2.5", "10x15", "360x720cru", "ne3np4.pg3", "ne16np4.pg3", "ne30np4.pg3", "C96", "mpasa120" ); +my @tran_res = ( "4x5", "0.9x1.25", "1.9x2.5", "10x15", "360x720cru", "ne3np4", "ne3np4.pg3", "ne16np4.pg3", "ne30np4.pg3", "C96", "mpasa120" ); foreach my $usecase ( "1850-2100_SSP2-4.5_transient" ) { my $startymd = 20150101; foreach my $res ( @tran_res ) { @@ -1884,7 +1900,7 @@ sub cat_and_create_namelistinfile { "-bgc bgc -clm_demand flanduse_timeseries -sim_year 1850-2000 -namelist '&a start_ymd=18500101/'", "-bgc bgc -envxml_dir . -namelist '&a use_c13=.true.,use_c14=.true.,use_c14_bombspike=.true./'" ); foreach my $clmopts ( @clmoptions ) { - my @clmres = ( "10x15", "4x5", "360x720cru", "0.9x1.25", "1.9x2.5", "ne3np4.pg3", "ne16np4.pg3", "ne30np4.pg3", "C96", "mpasa120" ); + my @clmres = ( "10x15", "4x5", "360x720cru", "0.9x1.25", "1.9x2.5", "ne3np4", "ne3np4.pg3", "ne16np4.pg3", "ne30np4.pg3", "C96", "mpasa120" ); foreach my $res ( @clmres ) { $options = "-res $res -envxml_dir . "; &make_env_run( ); diff --git a/cime_config/SystemTests/lreprstruct.py b/cime_config/SystemTests/lreprstruct.py index a03fb1815b..baf172fffe 100644 --- a/cime_config/SystemTests/lreprstruct.py +++ b/cime_config/SystemTests/lreprstruct.py @@ -16,6 +16,8 @@ """ +import re + from CIME.SystemTests.system_tests_compare_two import SystemTestsCompareTwo from CIME.XML.standard_module_setup import * from CIME.SystemTests.test_utils.user_nl_utils import append_to_user_nl_files @@ -53,13 +55,16 @@ def _case_one_setup(self): user_nl_clm_path = os.path.join(self._get_caseroot(), "user_nl_clm") with open(user_nl_clm_path) as f: user_nl_clm_text = f.read() - for grain_output in re.findall("GRAIN\w*", user_nl_clm_text): - user_nl_clm_text = user_nl_clm_text.replace( - grain_output, + + def replace_grain(match): + grain_output = match.group() + return ( grain_output.replace("GRAIN", "REPRODUCTIVE1") + "', '" - + grain_output.replace("GRAIN", "REPRODUCTIVE2"), + + grain_output.replace("GRAIN", "REPRODUCTIVE2") ) + + user_nl_clm_text = re.sub(r"GRAIN\w*", replace_grain, user_nl_clm_text) with open(user_nl_clm_path, "w") as f: f.write(user_nl_clm_text) diff --git a/cime_config/SystemTests/rxcropmaturityinst.py b/cime_config/SystemTests/rxcropmaturityinst.py deleted file mode 100644 index bf8bf7750b..0000000000 --- a/cime_config/SystemTests/rxcropmaturityinst.py +++ /dev/null @@ -1,6 +0,0 @@ -from rxcropmaturity import RXCROPMATURITYSHARED - - -class RXCROPMATURITYINST(RXCROPMATURITYSHARED): - def run_phase(self): - self._run_phase(h1_inst=True) diff --git a/cime_config/SystemTests/rxcropmaturityskipgeninst.py b/cime_config/SystemTests/rxcropmaturityskipgeninst.py deleted file mode 100644 index 4cab9bd7c0..0000000000 --- a/cime_config/SystemTests/rxcropmaturityskipgeninst.py +++ /dev/null @@ -1,6 +0,0 @@ -from rxcropmaturity import RXCROPMATURITYSHARED - - -class RXCROPMATURITYSKIPGENINST(RXCROPMATURITYSHARED): - def run_phase(self): - self._run_phase(skip_gen=True, h1_inst=True) diff --git a/cime_config/SystemTests/sspmatrixcn.py b/cime_config/SystemTests/sspmatrixcn.py index 17ac8abd74..87c4ab2e80 100644 --- a/cime_config/SystemTests/sspmatrixcn.py +++ b/cime_config/SystemTests/sspmatrixcn.py @@ -14,6 +14,7 @@ """ import shutil, glob, os, sys +from pathlib import Path from datetime import datetime if __name__ == "__main__": @@ -205,9 +206,9 @@ def run_indv(self, nstep, st_archive=True): restdir = os.path.join(rest_r, rundate) os.mkdir(restdir) rpoint = os.path.join(restdir, "rpointer.clm." + rundate) - os.mknod(rpoint) + Path.touch(rpoint) rpoint = os.path.join(restdir, "rpointer.cpl." + rundate) - os.mknod(rpoint) + Path.touch(rpoint) def run_phase(self): "Run phase" diff --git a/cime_config/config_archive.xml b/cime_config/config_archive.xml index c219a1d1ef..3a8272919e 100644 --- a/cime_config/config_archive.xml +++ b/cime_config/config_archive.xml @@ -1,7 +1,8 @@ r - rh\d? + rh\da + rh\di h\d*.*\.nc$ lilac_hi.*\.nc$ lilac_atm_driver_h\d*.*\.nc$ @@ -13,37 +14,19 @@ rpointer.lnd - rpointer.lnd_9999 + rpointer.lnd_9999.1976-01-01-00000 casename.clm2.r.1976-01-01-00000.nc - casename.clm2.rh4.1976-01-01-00000.nc - casename.clm2.h0.1976-01-01-00000.nc + casename.clm2.rh4a.1976-01-01-00000.nc + casename.clm2.rh4i.1976-01-01-00000.nc + casename.clm2.h0a.1976-01-01-00000.nc + casename.clm2.h0i.1976-01-01-00000.nc casename.clm2.lilac_hi.1976-01-01-00000.nc casename.clm2.lilac_atm_driver_h0.0001-01.nc - casename.clm2.h0.1976-01-01-00000.nc.base + casename.clm2.h0a.1976-01-01-00000.nc.base + casename.clm2.h0i.1976-01-01-00000.nc.base casename.clm2_0002.e.postassim.1976-01-01-00000.nc casename.clm2_0002.e.preassim.1976-01-01-00000.nc - anothercasename.clm2.i.1976-01-01-00000.nc - - - - r - rh\d? - h\d*.*\.nc$ - e - locfnh - - rpointer.lnd$NINST_STRING - ./$CASE.ctsm$NINST_STRING.r.$DATENAME.nc - - - rpointer.lnd - rpointer.lnd_9999 - casename.ctsm.r.1976-01-01-00000.nc - casename.ctsm.rh4.1976-01-01-00000.nc - casename.ctsm.h0.1976-01-01-00000.nc - casename.ctsm.h0.1976-01-01-00000.nc.base - casename.ctsm_0002.e.postassim.1976-01-01-00000.nc - casename.ctsm_0002.e.preassim.1976-01-01-00000.nc + anothercasename.clm2.r.1976-01-01-00000.nc diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 689fbcde0d..f869d0e362 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -241,12 +241,24 @@ char - - -bgc sp - -bgc bgc - -bgc bgc -crop - -bgc fates -no-megan - -bgc fates -no-megan + + -bgc sp + -bgc bgc + -bgc bgc -crop + + + --bgc fates --no-megan --no-drydep --no-fire_emis + + + --bgc sp --no-megan --no-drydep --no-fire_emis + --bgc bgc --no-megan --no-drydep --no-fire_emis + --bgc bgc --crop --no-megan --no-drydep --no-fire_emis + -bgc bgc -dynamic_vegetation diff --git a/cime_config/config_compsets.xml b/cime_config/config_compsets.xml index 455c8212c7..56726efd26 100644 --- a/cime_config/config_compsets.xml +++ b/cime_config/config_compsets.xml @@ -461,7 +461,7 @@ ISSP585Clm60BgcCropCrujra - SSP585_DATM%CRUJRA2024_CLM50%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV + SSP585_DATM%CRUJRA2024_CLM60%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV @@ -480,7 +480,7 @@ ISSP245Clm60BgcCropCrujra - SSP245_DATM%CRUJRA2024_CLM50%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV + SSP245_DATM%CRUJRA2024_CLM60%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV @@ -489,7 +489,7 @@ ISSP370Clm60BgcCropCrujra - SSP370_DATM%CRUJRA2024_CLM50%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV + SSP370_DATM%CRUJRA2024_CLM60%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV diff --git a/cime_config/config_pes.xml b/cime_config/config_pes.xml index d39ba06e49..bb10b8019c 100644 --- a/cime_config/config_pes.xml +++ b/cime_config/config_pes.xml @@ -1115,7 +1115,7 @@ - none + default ne120 layout for any machine -16 -16 @@ -1148,6 +1148,196 @@ + + + + + + eXtra-Large Derecho ne120 layout + + -1 + -44 + -44 + -44 + -44 + -44 + -44 + -44 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + + Large Derecho ne120 layout + + -1 + -22 + -22 + -22 + -22 + -22 + -22 + -22 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + + Medium Derecho ne120 layout + + -1 + -11 + -11 + -11 + -11 + -11 + -11 + -11 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + + Small Derecho ne120 layout + + -1 + -6 + -6 + -6 + -6 + -6 + -6 + -6 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + + eXtra-Small Derecho ne120 layout + + -1 + -3 + -3 + -3 + -3 + -3 + -3 + -3 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + @@ -1751,7 +1941,7 @@ - none + Derecho mpasa15 layout -1 -36 @@ -1786,6 +1976,123 @@ + + + + + Large Derecho mpasa15 layout + + -1 + -72 + -72 + -72 + -72 + -72 + -72 + -72 + -72 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + + Small Derecho mpasa15 layout + + -1 + -18 + -18 + -18 + -18 + -18 + -18 + -18 + -18 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + + + + + + eXtra-Small Derecho mpasa15 layout + + -1 + -9 + -9 + -9 + -9 + -9 + -9 + -9 + -9 + + + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + + + 0 + -1 + -1 + -1 + -1 + -1 + -1 + -1 + + + + diff --git a/cime_config/config_tests.xml b/cime_config/config_tests.xml index ee80087a08..12859b9131 100644 --- a/cime_config/config_tests.xml +++ b/cime_config/config_tests.xml @@ -145,16 +145,6 @@ This defines various CTSM-specific system tests $STOP_N - - As RXCROPMATURITY but ensure instantaneous h1. Can be removed once instantaneous and other variables are on separate files. - 1 - FALSE - FALSE - never - $STOP_OPTION - $STOP_N - - As RXCROPMATURITY but don't actually generate GDDs. Allows short testing with existing GDD inputs. 1 @@ -165,16 +155,6 @@ This defines various CTSM-specific system tests $STOP_N - - As RXCROPMATURITYSKIPGEN but ensure instantaneous h1. Can be removed once instantaneous and other variables are on separate files. - 1 - FALSE - FALSE - never - $STOP_OPTION - $STOP_N - - + + + FAIL + #3311 + Requires finidat with c13/c14 to PASS + + + + + FAIL + #3311 + Requires finidat with c13/c14 to PASS + + + + + FAIL + #3311 + Requires finidat with c13/c14 to PASS + + + + + FAIL + #3311 + Requires finidat with c13/c14 to PASS + + + + + FAIL + #3311 + Requires finidat with c13/c14 to PASS + + + FAIL @@ -76,13 +112,6 @@ Works with finidat = 'ctsm53041_54surfdata_snowTherm_100_pSASU.clm2.r.0161-01-01-00000.nc' and fails with finidat = 'ctsm53041_54surfdata_snowTherm_100_pSASU.clm2.r.0161-01-01-00000_64bitoffset.nc'. - - - FAIL - #3250 - SSP landuse files not available for ctsm54, yet. - - FAIL @@ -278,13 +307,13 @@ - + FAIL - #3097 + FATES#1089 - + FAIL FATES#1089 @@ -332,4 +361,13 @@ + + + + + FAIL + #3316 + + + diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index c67955b16e..41f5d2a4ff 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -13,6 +13,7 @@ rxcropmaturity: Short tests to be run during development related to prescribed crop calendars matrixcn: Tests exercising the matrix-CN capability aux_clm_mpi_serial: aux_clm tests using mpi-serial. Useful for redoing tests that failed due to https://github.com/ESCOMP/CTSM/issues/2916, after having replaced libraries/mpi-serial with a fresh copy. + decomp_init: Initialization tests specifically for examining the PE layout decomposition initialization --> @@ -172,6 +173,16 @@ + + + + + + + + + + @@ -219,7 +230,7 @@ - + @@ -228,7 +239,7 @@ - + @@ -406,7 +417,7 @@ - + @@ -415,8 +426,18 @@ + + + + + + + + + + - + @@ -425,7 +446,7 @@ - + @@ -434,7 +455,7 @@ - + @@ -443,7 +464,7 @@ - + @@ -453,7 +474,7 @@ - + @@ -462,7 +483,7 @@ - + @@ -471,7 +492,7 @@ - + @@ -480,7 +501,7 @@ - + @@ -546,7 +567,7 @@ - + @@ -1261,7 +1282,7 @@ - + @@ -1289,7 +1310,7 @@ - + @@ -1298,7 +1319,7 @@ - + @@ -1307,7 +1328,7 @@ - + @@ -1316,7 +1337,7 @@ - + @@ -1471,6 +1492,8 @@ + + @@ -1758,7 +1781,7 @@ - + @@ -1776,7 +1799,7 @@ - + @@ -1819,6 +1842,7 @@ + @@ -1827,7 +1851,7 @@ - + @@ -1847,7 +1871,7 @@ - + @@ -1856,7 +1880,7 @@ - + @@ -2007,6 +2031,7 @@ + @@ -2318,7 +2343,7 @@ - + @@ -2444,6 +2469,16 @@ + + + + + + + + + + @@ -2451,6 +2486,7 @@ + @@ -2464,6 +2500,17 @@ + + + + + + + + + + + @@ -2494,6 +2541,17 @@ + + + + + + + + + + + @@ -2542,6 +2600,16 @@ + + + + + + + + + + @@ -2648,13 +2716,14 @@ - + + - + @@ -2679,7 +2748,7 @@ - + @@ -2689,6 +2758,7 @@ + @@ -2822,7 +2892,7 @@ - + @@ -2939,6 +3009,16 @@ + + + + + + + + + + @@ -2952,12 +3032,13 @@ + - + @@ -3027,14 +3108,14 @@ - + - + @@ -3073,7 +3154,7 @@ - + @@ -3083,14 +3164,14 @@ - + - + @@ -3102,7 +3183,7 @@ - + @@ -3280,6 +3361,7 @@ + @@ -3553,7 +3635,7 @@ - + @@ -3563,7 +3645,7 @@ - + @@ -3574,7 +3656,7 @@ - + @@ -3947,10 +4029,10 @@ - + - + @@ -3974,6 +4056,7 @@ + @@ -3987,6 +4070,7 @@ + @@ -4079,6 +4163,53 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -4167,17 +4298,6 @@ - - - - - - - - - - - @@ -4190,17 +4310,6 @@ - - - - - - - - - - - @@ -4315,7 +4424,7 @@ - + diff --git a/cime_config/testdefs/testmods_dirs/clm/ExcessIceStartup_output_sp_exice/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ExcessIceStartup_output_sp_exice/include_user_mods index 142522f5b3..eccf97ff8e 100644 --- a/cime_config/testdefs/testmods_dirs/clm/ExcessIceStartup_output_sp_exice/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/ExcessIceStartup_output_sp_exice/include_user_mods @@ -1,2 +1,3 @@ +../nofireemis ../monthly ../../../../usermods_dirs/clm/output_sp_exice diff --git a/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods index 1e4ddf5337..bc8c80f140 100644 --- a/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/ExcessIceStreams/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default \ No newline at end of file diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/shell_commands b/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/shell_commands index db5a1f8672..4eb555a0e7 100644 --- a/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/shell_commands +++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/shell_commands @@ -9,3 +9,5 @@ $FATESDIR/tools/modify_fates_paramfile.py --O --fin $FATESPARAMFILE --fout $FATE $FATESDIR/tools/modify_fates_paramfile.py --O --fin $FATESPARAMFILE --fout $FATESPARAMFILE --var fates_seed_dispersal_max_dist --val 2500000 --allpfts $FATESDIR/tools/modify_fates_paramfile.py --O --fin $FATESPARAMFILE --fout $FATESPARAMFILE --var fates_seed_dispersal_pdf_scale --val 1e-05 --allpfts $FATESDIR/tools/modify_fates_paramfile.py --O --fin $FATESPARAMFILE --fout $FATESPARAMFILE --var fates_seed_dispersal_pdf_shape --val 0.1 --allpfts + +echo "fates_paramfile = '$FATESPARAMFILE'" >> $CASEDIR/user_nl_clm diff --git a/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/user_nl_clm index ecd1dc8b57..7b736a1511 100644 --- a/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/FatesColdSeedDisp/user_nl_clm @@ -1,3 +1,2 @@ -fates_paramfile = '$CASEROOT/fates_params_seeddisp_4x5.nc' fates_seeddisp_cadence = 1 hist_fincl1 = 'FATES_SEEDS_IN_GRIDCELL_PF', 'FATES_SEEDS_OUT_GRIDCELL_PF' diff --git a/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods index 1e4ddf5337..bc8c80f140 100644 --- a/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/SNICARFRC/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default \ No newline at end of file diff --git a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/include_user_mods index acdaa462fc..821b73c2e0 100644 --- a/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/collapse_pfts_78_to_16_decStart_f10/include_user_mods @@ -1 +1,2 @@ +../nofireemis ../decStart diff --git a/cime_config/testdefs/testmods_dirs/clm/crop/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/crop/user_nl_clm index 8ad588381e..56d4696774 100644 --- a/cime_config/testdefs/testmods_dirs/clm/crop/user_nl_clm +++ b/cime_config/testdefs/testmods_dirs/clm/crop/user_nl_clm @@ -17,4 +17,5 @@ hist_fincl3 = 'SDATES', 'SDATES_PERHARV', 'SYEARS_PERHARV', 'HDATES', 'GRAINC_TO hist_nhtfrq = -24,-8,-24 hist_mfilt = 1,1,1 hist_type1d_pertape(3) = 'PFTS' +hist_avgflag_pertape(3) = 'I' hist_dov2xy = .true.,.false.,.false. diff --git a/cime_config/testdefs/testmods_dirs/clm/f09_FillMissingW_Urban/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/f09_FillMissingW_Urban/include_user_mods new file mode 100644 index 0000000000..fe0e18cf88 --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/f09_FillMissingW_Urban/include_user_mods @@ -0,0 +1 @@ +../default diff --git a/cime_config/testdefs/testmods_dirs/clm/f09_FillMissingW_Urban/user_nl_clm b/cime_config/testdefs/testmods_dirs/clm/f09_FillMissingW_Urban/user_nl_clm new file mode 100644 index 0000000000..499b6026ea --- /dev/null +++ b/cime_config/testdefs/testmods_dirs/clm/f09_FillMissingW_Urban/user_nl_clm @@ -0,0 +1,6 @@ +! NOTE: Using an initial file that does NOT have TBD on it and 5.4 landuse timeseries dataset that has TBD on it +fsurdat = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.4.0/surfdata_0.9x1.25_hist_1850_78pfts_c250428.nc' +flanduse_timeseries = '$DIN_LOC_ROOT/lnd/clm2/surfdata_esmf/ctsm5.4.0/landuse.timeseries_0.9x1.25_hist_1850-2023_78pfts_c250428.nc' +finidat = '$DIN_LOC_ROOT/lnd/clm2/initdata_esmf/ctsm5.4/ctsm53041_54surfdata_snowTherm_100_pSASU.clm2.r.0161-01-01-00000.nc' +init_interp_fill_missing_urban_with_HD = .true. +use_init_interp = .true. diff --git a/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods index 1e4ddf5337..d3df58a6b3 100644 --- a/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/o3lombardozzi2015/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default diff --git a/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods index 1e4ddf5337..d3df58a6b3 100644 --- a/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/pauseResume/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default diff --git a/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods index 1e4ddf5337..bc8c80f140 100644 --- a/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/prescribed/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default \ No newline at end of file diff --git a/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods index 1e4ddf5337..d3df58a6b3 100644 --- a/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/pts/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default diff --git a/cime_config/testdefs/testmods_dirs/clm/run_self_tests/shell_commands b/cime_config/testdefs/testmods_dirs/clm/run_self_tests/shell_commands index d426269206..7762f69e36 100755 --- a/cime_config/testdefs/testmods_dirs/clm/run_self_tests/shell_commands +++ b/cime_config/testdefs/testmods_dirs/clm/run_self_tests/shell_commands @@ -1,5 +1,15 @@ #!/bin/bash ./xmlchange CLM_FORCE_COLDSTART="on" -# We use this testmod in a _Ln1 test; this requires forcing the ROF coupling frequency to every time step -./xmlchange ROF_NCPL=48 +# We use this testmod in a _Ln1 test; this requires forcing the ROF coupling frequency to same frequency as DATM +./xmlchange ROF_NCPL='$ATM_NCPL' + +# Turn MEGAN off to run faster +./xmlchange CLM_BLDNML_OPTS='--no-megan' --append + +# Use fast structure and NWP configuration for speed +./xmlchange CLM_STRUCTURE="fast" +./xmlchange CLM_CONFIGURATION="nwp" + +# Turn cpl history off +./xmlchange HIST_OPTION="never" \ No newline at end of file diff --git a/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods index 1e4ddf5337..d3df58a6b3 100644 --- a/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods +++ b/cime_config/testdefs/testmods_dirs/clm/waccmx_offline/include_user_mods @@ -1,2 +1,2 @@ -../default ../nofireemis +../default diff --git a/components/mizuRoute b/components/mizuroute similarity index 100% rename from components/mizuRoute rename to components/mizuroute diff --git a/components/mosart b/components/mosart index 00a87c9084..c776a802f6 160000 --- a/components/mosart +++ b/components/mosart @@ -1 +1 @@ -Subproject commit 00a87c9084af1af0d2b14d14e3d432f6808681f9 +Subproject commit c776a802f6f3e5ed853d4adfc7a8db6b8fed28ab diff --git a/components/rtm b/components/rtm index 26e96f500b..dd45b884bc 160000 --- a/components/rtm +++ b/components/rtm @@ -1 +1 @@ -Subproject commit 26e96f500b9500b32a870db20eed6b1bd37587ea +Subproject commit dd45b884bc26bf2ec578f2157a808b138f318fb3 diff --git a/doc/ChangeLog b/doc/ChangeLog index 793b324524..d728cf2464 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,783 @@ =============================================================== +Tag name: ctsm5.3.063 +Originator(s): samrabin (Sam Rabin, UCAR/TSS) +Date: Thu Jul 10 12:28:36 MDT 2025 +One-line Summary: Merge b4b-dev to master + +Purpose and description of changes +---------------------------------- + +Regular merge of b4b-dev branch to master. See "Bugs fixed" and "Other details" for more information. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed: +- [Issue #2985: Fix need/checks for inputdata path in subset_data and Python testing](https://github.com/ESCOMP/CTSM/issues/2985) +- [Issue #2986: Avoid use of os.mknod() in Python testing for portability](https://github.com/ESCOMP/CTSM/issues/2986) +- [Issue #2984: Python unit tests aren't portable](https://github.com/ESCOMP/CTSM/issues/2984) +- [Issue #3279: subset_data still having trouble with Longitude](https://github.com/ESCOMP/CTSM/issues/3279) +- [Issue #2911: Docs: Specify that snow/ice units are liquid water equivalent](https://github.com/ESCOMP/CTSM/issues/2911) +- [Issue #3312: Add some PE layout test sizes for some resolutions to facilitate testing decompInit time testing for different problem sizes / task counts](https://github.com/ESCOMP/CTSM/issues/3312) +- [Issue #3313: Hist fields REPRODUCTIVE1N_TO_FOOD_PERHARV and _ANN lose their suffixes in LREPR* tests](https://github.com/ESCOMP/CTSM/issues/3313) +- [Issue #3110: Initialization of historical using CTSM5.4 surface datasets fails](https://github.com/ESCOMP/CTSM/issues/3110) + + +Notes of particular relevance for users +--------------------------------------- + +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): +- New init_interp_fill_missing_urban_with_HD option (default `.false.`). See [Pull Request #3132: Fix #3110 (Initialization of historical using CTSM5.4 surface datasets fails) by olyson](https://github.com/ESCOMP/CTSM/pull/3132). + +Changes to documentation: +- Tech Note now specifies that snow/ice units are liquid water equivalent. + + +Notes of particular relevance for developers: +--------------------------------------------- + +Changes to tests or testing: +- Adds SMS_D_Ld10.f09_f09_mt232.IHistClm60BgcCrop.derecho_intel.clm-f09_FillMissingW_Urban test to aux_clm +- Adds various tests to new decomp_init test suite ("Initialization tests specifically for examining the PE layout decomposition initialization") +- Adds various Python unit and system tests + + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + fates tests: (give name of baseline if different from CTSM tagname, normally fates baselines are fates--) + * Will run fates suite after aux_clm, just to generate fates-sci.1.84.0_api.40.0.0-ctsm5.3.063 baseline. Will not compare against any baseline or even check for errors. + + +Other details +------------- + +Pull Requests that document the changes: +- [Pull Request #3238: Add GitHub workflow for Python unit tests by samsrabin](https://github.com/ESCOMP/CTSM/pull/3238) +- [Pull Request #3286: subset_data: Fix conversion of Longitude to string by samsrabin](https://github.com/ESCOMP/CTSM/pull/3286) +- [Pull Request #3247: add notes to specify snow/ice units are liquid water equivalent by sy-li](https://github.com/ESCOMP/CTSM/pull/3247) +- [Pull Request #3315: Add decomp_init testlist and some extra PE layouts for some grids by ekluzek](https://github.com/ESCOMP/CTSM/pull/3315) +- [Pull Request #3314: Fix string replacements in lreprstruct test by billsacks](https://github.com/ESCOMP/CTSM/pull/3314) +- [Pull Request #3132: Fix #3110 (Initialization of historical using CTSM5.4 surface datasets fails) by olyson](https://github.com/ESCOMP/CTSM/pull/3132) +- [Pull Request #3240: tips-for-working-with-rst.md: Add common errors, cheatsheet links. by samsrabin](https://github.com/ESCOMP/CTSM/pull/3240) + +=============================================================== +=============================================================== +Tag name: ctsm5.3.062 +Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) +Date: Wed 09 Jul 2025 09:03:55 AM MDT +One-line Summary: Put instantaneous and non-inst. fields on separate hist files + +Purpose and description of changes +---------------------------------- + Following ctsm5.3.018 "Change history time to be the middle of the time bounds" + the current change intends to prevent confusion associated with the time corresponding to instantaneous history fields by putting them on separate files than non-instantaneous fields. The result is + + 1) two history files per clm, mosart, and rtm history tape: + tape h0 becomes h0a and h0i + tape h1 becomes h1a and h1i + ... + tape hX becomes hXa and hXi + + 2) two history restart files per history restart tape: + rh0 becomes rh0a and rh0i + rh1 becomes rh1a and rh1i + ... + rhX becomes rhXa and rhXi + + The clm handles empty history (and corresponding history restart) files by not generating them, while rtm and mosart give an error. Instead of refactoring rtm and mosart to behave like the clm (considered out of scope), I have introduced one active instantaneous field in mosart and one in rtm to bypass the "empty file" error. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Resolves #1059 Don't allow inst. fields and averaged fields to be on the same history file + Resolves ESCOMP/RTM#32 + Resolves ESCOMP/MOSART#52 + +Notes of particular relevance for users +--------------------------------------- +Caveats for users (e.g., need to interpolate initial conditions): + History tapes have new extensions, e.g. h0 becomes h0a and h0i + History restart tapes have new extensions, e.g. rh0 becomes rh0a and rh0i + +Changes to documentation: + Not, yet: + - clm documentation + - Adam Phillips' cmip documentation + + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + fates tests: (-c fates-sci.1.84.0_api.40.0.0-ctsm5.3.061 -g fates-sci.1.84.0_api.40.0.0-ctsm5.3.062) + derecho ----- OK + izumi ------- OK + + any other testing (give details below): + + ctsm_sci (-c ctsm_sci-ctsm5.3.059 -g ctsm_sci-ctsm5.3.062) + derecho ---- FAIL and I will open an issue and mark EXPECTED FAILURE as the problem originates in ctsm5.3.060 + + mosart tests: (-c mosart1.1.08-ctsm5.3.061 -g mosart1.1.09-ctsm5.3.062) + derecho ----- OK + izumi ------- OK + + rtm tests: (-c rtm1_0_86-ctsm5.3.061 -g rtm1_0_87-ctsm5.3.062) + derecho ----- OK + izumi ------- OK + + crop_calendars tests: (tested while in ctsm5.3.058 and again in ctsm5.3.061) + derecho ----- OK + izumi ------- OK + + ssp tests: (tested while in ctsm5.3.058 and again in ctsm5.3.061) + derecho ----- OK + + hillslope tests: (tested while in ctsm5.3.058 and again in ctsm5.3.061) + derecho ----- OK + + fire tests: (tested while in ctsm5.3.058 and again in ctsm5.3.061) + derecho ----- OK + +Answer changes +-------------- + +Changes answers relative to baseline: No, but read caveat: + h0 files become h0a (containing non-instantaneous fields) and h0i (containing instantaneous fields): + - I spot-checked clm, mosart, and rtm files and confirmed no bitwise change in answers. + - I ran Sam Rabin's comparison tool written specifically to compare hX files against hXa + hXi files: + ~samrabin/pr_2445_baseline_compare/pr_2445_baseline_compare.py -1 /glade/campaign/cgd/tss/ctsm_baselines/ctsm5.3.061 tests_0701-173109de + and it returned a single DIFF that appears to be a false positive. + +Other details +------------- +List any git submodules updated (cime, rtm, mosart, cism, fates, etc.): + mosart1.1.08 --> mosart1.1.09 + rtm1_0_86 --> rtm1_0_87 + +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/2445 + https://github.com/ESCOMP/MOSART/pull/117 + https://github.com/ESCOMP/RTM/pull/61 + +=============================================================== +=============================================================== +Tag name: ctsm5.3.061 +Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) +Date: Thu 26 Jun 2025 11:28:43 AM MDT +One-line Summary: Merge b4b-dev to master + +Purpose and description of changes +---------------------------------- +PR #3231 Clean up docs workflows +Resolves #3160 +Resolves #3213 + +PR #3272 Throw error if reseed_dead_plants = .true. in a branch simulation +Resolves #3257 + +PR #3264 Fix plumber2_surf_wrapper +Resolves #3262 + +PR #3259 subset_data point: Fix --create-datm and Longitude TypeErrors +Resolves #3258 +Resolves #3260 +Resolves #3197 +Resolves #2960 + +PR #3227 Docs docs: Update Windows instructions +Resolves #3185 + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Listed along with corresponding PRs in "Purpose and description of changes" above + +Notes of particular relevance for users +--------------------------------------- +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + #3272 Throw error if reseed_dead_plants = .true. in a branch simulation + +Changes to documentation: + #3227 Docs docs: Update Windows instructions + +Testing summary: +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + +Answer changes +-------------- +Changes answers relative to baseline: No + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/3283 + +=============================================================== +=============================================================== +Tag name: ctsm5.3.060 +Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) +Date: Tue 24 Jun 2025 02:13:05 PM MDT +One-line Summary: Preliminary update of ctsm54 defaults (answer changing) + +Purpose and description of changes +---------------------------------- + + Brings to master some of the work done in #3206, which I merged to the ctsm5.4 alpha branch recently as tag alpha-ctsm5.4.CMIP7.02.ctsm5.3.055. + + Allows Cecile to run coupled without having to adjust clm things manually: updates namelist defaults and IC files that have been limited to the ctsm5.4 branch so far. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[x] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Resolves #3116 modify snow thermal conductivity defaults + Resolves #3005 ctsm54 initial condition files + +Notes of particular relevance for users +--------------------------------------- +Changes made to namelist defaults (e.g., changed parameter values): + a3ce6a7 changes the default snow thermal conductivity schemes over lakes and glaciers in clm6 cases + +Changes to the datasets (e.g., parameter, surface or initial files): + a75e488 introduces new paramfile to clm6 cases + 3a8c432 introduces new f09 and ne30 finidat files for 1850 and 2000 clm6 cases + Reverted preexisting changes to the default raw datasets that came in with #3206 (from the ctsm54 branch) + Reverted changes to the f09 and ne30 fsurdat/landuse files that came in with b1890ac + +Changes to documentation: + None, yet + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: + The next 2 tests are now labeled EXPECTED FAILURE in the RUN phase, to be addressed in issue #3252: + LII2FINIDATAREAS_D_P256x2_Ld1.f09_g17.I1850Clm50BgcCrop.derecho_intel.clm-default--clm-matrixcnOn_ignore_warnings + LII2FINIDATAREAS_D_P256x2_Ld1.f09_g17.I1850Clm50BgcCrop.derecho_intel.clm-default + +Testing summary: +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK (see Changes to tests above) + izumi ------- OK + + fates tests: (-c fates-sci.1.84.0_api.40.0.0-ctsm5.3.051 -g fates-sci.1.84.0_api.40.0.0-ctsm5.3.060) + derecho ----- OK + izumi ------- OK + +Answer changes +-------------- + +Changes answers relative to baseline: Yes + + Summarize any changes to answers, i.e., + - what code configurations: various + - what platforms/compilers: all + - nature of change: larger than roundoff/same climate + + See above in changes to namelist defaults and datasets for the sources of change. + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/3268 + +=============================================================== +=============================================================== +Tag name: ctsm5.3.059 +Originator(s): erik (Erik Kluzek) +Date: Mon 23 Jun 2025 01:39:37 AM MDT +One-line Summary: Bring in various cleanup efforts found in previous testing after the chill changes came in + +Purpose and description of changes +---------------------------------- + +Various updates for testing and other problems identified in the +cesm3_0_beta04 tag. So fixes and cleanup for usability. +Including the following: + +- Fix SHR_ASSERT so single-point matrix test passes +- ne3np4 to namelist_defaults_ctsm.xml and Makefile for PTS mode and add ability +- Fixes warm starts in PTS_MODE so that SCAM can use restart files +- f19 + f45 16pft fsurdat/landuse files to namelist_defaults_ctsm + Makefile +- Changes in the FORTRAN code to properly abort when fire-emission is asked for + it can't be provided. Added unit testing for this. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + + Fixes #2868 -- Custom crop calendar instructions + Fixes #2791 -- f19 16 pft for PPE work + Fixes #2768 -- ne3np4 added in + Fixes #2780 -- CN matrix single point + Fixes #2762 -- Don't allow FATES, SP, or nofire to turn fire-emis on + Fixes #3073 -- Wrong order for testmods with nofireemis + Some of #2810 -- 16pft f45 landuse.timeseries for FATES + CTSM namelist checking for: + https://github.com/NGEET/fates/issues/1356 -- FatesSp and FATES ST3 on at same time + Some work on https://github.com/ESCOMP/CTSM/issues/2643 -- standarize logical settings for FATES + +Notes of particular relevance for users +--------------------------------------- +Caveats for users (e.g., need to interpolate initial conditions): + Turning on fire-emissions can NOT be turned on now for configurations that + don't allow it. + - FATES + - Sp + - Bgc but with nofire + + Also now CTSM fire-emiss, drydep and MEGAN are turned off for CTSM when coupled to CAM + This means that CAM will be the one that sets each of these + They can be added and turned on for CTSM I compsets though + +Changes to CTSM's user interface (e.g., new/renamed XML or namelist variables): + FATES-SP and FATES-ST3 can't be both on at the same time + +Changes made to namelist defaults (e.g., changed parameter values): + Changes to some of the finidat file settings for 2013 at ne0np4CONUS.ne30x8 + +Changes to the datasets (e.g., parameter, surface or initial files): + New fsurdat/flanduse_timeseries for ne3np4 needed for SCAM + +Notes of particular relevance for developers: +--------------------------------------------- +Caveats for developers (e.g., code that is duplicated that requires double maintenance): + Path for mizuroute changed to lowercase so that it matches CESM checkouts + +Changes to tests or testing: + Many tests now need to explicitly set nofireemis. Some testmods that assume + Sp include that explicitly. Other tests also include --clm-nofireemis + PTS_MODE testing changed from f45 to ne3np4 for SCAM + Add more tests for CAM VR grids + Add more namelist testing for ne3np4 and for fire-emission options and + FatesSp and FATES ST3 mode fails correctly + + +Testing summary: regular, ctsm_sci +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - OK + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - Fails, but ctsm5.3.058 does as well + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + any other testing (give details below): + + ctsm_sci + derecho ---- OK + cesm_testing: + SMS_D_Ln9.f19_f19_mg17.FWma2000climo.derecho_intel.cam-outfrq9s_waccm_ma_mam4 + SMS_D_Ln9.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s + SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s + SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCnudged.derecho_intel.cam-outfrq9s + SMS_D_Ln9_P5120x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s + +Answer changes +-------------- + +Changes answers relative to baseline: no-bit-for-bit + Some tests that allowed fire-emissions to be on, have a different field list now + +Other details +------------- + +List any git submodules updated (cime, rtm, mosart, cism, fates, etc.): + Change name of components/mizuroute directory so the same as used in CESM + +Pull Requests that document the changes (include PR ids): +(https://github.com/ESCOMP/ctsm/pull) + #2840 -- Fix single point matrixcn fails + #2835 -- n3np4 + warm start fixes for PTS_MODE + #2834 -- f19 + f45 16 pft datasets + #2844 -- Fortran code abort when fire-emission asked for and can't be provided + +=============================================================== +=============================================================== +Tag name: ctsm5.3.058 +Originator(s): samrabin (Sam Rabin, UCAR/TSS) +Date: Mon Jun 16 11:43:52 MDT 2025 +One-line Summary: Fix clm6 compset aliases + +Purpose and description of changes +---------------------------------- + +The following clm60 compset aliases were actually returning long names with CLM50 physics: + +``` + ISSP245Clm60BgcCropCrujra : SSP245_DATM%CRUJRA2024_CLM50%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV + ISSP370Clm60BgcCropCrujra : SSP370_DATM%CRUJRA2024_CLM50%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV + ISSP585Clm60BgcCropCrujra : SSP585_DATM%CRUJRA2024_CLM50%BGC-CROP_SICE_SOCN_MOSART_SGLC_SWAV +``` + +This tag fixes them. It also adds a GitHub workflow to prevent this from happening again, at least for clm6. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[X] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed (include CTSM Issue # and description): +- [Issue #3244: ISSP CLM60 Crujra compset aliases have longnames that use CLM50, not CLM60](https://github.com/ESCOMP/CTSM/issues/3244) +- [Issue #3254: aux_clm has no Clm60 ISSP tests](https://github.com/ESCOMP/CTSM/issues/3254) + + +Notes of particular relevance for developers: +--------------------------------------------- + +Changes to tests or testing: +- Three ISSP tests in aux_clm changed from Clm50 to their Clm60 equivalents. +- Other ISSP Clm50 tests are untouched. +- Adds a GitHub workflow to check that Clm6 compset aliases return CLM6 longnames. + + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + +Answer changes +-------------- + +Changes answers relative to baseline: + + [ If a tag changes answers relative to baseline comparison the + following should be filled in (otherwise remove this section). + And always remove these three lines and parts that don't apply. ] + + Summarize any changes to answers, i.e., + - what code configurations: ISSP245, ISSP370, and ISSP585 compsets + - what platforms/compilers: All + - nature of change (roundoff; larger than roundoff/same climate; new climate): + Larger than roundoff/same climate, since it only affects land-only (I) cases. + + Specifically, the following compsets now actually receive Clm60 physics instead of Clm50: + - ISSP245Clm60BgcCropCrujra + - ISSP370Clm60BgcCropCrujra + - ISSP585Clm60BgcCropCrujra + + +Other details +------------- + +Pull Requests that document the changes: +- [Pull Request #3248: ctsm5.3.058: Fix clm6 compset aliases by samsrabin](https://github.com/ESCOMP/CTSM/pull/3248) + +=============================================================== +=============================================================== +Tag name: ctsm5.3.057 +Originator(s): glemieux (Gregory Lemieux, LBNL, glemieux@lbl.gov) +Date: Fri Jun 13 17:00:00 MDT 2025 +One-line Summary: Fix PEM test for on-the-fly parameter file generation + +Purpose and description of changes +---------------------------------- + +This resolves an issue in which PEM tests that are used in conjunction with +testmods that build the FATES parameter file on-the-fly might result in RUN +failure due to the file not being generated for the second case. This +addresses the issue by simply by setting the fates_paramfile namelist setting +to the full path for the primary test case directory. + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Resolves #3097 Missing on-the-fly built fates parameter file for test types that have a "case2" subdirectory + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: + PEM_D_Ld20.5x5_amazon.I2000Clm50FatesRs.derecho_gnu.clm-FatesColdSeedDisp is now added to the aux_clm test suite + Note that this test fails as expected on the COMPARE_mod_pes step + +Testing summary: +---------------- + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + +Answer changes +-------------- + +Changes answers relative to baseline: No + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +(https://github.com/ESCOMP/ctsm/pull) +[ctsm5.3.057: Fix PEM test for FATES testmod that builds an on-the-fly parameter file](https://github.com/ESCOMP/CTSM/pull/3243) + +=============================================================== +=============================================================== +Tag name: ctsm5.3.056 +Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) +Date: Thu 12 Jun 2025 01:43:46 PM MDT +One-line Summary: Merge b4b-dev to master + +Purpose and description of changes +---------------------------------- + +Several updates to documentation, from the documentation hackathon. +Also remove /glade from paths for mksurfdata_esmf paths + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Resolves Improve headings in single-point docs #3008 + Resolves Docs needed: Supported towers #3175 + Resolves Docs needed: NEON #3167 + Documentation part of run_neon base case must be of same run type as a requested clone #1926 + Resolves run_tower documentation needed #2997 + Resolves Various subset_data and related docs needed #3000 + Resolves Docs docs: Inline code rendered as italics #3164 + Resolves Fix some missing equation references in Snow Hydrology chapter of Tech Note #3202 + Resolves Fix Equation number label 2.5.119 in technical note #3196 + Resolves Remove hardcoded /glade/campaign/cesm/cesmdata/inputdata/ paths to support --rawdata-dir flexibility #3031 + +Notes of particular relevance for users +--------------------------------------- + +Changes to documentation: Yes! + +Notes of particular relevance for developers: None +--------------------------------------------- + +Testing summary: regular +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - OK + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - I ran it and there were fails but previous versions had this as well + One problem was with the NEON server, so outside our control + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + +If the tag used for baseline comparisons was NOT the previous tag, note that here: + +Answer changes +-------------- + +Changes answers relative to baseline: No bit-for-bit + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +(https://github.com/ESCOMP/ctsm/pull) + + Merge b4b-dev to master #3242 + Updates to run_tower/single point documentation #3194 + fixed italics to be in-line code #3198 + Fix some equation references in the Snow Hydrology chapter of the Tech Note #3203 + Fix typo in Chapter 5 of technical note #3195 + Remove hardcoded paths in gen_mksurfdata_namelist.xml #3162 + Merge ctsm5.3.050 to b4b-dev #3200 + +=============================================================== +=============================================================== Tag name: ctsm5.3.055 Originator(s): samrabin (Sam Rabin, UCAR/TSS) Date: Thu Jun 5 13:59:20 MDT 2025 diff --git a/doc/ChangeSum b/doc/ChangeSum index 3b5c59acfe..473d47e2b1 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,13 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.3.063 samrabin 07/10/2025 Merge b4b-dev to master + ctsm5.3.062 slevis 07/09/2025 Put inst. and non-inst. fields on separate hist files + ctsm5.3.061 slevis 06/26/2025 Merge b4b-dev to master + ctsm5.3.060 slevis 06/24/2025 Preliminary update of ctsm54 defaults (answer changing) + ctsm5.3.059 erik 06/23/2025 Bring in various cleanup efforts found in previous testing after the chill changes came in + ctsm5.3.058 samrabin 06/16/2025 Fix clm6 compset aliases + ctsm5.3.057 glemieux 06/13/2025 Fix PEM test for on-the-fly parameter file generation + ctsm5.3.056 erik 06/12/2025 Merge b4b-dev to master ctsm5.3.055 samrabin 06/05/2025 Remove FTorch ctsm5.3.054 samrabin 06/02/2025 CDEPS: Allow anomaly forcings with any DATM ctsm5.3.053 samrabin 05/30/2025 Fix and improve anomaly forcings for ISSP cases diff --git a/doc/ctsm-docs_container/Dockerfile b/doc/ctsm-docs_container/Dockerfile index 2ffd7a1702..5c78a0c14f 100644 --- a/doc/ctsm-docs_container/Dockerfile +++ b/doc/ctsm-docs_container/Dockerfile @@ -29,4 +29,4 @@ CMD ["/bin/bash", "-l"] LABEL org.opencontainers.image.title="Container for building CTSM documentation" LABEL org.opencontainers.image.source=https://github.com/ESCOMP/CTSM -LABEL org.opencontainers.image.version="v1.0.2c" +LABEL org.opencontainers.image.version="v1.0.2d" diff --git a/doc/source/tech_note/Fluxes/CLM50_Tech_Note_Fluxes.rst b/doc/source/tech_note/Fluxes/CLM50_Tech_Note_Fluxes.rst index 05b35d8b34..2eef99666f 100644 --- a/doc/source/tech_note/Fluxes/CLM50_Tech_Note_Fluxes.rst +++ b/doc/source/tech_note/Fluxes/CLM50_Tech_Note_Fluxes.rst @@ -1215,7 +1215,7 @@ The numerical solution for vegetation temperature and the fluxes of momentum, se #. Leaf boundary layer resistance :math:`r_{b}` (:eq:`5.122` ) -#. Aerodynamic resistances :math:`r_{ah} ^{{'} }` and :math:`r_{aw} ^{{'} }`(:eq:`5.116` ) +#. Aerodynamic resistances :math:`r_{ah} ^{{'} }` and :math:`r_{aw} ^{{'} }` (:eq:`5.116` ) #. Sunlit and shaded stomatal resistances :math:`r_{s}^{sun}` and :math:`r_{s}^{sha}` (Chapter :numref:`rst_Stomatal Resistance and Photosynthesis`) diff --git a/doc/source/tech_note/Snow_Hydrology/CLM50_Tech_Note_Snow_Hydrology.rst b/doc/source/tech_note/Snow_Hydrology/CLM50_Tech_Note_Snow_Hydrology.rst index fdc559e1c2..acd0ae3e22 100644 --- a/doc/source/tech_note/Snow_Hydrology/CLM50_Tech_Note_Snow_Hydrology.rst +++ b/doc/source/tech_note/Snow_Hydrology/CLM50_Tech_Note_Snow_Hydrology.rst @@ -15,6 +15,21 @@ Shown are three snow layers, :math:`i=-2`, :math:`i=-1`, and :math:`i=0`. The la The state variables for snow are the mass of water :math:`w_{liq,i}` (kg m\ :sup:`-2`), mass of ice :math:`w_{ice,i}` (kg m\ :sup:`-2`), layer thickness :math:`\Delta z_{i}` (m), and temperature :math:`T_{i}` (Chapter :numref:`rst_Soil and Snow Temperatures`). The water vapor phase is neglected. Snow can also exist in the model without being represented by explicit snow layers. This occurs when the snowpack is less than a specified minimum snow depth (:math:`z_{sno} < 0.01` m). In this case, the state variable is the mass of snow :math:`W_{sno}` (kg m\ :sup:`-2`). +.. note:: + In CLM, all water-related state variables, including snow and ice, are reported in **liquid water equivalent** units. This means that quantities such as snow water equivalent (SWE), soil ice content, and snowmelt are expressed in terms of the depth of liquid water that would result if the frozen material melted completely. + + For example: + + - ``H2OSNO`` represents the total snow water equivalent in mm. + - ``H2OSOI_ICE`` is the soil ice content in mm. + - ``QSNOMELT`` is the snow melt rate in mm/s. + + In contrast, some glaciological or cryosphere models (e.g., PISM, RACMO2, Crocus) may output variables in **ice-equivalent** units, depending on the modeling context. When necessary, conversion from ice equivalent to water equivalent should account for the density of ice versus liquid water (:numref:`Table Physical Constants`): + + .. math:: + + \text{Water equivalent} = \text{Ice equivalent} \times \frac{\rho_\text{ice}}{\rho_\text{liq}} + Section :numref:`Snow Covered Area Fraction` describes the calculation of fractional snow covered area, which is used in the surface albedo calculation (Chapter :numref:`rst_Surface Albedos`) and the surface flux calculations (Chapter :numref:`rst_Momentum, Sensible Heat, and Latent Heat Fluxes`). The following two sections (:numref:`Ice Content` and :numref:`Water Content`) describe the ice and water content of the snow pack assuming that at least one snow layer exists. Section :numref:`Black and organic carbon and mineral dust within snow` describes how black and organic carbon and mineral dust particles are represented within snow, including meltwater flushing. See Section :numref:`Initialization of snow layer` for a description of how a snow layer is initialized. .. _Snow Covered Area Fraction: @@ -585,7 +600,7 @@ The maximum snow layer thickness, :math:`\Delta z_{\max }`, depends on the numbe Subdivision ''''''''''''''''''' -The snow layers are subdivided when the layer thickness exceeds the prescribed maximum thickness :math:`\Delta z_{\max }` with lower and upper bounds that depend on the number of snow layers (:numref:`Table snow layer thickness`). For example, if there is only one layer, then the maximum thickness of that layer is 0.03 m, however, if there is more than one layer, then the maximum thickness of the top layer is 0.02 m. Layers are checked sequentially from top to bottom for this limit. If there is only one snow layer and its thickness is greater than 0.03 m (:numref:`Table snow layer thickness`), the layer is subdivided into two layers of equal thickness, liquid water and ice contents, and temperature. If there is an existing layer below the layer to be subdivided, the thickness :math:`\Delta z_{i}`, liquid water and ice contents, :math:`w_{liq,\; i}` and :math:`w_{ice,\; i}`, and temperature :math:`T_{i}` of the excess snow are combined with the underlying layer according to equations -. If there is no underlying layer after adjusting the layer for the excess snow, the layer is subdivided into two layers of equal thickness, liquid water and ice contents. The vertical snow temperature profile is maintained by calculating the slope between the layer above the splitting layer (:math:`T_{1}` ) and the splitting layer (:math:`T_{2}` ) and constraining the new temperatures (:math:`T_{2}^{n+1}`, :math:`T_{3}^{n+1}` ) to lie along this slope. The temperature of the lower layer is first evaluated from +The snow layers are subdivided when the layer thickness exceeds the prescribed maximum thickness :math:`\Delta z_{\max }` with lower and upper bounds that depend on the number of snow layers (:numref:`Table snow layer thickness`). For example, if there is only one layer, then the maximum thickness of that layer is 0.03 m, however, if there is more than one layer, then the maximum thickness of the top layer is 0.02 m. Layers are checked sequentially from top to bottom for this limit. If there is only one snow layer and its thickness is greater than 0.03 m (:numref:`Table snow layer thickness`), the layer is subdivided into two layers of equal thickness, liquid water and ice contents, and temperature. If there is an existing layer below the layer to be subdivided, the thickness :math:`\Delta z_{i}`, liquid water and ice contents, :math:`w_{liq,\; i}` and :math:`w_{ice,\; i}`, and temperature :math:`T_{i}` of the excess snow are combined with the underlying layer according to equations :eq:`8.55` - :eq:`8.58`. If there is no underlying layer after adjusting the layer for the excess snow, the layer is subdivided into two layers of equal thickness, liquid water and ice contents. The vertical snow temperature profile is maintained by calculating the slope between the layer above the splitting layer (:math:`T_{1}` ) and the splitting layer (:math:`T_{2}` ) and constraining the new temperatures (:math:`T_{2}^{n+1}`, :math:`T_{3}^{n+1}` ) to lie along this slope. The temperature of the lower layer is first evaluated from .. math:: :label: 8.62 @@ -602,5 +617,5 @@ then adjusted as, T_{2}^{n+1} = T_{2}^{n} +\left(\frac{T_{1}^{n} -T_{2}^{n} }{{\left(\Delta z_{1} +\Delta z_{2}^{n} \right)\mathord{\left/ {\vphantom {\left(\Delta z_{1} +\Delta z_{2}^{n} \right) 2}} \right.} 2} } \right)\left(\frac{\Delta z_{2}^{n+1} }{2} \right) & \qquad T'_{3} `_. + + -The ``--create-user-mods`` command tells the script to set up a user mods directory in your specified ``$my_output_dir`` and to specify the required ``PTS_LAT`` and ``PTS_LON`` settings. You can then use this user mods directory to set up your CTSM case, as described below. +The ``--create-user-mods`` command tells the script to set up a user mods directory in your specified ``$my_output_dir`` and to specify the required ``PTS_LAT`` and ``PTS_LON`` settings. You can then use this user mods directory to set up your CTSM case, as described below. ``subset_data`` will default to subsetting surface data and land-use timeseries from the default, nominal one-degree resolution (f09) datasets. +================ Create the case ------------------- +================ You can use the user mods directory set up in the previous subset data step to tell CIME/CTSM where your subset files are located. diff --git a/doc/source/users_guide/running-single-points/index.rst b/doc/source/users_guide/running-single-points/index.rst index d5ece00ec9..1b503acc87 100644 --- a/doc/source/users_guide/running-single-points/index.rst +++ b/doc/source/users_guide/running-single-points/index.rst @@ -14,8 +14,8 @@ Running Single Point and Regional Cases .. toctree:: :maxdepth: 2 - single-point-and-regional-grid-configurations.rst - running-single-point-subset-data.rst - running-single-point-configurations.rst - running-pts_mode-configurations.rst + intro-to-single-pt-regional.rst + supported-tower-sites.rst + generic-single-point-regional.rst + predefined-single-point-regional-resolutions.rst diff --git a/doc/source/users_guide/running-single-points/intro-to-single-pt-regional.rst b/doc/source/users_guide/running-single-points/intro-to-single-pt-regional.rst new file mode 100644 index 0000000000..23279aac23 --- /dev/null +++ b/doc/source/users_guide/running-single-points/intro-to-single-pt-regional.rst @@ -0,0 +1,54 @@ +.. include:: ../substitutions.rst + +.. _single-point-regional-configurations: + +***************************************************** +Introduction to Single-Point and Regional Grid Setups +***************************************************** + +CTSM is designed to support a wide range of spatial scales, ranging from global simulations to regional runs to highly resolved single-point cases. Setting up and running single-point and regional simulations is useful for a variety of purposes including: running quick cases for testing, evaluating specific vegetation types, or running with observed data from a specific site to generate and test hypotheses. + +Single-point cases allow users to run CTSM at a specific location such as a flux tower or ecological field site. Single-point runs are especially useful where high-resolution meteorological forcing data and site-specific observations are available and require minimal computational resources. + +Regional configurations support simulations over broader geographic areas defined by a user-specified domain. Regional runs require additional input data such as meteorological forcing for the region. You can either extract regional subsets from global datasets or create custom datasets for your region of interest. + +.. _options-for-single-points: + +========================================= + Choosing the right single point options +========================================= + +There are several different ways to set up single-point and regional cases. + +For supported tower sites: +-------------------------- + +You can run at a supported tower site if one of the supported single-point/regional datasets is your site of interest (see :ref:`supported-tower-sites`). All the datasets are created for you, and you can easily select one and run it out of the box using a supported resolution from the top level of the CESM scripts. You can also use this method for your own datasets, but you have to create the datasets, and add them to the XML database in scripts, CLM and to the DATM. This is worthwhile if you want to repeat many multiple cases for a given point or region. + +Next, using ``subset_data`` is the best way to setup cases quickly where you can use a simple tool to create your own datasets (see :ref:`generic_single_point_runs`). With this method you don't have to change DATM or add files to the XML database. ``subset_data`` will create a usermod directory where you can store your files and the files needed to directly run a case. + +For unsupported tower sites: +---------------------------- + +If you have meteorology data that you want to force your CLM simulations with, you'll need to setup cases as described in :ref:`pre-defined-single-pt-regional-resolutions`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use. + +================ +Spinning up CTSM +================ + +We make steady state assumptions about the initial state of ecosystem properties including temperature water, snow, ice, carbon & nitrogen. This is the equilibrium state of the model, given the forcing data. Spinning up the model brings internal state variables into equilibrium with environmental forcing conditions so that the results are not influenced by the initial conditions of state variables (such as soil C). In runs with active biogoechemistry, we need to get the ecosystem carbon and nitrogen pools with long turnover times into steady state. + +Specifically, spinning up CTSM consists of 3 parts including: + +1. AD, or accelerated decomposition: The turnover and decomposition of the slow pools of C and N that normally have a long residence time in ecosystems is mathematically accelerated, where we make the slow pools spin up more quickly by increasing their turnover time. This includes: +-Accelerating turnover of wood, litter and soil pools +-Accelerating advection and diffusion terms +-Calculating this as a function of latitude so that spinup is more accelerated in high latitude regions. + +2. postAD, which occurs after AD spinup: During postAD runs we take away accelerated decomposition and let the ecosystem settle into its equilibrium, or steady-state under 'normal conditions'. Pools are increased by the same degree their turnover was increased (e.g., turnover 10x faster means the pool must be 10x larger). During AD and postAD spinup we cycle over several years of input data and hold other inputs constant (e.g., atmospheric CO2 concentrations, N deposition, etc.). For transient runs these inputs also change over time. + +3. transient: Transient runs are used to compare with observations, and include high frequency output that we can compare with flux tower measurements. The end of the spinup simulation is used as the initial conditions for a transient simulation, set in the user_nl_clm file. + + + + diff --git a/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst b/doc/source/users_guide/running-single-points/predefined-single-point-regional-resolutions.rst similarity index 91% rename from doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst rename to doc/source/users_guide/running-single-points/predefined-single-point-regional-resolutions.rst index fb61397321..4ad8995511 100644 --- a/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst +++ b/doc/source/users_guide/running-single-points/predefined-single-point-regional-resolutions.rst @@ -1,13 +1,13 @@ .. include:: ../substitutions.rst -.. _pts_mode: +.. _pre-defined-single-pt-regional-resolutions: **************************************************** -Running a single point using global data - PTS_MODE +Pre-defined single-point and regional resolutions **************************************************** .. warning:: - ``PTS_MODE`` has been mostly deprecated in favor of ``subset_data`` (Sect. :numref:`single_point_subset_data`). You should only consider using it if you are using the Single Column Atmospheric Model (SCAM). + ``PTS_MODE`` has been mostly deprecated in favor of ``subset_data`` (Sect. :numref:`generic_single_point_runs`). You should only consider using it if you are using the Single Column Atmospheric Model (SCAM). ``PTS_MODE`` enables you to run the model using global datasets, but just picking a single point from those datasets and operating on it. It can be a very quick way to do fast simulations and get a quick turnaround. diff --git a/doc/source/users_guide/running-single-points/running-single-point-configurations.rst b/doc/source/users_guide/running-single-points/running-single-point-configurations.rst deleted file mode 100644 index 56cad6a11e..0000000000 --- a/doc/source/users_guide/running-single-points/running-single-point-configurations.rst +++ /dev/null @@ -1,215 +0,0 @@ -.. include:: ../substitutions.rst - -.. _running-single-point-datasets: - -****************************************** - Running Single Point Configurations -****************************************** - -In addition to running with the outputs of ``subset_data`` (Sect. :numref:`single_point_subset_data`), CLM supports running using single-point or regional datasets that are customized to a particular region. CLM supports a a small number of out-of-the-box single-point and regional datasets. However, users can create their own dataset. - -To get the list of supported dataset resolutions do this: -:: - - > cd $CTSMROOT/doc - > ../bld/build-namelist -res list - -Which results in the following: -:: - - CLM build-namelist - valid values for res (Horizontal resolutions - Note: 0.5x0.5, 5x5min, 10x10min, 3x3min and 0.33x0.33 are only used for CLM tools): - Values: default 512x1024 360x720cru 128x256 64x128 48x96 32x64 8x16 94x192 \ - 0.23x0.31 0.47x0.63 0.9x1.25 1.9x2.5 2.5x3.33 4x5 10x15 5x5_amazon 1x1_tropicAtl \ - 1x1_vancouverCAN 1x1_mexicocityMEX 1x1_asphaltjungleNJ 1x1_brazil 1x1_urbanc_alpha 1x1_numaIA \ - 1x1_smallvilleIA 0.5x0.5 3x3min 5x5min 10x10min 0.33x0.33 ne4np4 ne16np4 ne30np4 ne60np4 \ - ne120np4 ne240np4 wus12 us20 - Default = 1.9x2.5 - (NOTE: resolution and mask and other settings may influence what the default is) - -The resolution names that have an underscore in them ("_") are all single-point or regional resolutions. - -.. note:: When running a single point, the number of processors is automatically set to one, which is the only value allowed. - -.. warning:: - Just like running with the outputs from ``subset_data`` (Sect. :numref:`single_point_subset_data`), by default these setups sometimes run with ``MPILIB=mpi-serial`` (in the ``env_build.xml`` file) turned on, which allows you to run the model interactively. On some machines this mode is NOT supported and you may need to change it to FALSE before you are able to build. - -.. _single-point-global-climate: - -Single-point runs with global climate forcings -============================================== - -Example: Use global forcings at a site without its own special forcings ------------------------------------------------------------------------ - -This example uses the single-point site in Brazil. -:: - - > cd cime/scripts - > set SITE=1x1_brazil - > ./create_newcase -case testSPDATASET -res $SITE -compset I2000Clm50SpGs - > cd testSPDATASET - -Then setup, build and run normally. - -Example: Use global forcings at a site WITH its own special forcings --------------------------------------------------------------------- - -The urban Mexico City test site has its own atmosphere forcing data (see Sect. :numref:`single-point-with-own-forcing`). To ignore that and run it with the default global forcing data, but over the period for which its own forcing data is provided, do the following: - -:: - - > cd cime/scripts - # Set a variable to the site you want to use (as it's used several times below) - > set SITE=1x1_mexicocityMEX - > ./create_newcase -case testSPDATASET -res $SITE -compset I1PtClm50SpGs - > cd testSPDATASET - -(Note the use of ``I1Pt`` instead of ``I2000`` as in the example above.) Then setup, build and run normally. - -.. _single-point-with-own-forcing: - -Supported single-point runs for sites with their own atmospheric forcing -======================================================================== - -Of the supported single-point datasets we have three that also have atmospheric forcing data that go with them: Mexico City (Mexico), Vancouver, (Canada, British Columbia), and ``urbanc_alpha`` (test data for an Urban inter-comparison project). Mexico city and Vancouver also have namelist options in the source code for them to work with modified urban data parameters that are particular to these locations. To turn on the atmospheric forcing for these datasets, you set the ``env_run.xml DATM_MODE`` variable to ``CLM1PT``, and then the atmospheric forcing datasets will be used for the point picked. If you use one of the compsets that has "I1Pt" in the name that will be set automatically. - -.. todo:: - Update the below, as ``queryDefaultNamelist.pl`` no longer exists. - -When running with datasets that have their own atmospheric forcing you need to be careful to run over the period that data is available. If you have at least one year of forcing it will cycle over the available data over and over again no matter how long of a simulation you run. However, if you have less than a years worth of data (or if the start date doesn't start at the beginning of the year, or the end date doesn't end at the end of the year) then you won't be able to run over anything but the data extent. In this case you will need to carefully set the ``RUN_STARTDATE``, ``START_TOD`` and ``STOP_N/STOP_OPTION`` variables for your case to run over the entire time extent of your data. For the supported data points, these values are in the XML database and you can use the ``queryDefaultNamelist.pl`` script to query the values and set them for your case (they are set for the three urban test cases: Mexicocity, Vancouver, and urbanc_alpha). - -Example: Use site-specific atmospheric forcings ------------------------------------------------ -In this example, we show how to use the atmospheric forcings specific to the Vancouver, Canada point. -:: - - > cd cime/scripts - - # Set a variable to the site you want to use (as it's used several times below) - > set SITE=1x1_vancouverCAN - - # Create a case at the single-point resolutions with their forcing - > ./create_newcase -case testSPDATASETnAtmForcing -res $SITE -compset I1PtClm50SpGs - > cd testSPDATASETnAtmForcing - - # Figure out the start and end date for this dataset - # You can do this by examining the datafile. - > set STOP_N=330 - > set START_YEAR=1992 - > set STARTDATE=${START_YEAR}-08-12 - > @ NDAYS = $STOP_N / 24 - > ./xmlchange RUN_STARTDATE=$STARTDATE,STOP_N=$STOP_N,STOP_OPTION=nsteps - - # Set the User namelist to set the output frequencies of the history files - # Setting the stdurbpt use-case option create three history file streams - # The frequencies and number of time-samples needs to be set - > cat << EOF > user_nl_clm - hist_mfilt = $NDAYS,$STOP_N,$STOP_N - hist_nhtfrq = -1,1,1 - EOF - - > ./case.setup - -.. warning:: If you don't set the start-year and run-length carefully as shown above the model will abort with a "dtlimit error" in the atmosphere model. Since, the forcing data for this site (and the MexicoCity site) is less than a year, the model won't be able to run for a full year. The ``1x1_urbanc_alpha`` site has data for more than a full year, but neither year is complete hence, it has the same problem (see the problem for this site above). - -.. _creating-your-own-singlepoint-dataset: - -Creating your own single-point dataset -=================================================== - -The following provides an example of setting up a case using ``CLM_USRDAT_NAME`` where you rename the files according to the ``CLM_USRDAT_NAME`` convention. We have an example of such datafiles in the repository for a specific region over Alaska (actually just a sub-set of the global f19 grid). - -Example: Using CLM_USRDAT_NAME to run a simulation using user datasets for a specific region over Alaska ------------------------------------------------------------------------------------------------------------------------ -:: - - > cd cime/scripts - > ./create_newcase -case my_userdataset_test -res CLM_USRDAT -compset I2000Clm50BgcCruGs - > cd my_userdataset_test/ - > set GRIDNAME=13x12pt_f19_alaskaUSA - > set LMASK=gx1v6 - > ./xmlchange CLM_USRDAT_NAME=$GRIDNAME,CLM_BLDNML_OPTS="-mask $LMASK" - > ./xmlchange ATM_DOMAIN_FILE=domain.lnd.${GRIDNAME}_$LMASK.nc - > ./xmlchange LND_DOMAIN_FILE=domain.lnd.${GRIDNAME}_$LMASK.nc - - # Make sure the file exists in your $CSMDATA or else use svn to download it there - > ls $CSMDATA/lnd/clm2/surfdata_map/surfdata_${GRIDNAME}_simyr2000.nc - - # If it doesn't exist, comment out the following... - #> setenv SVN_INP_URL https://svn-ccsm-inputdata.cgd.ucar.edu/trunk/inputdata/ - #> svn export $SVN_INP_URL/lnd/clm2/surfdata_map/surfdata_${GRIDNAME}_simyr2000.nc $CSMDATA/lnd/clm2/surfdata_map/surfdata_${GRIDNAME}_simyr2000.nc - > ./case.setup - -The first step is to create the domain and surface datasets using the process outlined in :ref:`using-clm-tools-section`. Below we show an example of the process. - -Example: Creating a surface dataset for a single point ---------------------------------------------------------------------- -.. todo:: - Update the below, as ``mksurfdata.pl`` no longer exists and domain files aren't needed with nuopc. - -:: - - # set the GRIDNAME and creation date that will be used later - > setenv GRIDNAME 1x1_boulderCO - > setenv CDATE `date +%y%m%d` - # Create the SCRIP grid file for the location and create a unity mapping file for it. - > cd $CTSMROOT/tools/mkmapdata - > ./mknoocnmap.pl -p 40,255 -n $GRIDNAME - # Set pointer to MAPFILE just created that will be used later - > setenv MAPFILE `pwd`/map_${GRIDNAME}_noocean_to_${GRIDNAME}_nomask_aave_da_${CDATE}.nc - # create the mapping files needed by mksurfdata_esmf. - > cd ../.././mkmapdata - > setenv GRIDFILE ../mkmapgrids/SCRIPgrid_${GRIDNAME}_nomask_${CDATE}.nc - > ./mkmapdata.sh -r $GRIDNAME -f $GRIDFILE -t regional - # create the domain file - > cd ../../../../tools/mapping/gen_domain_files/src - > ../../../scripts/ccsm_utils/Machines/configure -mach cheyenne -compiler intel - > gmake - > cd .. - > setenv OCNDOM domain.ocn_noocean.nc - > setenv ATMDOM domain.lnd.{$GRIDNAME}_noocean.nc - > ./gen_domain -m $MAPFILE -o $OCNDOM -l $ATMDOM - # Save the location where the domain file was created - > setenv GENDOM_PATH `pwd` - # Finally create the surface dataset - > cd ../../../../lnd/clm/tools/|version|/mksurfdata_esmf/src - > gmake - > cd .. - > ./mksurfdata.pl -r usrspec -usr_gname $GRIDNAME -usr_gdate $CDATE - -The next step is to create a case that points to the files you created above. We will still use the ``CLM_USRDAT_NAME`` option as a way to get a case setup without having to add the grid to scripts. - -Example: Setting up a case from the single-point surface dataset just created --------------------------------------------------------------------------------------------- - -.. todo:: - Change this to provide instructions for a CTSM checkout instead of a CESM one. - -.. todo:: - Update the below, as domain files aren't needed with nuopc. - -:: - - # First setup an environment variable that points to the top of the CESM directory. - > setenv CESMROOT - # Next make sure you have a inputdata location that you can write to - # You only need to do this step once, so you won't need to do this in the future - > setenv MYCSMDATA $HOME/inputdata # Set env var for the directory for input data - > ./link_dirtree $CSMDATA $MYCSMDATA - # Copy the file you created above to your new $MYCSMDATA location following the CLMUSRDAT - # naming convention (leave off the creation date) - > cp $CESMROOT/$CTSMROOT/tools/mksurfdata_esmf/surfdata_${GRIDNAME}_simyr1850_$CDATE.nc \ - $MYCSMDATA/lnd/clm2/surfdata_map/surfdata_${GRIDNAME}_simyr1850.nc - > cd $CESMROOT/cime/scripts - > ./create_newcase -case my_usernldatasets_test -res CLM_USRDAT -compset I1850Clm50BgcCropCru \ - -mach cheyenne_intel - > cd my_usernldatasets_test - > ./xmlchange DIN_LOC_ROOT=$MYCSMDATA - # Set the path to the location of gen_domain set in the creation step above - > ./xmlchange ATM_DOMAIN_PATH=$GENDOM_PATH,LND_DOMAIN_PATH=$GENDOM_PATH - > ./xmlchange ATM_DOMAIN_FILE=$ATMDOM,LND_DOMAIN_FILE=$ATMDOM - > ./xmlchange CLM_USRDAT_NAME=$GRIDNAME - > ./case.setup - -.. note:: With this and previous versions of the model we recommended using ``CLM_USRDAT_NAME`` as a way to identify your own datasets without having to enter them into the XML database. This has the down-side that you can't include creation dates in your filenames, which means you can't keep track of different versions by date. It also means you HAVE to rename the files after you created them with ``mksurfdata.pl``. Now, since ``user_nl`` files are supported for ALL model components, and the same domain files are read by both CLM and DATM and set using the envxml variables: ``ATM_DOMAIN_PATH``, ``ATM_DOMAIN_FILE``, ``LND_DOMAIN_PATH``, and ``LND_DOMAIN_FILE`` -- you can use this mechanism (``user_nl_clm`` and ``user_nl_datm`` and those envxml variables) to point to your datasets in any location. In the future we will deprecate ``CLM_USRDAT_NAME`` and recommend ``user_nl_clm`` and ``user_nl_datm`` and the ``DOMAIN`` envxml variables. diff --git a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst deleted file mode 100644 index d16dfa6f5e..0000000000 --- a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst +++ /dev/null @@ -1,32 +0,0 @@ -.. include:: ../substitutions.rst - -.. _single-point-configurations: - -***************************************** -Single and Regional Grid Configurations -***************************************** - -CLM allows you to set up and run cases with a single-point or a local region as well as global resolutions. This is often useful for running quick cases for testing, evaluating specific vegetation types, or land-units, or running with observed data for a specific site. - -There are two different ways to do this for normal-supported site - -``subset_data`` - runs for a single point using global datasets. - -``CLM_USRDAT_NAME`` - runs using your own datasets (single-point or regional). - -.. _options-for-single-points: - -========================================= - Choosing the right single point options -========================================= - -Running for a *normal supported site* is a great solution, if one of the supported single-point/regional datasets, is your region of interest (see :ref:`running-single-point-datasets`). All the datasets are created for you, and you can easily select one and run, out of the box with it using a supported resolution from the top level of the CESM scripts. The problem is that there is a very limited set of supported datasets. You can also use this method for your own datasets, but you have to create the datasets, and add them to the XML database in scripts, CLM and to the DATM. This is worthwhile if you want to repeat many multiple cases for a given point or region. - -In general :ref:`single_point_subset_data` is the quick and dirty method that gets you started, but it has limitations. It's good for an initial attempt at seeing results for a point of interest, but since you can NOT restart with it, its usage is limited. It is the quickest method as you can create a case for it directly from ``cime/scripts/create_newcase``. Although you can't restart, running a single point is very fast, and you can run for long simulation times even without restarts. - -Next, ``CLM_USRDAT_NAME`` using ``subset_data`` is the best way to setup cases quickly where you have a simple tool to create your own datasets (see :ref:`single_point_subset_data`). With this method you don't have to change DATM or add files to the XML database. ``subset_data`` will create a usermod directory where you can store your files and the files needed to directly run a case. - -Finally, if you also have meteorology data that you want to force your CLM simulations with you'll need to setup cases as described in :ref:`creating-your-own-singlepoint-dataset`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use. - diff --git a/doc/source/users_guide/running-single-points/supported-tower-sites.rst b/doc/source/users_guide/running-single-points/supported-tower-sites.rst new file mode 100644 index 0000000000..3e080a3ee1 --- /dev/null +++ b/doc/source/users_guide/running-single-points/supported-tower-sites.rst @@ -0,0 +1,82 @@ +.. include:: ../substitutions.rst + +.. _supported-tower-sites: + +******************************************** +Supported tower sites for single-point runs +******************************************** + +CTSM has functionality within the ``run_tower`` tool for running single-point cases at particular supported tower sites using forcing data from those sites. + +This tool was developed as a collaboration between NCAR's modeling capabilities and NEON's measurement network that could drive scientific discovery at the confluence of geosciences and biological sciences. The tool was then expanded to include PLUMBER sites to support a wider variety of ecological research projects. + +Broadly, this tool can be used to probe questions such as: + + * What biases in NCAR models can current observations address? + * How can NCAR models inform observational data streams? + * What new hypotheses of atmospheric science and macroscale ecology can be tested with observations and NCAR models to increase our understanding of the biosphere-atmosphere system and its response to global environmental change? + * Can Earth system prediction be extended to ecological forecasts? + +==================================================== +General Information on Running Supported Tower Sites +==================================================== + +The ``run_tower`` capability allows users to run Community Land Model (CLM) simulations at NEON and PLUMBER tower sites in a streamlined manner by setting up the appropriate model configurations, datasets, and initial conditions. This script can run for one or more (NEON or PLUMBER) tower sites. It will do the following: + + 1) Create a generic base case for cloning. + 2) Make the case for the specific neon or plumber site(s). + 3) Make changes to the case, for + a. AD spinup + b. post-AD spinup + c. transient + d. SASU or Matrix spinup + 4) Build and submit the case. + +The available options, a description of those options, and details on default values can be shown by running ``run_tower --help``. + +A `tutorial `_ on running and evaluating data from ``run_tower`` is also available. + +.. warning:: Note that the run_tower base case must be of same run type as a requested clone, as described by this `issue ticket `_. + +========================================= +NEON Tower Single Point Simulations +========================================= + +With this tool, CLM uses gap-filled meteorology from NEON tower sites, the dominant plant species is mapped to the appropriate model plant functional type (PFT), and soil characteristics used in the simulations are updated to match observations from NEON's soil megapits. Gap-filled NEON tower flux data are also available for model evaluation. Additionally, all the commands to run the model are combined into a script that you can easily call from a single line of code. + +Currently supported NEON sites can be found by running ``run_tower --help``. + +.. note:: If you choose to run ``all``, single point simulations at all NEON sites will be run. This is a useful feature, but we recommend testing out running just one site first. + +Information on the specific sites can be found on the `NEON webpage `_. + +.. note:: For NEON tower site simulations, the default run type is ``transient``. + +To run CTSM at a NEON site, change directories to where the run_tower tool is located, and then run the ``run_tower`` command. You can also add any additional arguments as described by the ``help`` options. These steps will look something like this:: + + cd CTSM/tools/site_and_regional + run_tower --neon-sites ABBY + +When a simulation completes, the data are stored in the archive directory under ``CTSM/tools/site_and_regional/archive``. In this directory you will find files that include data for every day of the simulation, as well as files that average model variables monthly. The output file names are automatically generated and are composed of the simulation name, which includes the site name, type of simulation (eg, ``transient``), and the date of simulated data. +The tower simulations generate two types of files: + +1) ``h0`` Variables that are averaged monthly. One file is available for every month of the simulation. These files include hundreds of variables. + +2) ``h1`` Variables that are recorded every 30 minutes. Values are aggregated into one file for each day of the simulation. Each file includes 48 data points for selected variables. + +========================================= +PLUMBER Tower Single Point Simulations +========================================= + +.. note:: A few important notes regarding the PLUMBER tower site simulations are that the default run type is ``ad``; additionally, PLUMBER cases all start in different years. + +Currently supported PLUMBER Sites can be found by running ``run_tower --help``. + +Information on the specific sites can be found `here `_. + +To run CTSM at a PLUMBER site, change directories to where the run_tower tool is located, and then run the ``run_tower`` command. You can also add any additional arguments as described by the ``help`` options. These steps will look something like this:: + + cd CTSM/tools/site_and_regional + run_tower --plumber-sites AR-SLu + +The output for a PLUMBER case will be set up similarly to the output for a NEON case, as described above. diff --git a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst index ec28a0d624..7b1a4d8ad0 100644 --- a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst +++ b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst @@ -352,13 +352,13 @@ When ``-irrig on`` is used ``build-namelist`` will try to find surface datasets Update the below, as ``queryDefaultNamelist.pl`` no longer exists. ``CLM_USRDAT_NAME`` - Provides a way to enter your own datasets into the namelist. The files you create must be named with specific naming conventions outlined in :ref:`creating-your-own-singlepoint-dataset`. To see what the expected names of the files are, use the ``queryDefaultNamelist.pl`` to see what the names will need to be. For example if your ``CLM_USRDAT_NAME`` will be "1x1_boulderCO", with a "navy" land-mask, constant simulation year range, for 1850, the following will list what your filenames should be: + Provides a way to enter your own datasets into the namelist. The files you create must be named with specific naming conventions outlined in :ref:`generic_single_point_runs`. To see what the expected names of the files are, use the ``queryDefaultNamelist.pl`` to see what the names will need to be. For example if your ``CLM_USRDAT_NAME`` will be "1x1_boulderCO", with a "navy" land-mask, constant simulation year range, for 1850, the following will list what your filenames should be: :: > cd $CTSMROOT/bld > queryDefaultNamelist.pl -usrname "1x1_boulderCO" -options mask=navy,sim_year=1850,sim_year_range="constant" -csmdata $CSMDATA - An example of using ``CLM_USRDAT_NAME`` for a simulation is given in Example :numref:`creating-your-own-singlepoint-dataset`. + An example of using ``CLM_USRDAT_NAME`` for a simulation is given in Example :numref:`generic_single_point_runs`. ``CLM_CO2_TYPE`` sets the type of input CO2 for either "constant", "diagnostic" or prognostic". If "constant" the value from ``CCSM_CO2_PPMV`` will be used. If "diagnostic" or "prognostic" the values MUST be sent from the atmosphere model. diff --git a/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst b/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst index 82169e8238..71f4783b92 100644 --- a/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst +++ b/doc/source/users_guide/using-clm-tools/observational-sites-datasets.rst @@ -9,6 +9,6 @@ Observational Sites Datasets .. todo:: Update this. -There are two ways to customize datasets for a particular observational site. The first is to customize the input to the tools that create the dataset, and the second is to overwrite the default data after you've created a given dataset. Depending on the tool it might be easier to do it one way or the other. In Table :numref:`reqd-files-table` we list the files that are most likely to be customized and the way they might be customized. Of those files, the ones you are most likely to customize are: ``fatmlndfrc``, ``fsurdat``, ``faerdep`` (for DATM), and ``stream_fldfilename_ndep``. Note ``mksurfdata_esmf`` as documented previously has options to overwrite the vegetation and soil types. For more information on this also see :ref:`creating-your-own-singlepoint-dataset`. +There are two ways to customize datasets for a particular observational site. The first is to customize the input to the tools that create the dataset, and the second is to overwrite the default data after you've created a given dataset. Depending on the tool it might be easier to do it one way or the other. In Table :numref:`reqd-files-table` we list the files that are most likely to be customized and the way they might be customized. Of those files, the ones you are most likely to customize are: ``fatmlndfrc``, ``fsurdat``, ``faerdep`` (for DATM), and ``stream_fldfilename_ndep``. Note ``mksurfdata_esmf`` as documented previously has options to overwrite the vegetation and soil types. For more information on this also see :ref:`generic_single_point_runs`. -Another aspect of customizing your input datasets is customizing the input atmospheric forcing datasets; see :ref:`creating-your-own-singlepoint-dataset` for more information on this. +Another aspect of customizing your input datasets is customizing the input atmospheric forcing datasets; see :ref:`generic_single_point_runs` for more information on this. diff --git a/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst b/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst index e28029d7f3..895dbf2a65 100644 --- a/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst +++ b/doc/source/users_guide/working-with-documentation/building-docs-multiple-versions.rst @@ -7,12 +7,12 @@ There is a menu in the lower left of the webpage that lets readers switch betwee Note that this is not necessary in order for you to contribute an update to the documentation. GitHub will test this automatically when you open a PR. But if you'd like to try, this will generate a local site for you in ``_publish/`` and then open it: -.. literalinclude:: ../../../testing.sh +.. literalinclude:: ../../../test/test_container_eq_ctsm_pylib.sh :start-at: ./build_docs_to_publish :end-before: VERSION LINKS WILL NOT RESOLVE - :append: open _publish/index.html + :append: CMD _publish/index.html # where CMD is open for Mac or wslview for Windows (Ubuntu VM) -**Note:** This is not yet supported with Podman on Linux (including Ubuntu VM on Windows). See `doc-builder Issue #27: build_docs_to_publish fails on Linux (maybe just Ubuntu?) with Podman `_. +**Note:** This is not yet supported with Podman on Linux (including Ubuntu VM on Windows). See `doc-builder Issue #27: build_docs_to_publish fails on Linux (maybe just Ubuntu?) with Podman `_. It does work with Docker on Linux, though. How this works @@ -24,6 +24,6 @@ How this works :start-at: version of certain files we want to preserve :end-before: End version definitions -For each member of `VERSION_LIST`, ``build_docs_to_publish`` checks out its `ref`, then builds the documentation in a build directory. (`LATEST_REF` is set because some files, folders, and submodules are important for how the build works and need to stay the same for each build.) Once the build is complete, ``build_docs_to_publish`` should reset your local repo copy (CTSM clone) to how it was before you called ``build_docs_to_publish``. +For each member of ``VERSION_LIST``, ``build_docs_to_publish`` checks out its ``ref``, then builds the documentation in a build directory. (``LATEST_REF`` is set because some files, folders, and submodules are important for how the build works and need to stay the same for each build.) Once the build is complete, ``build_docs_to_publish`` should reset your local repo copy (CTSM clone) to how it was before you called ``build_docs_to_publish``. Next, ``build_docs_to_publish`` moves the HTML files from the build directory to the publish directory. The publish directory has a structure that matches the paths in the version dropdown menu's links. If a member of ``VERSION_LIST`` has ``landing_version=True``, its HTML will be at the top level. That makes it simple for people to find the default version of the docs at https://escomp.github.io/CTSM, rather than having to drill down further into something like ``https://escomp.github.io/CTSM/versions/latest``. diff --git a/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md b/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md index 251622b6f0..63acab53a7 100644 --- a/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md +++ b/doc/source/users_guide/working-with-documentation/building-docs-original-wiki.md @@ -2,7 +2,7 @@ # ⚠️ Original docs documentation from the GitHub Wiki -.. todo:: +.. warning:: ⚠️⚠️⚠️WARNING⚠️⚠️⚠️ The linked page contains documentation that (a) is more complicated than you probably require and (b) has not been fully checked for accuracy with the latest documentation setup. Unless you have a very good reason, you should probably go to :ref:`docs-intro-and-recommended`. diff --git a/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md b/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md index ab972cdfc4..ceb701b5cf 100644 --- a/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md +++ b/doc/source/users_guide/working-with-documentation/building-docs-prereqs-windows.md @@ -8,7 +8,17 @@ Note that you may need administrator privileges on your PC (or approval from you ## Install Linux subsystem -We don't support building our documentation in the native Windows command-line environment. Thus, you will need to install a little version of Linux inside a virtual machine (VM) to use instead. +We don't support building our documentation in the native Windows command-line environment. Thus, you will need to install a little version of Linux inside a virtual machine (VM) to use instead. The process for doing this varies depending on how tightly the installation process is controlled on your computer. + +### NCAR computers + +Please follow the [Windows Subsystem for Linux (WSL) setup instructions](https://wiki.ucar.edu/pages/viewpage.action?pageId=514032264&spaceKey=CONFIGMGMT&title=Setup) on the UCAR Wiki. In the step about installing a Linux distribution, choose Ubuntu. + +Feel free to peruse the [overall WSL documentation](https://wiki.ucar.edu/spaces/CONFIGMGMT/pages/514032242/Windows+Subsystem+for+Linux) on and linked from the UCAR Wiki for additional information. + +### Non-NCAR computers + +If your computer is managed by an organization other than NCAR, please check with your IT department or equivalent for instructions on installing Windows Subsystem for Linux (WSL) and Ubuntu. Otherwise, follow these instructions: 1. Download and install Ubuntu from the Microsoft Store. 1. Restart your computer. @@ -16,6 +26,8 @@ We don't support building our documentation in the native Windows command-line e If Ubuntu opens in that last step but you see an error, you may need to manually enable Windows Subsystem for Linux (WSL). To do so: Open Control Panel, go to "Programs" > "Programs and Features" > "Turn Windows features on or off". Check the box next to "Windows Subsystem for Linux" and click OK. +Once Ubuntu is working and open, you'll be asked to create a new UNIX username and password. This doesn't have to match your Windows username and password, but do make sure to save this information somewhere secure. + .. _windows-docs-ubuntu-utilities: ## Install utilities @@ -31,9 +43,8 @@ which make || sudo apt-get -y install make which git || sudo apt-get -y install git which git-lfs || sudo apt-get -y install git-lfs -# Chromium: A web browser engine that's the basis for popular browsers like Google -# Chrome and Microsoft Edge -which chromium || sudo apt-get -y install chromium +# WSL utilities, which will give us the wslview command for opening HTML pages in a Windows browser +which wslview || sudo apt-get -y install wslu ``` .. _container-or-conda-windows: @@ -42,42 +53,78 @@ which chromium || sudo apt-get -y install chromium We recommend building the software in what's called a container—basically a tiny little operating system with just some apps and utilities needed by the doc-building process. This is nice because, if we change the doc-building process in ways that require new versions of those apps and utilities, that will be completely invisible to you. You won't need to manually do anything to update your setup to work with the new process; it'll just happen automatically. -We recommend using the container software Podman. +For builds in WSL (Ubuntu), we recommend using the container software Docker. You can install it in Ubuntu like so: -1. Install Podman with `sudo apt-get -y install podman`. -1. Set up and start a Podman "virtual machine" with `podman machine init --now`. -1. Test your installation by doing `podman run --rm hello-world`. If it worked, you should see ASCII art of the Podman logo. +```shell +# If needed, download and run the Docker installation script. +# Ignore the message saying "We recommend using Docker Desktop for Windows." +# The script will make you wait 20 seconds to make sure this is want you want, +# and then it should continue automatically. +which docker || curl -fsSL https://get.docker.com -o get-docker.sh +which docker || sudo sh ./get-docker.sh + +# Set up the docker "group," if needed, and add your username to it. +sudo groupadd docker # Create docker group if it doesn't exist +sudo usermod -aG docker $USER # Add your user to the docker group +newgrp docker # Apply the new group membership (avoids needing to log out and back in) + +# Make sure it worked: This should print a "Hello from Docker!" message +docker run hello-world +``` -You may not be able to install Podman or any other containerization software, so there is an alternative method: a Conda environment. +You may not be able to install Docker or any other containerization software, so there is an alternative method: a Conda environment. 1. Check whether you already have Conda installed by doing `which conda`. If that doesn't print anything, [install Miniconda](https://www.anaconda.com/docs/getting-started/miniconda/install#linux). 1. Follow the instructions for setting up the `ctsm_pylib` Conda environment in Sect. :numref:`using-ctsm-pylib`. +.. _editing-text-files-wsl: + +## Editing documentation files +If you prefer using an old-school text editor like `vim`, it's probably already installed in your Ubuntu VM, or can be installed with `sudo apt-get -y install EDITOR_NAME`. If you prefer a more user-friendly interface, there are several options. Note that **all commands in this section are to be run in your Ubuntu VM, not a Windows terminal**. -## Set up your permissions -This will make sure that you "own" your home directory in the Ubuntu VM. **In your Ubuntu terminal**, do: +### In a Windows app (recommended) +If you installed `wslview` in the instructions above, you can edit files by doing ```shell -chown -R $USER:$USER $HOME +wslview path/to/file_i_want_to_edit.rst ``` +If not, you can do +```shell +explorer.exe $(wslpath -w path/to/file_i_want_to_edit.rst) +``` +These both do the same thing, but the `wslview` method is simpler. Either way, at least the first time you do this, it will open a window asking which app you'd like to open the file in. Choose whatever you're most comfortable with. At the bottom of the window, you can then choose whether you always want to open HTML files using the selected app or just this once. -.. _editing-text-files-wsl: - -## Editing text files in an Ubuntu VM -If you prefer using an old-school text editor like `vim`, it's probably already installed, or can be installed with `sudo apt-get -y install EDITOR_NAME`. If you prefer a more user-friendly interface, there are several options. - -You may be able to edit files in your Ubuntu VM in the Ubuntu terminal by using the name of the Windows executable. For Notepad, for instance, you would do +You may also be able to open files in Windows apps by using the name of the Windows executable. For Notepad, for instance, you would do ```shell -notepad.exe file_i_want_to_edit.rst +notepad.exe $(wslpath -w path/to/file_i_want_to_edit.rst) ``` -If you use [VS Code](https://code.visualstudio.com/), you can install the [WSL VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl). Then you can open any file or folder in your Ubuntu VM by doing +If you use [VS Code](https://code.visualstudio.com/), you can install the [WSL VS Code extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl). Then (after closing and re-opening Ubuntu) you can open any documentation file **or folder** by doing ```shell code path/to/file-or-folder ``` +### In an Ubuntu app (not recommended) + You can also install a user-friendly text editor in Ubuntu. This may be slower and have unexpected differences in behavior from what you expect from Windows apps, but it does work. For example: - [gedit](https://gedit-text-editor.org/): `sudo apt-get install -y gedit` - [Kate](https://kate-editor.org/): `sudo apt-get install -y kate` - [VS Code](https://code.visualstudio.com/) (if you don't already have it installed on Windows): `sudo snap install code --classic` You can use all of those to open and edit files, but Kate and VS Code let you open entire folders, which can be convenient. In any case, you'd do `EDITOR_NAME path/to/thing/youre/editing` to open it, where `EDITOR_NAME` is `gedit`, `kate`, or `code`, respectively. + +## Troubleshooting + +### "Permission denied" error + +If you get this error, it may be a result of opening Ubuntu as an administrator (e.g., by right-clicking on its icon and choosing "Run as administrator.") Try not doing that, although this will result in you needing to get a new copy of CTSM to work in. + +If that's not feasible or doesn't solve the problem, you may need to remind Linux that you do actually own your files. **In your Ubuntu terminal**, do: +```shell +chown -R $USER:$USER $HOME +``` + +If that also gives a permission error, you may need to put `sudo` at the start of the command. + +### "The host 'wsl$' was not found in the list of allowed hosts" + +You may see this warning in a dialog box after trying to open a file with `wslview`, `explorer.exe`, or something else. Check "Permanently allow host 'wsl$'" and then press "Allow". diff --git a/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md b/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md index 1501f8d48a..bfc537f223 100644 --- a/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md +++ b/doc/source/users_guide/working-with-documentation/docs-intro-and-recommended.md @@ -55,8 +55,12 @@ open _build/html/index.html ### Windows (Ubuntu VM) -Assuming you installed Chromium in the :ref:`windows-docs-ubuntu-utilities` setup step, you can open your build of the documentation like so: +Assuming you installed the WSL Utilities in the :ref:`windows-docs-ubuntu-utilities` setup step, you can open your build of the documentation like so: ```shell -chromium _build/html/index.html & +wslview _build/html/index.html ``` -This will generate a lot of warnings in the terminal that seem to be inconsequential to our purpose here. You may need to press Ctrl-C and/or Enter a few times to clear them and return your cursor to the prompt. +If you didn't, you can do +```shell +explorer.exe $(wslpath -w _build/html/index.html) +``` +These both do the same thing, but the `wslview` method is simpler. Either way, at least the first time you do this, it will open a window asking which app you'd like to view the HTML file in. Choose a browser like Microsoft Edge or Chrome. At the bottom of the window, you can then choose whether you always want to open HTML files using the selected app or just this once. diff --git a/doc/source/users_guide/working-with-documentation/tips-for-working-with-rst.md b/doc/source/users_guide/working-with-documentation/tips-for-working-with-rst.md index 2dcc7f455e..164f24115b 100644 --- a/doc/source/users_guide/working-with-documentation/tips-for-working-with-rst.md +++ b/doc/source/users_guide/working-with-documentation/tips-for-working-with-rst.md @@ -2,6 +2,22 @@ # Tips for working with reStructuredText +If you've never used reStructuredText before, you should be aware that its syntax is pretty different from anything you've ever used before. We recommend the following resources as references for the syntax: +- [Sphinx's reStructuredText Primer](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html) +- The [Quick reStructuredText](https://docutils.sourceforge.io/docs/user/rst/quickref.html) cheat sheet + +Some especially useful bits: +- [Section headers](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#sections) +- [Hyperlinks](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#hyperlinks) +- [Callout blocks (e.g., warning, tip)](https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#admonitions-messages-and-warnings) + +On this page, we've compiled some supplemental information that might be helpful, including a list of common errors and their causes. + +.. contents:: + :depth: 1 + :backlinks: top + :local: + .. _rst-math: ## reStructuredText: Math @@ -22,10 +38,6 @@ Note (a) the leading spaces for each line after `.. math::` and (b) the empty li reStructuredText math largely follows LaTeX syntax. -Common errors: -- 'ERROR: Error in "math" directive: invalid option block': You might have forgotten the empty line after your equation label. -- "WARNING: Explicit markup ends without a blank line; unexpected unindent": You might have forgotten the leading spaces for every line after `.. math::`. You need at least one leading space on each line. - .. _rst-cross-references: ## reStructuredText: Cross-references @@ -56,12 +68,6 @@ You can have any link (except for equations) show custom text by putting the ref Note that this is necessary for labels that aren't immediately followed by a section heading, a table with a caption, or a figure with a caption. For instance, to refer to labels in our bibliography, you could do ``:ref:`(Bonan, 1996)``` → :ref:`(Bonan, 1996)`. -Common errors: -- "WARNING: Failed to create a cross reference. A title or caption not found": This probably means you tried to `:ref:` a label that's not immediately followed by (a) a table/figure with a caption or section or (b) a section (see above). -- "WARNING: undefined label": If you're sure the label you referenced actually exists, this probably means you tried to ``:numref:`` a label that's not immediately followed by a table, figure, or section (see above). Alternatively, you might have tried to ``:ref:`` an :ref:`equation`; in that case, use ``:eq:`` instead. -- "WARNING: malformed hyperlink target": You may have forgotten the trailing `:` on a label line. -- If you forget to surround a label with blank lines, you will get errors like "Explicit markup ends without a blank line; unexpected unindent [docutils]" that often point to lines far away from the actual problem. - .. _rst-comments: ## reStructuredText: Comments @@ -80,3 +86,80 @@ Make sure to include at least one empty line after the comment text. Tables defined with the [:table: directive](https://docutils.sourceforge.io/docs/ref/rst/directives.html#table) can be annoying because they're very sensitive to the cells inside them being precisely the right widths, as defined by the first `====` strings. If you don't get the widths right, you'll see "Text in column margin" errors. Instead, define your tables using the [list-table](https://docutils.sourceforge.io/docs/ref/rst/directives.html#list-table) directive. If you already have a table in some other format, like comma-separated values (CSV), you may want to check out the R package [knitr](https://cran.r-project.org/web/packages/knitr/index.html). Its [kable](https://bookdown.org/yihui/rmarkdown-cookbook/kable.html) command allows automatic conversion of R dataframes to tables in reStructuredText and other formats. + + +## reStructuredText: Common error messages and how to handle them + +.. _error-unexpected-unindent: + +### "ERROR: Unexpected indentation" + +Like Python, reStructuredText is very particular about how lines are indented. Indentation is used, for example, to denote [code ("literal") blocks](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks) and [quote blocks](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#lists-and-quote-like-blocks). An error like +``` +/path/to/file.rst:102: ERROR: Unexpected indentation. [docutils] +``` +indicates that line 102 is indented but not in a way that reStructuredText expects. + +### "WARNING: Block quote ends without a blank line; unexpected unindent" + +This is essentially the inverse of :ref:`error-unexpected-unindent`: The above line was indented but this one isn't. reStructuredText tried to interpret the indented line as a [block quote](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#lists-and-quote-like-blocks), but block quotes require a blank line after them. + +.. _inline-literal-start-without-end: + +### "WARNING: Inline literal start-string without end-string" + +An "inline literal" is when you want to mix code into a normal line of text (as opposed to in its own code block) ``like this``. This is accomplished with double-backticks: +```reStructuredText +An "inline literal" is when you want to mix code into a normal line of +text (as opposed to in its own code block) ``like this``. +``` +(A backtick is what you get if you press the key to the left of 1 on a standard US English keyboard.) + +If you have a double-backtick on a line, reStructuredText will think, "They want to start an inline literal here," then look for another double-backtick to end the literal. The "WARNING: Inline literal start-string without end-string" means it can't find one on that line. + +This might happen, for example, if you try to put a [Markdown code block](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/creating-and-highlighting-code-blocks) in a .rst file. In that case, use the [reStructuredText code block syntax](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks) instead (optionally with [syntax highlighting](https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-highlight)). + +### "WARNING: Inline interpreted text or phrase reference start-string without end-string" + +Like :ref:`inline-literal-start-without-end`, this is probably related to having one double-backtick without another on the same line. As with that other error, it could be the result of a Markdown code block in a .rst file. + +### "ERROR: Error in "code" directive: maximum 1 argument(s) allowed, 19 supplied" + +This error might show something other than "code," like "highlight" or "sourcecode". It also will probably show a second number that's not 19. The problem is that you tried to write a [reStructuredText code block with syntax highlighting](https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-highlight) but didn't include a blank line after the first one: + +```reStructuredText +.. code:: shell + # How to list all the available grids + cd cime/scripts + ./query_config --grids +``` + +Fix this by adding a blank line: +```reStructuredText +.. code:: shell + + # How to list all the available grids + cd cime/scripts + ./query_config --grids +``` + +### 'ERROR: Error in "math" directive: invalid option block' + +You might have forgotten the empty line after an equation label. + +### "WARNING: Explicit markup ends without a blank line; unexpected unindent" + +You might have forgotten the leading spaces for every line after `.. math::`. As a reminder, you need at least one leading space on each line. + +You can also get this error if you forget to surround a :ref:`cross-reference label` with blank lines. In this case, the error message might point to lines far away from the actual problem. + +### "WARNING: Failed to create a cross reference: A title or caption not found" +This probably means you tried to `:ref:` a label that's not immediately followed by (a) a table/figure with a caption or (b) a section. + +### "WARNING: undefined label" + +If you're sure the label you referenced actually exists, this probably means you tried to ``:numref:`` a label that's not immediately followed by a table, figure, or section (see above). Alternatively, you might have tried to ``:ref:`` an :ref:`equation`; in that case, use ``:eq:`` instead. + +### "WARNING: malformed hyperlink target" + +You may have forgotten the trailing `:` on a label line. \ No newline at end of file diff --git a/doc/test/compose_test_cmd.sh b/doc/test/compose_test_cmd.sh new file mode 100755 index 0000000000..2b2fd3cf67 --- /dev/null +++ b/doc/test/compose_test_cmd.sh @@ -0,0 +1,13 @@ +# This should only be run locally within another shell + +if [[ "${cli_tool}" == "" ]]; then + echo "${msg} (no container)" +else + cmd="${cmd} -d" + if [[ "${cli_tool}" != "default" ]]; then + cmd="${cmd} --container-cli-tool ${cli_tool}" + fi + echo "${msg} (container: ${cli_tool})" +fi + +echo cmd diff --git a/doc/test/test_build_docs_-b.sh b/doc/test/test_build_docs_-b.sh new file mode 100755 index 0000000000..8b49e2f7aa --- /dev/null +++ b/doc/test/test_build_docs_-b.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +msg="~~~~~ Check that -b works" +cmd="./build_docs -b _build -c" + +. test/compose_test_cmd.sh +set -x +$cmd + +exit 0 diff --git a/doc/test/test_build_docs_-r-v.sh b/doc/test/test_build_docs_-r-v.sh new file mode 100755 index 0000000000..6f9415b563 --- /dev/null +++ b/doc/test/test_build_docs_-r-v.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +msg="~~~~~ Check that -r -v works" +cmd="./build_docs -r _build -v latest -c --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates" + +. test/compose_test_cmd.sh +set -x +$cmd + +exit 0 diff --git a/doc/test/test_container_eq_ctsm_pylib.sh b/doc/test/test_container_eq_ctsm_pylib.sh new file mode 100755 index 0000000000..729f1b723e --- /dev/null +++ b/doc/test/test_container_eq_ctsm_pylib.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +# Compare docs built with container vs. ctsm_pylib + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +rm -rf _publish* + +# Build all docs using container +echo "~~~~~ Build all docs using container" +# Also do a custom --conf-py-path +rm -rf _build _publish +d1="$PWD/_publish_container" +./build_docs_to_publish -r _build -d --site-root "$PWD/_publish" +# VERSION LINKS WILL NOT RESOLVE IN _publish_container +cp -a _publish "${d1}" + +# Build all docs using ctsm_pylib +echo "~~~~~ Build all docs using ctsm_pylib" +rm -rf _build _publish +d2="$PWD/_publish_nocontainer" +conda run -n ctsm_pylib --no-capture-output ./build_docs_to_publish -r _build --site-root "$PWD/_publish" --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates +# VERSION LINKS WILL NOT RESOLVE IN _publish_nocontainer +cp -a _publish "${d2}" + +# Make sure container version is identical to no-container version +echo "~~~~~ Make sure container version is identical to no-container version" +diff -qr "${d1}" "${d2}" +echo "Successful: Docs built with container are identical to those built without" + +exit 0 diff --git a/doc/test/test_doc-builder_tests.sh b/doc/test/test_doc-builder_tests.sh new file mode 100755 index 0000000000..07cfa73ea1 --- /dev/null +++ b/doc/test/test_doc-builder_tests.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}" + +echo "~~~~~ Check that doc-builder tests pass" +cd ../doc-builder/test +set -x +conda run --no-capture-output -n ctsm_pylib make test + +exit 0 diff --git a/doc/test/test_makefile_method.sh b/doc/test/test_makefile_method.sh new file mode 100755 index 0000000000..b0fd80984e --- /dev/null +++ b/doc/test/test_makefile_method.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +cli_tool="$1" + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/.." + +echo "~~~~~ Check that Makefile method works" +set -x +make SPHINXOPTS="-W --keep-going" BUILDDIR=${PWD}/_build html + +exit 0 diff --git a/doc/test/testing.sh b/doc/test/testing.sh new file mode 100755 index 0000000000..2e91025e6c --- /dev/null +++ b/doc/test/testing.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Fail on any non-zero exit code +set -e + +SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "${SCRIPT_DIR}/" + +# Compare docs built with container vs. ctsm_pylib +./test_container_eq_ctsm_pylib.sh + +# Check that -r -v works (Docker) +# Also do a custom --conf-py-path and other stuff +cd "${SCRIPT_DIR}/" +rm -rf _build +./test_build_docs_-r-v.sh docker + +# Check that Makefile method works +cd "${SCRIPT_DIR}/" +rm -rf _build +conda run --no-capture-output -n ctsm_pylib ./test_makefile_method.sh + +# Check that -b works +cd "${SCRIPT_DIR}/" +rm -rf _build +./test_build_docs_-b.sh docker + +# Check that doc-builder tests pass +# Don't run if on a GitHub runner; failing 🤷. Trust that doc-builder does this test. +if [[ "${GITHUB_ACTIONS}" == "" ]]; then + cd "${SCRIPT_DIR}/" + ./test_doc-builder_tests.sh +fi + +exit 0 diff --git a/doc/testing.sh b/doc/testing.sh deleted file mode 100755 index 9253df848c..0000000000 --- a/doc/testing.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -set -e -set -x - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -cd "${SCRIPT_DIR}" - -../bin/git-fleximod update -o -rm -rf _publish* - -# Build all docs using container -echo "~~~~~ Build all docs using container" -# Also do a custom --conf-py-path -rm -rf _build _publish -d1="$PWD/_publish_container" -./build_docs_to_publish -r _build -d --site-root "$PWD/_publish" -# VERSION LINKS WILL NOT RESOLVE IN _publish_container -cp -a _publish "${d1}" - -# Build all docs using ctsm_pylib -echo "~~~~~ Build all docs using ctsm_pylib" -rm -rf _build _publish -d2="$PWD/_publish_nocontainer" -conda run -n ctsm_pylib ./build_docs_to_publish -r _build --site-root "$PWD/_publish" --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates -# VERSION LINKS WILL NOT RESOLVE IN _publish_nocontainer -cp -a _publish "${d2}" - -# Make sure container version is identical to no-container version -echo "~~~~~ Make sure container version is identical to no-container version" -diff -qr "${d1}" "${d2}" - -# Check that -r -v works -echo "~~~~~ Check that -r -v works (Docker)" -# Also do a custom --conf-py-path -rm -rf _build_container -./build_docs -r _build_container -v latest -d -c --conf-py-path doc-builder/test/conf.py --static-path ../_static --templates-path ../_templates --container-cli-tool docker - -# Check that Makefile method works -echo "~~~~~ Check that Makefile method works" -rm -rf _build -conda run -n ctsm_pylib make SPHINXOPTS="-W --keep-going" BUILDDIR=${PWD}/_build html - -# Check that -b works -echo "~~~~~ Check that -b works (Podman)" -rm -rf _build_container -./build_docs -b _build_container -d -c --container-cli-tool docker - -# Check that doc-builder tests pass -# Don't run if on a GitHub runner; failing 🤷. Trust that doc-builder does this test. -if [[ "${GITHUB_ACTIONS}" == "" ]]; then - echo "~~~~~ Check that doc-builder tests pass" - cd doc-builder/test - conda run -n ctsm_pylib make test -fi - -exit 0 \ No newline at end of file diff --git a/parse_cime.cs.status b/parse_cime.cs.status index daaaef2293..eb5c140df9 100755 --- a/parse_cime.cs.status +++ b/parse_cime.cs.status @@ -328,13 +328,17 @@ sub print_categories { my $scrdir = shift(@_); my %csstatus = @_; - my $expectedfailfile = "$scrdir/components/clm/cime_config/testdefs/ExpectedTestFails.xml"; - if ( ! -f $expectedfailfile ) { - $expectedfailfile = "$scrdir/cime_config/testdefs/ExpectedTestFails.xml"; + my $srcroot = "$scrdir"; + my $expectedfailfile = "$srcroot/cime_config/testdefs/ExpectedTestFails.xml"; + if ( $srcroot =~ m|/components/clm$|) { + $srcroot = absolute_path( "$scrdir/../.." ); + if ( ! -f $expectedfailfile ) { + die "ERROR: CTSM ExpectedTestFails.xml file NOT found in $scrdir\n"; + } } - my @failfiles = ( $expectedfailfile, "$scrdir/components/mizuRoute/cime_config/testdefs/ExpectedTestFails.xml", - #"$scrdir/components/mosart/cime_config/testdefs/ExpectedTestFails.xml", - "$scrdir/components/cmeps/cime_config/ExpectedTestFails.xml" ); + my @failfiles = ( $expectedfailfile, "$srcroot/components/mizuroute/cime_config/testdefs/ExpectedTestFails.xml", + "$srcroot/components/mosart/cime_config/testdefs/ExpectedTestFails.xml", + "$srcroot/components/cmeps/cime_config/ExpectedTestFails.xml" ); my @passes; my @fails; my @pendings; diff --git a/python/ctsm/crop_calendars/check_rxboth_run.py b/python/ctsm/crop_calendars/check_rxboth_run.py index 2bb0872d45..568cb63822 100644 --- a/python/ctsm/crop_calendars/check_rxboth_run.py +++ b/python/ctsm/crop_calendars/check_rxboth_run.py @@ -78,7 +78,7 @@ def main(argv): any_bad = False - annual_outfiles = glob.glob(os.path.join(args.directory, "*.clm2.h1.*.nc")) + annual_outfiles = glob.glob(os.path.join(args.directory, "*.clm2.h1i.*.nc")) # These should be constant in a Prescribed Calendars (rxboth) run, as long as the inputs were # static. diff --git a/python/ctsm/crop_calendars/generate_gdds_functions.py b/python/ctsm/crop_calendars/generate_gdds_functions.py index f80f1e55f7..0489f320b7 100644 --- a/python/ctsm/crop_calendars/generate_gdds_functions.py +++ b/python/ctsm/crop_calendars/generate_gdds_functions.py @@ -280,13 +280,13 @@ def import_and_process_1yr( chunks = None # Get h1 file (list) - h1_pattern = os.path.join(indir, "*h1.*.nc") + h1_pattern = os.path.join(indir, "*h1i.*.nc") h1_filelist = glob.glob(h1_pattern) if not h1_filelist: - h1_pattern = os.path.join(indir, "*h1.*.nc.base") + h1_pattern = os.path.join(indir, "*h1i.*.nc.base") h1_filelist = glob.glob(h1_pattern) if not h1_filelist: - error(logger, "No files found matching pattern '*h1.*.nc(.base)'") + error(logger, "No files found matching pattern '*h1i.*.nc(.base)'") # Get list of crops to include if skip_crops is not None: @@ -566,7 +566,7 @@ def import_and_process_1yr( log(logger, " Importing accumulated GDDs...") clm_gdd_var = "GDDACCUM" my_vars = [clm_gdd_var, "GDDHARV"] - patterns = [f"*h2.{this_year-1}-01*.nc", f"*h2.{this_year-1}-01*.nc.base"] + patterns = [f"*h2i.{this_year-1}-01*.nc", f"*h2i.{this_year-1}-01*.nc.base"] for pat in patterns: pattern = os.path.join(indir, pat) h2_files = glob.glob(pattern) diff --git a/python/ctsm/longitude.py b/python/ctsm/longitude.py index 8afa731131..96fd134082 100644 --- a/python/ctsm/longitude.py +++ b/python/ctsm/longitude.py @@ -58,7 +58,7 @@ def _convert_lon_type_180_to_360(lon_in): return lon_out -def _detect_lon_type(lon_in): +def detect_lon_type(lon_in): """ Detect longitude type of a given numeric. If lon_in contains more than one number (as in a list or Numpy array), this function will assume all members are of the same type if (a) there is at @@ -177,6 +177,13 @@ def __ge__(self, other): self._check_lons_same_type(other) return self._lon >= other._lon + def __str__(self): + """ + We don't allow implicit string conversion because the user should always specify the + Longitude type they want + """ + raise NotImplementedError("Use Longitude.get_str() instead of implicit string conversion") + def get(self, lon_type_out): """ Get the longitude value, converting longitude type if needed @@ -189,6 +196,14 @@ def get(self, lon_type_out): return _convert_lon_type_180_to_360(self._lon) raise RuntimeError(f"Add handling for lon_type_out {lon_type_out}") + def get_str(self, lon_type_out): + """ + Get the longitude value as a string, converting longitude type if needed + """ + lon_out = self.get(lon_type_out) + # Use float() because the standard in CTSM filenames is to put .0 after whole-number values + return str(float(lon_out)) + def lon_type(self): """ Getter method for self._lon_type diff --git a/python/ctsm/pft_utils.py b/python/ctsm/pft_utils.py new file mode 100644 index 0000000000..40ab8b9f23 --- /dev/null +++ b/python/ctsm/pft_utils.py @@ -0,0 +1,21 @@ +""" +Constants and functions relating to PFTs +""" + +MIN_PFT = 0 # bare ground +MIN_NAT_PFT = 1 # minimum natural pft (not including bare ground) +MAX_NAT_PFT = 14 # maximum natural pft +MAX_PFT_GENERICCROPS = 16 # for runs with generic crops +MAX_PFT_MANAGEDCROPS = 78 # for runs with explicit crops + + +def is_valid_pft(pft_num, managed_crops): + """ + Given a number, check whether it represents a valid PFT (bare ground OK) + """ + if managed_crops: + max_allowed_pft = MAX_PFT_MANAGEDCROPS + else: + max_allowed_pft = MAX_PFT_GENERICCROPS + + return MIN_PFT <= pft_num <= max_allowed_pft diff --git a/python/ctsm/site_and_regional/plumber2_shared.py b/python/ctsm/site_and_regional/plumber2_shared.py new file mode 100644 index 0000000000..d4ab9d00b3 --- /dev/null +++ b/python/ctsm/site_and_regional/plumber2_shared.py @@ -0,0 +1,21 @@ +""" +Things shared between plumber2 scripts +""" + +import os +import pandas as pd +from ctsm.path_utils import path_to_ctsm_root + +PLUMBER2_SITES_CSV = os.path.join( + path_to_ctsm_root(), + "tools", + "site_and_regional", + "PLUMBER2_sites.csv", +) + + +def read_plumber2_sites_csv(file=PLUMBER2_SITES_CSV): + """ + Read PLUMBER2_sites.csv using pandas + """ + return pd.read_csv(file, skiprows=5) diff --git a/python/ctsm/site_and_regional/plumber2_surf_wrapper.py b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py index 022914d17e..cedc6b25e0 100755 --- a/python/ctsm/site_and_regional/plumber2_surf_wrapper.py +++ b/python/ctsm/site_and_regional/plumber2_surf_wrapper.py @@ -22,16 +22,18 @@ import argparse import logging -import os -import subprocess +import sys import tqdm -import pandas as pd +# pylint:disable=wrong-import-position +from ctsm.site_and_regional.plumber2_shared import PLUMBER2_SITES_CSV, read_plumber2_sites_csv +from ctsm import subset_data +from ctsm.pft_utils import MAX_PFT_MANAGEDCROPS, is_valid_pft -def get_parser(): +def get_args(): """ - Get parser object for this script. + Get arguments for this script. """ parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter @@ -45,39 +47,44 @@ def get_parser(): help="Verbose mode will print more information. ", action="store_true", dest="verbose", - default=False, ) parser.add_argument( - "--16pft", - help="Create and/or modify 16-PFT surface datasets (e.g. for a FATES run) ", + "--crop", + help=f"Create and/or modify {MAX_PFT_MANAGEDCROPS}-PFT " + "surface datasets (e.g. for a non-FATES run)", action="store_true", - dest="pft_16", - default=True, + dest="use_managed_crops", ) - return parser + parser.add_argument( + "--overwrite", + help="Overwrite any existing files", + action="store_true", + ) + + parser.add_argument( + "--plumber2-sites-csv", + help=f"Comma-separated value (CSV) file with Plumber2 sites. Default: {PLUMBER2_SITES_CSV}", + default=PLUMBER2_SITES_CSV, + ) + + return parser.parse_args() def execute(command): """ - Function for running a command on shell. + Runs subset_data with given arguments. Args: - command (str): - command that we want to run. + command (list): + list of args for command that we want to run. Raises: - Error with the return code from shell. + Whatever error subset_data gives, if any. """ print("\n", " >> ", *command, "\n") - try: - subprocess.check_call(command, stdout=open(os.devnull, "w"), stderr=subprocess.STDOUT) - - except subprocess.CalledProcessError as err: - # raise RuntimeError("command '{}' return with error - # (code {}): {}".format(e.cmd, e.returncode, e.output)) - # print (e.ouput) - print(err) + sys.argv = command + subset_data.main() def main(): @@ -85,97 +92,103 @@ def main(): Read plumber2_sites from csv, iterate through sites, and add dominant PFT """ - args = get_parser().parse_args() + args = get_args() if args.verbose: logging.basicConfig(level=logging.DEBUG) - plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4) + plumber2_sites = read_plumber2_sites_csv(args.plumber2_sites_csv) for _, row in tqdm.tqdm(plumber2_sites.iterrows()): lat = row["Lat"] lon = row["Lon"] site = row["Site"] + + clmsite = "1x1_PLUMBER2_" + site + print("Now processing site :", site) + + # Set up part of subset_data command that is shared among all options + subset_command = [ + "./subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--site", + clmsite, + "--create-surface", + "--uniform-snowpack", + "--cap-saturation", + "--lon-type", + "180", + ] + + # Read info for first PFT pft1 = row["pft1"] + if not is_valid_pft(pft1, args.use_managed_crops): + raise RuntimeError(f"pft1 must be a valid PFT; got {pft1}") pctpft1 = row["pft1-%"] cth1 = row["pft1-cth"] cbh1 = row["pft1-cbh"] - pft2 = row["pft2"] - pctpft2 = row["pft2-%"] - cth2 = row["pft2-cth"] - cbh2 = row["pft2-cbh"] - # overwrite missing values from .csv file - if pft1 == -999: - pft1 = 0 - pctpft1 = 0 - cth1 = 0 - cbh1 = 0 - if pft2 == -999: - pft2 = 0 - pctpft2 = 0 - cth2 = 0 - cbh2 = 0 - clmsite = "1x1_PLUMBER2_" + site - print("Now processing site :", site) - if args.pft_16: - # use surface dataset with 16 pfts, but overwrite to 100% 1 dominant PFT - # don't set crop flag - # set dominant pft - subset_command = [ - "./subset_data", - "point", - "--lat", - str(lat), - "--lon", - str(lon), - "--site", - clmsite, + # Read info for second PFT, if a valid one is given in the .csv file + pft2 = row["pft2"] + if is_valid_pft(pft2, args.use_managed_crops): + pctpft2 = row["pft2-%"] + cth2 = row["pft2-cth"] + cbh2 = row["pft2-cbh"] + + # Set dominant PFT(s) + if is_valid_pft(pft2, args.use_managed_crops): + subset_command += [ "--dompft", str(pft1), str(pft2), "--pctpft", str(pctpft1), str(pctpft2), - "--cth", - str(cth1), - str(cth2), - "--cbh", - str(cbh1), - str(cbh2), - "--create-surface", - "--uniform-snowpack", - "--cap-saturation", - "--verbose", - "--overwrite", ] else: - # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT - # NOTE: FATES will currently not run with a 78-PFT surface dataset - # set crop flag - # set dominant pft - subset_command = [ - "./subset_data", - "point", - "--lat", - str(lat), - "--lon", - str(lon), - "--site", - clmsite, - "--crop", + subset_command += [ "--dompft", str(pft1), - str(pft2), "--pctpft", str(pctpft1), - str(pctpft2), - "--create-surface", - "--uniform-snowpack", - "--cap-saturation", - "--verbose", - "--overwrite", ] + + if not args.use_managed_crops: + # use surface dataset with 78 pfts, but overwrite to 100% 1 dominant PFT + # don't set crop flag + # set canopy top and bottom heights + if is_valid_pft(pft2, args.use_managed_crops): + subset_command += [ + "--cth", + str(cth1), + str(cth2), + "--cbh", + str(cbh1), + str(cbh2), + ] + else: + subset_command += [ + "--cth", + str(cth1), + "--cbh", + str(cbh1), + ] + else: + # use surface dataset with 78 pfts, and overwrite to 100% 1 dominant PFT + # NOTE: FATES will currently not run with a 78-PFT surface dataset + # set crop flag + subset_command += ["--crop"] + # don't set canopy top and bottom heights + + if args.verbose: + subset_command += ["--verbose"] + if args.overwrite: + subset_command += ["--overwrite"] + execute(subset_command) diff --git a/python/ctsm/site_and_regional/plumber2_usermods.py b/python/ctsm/site_and_regional/plumber2_usermods.py index 7b7f294a24..7c8f37b1b5 100644 --- a/python/ctsm/site_and_regional/plumber2_usermods.py +++ b/python/ctsm/site_and_regional/plumber2_usermods.py @@ -13,7 +13,8 @@ import os import tqdm -import pandas as pd +# pylint:disable=wrong-import-position +from ctsm.site_and_regional.plumber2_shared import read_plumber2_sites_csv # Big ugly function to create usermod_dirs for each site @@ -155,7 +156,7 @@ def main(): """ # For now we can just run the 'main' program as a loop - plumber2_sites = pd.read_csv("PLUMBER2_sites.csv", skiprows=4) + plumber2_sites = read_plumber2_sites_csv() for _, row in tqdm.tqdm(plumber2_sites.iterrows()): lat = row["Lat"] diff --git a/python/ctsm/site_and_regional/regional_case.py b/python/ctsm/site_and_regional/regional_case.py index 94f6011569..1b52e72ab4 100644 --- a/python/ctsm/site_and_regional/regional_case.py +++ b/python/ctsm/site_and_regional/regional_case.py @@ -19,7 +19,7 @@ from ctsm.utils import add_tag_to_filename from ctsm.utils import abort from ctsm.config_utils import check_lon1_lt_lon2 -from ctsm.longitude import Longitude, _detect_lon_type +from ctsm.longitude import Longitude, detect_lon_type logger = logging.getLogger(__name__) @@ -142,7 +142,7 @@ def _subset_lon_lat(self, x_dim, y_dim, f_in): # Detect longitude type (180 or 360) of input file, throwing a helpful error if it can't be # determined. - f_lon_type = _detect_lon_type(lon) + f_lon_type = detect_lon_type(lon) lon1_type = self.lon1.lon_type() lon2_type = self.lon2.lon_type() if lon1_type != lon2_type: @@ -160,6 +160,20 @@ def _subset_lon_lat(self, x_dim, y_dim, f_in): f_out = f_in.isel({y_dim: yind, x_dim: xind}) return f_out + def _get_lon_strings(self): + """ + Get the string versions of the region's longitudes + """ + if isinstance(self.lon1, Longitude): + lon1_str = self.lon1.get_str(self.lon1.lon_type()) + else: + lon1_str = str(self.lon1) + if isinstance(self.lon2, Longitude): + lon2_str = self.lon2.get_str(self.lon2.lon_type()) + else: + lon2_str = str(self.lon2) + return lon1_str, lon2_str + def create_tag(self): """ Create a tag for a region which is either the region name @@ -169,9 +183,8 @@ def create_tag(self): if self.reg_name: self.tag = self.reg_name else: - self.tag = "{}-{}_{}-{}".format( - str(self.lon1), str(self.lon2), str(self.lat1), str(self.lat2) - ) + lon1_str, lon2_str = self._get_lon_strings() + self.tag = "{}-{}_{}-{}".format(lon1_str, lon2_str, str(self.lat1), str(self.lat2)) def check_region_bounds(self): """ @@ -179,10 +192,11 @@ def check_region_bounds(self): """ # If you're calling this, lat/lon bounds need to have been provided if any(x is None for x in [self.lon1, self.lon2, self.lat1, self.lat2]): + lon1_str, lon2_str = self._get_lon_strings() raise argparse.ArgumentTypeError( "Latitude and longitude bounds must be provided and not None.\n" - + f" lon1: {self.lon1}\n" - + f" lon2: {self.lon2}\n" + + f" lon1: {lon1_str}\n" + + f" lon2: {lon2_str}\n" + f" lat1: {self.lat1}\n" + f" lat2: {self.lat2}" ) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index bd16bae226..c99a240513 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -15,17 +15,11 @@ # -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR, DatmFiles from ctsm.utils import add_tag_to_filename, ensure_iterable +from ctsm.longitude import detect_lon_type +from ctsm.pft_utils import MAX_NAT_PFT, MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS logger = logging.getLogger(__name__) -NAT_PFT = 15 # natural pfts -NUM_PFT = 17 # for runs with generic crops -MAX_PFT = 78 # for runs with explicit crops - -# -- constants to represent months of year -FIRST_MONTH = 1 -LAST_MONTH = 12 - class SinglePointCase(BaseCase): """ @@ -151,6 +145,29 @@ def __init__( # self.check_nonveg() self.check_pct_pft() + def convert_plon_to_filetype_if_needed(self, lon_da): + """ + Check that point and input file longitude types are equal. If not, convert point to match + file. + """ + plon_in = self.plon + f_lon_type = detect_lon_type(lon_da) + plon_type = plon_in.lon_type() + if f_lon_type == plon_type: + plon_out = plon_in.get(plon_type) + else: + plon_orig = plon_in.get(plon_type) + plon_out = plon_in.get(f_lon_type) + if plon_orig != plon_out: + logger.info( + "Converted plon from type %s (value %f) to type %s (value %f)", + plon_type, + plon_orig, + f_lon_type, + plon_out, + ) + return plon_out + def create_tag(self): """ Create a tag for single point which is the site name @@ -159,7 +176,7 @@ def create_tag(self): if self.site_name: self.tag = self.site_name else: - self.tag = "{}_{}".format(str(self.plon), str(self.plat)) + self.tag = "{}_{}".format(self.plon.get_str(self.plon.lon_type()), str(self.plat)) def check_dom_pft(self): """ @@ -173,20 +190,21 @@ def check_dom_pft(self): same range. e.g. If users specified multiple dom_pft, they should be either in : - - 0 - NAT_PFT-1 range + - 0 - MAX_NAT_PFT range or - - NAT_PFT - MAX_PFT range + - MAX_NAT_PFT+1 - MAX_PFT_MANAGEDCROPS range - give an error: mixed land units not possible ------------- Raises: Error (ArgumentTypeError): - If any dom_pft is bigger than MAX_PFT. + If any dom_pft is bigger than MAX_PFT_MANAGEDCROPS. Error (ArgumentTypeError): If any dom_pft is less than 1. Error (ArgumentTypeError): If mixed land units are chosen. - dom_pft values are both in range of (0 - NAT_PFT-1) and (NAT_PFT - MAX_PFT). + dom_pft values are both in range of + (0 - MAX_NAT_PFT) and (MAX_NAT_PFT+1 - MAX_PFT_MANAGEDCROPS). """ @@ -200,27 +218,29 @@ def check_dom_pft(self): min_dom_pft = min(self.dom_pft) max_dom_pft = max(self.dom_pft) - # -- check dom_pft values should be between 0-MAX_PFT - if min_dom_pft < 0 or max_dom_pft > MAX_PFT: - err_msg = "values for --dompft should be between 1 and 78." + # -- check dom_pft values should be between 0-MAX_PFT_MANAGEDCROPS + if min_dom_pft < 0 or max_dom_pft > MAX_PFT_MANAGEDCROPS: + err_msg = f"values for --dompft should be between 1 and {MAX_PFT_MANAGEDCROPS}." raise argparse.ArgumentTypeError(err_msg) # -- check dom_pft vs num_pft if max_dom_pft > self.num_pft: - err_msg = "Please use --crop flag when --dompft is above 16." + err_msg = f"Please use --crop flag when --dompft is above {MAX_PFT_GENERICCROPS}." raise argparse.ArgumentTypeError(err_msg) # -- check dom_pft vs MAX_pft - if self.num_pft - 1 < max_dom_pft < NUM_PFT: + if self.num_pft - 1 < max_dom_pft <= MAX_PFT_GENERICCROPS: logger.info( - "WARNING, you trying to run with generic crops (16 PFT surface dataset)" + "WARNING, you are trying to run with generic crops (%s PFT surface dataset)", + MAX_PFT_GENERICCROPS, ) # -- check if all dom_pft are in the same range: - if min_dom_pft < NAT_PFT <= max_dom_pft: + if min_dom_pft <= MAX_NAT_PFT < max_dom_pft: err_msg = ( "You are subsetting using mixed land units that have both " - "natural pfts and crop cfts. Check your surface dataset. " + "natural pfts and crop cfts. Check your surface dataset.\n" + f"{min_dom_pft} <= {MAX_NAT_PFT} < {max_dom_pft}\n" ) raise argparse.ArgumentTypeError(err_msg) @@ -316,7 +336,11 @@ def create_domain_at_point(self, indir, file): Create domain file for this SinglePointCase class. """ logger.info("----------------------------------------------------------------------") - logger.info("Creating domain file at %s, %s.", str(self.plon), str(self.plat)) + logger.info( + "Creating domain file at %s, %s.", + self.plon.get_str(self.plon.lon_type()), + str(self.plat), + ) # specify files fdomain_in = os.path.join(indir, file) @@ -350,7 +374,7 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): logger.info("----------------------------------------------------------------------") logger.info( "Creating land use file at %s, %s.", - str(self.plon), + self.plon.get_str(self.plon.lon_type()), str(self.plat), ) @@ -363,8 +387,11 @@ def create_landuse_at_point(self, indir, file, user_mods_dir): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fluse_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lsmlon"]) + # extract gridcell closest to plon/plat - f_out = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + f_out = f_in.sel(lsmlon=plon_float, lsmlat=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["lsmlat", "lsmlon"]) @@ -405,7 +432,7 @@ def modify_surfdata_atpoint(self, f_orig): if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) # -- First initialize everything: - if max_dom_pft < NAT_PFT: + if max_dom_pft <= MAX_NAT_PFT: f_mod["PCT_NAT_PFT"][:, :, :] = 0 else: f_mod["PCT_CFT"][:, :, :] = 0 @@ -424,10 +451,10 @@ def modify_surfdata_atpoint(self, f_orig): if cth is not None: f_mod["MONTHLY_HEIGHT_TOP"][:, :, :, dom_pft] = cth f_mod["MONTHLY_HEIGHT_BOT"][:, :, :, dom_pft] = cbh - if dom_pft < NAT_PFT: + if dom_pft <= MAX_NAT_PFT: f_mod["PCT_NAT_PFT"][:, :, dom_pft] = pct_pft else: - dom_pft = dom_pft - NAT_PFT + dom_pft = dom_pft - (MAX_NAT_PFT + 1) f_mod["PCT_CFT"][:, :, dom_pft] = pct_pft # ------------------------------- @@ -445,7 +472,7 @@ def modify_surfdata_atpoint(self, f_orig): if self.dom_pft is not None: max_dom_pft = max(self.dom_pft) - if max_dom_pft < NAT_PFT: + if max_dom_pft <= MAX_NAT_PFT: f_mod["PCT_NATVEG"][:, :] = 100 f_mod["PCT_CROP"][:, :] = 0 else: @@ -482,7 +509,7 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir, specify_fsurf_out logger.info("----------------------------------------------------------------------") logger.info( "Creating surface dataset file at %s, %s", - str(self.plon), + self.plon.get_str(self.plon.lon_type()), str(self.plat), ) @@ -498,8 +525,11 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir, specify_fsurf_out # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fsurf_in, "LONGXY", "LATIXY", "lsmlon", "lsmlat") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lsmlon"]) + # extract gridcell closest to plon/plat - f_tmp = f_in.sel(lsmlon=self.plon, lsmlat=self.plat, method="nearest") + f_tmp = f_in.sel(lsmlon=plon_float, lsmlat=self.plat, method="nearest") # expand dimensions f_tmp = f_tmp.expand_dims(["lsmlat", "lsmlon"]).copy(deep=True) @@ -525,10 +555,10 @@ def create_surfdata_at_point(self, indir, file, user_mods_dir, specify_fsurf_out # update lsmlat and lsmlon to match site specific instead of the nearest point # we do this so that if we create user_mods the PTS_LON and PTS_LAT in CIME match # the surface data coordinates - which is required - f_out["lsmlon"] = np.atleast_1d(self.plon) + f_out["lsmlon"] = np.atleast_1d(plon_float) f_out["lsmlat"] = np.atleast_1d(self.plat) f_out["LATIXY"][:, :] = self.plat - f_out["LONGXY"][:, :] = self.plon + f_out["LONGXY"][:, :] = plon_float # update attributes self.update_metadata(f_out) @@ -554,7 +584,7 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): logger.info("----------------------------------------------------------------------") logger.info( "Creating DATM domain file at %s, %s", - str(self.plon), + self.plon.get_str(self.plon.lon_type()), str(self.plat), ) @@ -568,8 +598,11 @@ def create_datmdomain_at_point(self, datm_tuple: DatmFiles): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(fdatmdomain_in, "xc", "yc", "ni", "nj") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lon"]) + # extract gridcell closest to plon/plat - f_out = f_in.sel(ni=self.plon, nj=self.plat, method="nearest") + f_out = f_in.sel(ni=plon_float, nj=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["nj", "ni"]) @@ -591,14 +624,17 @@ def extract_datm_at(self, file_in, file_out): # create 1d coordinate variables to enable sel() method f_in = self.create_1d_coord(file_in, "LONGXY", "LATIXY", "lon", "lat") + # get point longitude, converting to match file type if needed + plon_float = self.convert_plon_to_filetype_if_needed(f_in["lon"]) + # extract gridcell closest to plon/plat - f_out = f_in.sel(lon=self.plon, lat=self.plat, method="nearest") + f_out = f_in.sel(lon=plon_float, lat=self.plat, method="nearest") # expand dimensions f_out = f_out.expand_dims(["lat", "lon"]) # specify dimension order - f_out = f_out.transpose("scalar", "time", "lat", "lon") + f_out = f_out.transpose("time", "lat", "lon") # update attributes self.update_metadata(f_out) @@ -617,7 +653,9 @@ def write_shell_commands(self, file, datm_syr, datm_eyr): with open(file, "w") as nl_file: self.write_to_file("# Change below line if you move the subset data directory", nl_file) self.write_to_file("./xmlchange {}={}".format(USRDAT_DIR, self.out_dir), nl_file) - self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), nl_file) + self.write_to_file( + "./xmlchange PTS_LON={}".format(self.plon.get_str(self.plon.lon_type())), nl_file + ) self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), nl_file) self.write_to_file("./xmlchange MPILIB=mpi-serial", nl_file) if self.create_datm: @@ -643,7 +681,9 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s Create all of a DATM dataset at a point. """ logger.info("----------------------------------------------------------------------") - logger.info("Creating DATM files at %s, %s", str(self.plon), str(self.plat)) + logger.info( + "Creating DATM files at %s, %s", self.plon.get_str(self.plon.lon_type()), str(self.plat) + ) # -- create data files infile = [] @@ -653,46 +693,36 @@ def create_datm_at_point(self, datm_tuple: DatmFiles, datm_syr, datm_eyr, datm_s tpqwfiles = [] for year in range(datm_syr, datm_eyr + 1): ystr = str(year) - for month in range(FIRST_MONTH, LAST_MONTH + 1): - mstr = str(month) - if month < 10: - mstr = "0" + mstr - - dtag = ystr + "-" + mstr - fsolar = os.path.join( - datm_tuple.indir, - datm_tuple.dir_solar, - "{}{}.nc".format(datm_tuple.tag_solar, dtag), - ) - fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, dtag) - fprecip = os.path.join( - datm_tuple.indir, - datm_tuple.dir_prec, - "{}{}.nc".format(datm_tuple.tag_prec, dtag), - ) - fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, dtag) - ftpqw = os.path.join( - datm_tuple.indir, - datm_tuple.dir_tpqw, - "{}{}.nc".format(datm_tuple.tag_tpqw, dtag), - ) - ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, dtag) - - outdir = os.path.join(self.out_dir, datm_tuple.outdir) - infile += [fsolar, fprecip, ftpqw] - outfile += [ - os.path.join(outdir, fsolar2), - os.path.join(outdir, fprecip2), - os.path.join(outdir, ftpqw2), - ] - solarfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2) - ) - precfiles.append( - os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2) - ) - tpqwfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) + fsolar = os.path.join( + datm_tuple.indir, + datm_tuple.dir_solar, + "{}{}.nc".format(datm_tuple.tag_solar, ystr), + ) + fsolar2 = "{}{}.{}.nc".format(datm_tuple.tag_solar, self.tag, ystr) + fprecip = os.path.join( + datm_tuple.indir, + datm_tuple.dir_prec, + "{}{}.nc".format(datm_tuple.tag_prec, ystr), + ) + fprecip2 = "{}{}.{}.nc".format(datm_tuple.tag_prec, self.tag, ystr) + ftpqw = os.path.join( + datm_tuple.indir, + datm_tuple.dir_tpqw, + "{}{}.nc".format(datm_tuple.tag_tpqw, ystr), + ) + ftpqw2 = "{}{}.{}.nc".format(datm_tuple.tag_tpqw, self.tag, ystr) + + outdir = os.path.join(self.out_dir, datm_tuple.outdir) + infile += [fsolar, fprecip, ftpqw] + outfile += [ + os.path.join(outdir, fsolar2), + os.path.join(outdir, fprecip2), + os.path.join(outdir, ftpqw2), + ] + solarfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fsolar2)) + precfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, fprecip2)) + tpqwfiles.append(os.path.join("${}".format(USRDAT_DIR), datm_tuple.outdir, ftpqw2)) for idx, out_f in enumerate(outfile): logger.debug(out_f) diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index 81f1f703f3..ed9282ef46 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -69,7 +69,8 @@ from ctsm.path_utils import path_to_ctsm_root from ctsm.utils import abort from ctsm.config_utils import check_lon1_lt_lon2 -from ctsm.longitude import Longitude, _detect_lon_type +from ctsm.longitude import Longitude, detect_lon_type +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS # -- import ctsm logging flags from ctsm.ctsm_logging import ( @@ -597,14 +598,14 @@ def determine_num_pft(crop): num_pft (int) : number of pfts for surface dataset """ if crop: - num_pft = "78" + num_pft = str(MAX_PFT_MANAGEDCROPS) else: - num_pft = "16" + num_pft = str(MAX_PFT_GENERICCROPS) logger.debug("crop_flag = %s => num_pft = %s", str(crop), num_pft) return num_pft -def setup_files(args, defaults, cesmroot): +def setup_files(args, defaults, cesmroot, testing=False): """ Sets up the files and folders needed for this program """ @@ -622,22 +623,31 @@ def setup_files(args, defaults, cesmroot): else: clmforcingindir = args.inputdatadir - if not os.path.isdir(clmforcingindir): + if not testing and not os.path.isdir(clmforcingindir): logger.info("clmforcingindir does not exist: %s", clmforcingindir) - abort("inputdata directory does not exist") + abort(f"inputdata directory does not exist: {clmforcingindir}") file_dict = {"main_dir": clmforcingindir} # DATM data - # TODO Issue #2960: Make datm_type a user option at the command - # line. For reference, this option affects three .cfg files: - # tools/site_and_regional/default_data_1850.cfg - # tools/site_and_regional/default_data_2000.cfg - # python/ctsm/test/testinputs/default_data.cfg + # To find the affected files, from the top level of ctsm, do: + # grep "\[datm\]" $(find . -type f -name "*cfg") if args.create_datm: - datm_type = "datm_crujra" # also available: datm_type = "datm_gswp3" + datm_cfg_section = "datm" + + # Issue #3269: Changes in PR #3259 mean that --create-datm won't work with GSWP3 + settings_to_check_for_gswp3 = ["solartag", "prectag", "tpqwtag"] + for setting in settings_to_check_for_gswp3: + value = defaults.get(datm_cfg_section, setting) + if "gswp3" in value.lower(): + msg = ( + "--create-datm is no longer supported for GSWP3 data; " + "see https://github.com/ESCOMP/CTSM/issues/3269" + ) + raise NotImplementedError(msg) + dir_output_datm = "datmdata" - dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_type, "dir")) + dir_input_datm = os.path.join(clmforcingindir, defaults.get(datm_cfg_section, "dir")) if not os.path.isdir(os.path.join(args.out_dir, dir_output_datm)): os.mkdir(os.path.join(args.out_dir, dir_output_datm)) logger.info("dir_input_datm : %s", dir_input_datm) @@ -645,16 +655,16 @@ def setup_files(args, defaults, cesmroot): file_dict["datm_tuple"] = DatmFiles( dir_input_datm, dir_output_datm, - defaults.get(datm_type, "domain"), - defaults.get(datm_type, "solardir"), - defaults.get(datm_type, "precdir"), - defaults.get(datm_type, "tpqwdir"), - defaults.get(datm_type, "solartag"), - defaults.get(datm_type, "prectag"), - defaults.get(datm_type, "tpqwtag"), - defaults.get(datm_type, "solarname"), - defaults.get(datm_type, "precname"), - defaults.get(datm_type, "tpqwname"), + defaults.get(datm_cfg_section, "domain"), + defaults.get(datm_cfg_section, "solardir"), + defaults.get(datm_cfg_section, "precdir"), + defaults.get(datm_cfg_section, "tpqwdir"), + defaults.get(datm_cfg_section, "solartag"), + defaults.get(datm_cfg_section, "prectag"), + defaults.get(datm_cfg_section, "tpqwtag"), + defaults.get(datm_cfg_section, "solarname"), + defaults.get(datm_cfg_section, "precname"), + defaults.get(datm_cfg_section, "tpqwname"), ) # if the crop flag is on - we need to use a different land use and surface data file @@ -812,7 +822,7 @@ def subset_region(args, file_dict: dict): print("\nFor running this regional case with the created user_mods : ") print( "./create_newcase --case case --res CLM_USRDAT --compset I2000Clm60BgcCrop", - "--run-unsupported --user-mods-dirs ", + "--run-unsupported --user-mods-dir ", args.user_mods_dir, "\n\n", ) @@ -833,10 +843,10 @@ def process_args(args): if any(lon_arg_values): if args.lon_type is None: if hasattr(args, "plon"): - args.lon_type = _detect_lon_type(args.plon) + args.lon_type = detect_lon_type(args.plon) else: - lon1_type = _detect_lon_type(args.lon1) - lon2_type = _detect_lon_type(args.lon2) + lon1_type = detect_lon_type(args.lon1) + lon2_type = detect_lon_type(args.lon2) if lon1_type != lon2_type: raise argparse.ArgumentTypeError( "--lon1 and --lon2 seem to be of different types" diff --git a/python/ctsm/test/test_sys_gen_mksurfdata_jobscript_single_derecho.py b/python/ctsm/test/test_sys_gen_mksurfdata_jobscript_single_derecho.py new file mode 100755 index 0000000000..627fb1d32b --- /dev/null +++ b/python/ctsm/test/test_sys_gen_mksurfdata_jobscript_single_derecho.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +""" +System tests for gen_mksurfdata_jobscript_single.py subroutines on Derecho +""" + +import unittest +import os + +from ctsm import unit_testing +from ctsm.test_gen_mksurfdata_jobscript_single_parent import TestFGenMkSurfJobscriptSingleParent +from ctsm.path_utils import path_to_cime +from ctsm.os_utils import run_cmd_output_on_error +from ctsm.toolchain.gen_mksurfdata_jobscript_single import get_parser +from ctsm.toolchain.gen_mksurfdata_jobscript_single import get_mpirun +from ctsm.toolchain.gen_mksurfdata_jobscript_single import check_parser_args +from ctsm.toolchain.gen_mksurfdata_jobscript_single import write_runscript_part1 + + +# Allow test names that pylint doesn't like; otherwise hard to make them +# readable +# pylint: disable=invalid-name + + +# pylint: disable=protected-access +# pylint: disable=too-many-instance-attributes +class TestFGenMkSurfJobscriptSingleDerecho(TestFGenMkSurfJobscriptSingleParent): + """Tests the gen_mksurfdata_jobscript_single subroutines on Derecho""" + + def test_derecho_mpirun(self): + """ + test derecho mpirun. This would've helped caught a problem we ran into + It will also be helpful when sumodules are updated to guide to solutions + to problems + """ + machine = "derecho" + nodes = 4 + tasks = 128 + unit_testing.add_machine_node_args(machine, nodes, tasks) + args = get_parser().parse_args() + check_parser_args(args) + self.assertEqual(machine, args.machine) + self.assertEqual(tasks, args.tasks_per_node) + self.assertEqual(nodes, args.number_of_nodes) + self.assertEqual(self._account, args.account) + # Create the env_mach_specific.xml file needed for get_mpirun + # This will catch problems with our usage of CIME objects + # Doing this here will also catch potential issues in the gen_mksurfdata_build script + configure_path = os.path.join(path_to_cime(), "CIME", "scripts", "configure") + self.assertTrue(os.path.exists(configure_path)) + options = " --macros-format CMake --silent --compiler intel --machine " + machine + cmd = configure_path + options + cmd_list = cmd.split() + run_cmd_output_on_error( + cmd=cmd_list, errmsg="Trouble running configure", cwd=self._bld_path + ) + self.assertTrue(os.path.exists(self._env_mach)) + expected_attribs = {"mpilib": "default"} + with open(self._jobscript_file, "w", encoding="utf-8") as runfile: + attribs = write_runscript_part1( + number_of_nodes=nodes, + tasks_per_node=tasks, + machine=machine, + account=self._account, + walltime=args.walltime, + runfile=runfile, + ) + self.assertEqual(attribs, expected_attribs) + (executable, mksurfdata_path, env_mach_path) = get_mpirun(args, attribs) + expected_exe = "time mpibind " + self.assertEqual(executable, expected_exe) + self.assertEqual(mksurfdata_path, self._mksurf_exe) + self.assertEqual(env_mach_path, self._env_mach) + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_sys_plumber2_surf_wrapper.py b/python/ctsm/test/test_sys_plumber2_surf_wrapper.py new file mode 100755 index 0000000000..12ca561150 --- /dev/null +++ b/python/ctsm/test/test_sys_plumber2_surf_wrapper.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 + +"""System tests for plumber2_surf_wrapper""" + +import os +import unittest +import tempfile +import shutil +import sys + +from ctsm import unit_testing +from ctsm.site_and_regional.plumber2_surf_wrapper import main +from ctsm.site_and_regional.plumber2_shared import read_plumber2_sites_csv +from ctsm.path_utils import path_to_ctsm_root + +# Allow test names that pylint doesn't like; otherwise hard to make them +# readable +# pylint: disable=invalid-name + + +class TestSysPlumber2SurfWrapper(unittest.TestCase): + """ + System tests for plumber2_surf_wrapper + """ + + def setUp(self): + """ + Make tempdir for use by these tests. + """ + self._previous_dir = os.getcwd() + self._tempdir = tempfile.mkdtemp() + os.chdir(self._tempdir) # cd to tempdir + + # Path to script + self.tool_path = os.path.join( + path_to_ctsm_root(), + "tools", + "site_and_regional", + "plumber2_surf_wrapper", + ) + + # Path to test inputs directory + self.test_inputs = os.path.join( + os.path.dirname(__file__), "testinputs", "plumber2_surf_wrapper" + ) + + def tearDown(self): + """ + Remove temporary directory + """ + os.chdir(self._previous_dir) + shutil.rmtree(self._tempdir, ignore_errors=True) + + def test_plumber2_surf_wrapper(self): + """ + Run the entire tool with default settings. + CAN ONLY RUN ON SYSTEMS WITH INPUTDATA + """ + + sys.argv = [self.tool_path] + main() + + # How many files do we expect? + plumber2_csv = read_plumber2_sites_csv() + n_files_expected = len(plumber2_csv) + + # How many files did we get? + file_list = os.listdir("subset_data_single_point") + n_files = len(file_list) + + # Check + self.assertEqual(n_files_expected, n_files) + + def test_plumber2_surf_wrapper_78pft(self): + """ + Run the entire tool with --crop. + CAN ONLY RUN ON SYSTEMS WITH INPUTDATA + """ + + sys.argv = [self.tool_path, "--crop"] + main() + + # How many files do we expect? + plumber2_csv = read_plumber2_sites_csv() + n_files_expected = len(plumber2_csv) + + # How many files did we get? + file_list = os.listdir("subset_data_single_point") + n_files = len(file_list) + + # Check + self.assertEqual(n_files_expected, n_files) + + def test_plumber2_surf_wrapper_invalid_pft(self): + """ + plumber2_surf_wrapper should error if invalid PFT is given + """ + + sys.argv = [ + self.tool_path, + "--plumber2-sites-csv", + os.path.join(self.test_inputs, "PLUMBER2_sites_invalid_pft.csv"), + ] + with self.assertRaisesRegex(RuntimeError, "must be a valid PFT"): + main() + + def test_plumber2_surf_wrapper_existing_no_overwrite_fails(self): + """ + plumber2_surf_wrapper should fail if file exists but --overwrite isn't given + """ + + sys_argv_shared = [ + self.tool_path, + "--plumber2-sites-csv", + os.path.join(self.test_inputs, "PLUMBER2_site_valid.csv"), + ] + + # Run twice, expecting second to fail + sys.argv = sys_argv_shared + main() + sys.argv = sys_argv_shared + with self.assertRaisesRegex(SystemExit, "exists"): + main() + + def test_plumber2_surf_wrapper_existing_overwrite_passes(self): + """ + plumber2_surf_wrapper should pass if file exists and --overwrite is given + """ + + sys_argv_shared = [ + self.tool_path, + "--plumber2-sites-csv", + os.path.join(self.test_inputs, "PLUMBER2_site_valid.csv"), + ] + + # Run once to generate the files + sys.argv = sys_argv_shared + main() + + # Run again with --overwrite, expecting pass + sys.argv = sys_argv_shared + ["--overwrite"] + main() + + +if __name__ == "__main__": + unit_testing.setup_for_tests() + unittest.main() diff --git a/python/ctsm/test/test_sys_subset_data.py b/python/ctsm/test/test_sys_subset_data.py index bc73c8c41d..453df7c18e 100644 --- a/python/ctsm/test/test_sys_subset_data.py +++ b/python/ctsm/test/test_sys_subset_data.py @@ -12,6 +12,7 @@ import tempfile import inspect import xarray as xr +from CIME.scripts.create_newcase import _main_func as create_newcase # pylint: disable=import-error # -- add python/ctsm to path (needed if we want to run the test stand-alone) _CTSM_PYTHON = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir) @@ -23,36 +24,102 @@ from ctsm.utils import find_one_file_matching_pattern +def _get_sitename_str_point(include_sitename, sitename, lon, lat): + """ + Given a site, return the string to use in output filenames + """ + if include_sitename: + sitename_str = sitename + else: + sitename_str = f"{float(lon)}_{float(lat)}" + return sitename_str + + class TestSubsetDataSys(unittest.TestCase): """ Basic class for testing subset_data.py. """ def setUp(self): + self.previous_dir = os.getcwd() self.temp_dir_out = tempfile.TemporaryDirectory() self.temp_dir_umd = tempfile.TemporaryDirectory() + self.temp_dir_caseparent = tempfile.TemporaryDirectory() self.inputdata_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) def tearDown(self): self.temp_dir_out.cleanup() self.temp_dir_umd.cleanup() + os.chdir(self.previous_dir) - def _check_result_file_matches_expected(self, expected_output_files): + def _check_create_newcase(self): + """ + Check that you can call create_newcase using the usermods from subset_data + """ + case_dir = os.path.join(self.temp_dir_caseparent.name, "case") + sys.argv = [ + "create_newcase", + "--case", + case_dir, + "--res", + "CLM_USRDAT", + "--compset", + "I2000Clm60Bgc", + "--run-unsupported", + "--user-mods-dir", + self.temp_dir_umd.name, + ] + create_newcase() + + def _check_result_file_matches_expected(self, expected_output_files, caller_n): """ Loop through a list of output files, making sure they match what we expect. + + caller_n should be an integer giving the number of levels above this function you need to + traverse before you hit the actual test name. If the test is calling this function directly, + caller_n = 1. If the test is calling a function that calls this function, caller_n = 2. Etc. """ all_files_present_and_match = True + result_file_found = True + expected_file_found = True for basename in expected_output_files: + + # Check whether result (output) file exists. If not, note it but continue. result_file = os.path.join(self.temp_dir_out.name, basename) - result_file = find_one_file_matching_pattern(result_file) + try: + result_file = find_one_file_matching_pattern(result_file) + except FileNotFoundError: + result_file_found = False + + # Check whether expected file exists. If not, note it but continue. expected_file = os.path.join( os.path.dirname(__file__), "testinputs", "expected_result_files", - inspect.stack()[1][3], # Name of calling function (i.e., test name) + inspect.stack()[caller_n][3], # Name of calling function (i.e., test name) basename, ) - expected_file = find_one_file_matching_pattern(expected_file) + try: + expected_file = find_one_file_matching_pattern(expected_file) + except FileNotFoundError: + expected_file_found = False + + # Raise an AssertionError if either file was not found + if not (result_file_found and expected_file_found): + msg = "" + if not result_file_found: + this_dir = os.path.dirname(result_file) + msg += f"\nResult file '{result_file}' not found. " + msg += f"Contents of directory '{this_dir}':\n\t" + msg += "\n\t".join(os.listdir(this_dir)) + if not expected_file_found: + this_dir = os.path.dirname(expected_file) + msg += f"\nExpected file '{expected_file}' not found. " + msg += f"Contents of directory '{this_dir}':\n\t" + msg += "\n\t".join(os.listdir(this_dir)) + raise AssertionError(msg) + + # Compare the two files ds_result = xr.open_dataset(result_file) ds_expected = xr.open_dataset(expected_file) if not ds_result.equals(ds_expected): @@ -62,10 +129,15 @@ def _check_result_file_matches_expected(self, expected_output_files): all_files_present_and_match = False return all_files_present_and_match - def test_subset_data_reg_amazon(self): + def _do_test_subset_data_reg_amazon(self, include_regname=True): """ - Test subset_data for Amazon region + Convenience function for multiple tests of subset_data region for the Amazon """ + regname = "TMP" + lat1 = -12 + lat2 = -7 + lon1 = 291 + lon2 = 299 cfg_file = os.path.join( self.inputdata_dir, "ctsm", @@ -78,15 +150,13 @@ def test_subset_data_reg_amazon(self): "subset_data", "region", "--lat1", - "-12", + str(lat1), "--lat2", - "-7", + str(lat2), "--lon1", - "291", + str(lon1), "--lon2", - "299", - "--reg", - "TMP", + str(lon2), "--create-mesh", "--create-domain", "--create-surface", @@ -103,16 +173,40 @@ def test_subset_data_reg_amazon(self): cfg_file, "--overwrite", ] + if include_regname: + sys.argv += ["--reg", regname] subset_data.main() # Loop through all the output files, making sure they match what we expect. daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + if include_regname: + regname_str = regname + else: + regname_str = f"{float(lon1)}-{float(lon2)}_{float(lat1)}-{float(lat2)}" expected_output_files = [ - f"domain.lnd.5x5pt-amazon_navy_TMP_c{daystr}_ESMF_UNSTRUCTURED_MESH.nc", - f"domain.lnd.5x5pt-amazon_navy_TMP_c{daystr}.nc", - f"surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c{daystr}.nc", + f"domain.lnd.5x5pt-amazon_navy_{regname_str}_c{daystr}_ESMF_UNSTRUCTURED_MESH.nc", + f"domain.lnd.5x5pt-amazon_navy_{regname_str}_c{daystr}.nc", + f"surfdata_{regname_str}_amazon_hist_16pfts_CMIP6_2000_c{daystr}.nc", ] - self.assertTrue(self._check_result_file_matches_expected(expected_output_files)) + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + # Check that create_newcase works + # SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + self._check_create_newcase() + + def test_subset_data_reg_amazon(self): + """ + Test subset_data for Amazon region + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_reg_amazon() + + def test_subset_data_reg_amazon_noregname(self): + """ + Test subset_data for Amazon region + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_reg_amazon(include_regname=False) def test_subset_data_reg_infile_detect360(self): """ @@ -185,6 +279,226 @@ def test_subset_data_reg_infile_detect180_error(self): ): subset_data.main() + def _do_test_subset_data_pt_surface(self, lon, include_sitename=True): + """ + Given a longitude, test subset_data point --create-surface + """ + lat = -12 + cfg_file = os.path.join( + self.inputdata_dir, + "ctsm", + "test", + "testinputs", + "subset_data_amazon.cfg", + ) + print(cfg_file) + sys.argv = [ + "subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--create-domain", + "--create-surface", + "--surf-year", + "2000", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--inputdata-dir", + self.inputdata_dir, + "--cfg-file", + cfg_file, + "--overwrite", + ] + sitename = "TMP" + if include_sitename: + sys.argv += ["--site", sitename] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + sitename_str = _get_sitename_str_point(include_sitename, sitename, lon, lat) + expected_output_files = [ + f"surfdata_{sitename_str}_amazon_hist_16pfts_CMIP6_2000_c{daystr}.nc", + ] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + # Check that create_newcase works + # SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + self._check_create_newcase() + + def test_subset_data_pt_surface_amazon_type360(self): + """ + Test subset_data --create-surface for Amazon point with longitude type 360 + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_pt_surface(291) + + def test_subset_data_pt_surface_amazon_type180(self): + """ + Test subset_data --create-surface for Amazon point with longitude type 180 + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_pt_surface(-69) + + def test_subset_data_pt_surface_amazon_type180_nositename(self): + """ + Test subset_data --create-surface for Amazon point with longitude type 180 + without specifying a site name + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_pt_surface(-69, include_sitename=False) + + def _do_test_subset_data_pt_landuse(self, lon, include_sitename=True): + """ + Given a longitude, test subset_data point --create-landuse + """ + lat = -12 + sitename = "TMP" + cfg_file = os.path.join( + self.inputdata_dir, + "ctsm", + "test", + "testinputs", + "subset_data_amazon_1850.cfg", + ) + print(cfg_file) + sys.argv = [ + "subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--create-domain", + "--create-surface", + "--surf-year", + "1850", + "--create-landuse", + "--create-user-mods", + "--outdir", + self.temp_dir_out.name, + "--user-mods-dir", + self.temp_dir_umd.name, + "--inputdata-dir", + self.inputdata_dir, + "--cfg-file", + cfg_file, + "--overwrite", + ] + if include_sitename: + sys.argv += ["--site", sitename] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + sitename_str = _get_sitename_str_point(include_sitename, sitename, lon, lat) + expected_output_files = [ + f"surfdata_{sitename_str}_amazon_hist_1850_78pfts_c{daystr}.nc", + f"landuse.timeseries_{sitename_str}_amazon_hist_1850-1853_78pfts_c{daystr}.nc", + ] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + # Check that create_newcase works + # SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + self._check_create_newcase() + + def test_subset_data_pt_landuse_amazon_type360(self): + """ + Test subset_data --create-landuse for Amazon point with longitude type 360 + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_pt_landuse(291) + + def test_subset_data_pt_landuse_amazon_type360_nositename(self): + """ + Test subset_data --create-landuse for Amazon point with longitude type 360 and no site name + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_pt_landuse(291, include_sitename=False) + + def test_subset_data_pt_landuse_amazon_type180(self): + """ + Test subset_data --create-landuse for Amazon point with longitude type 180 + SHOULD WORK ONLY ON CESM-SUPPORTED MACHINES + """ + self._do_test_subset_data_pt_landuse(-69) + + def _do_test_subset_data_pt_datm(self, lon, include_sitename=True): + """ + Given a longitude, test subset_data point --create-datm + """ + start_year = 1986 + end_year = 1988 + sitename = "TMP" + lat = -12 + outdir = self.temp_dir_out.name + sys.argv = [ + "subset_data", + "point", + "--lat", + str(lat), + "--lon", + str(lon), + "--create-datm", + "--datm-syr", + str(start_year), + "--datm-eyr", + str(end_year), + "--create-user-mods", + "--outdir", + outdir, + "--user-mods-dir", + self.temp_dir_umd.name, + "--overwrite", + ] + if include_sitename: + sys.argv += ["--site", sitename] + subset_data.main() + + # Loop through all the output files, making sure they match what we expect. + daystr = "[0-9][0-9][0-9][0-9][0-9][0-9]" # 6-digit day code, yymmdd + sitename_str = _get_sitename_str_point(include_sitename, sitename, lon, lat) + expected_output_files = [ + f"domain.crujra_v2.3_0.5x0.5_{sitename_str}_c{daystr}.nc", + ] + for year in list(range(start_year, end_year + 1)): + for forcing in ["Solr", "Prec", "TPQWL"]: + expected_output_files.append( + f"clmforc.CRUJRAv2.5_0.5x0.5.{forcing}.{sitename_str}.{year}.nc" + ) + expected_output_files = [os.path.join("datmdata", x) for x in expected_output_files] + self.assertTrue(self._check_result_file_matches_expected(expected_output_files, 2)) + + # Check that create_newcase works + self._check_create_newcase() + + def test_subset_data_pt_datm_amazon_type360(self): + """ + Test subset_data --create-datm for Amazon point with longitude type 360 + FOR NOW CAN ONLY BE RUN ON DERECHO/CASPER + """ + self._do_test_subset_data_pt_datm(291) + + def test_subset_data_pt_datm_amazon_type180(self): + """ + Test subset_data --create-datm for Amazon point with longitude type 180 + FOR NOW CAN ONLY BE RUN ON DERECHO/CASPER + """ + self._do_test_subset_data_pt_datm(-69) + + def test_subset_data_pt_datm_amazon_type180_nositename(self): + """ + Test subset_data --create-datm for Amazon point with longitude type 180 without providing + site name. + FOR NOW CAN ONLY BE RUN ON DERECHO/CASPER + """ + self._do_test_subset_data_pt_datm(-69, include_sitename=False) + if __name__ == "__main__": unit_testing.setup_for_tests() diff --git a/python/ctsm/test/test_unit_gen_mksurfdata_jobscript_single.py b/python/ctsm/test/test_unit_gen_mksurfdata_jobscript_single.py index bee1aac715..3980b3bd49 100755 --- a/python/ctsm/test/test_unit_gen_mksurfdata_jobscript_single.py +++ b/python/ctsm/test/test_unit_gen_mksurfdata_jobscript_single.py @@ -6,40 +6,15 @@ import unittest import os -import sys import shutil -import tempfile - from ctsm import unit_testing -from ctsm.path_utils import path_to_ctsm_root -from ctsm.path_utils import path_to_cime -from ctsm.os_utils import run_cmd_output_on_error +from ctsm.test_gen_mksurfdata_jobscript_single_parent import TestFGenMkSurfJobscriptSingleParent from ctsm.toolchain.gen_mksurfdata_jobscript_single import get_parser -from ctsm.toolchain.gen_mksurfdata_jobscript_single import get_mpirun from ctsm.toolchain.gen_mksurfdata_jobscript_single import check_parser_args from ctsm.toolchain.gen_mksurfdata_jobscript_single import write_runscript_part1 -def add_args(machine, nodes, tasks): - """add arguments to sys.argv""" - args_to_add = [ - "--machine", - machine, - "--number-of-nodes", - str(nodes), - "--tasks-per-node", - str(tasks), - ] - for item in args_to_add: - sys.argv.append(item) - - -def create_empty_file(filename): - """create an empty file""" - os.system("touch " + filename) - - # Allow test names that pylint doesn't like; otherwise hard to make them # readable # pylint: disable=invalid-name @@ -47,65 +22,9 @@ def create_empty_file(filename): # pylint: disable=protected-access # pylint: disable=too-many-instance-attributes -class TestFGenMkSurfJobscriptSingle(unittest.TestCase): +class TestFGenMkSurfJobscriptSingle(TestFGenMkSurfJobscriptSingleParent): """Tests the gen_mksurfdata_jobscript_single subroutines""" - def setUp(self): - """Setup for trying out the methods""" - testinputs_path = os.path.join(path_to_ctsm_root(), "python/ctsm/test/testinputs") - self._testinputs_path = testinputs_path - self._previous_dir = os.getcwd() - self._tempdir = tempfile.mkdtemp() - os.chdir(self._tempdir) - self._account = "ACCOUNT_NUMBER" - self._jobscript_file = "output_jobscript" - self._output_compare = """#!/bin/bash -# Edit the batch directives for your batch system -# Below are default batch directives for derecho -#PBS -N mksurfdata -#PBS -j oe -#PBS -k eod -#PBS -S /bin/bash -#PBS -l walltime=12:00:00 -#PBS -A ACCOUNT_NUMBER -#PBS -q main -#PBS -l select=1:ncpus=128:mpiprocs=64:mem=218GB - -# This is a batch script to run a set of resolutions for mksurfdata_esmf input namelist -# NOTE: THIS SCRIPT IS AUTOMATICALLY GENERATED SO IN GENERAL YOU SHOULD NOT EDIT it!! - -""" - self._bld_path = os.path.join(self._tempdir, "tools_bld") - os.makedirs(self._bld_path) - self.assertTrue(os.path.isdir(self._bld_path)) - self._nlfile = os.path.join(self._tempdir, "namelist_file") - create_empty_file(self._nlfile) - self.assertTrue(os.path.exists(self._nlfile)) - self._mksurf_exe = os.path.join(self._bld_path, "mksurfdata") - create_empty_file(self._mksurf_exe) - self.assertTrue(os.path.exists(self._mksurf_exe)) - self._env_mach = os.path.join(self._bld_path, ".env_mach_specific.sh") - create_empty_file(self._env_mach) - self.assertTrue(os.path.exists(self._env_mach)) - sys.argv = [ - "gen_mksurfdata_jobscript_single", - "--bld-path", - self._bld_path, - "--namelist-file", - self._nlfile, - "--jobscript-file", - self._jobscript_file, - "--account", - self._account, - ] - - def tearDown(self): - """ - Remove temporary directory - """ - os.chdir(self._previous_dir) - shutil.rmtree(self._tempdir, ignore_errors=True) - def assertFileContentsEqual(self, expected, filepath, msg=None): """Asserts that the contents of the file given by 'filepath' are equal to the string given by 'expected'. 'msg' gives an optional message to be @@ -123,7 +42,7 @@ def test_simple_derecho_args(self): machine = "derecho" nodes = 1 tasks = 64 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) args = get_parser().parse_args() check_parser_args(args) with open(self._jobscript_file, "w", encoding="utf-8") as runfile: @@ -139,57 +58,12 @@ def test_simple_derecho_args(self): self.assertFileContentsEqual(self._output_compare, self._jobscript_file) - def test_derecho_mpirun(self): - """ - test derecho mpirun. This would've helped caught a problem we ran into - It will also be helpful when sumodules are updated to guide to solutions - to problems - """ - machine = "derecho" - nodes = 4 - tasks = 128 - add_args(machine, nodes, tasks) - args = get_parser().parse_args() - check_parser_args(args) - self.assertEqual(machine, args.machine) - self.assertEqual(tasks, args.tasks_per_node) - self.assertEqual(nodes, args.number_of_nodes) - self.assertEqual(self._account, args.account) - # Create the env_mach_specific.xml file needed for get_mpirun - # This will catch problems with our usage of CIME objects - # Doing this here will also catch potential issues in the gen_mksurfdata_build script - configure_path = os.path.join(path_to_cime(), "CIME", "scripts", "configure") - self.assertTrue(os.path.exists(configure_path)) - options = " --macros-format CMake --silent --compiler intel --machine " + machine - cmd = configure_path + options - cmd_list = cmd.split() - run_cmd_output_on_error( - cmd=cmd_list, errmsg="Trouble running configure", cwd=self._bld_path - ) - self.assertTrue(os.path.exists(self._env_mach)) - expected_attribs = {"mpilib": "default"} - with open(self._jobscript_file, "w", encoding="utf-8") as runfile: - attribs = write_runscript_part1( - number_of_nodes=nodes, - tasks_per_node=tasks, - machine=machine, - account=self._account, - walltime=args.walltime, - runfile=runfile, - ) - self.assertEqual(attribs, expected_attribs) - (executable, mksurfdata_path, env_mach_path) = get_mpirun(args, attribs) - expected_exe = "time mpibind " - self.assertEqual(executable, expected_exe) - self.assertEqual(mksurfdata_path, self._mksurf_exe) - self.assertEqual(env_mach_path, self._env_mach) - def test_too_many_tasks(self): """test trying to use too many tasks""" machine = "derecho" nodes = 1 tasks = 129 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) args = get_parser().parse_args() check_parser_args(args) with open(self._jobscript_file, "w", encoding="utf-8") as runfile: @@ -212,7 +86,7 @@ def test_zero_tasks(self): machine = "derecho" nodes = 5 tasks = 0 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) args = get_parser().parse_args() with self.assertRaisesRegex( SystemExit, @@ -225,7 +99,7 @@ def test_bld_build_path(self): machine = "derecho" nodes = 10 tasks = 64 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) # Remove the build path directory shutil.rmtree(self._bld_path, ignore_errors=True) args = get_parser().parse_args() @@ -237,7 +111,7 @@ def test_mksurfdata_exist(self): machine = "derecho" nodes = 10 tasks = 64 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) args = get_parser().parse_args() os.remove(self._mksurf_exe) with self.assertRaisesRegex(SystemExit, "mksurfdata_esmf executable "): @@ -248,7 +122,7 @@ def test_env_mach_specific_exist(self): machine = "derecho" nodes = 10 tasks = 64 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) args = get_parser().parse_args() os.remove(self._env_mach) with self.assertRaisesRegex(SystemExit, "Environment machine specific file"): @@ -259,7 +133,7 @@ def test_bad_machine(self): machine = "zztop" nodes = 1 tasks = 64 - add_args(machine, nodes, tasks) + unit_testing.add_machine_node_args(machine, nodes, tasks) with self.assertRaises(SystemExit): get_parser().parse_args() diff --git a/python/ctsm/test/test_unit_longitude.py b/python/ctsm/test/test_unit_longitude.py index 6bf7ec53e2..2382b2c303 100644 --- a/python/ctsm/test/test_unit_longitude.py +++ b/python/ctsm/test/test_unit_longitude.py @@ -10,7 +10,7 @@ from ctsm.longitude import Longitude from ctsm.longitude import _convert_lon_type_180_to_360, _convert_lon_type_360_to_180 from ctsm.longitude import _check_lon_type_180, _check_lon_type_360 -from ctsm.longitude import _detect_lon_type +from ctsm.longitude import detect_lon_type # Allow test names that pylint doesn't like; otherwise hard to make them # readable @@ -369,57 +369,57 @@ def test_lon_compare_notlon_error(self): def test_detect_lon_type_mid_180(self): """test that detect_lon_type works for an unambiguously 180 value""" - self.assertEqual(_detect_lon_type(-150), 180) + self.assertEqual(detect_lon_type(-150), 180) def test_detect_lon_type_min_180(self): """test that detect_lon_type works at -180""" - self.assertEqual(_detect_lon_type(-180), 180) + self.assertEqual(detect_lon_type(-180), 180) def test_detect_lon_type_mid_360(self): """test that detect_lon_type works for an unambiguously 360 value""" - self.assertEqual(_detect_lon_type(355), 360) + self.assertEqual(detect_lon_type(355), 360) def test_detect_lon_type_max_360(self): """test that detect_lon_type works at 360""" - self.assertEqual(_detect_lon_type(360), 360) + self.assertEqual(detect_lon_type(360), 360) def test_detect_lon_type_list_180(self): """test that detect_lon_type works for a list with just one unambiguously 180 value""" - self.assertEqual(_detect_lon_type([-150, 150]), 180) + self.assertEqual(detect_lon_type([-150, 150]), 180) def test_detect_lon_type_list_360(self): """test that detect_lon_type works for a list with just one unambiguously 360 value""" - self.assertEqual(_detect_lon_type([256, 150]), 360) + self.assertEqual(detect_lon_type([256, 150]), 360) def test_detect_lon_type_ambig(self): """test that detect_lon_type fails if ambiguous""" with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): - _detect_lon_type(150) + detect_lon_type(150) def test_detect_lon_type_list_ambig(self): """test that detect_lon_type fails for an ambiguous list""" with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): - _detect_lon_type([150, 170]) + detect_lon_type([150, 170]) def test_detect_lon_type_list_both(self): """test that detect_lon_type fails for a list with unambiguous members of both types""" with self.assertRaisesRegex(RuntimeError, r"Longitude array contains values of both types"): - _detect_lon_type([-150, 270]) + detect_lon_type([-150, 270]) def test_detect_lon_type_ambig0(self): """test that detect_lon_type fails at 0""" with self.assertRaisesRegex(ArgumentTypeError, r"Longitude\(s\) ambiguous"): - _detect_lon_type(0) + detect_lon_type(0) def test_detect_lon_type_oob_low(self): """test that detect_lon_type fails if out of bounds below min""" with self.assertRaisesRegex(ValueError, r"\(Minimum\) longitude < -180"): - _detect_lon_type(-300) + detect_lon_type(-300) def test_detect_lon_type_oob_high(self): """test that detect_lon_type fails if out of bounds above max""" with self.assertRaisesRegex(ValueError, r"\(Maximum\) longitude > 360"): - _detect_lon_type(500) + detect_lon_type(500) def test_list_as_lon(self): """ @@ -446,6 +446,27 @@ def test_lon_type_getter(self): lon = Longitude(55, 180) self.assertEqual(lon.lon_type(), 180) + def test_no_implicit_string_conversion(self): + """Ensure that implicit string conversion is disallowed""" + lon = Longitude(55, 180) + with self.assertRaisesRegex( + NotImplementedError, r"Use Longitude\.get_str\(\) instead of implicit string conversion" + ): + _ = f"{lon}" + with self.assertRaisesRegex( + NotImplementedError, r"Use Longitude\.get_str\(\) instead of implicit string conversion" + ): + _ = str(lon) + + def test_get_str(self): + """Ensure that explicit string conversion works as expected""" + lon = Longitude(55, 180) + self.assertEqual(lon.get_str(180), "55.0") + self.assertEqual(lon.get_str(360), "55.0") + lon = Longitude(-55, 180) + self.assertEqual(lon.get_str(180), "-55.0") + self.assertEqual(lon.get_str(360), "305.0") + if __name__ == "__main__": unit_testing.setup_for_tests() diff --git a/python/ctsm/test/test_unit_plumber2_surf_wrapper.py b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py index 66f5578caa..4b84752edb 100755 --- a/python/ctsm/test/test_unit_plumber2_surf_wrapper.py +++ b/python/ctsm/test/test_unit_plumber2_surf_wrapper.py @@ -16,7 +16,7 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing -from ctsm.site_and_regional.plumber2_surf_wrapper import get_parser +from ctsm.site_and_regional.plumber2_surf_wrapper import get_args # pylint: disable=invalid-name @@ -26,12 +26,60 @@ class TestPlumber2SurfWrapper(unittest.TestCase): Basic class for testing plumber2_surf_wrapper.py. """ - def test_parser(self): + def setUp(self): + sys.argv = ["subset_data"] # Could actually be anything + + def test_parser_default_csv_exists(self): + """ + Test that default PLUMBER2 sites CSV file exists + """ + + args = get_args() + self.assertTrue(os.path.exists(args.plumber2_sites_csv)) + + def test_parser_custom_csv(self): + """ + Test that script accepts custom CSV file path + """ + + custom_path = "path/to/custom.csv" + sys.argv += ["--plumber2-sites-csv", custom_path] + args = get_args() + self.assertEqual(args.plumber2_sites_csv, custom_path) + + def test_parser_verbose_false_default(self): + """ + Test that script is not verbose by default + """ + + args = get_args() + self.assertFalse(args.verbose) + + def test_parser_verbose_true(self): + """ + Test that --verbose sets verbose to True + """ + + sys.argv += ["--verbose"] + args = get_args() + self.assertTrue(args.verbose) + + def test_parser_78pft_false_default(self): + """ + Test that script does not use 78pft mode by default + """ + + args = get_args() + self.assertFalse(args.use_managed_crops) + + def test_parser_78pft_true(self): """ - Test that parser has same defaults as expected + Test that --crop sets use_managed_crops to True """ - self.assertEqual(get_parser().argument_default, None, "Parser not working as expected") + sys.argv += ["--crop"] + args = get_args() + self.assertTrue(args.use_managed_crops) if __name__ == "__main__": diff --git a/python/ctsm/test/test_unit_singlept_data.py b/python/ctsm/test/test_unit_singlept_data.py index 644af82588..489763282d 100755 --- a/python/ctsm/test/test_unit_singlept_data.py +++ b/python/ctsm/test/test_unit_singlept_data.py @@ -18,6 +18,8 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS +from ctsm.longitude import Longitude # pylint: disable=invalid-name @@ -28,7 +30,7 @@ class TestSinglePointCase(unittest.TestCase): """ plat = 20.1 - plon = 50.5 + plon = Longitude(50.5, lon_type=180) site_name = None create_domain = True create_surfdata = True @@ -38,7 +40,7 @@ class TestSinglePointCase(unittest.TestCase): dom_pft = [8] evenly_split_cropland = False pct_pft = None - num_pft = 16 + num_pft = MAX_PFT_GENERICCROPS cth = [0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9] cbh = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] include_nonveg = False @@ -131,7 +133,7 @@ def test_check_dom_pft_too_big(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.dom_pft = [16, 36, 79] + single_point.dom_pft = [MAX_PFT_GENERICCROPS, 36, 79] with self.assertRaisesRegex(argparse.ArgumentTypeError, "values for --dompft should*"): single_point.check_dom_pft() @@ -161,7 +163,7 @@ def test_check_dom_pft_too_small(self): out_dir=self.out_dir, overwrite=self.overwrite, ) - single_point.dom_pft = [16, 36, -1] + single_point.dom_pft = [MAX_PFT_GENERICCROPS, 36, -1] with self.assertRaisesRegex(argparse.ArgumentTypeError, "values for --dompft should*"): single_point.check_dom_pft() @@ -192,7 +194,7 @@ def test_check_dom_pft_numpft(self): overwrite=self.overwrite, ) single_point.dom_pft = [15, 53] - single_point.num_pft = 16 + single_point.num_pft = MAX_PFT_GENERICCROPS with self.assertRaisesRegex(argparse.ArgumentTypeError, "Please use --crop*"): single_point.check_dom_pft() @@ -223,7 +225,7 @@ def test_check_dom_pft_mixed_range(self): overwrite=self.overwrite, ) single_point.dom_pft = [1, 5, 15] - single_point.num_pft = 78 + single_point.num_pft = MAX_PFT_MANAGEDCROPS with self.assertRaisesRegex( argparse.ArgumentTypeError, "You are subsetting using mixed land*" ): diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index 2106799a4b..11ee416d4a 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -23,6 +23,8 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing from ctsm.site_and_regional.single_point_case import SinglePointCase +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS +from ctsm.longitude import Longitude # pylint: disable=invalid-name # pylint: disable=too-many-lines @@ -36,7 +38,7 @@ class TestSinglePointCaseSurfaceNoCrop(unittest.TestCase): """ plat = 20.1 - plon = 50.5 + plon = Longitude(50.5, lon_type=180) site_name = None create_domain = True create_surfdata = True @@ -46,7 +48,7 @@ class TestSinglePointCaseSurfaceNoCrop(unittest.TestCase): dom_pft = [8] evenly_split_cropland = False pct_pft = None - num_pft = 16 + num_pft = MAX_PFT_GENERICCROPS cth = 0.9 cbh = 0.1 include_nonveg = False @@ -657,7 +659,7 @@ class TestSinglePointCaseSurfaceCrop(unittest.TestCase): """ plat = 20.1 - plon = 50.5 + plon = Longitude(50.5, lon_type=180) site_name = None create_domain = True create_surfdata = True @@ -667,7 +669,7 @@ class TestSinglePointCaseSurfaceCrop(unittest.TestCase): dom_pft = [17] evenly_split_cropland = False pct_pft = None - num_pft = 78 + num_pft = MAX_PFT_MANAGEDCROPS cth = 0.9 cbh = 0.1 include_nonveg = False diff --git a/python/ctsm/test/test_unit_sspmatrix.py b/python/ctsm/test/test_unit_sspmatrix.py index 1b1bc60185..dd81a7df4f 100755 --- a/python/ctsm/test/test_unit_sspmatrix.py +++ b/python/ctsm/test/test_unit_sspmatrix.py @@ -53,7 +53,7 @@ def create_clone( Extend to handle creation of user_nl_clm file """ clone = super().create_clone(newcase, keepexe=keepexe) - os.mknod(os.path.join(newcase, "user_nl_clm")) + Path.touch(os.path.join(newcase, "user_nl_clm")) # Also make the needed case directories clone.make_case_dirs(self._tempdir) return clone @@ -165,7 +165,7 @@ def test_append_user_nl_step2(self): if os.path.exists(ufile): os.remove(ufile) - os.mknod(ufile) + Path.touch(ufile) expect = "\nhist_nhtfrq = -8760, hist_mfilt = 2\n" self.ssp.append_user_nl(caseroot=".", n=2) diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index eeb0a9a38a..a127a282e0 100755 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -7,6 +7,8 @@ """ import unittest +import tempfile +import shutil import configparser import argparse import os @@ -85,12 +87,24 @@ def setUp(self): self.defaults = configparser.ConfigParser() self.defaults.read(os.path.join(self.cesmroot, "tools/site_and_regional", DEFAULTS_FILE)) + # Work in temporary directory + self._previous_dir = os.getcwd() + self._tempdir = tempfile.mkdtemp() + os.chdir(self._tempdir) # cd to tempdir + + def tearDown(self): + """ + Remove temporary directory + """ + os.chdir(self._previous_dir) + shutil.rmtree(self._tempdir, ignore_errors=True) + def test_inputdata_setup_files_basic(self): """ Test """ self.args = check_args(self.args) - files = setup_files(self.args, self.defaults, self.cesmroot) + files = setup_files(self.args, self.defaults, self.cesmroot, testing=True) self.assertEqual( files["fsurf_in"], "surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc", @@ -116,6 +130,23 @@ def test_inputdata_setup_files_inputdata_dne(self): with self.assertRaisesRegex(SystemExit, "inputdata directory does not exist"): setup_files(self.args, self.defaults, self.cesmroot) + def test_inputdata_setup_files_gswp3_error(self): + """ + Test that error is thrown if user tries to --create-datm GSWP3 + """ + cfg_file = os.path.join( + _CTSM_PYTHON, "ctsm", "test", "testinputs", "default_data_gswp3.cfg" + ) + sys.argv = ["subset_data", "point", "--create-datm", "--cfg-file", cfg_file] + self.args = self.parser.parse_args() + self.defaults = configparser.ConfigParser() + self.defaults.read(self.args.config_file) + + with self.assertRaisesRegex( + NotImplementedError, "https://github.com/ESCOMP/CTSM/issues/3269" + ): + setup_files(self.args, self.defaults, self.cesmroot) + def test_check_args_nooutput(self): """ Test that check args aborts when no-output is asked for @@ -153,7 +184,7 @@ def test_check_args_outsurfdat_provided(self): sys.argv = ["subset_data", "point", "--create-surface", "--out-surface", "outputsurface.nc"] self.args = self.parser.parse_args() self.args = check_args(self.args) - files = setup_files(self.args, self.defaults, self.cesmroot) + files = setup_files(self.args, self.defaults, self.cesmroot, testing=True) self.assertEqual( files["fsurf_out"], "outputsurface.nc", @@ -229,7 +260,7 @@ def test_check_args_outsurfdat_fails_without_overwrite(self): for an existing dataset without the overwrite option """ outfile = os.path.join( - os.getcwd(), + _CTSM_PYTHON, "ctsm/test/testinputs/", "surfdata_1x1_mexicocityMEX_hist_16pfts_CMIP6_2000_c231103.nc", ) diff --git a/python/ctsm/test/testinputs/default_data.cfg b/python/ctsm/test/testinputs/default_data.cfg index a832d810cc..60c012561c 100644 --- a/python/ctsm/test/testinputs/default_data.cfg +++ b/python/ctsm/test/testinputs/default_data.cfg @@ -1,7 +1,7 @@ [main] clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata -[datm_crujra] +[datm] dir = atm/datm7/atm_forcing.datm7.CRUJRA.0.5d.c20241231/three_stream domain = domain.crujra_v2.3_0.5x0.5.c220801.nc solardir = . @@ -14,19 +14,6 @@ solarname = CLMCRUJRA2024.Solar precname = CLMCRUJRA2024.Precip tpqwname = CLMCRUJRA2024.TPQW -[datm_gswp3] -dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - [surfdat] dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc diff --git a/python/ctsm/test/testinputs/default_data_gswp3.cfg b/python/ctsm/test/testinputs/default_data_gswp3.cfg new file mode 100644 index 0000000000..09e1463eb2 --- /dev/null +++ b/python/ctsm/test/testinputs/default_data_gswp3.cfg @@ -0,0 +1,30 @@ +[main] +clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata + +[datm] +dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 +domain = domain.lnd.360x720_gswp3.0v1.c170606.nc +solardir = Solar +precdir = Precip +tpqwdir = TPHWL +solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. +prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. +tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. +solarname = CLMGSWP3v1.Solar +precname = CLMGSWP3v1.Precip +tpqwname = CLMGSWP3v1.TPQW + +[surfdat] +dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 +surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc +surfdat_78pft = surfdata_0.9x1.25_hist_2000_78pfts_c240908.nc +mesh_dir = share/meshes/ +mesh_surf = fv0.9x1.25_141008_ESMFmesh.nc + +[landuse] +dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 +landuse_16pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc +landuse_78pft = landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240908.nc + +[domain] +file = share/domains/domain.lnd.fv0.9x1.25_gx1v7.151020.nc diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc new file mode 100644 index 0000000000..84da04d260 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e1075a199de0d85b974bd9dbd09216e460eda035b3a6652cbfc59b75829e3ee +size 13136 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc new file mode 100644 index 0000000000..f05b8eb442 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21fb1ae2b2e75336e409770988dabd80e9ee69d990e5aa63dc7008c9145a455f +size 13136 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc new file mode 100644 index 0000000000..3d521c66f4 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f8e2624c686c86d5d1071ed618b564e4589731555836083cad0a1e8259b7962e +size 13136 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc new file mode 100644 index 0000000000..1d551867f0 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b4164da71cf6bdf351143b936d2ad84da0c943378cb54534ec46f03513e2d17 +size 13144 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc new file mode 100644 index 0000000000..b752309969 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93e9ab5686acc5fb7ddaf775e7f561d572a4fbecab28088b643868432e3d1ed3 +size 13144 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc new file mode 100644 index 0000000000..c3c47b61be --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fbb6d1679040959e540928b7df056a848e9a385441d725f5f84271a07c64889c +size 13144 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc new file mode 100644 index 0000000000..9be8249601 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7145768c96bdf8b3cbab234b2a09c4506916dbbc8db9fbc73282d643251ed318 +size 37324 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc new file mode 100644 index 0000000000..068a7ff28e --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1e38846646d2514671bd340daa0954bf1981aa328d4923cb42044097bb77f38 +size 37324 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc new file mode 100644 index 0000000000..1b7094dbee --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:395aa495fd3b926521cd355fd2a012cdcd07d19b7a00467fdc49dafbf80751a1 +size 37324 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc new file mode 100644 index 0000000000..c9b19f474b --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180/datmdata/domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:206ba64ca50dbd3b34e93f498eb1f526689e3a6900762f12e30c3af9b75ccb5c +size 2000 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1986.nc new file mode 120000 index 0000000000..0e14bd986a --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1986.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1986.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1987.nc new file mode 120000 index 0000000000..28b7abf80d --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1987.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1987.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1988.nc new file mode 120000 index 0000000000..a238ab07c2 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Prec.-69.0_-12.0.1988.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.Prec.TMP.1988.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1986.nc new file mode 120000 index 0000000000..a2045e914c --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1986.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1986.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1987.nc new file mode 120000 index 0000000000..24cc171353 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1987.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1987.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1988.nc new file mode 120000 index 0000000000..00eacece43 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.Solr.-69.0_-12.0.1988.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.Solr.TMP.1988.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1986.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1986.nc new file mode 120000 index 0000000000..3806e36151 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1986.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1986.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1987.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1987.nc new file mode 120000 index 0000000000..44ce035a2a --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1987.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1987.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1988.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1988.nc new file mode 120000 index 0000000000..cd8cdfb7c9 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.-69.0_-12.0.1988.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//clmforc.CRUJRAv2.5_0.5x0.5.TPQWL.TMP.1988.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/domain.crujra_v2.3_0.5x0.5_-69.0_-12.0_c250620.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/domain.crujra_v2.3_0.5x0.5_-69.0_-12.0_c250620.nc new file mode 120000 index 0000000000..1dc0d0d5f2 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type180_nositename/datmdata/domain.crujra_v2.3_0.5x0.5_-69.0_-12.0_c250620.nc @@ -0,0 +1 @@ +../../test_subset_data_pt_datm_amazon_type180/datmdata//domain.crujra_v2.3_0.5x0.5_TMP_c250620.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type360 b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type360 new file mode 120000 index 0000000000..88385bbff2 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_datm_amazon_type360 @@ -0,0 +1 @@ +test_subset_data_pt_datm_amazon_type180 \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc new file mode 100644 index 0000000000..d34fdf3acf --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b063aeb04ed3a0a613608ecf88ac47efb39de7ba74bf6e33a490925540bf47fb +size 18176 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc new file mode 100644 index 0000000000..02999b6b00 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type180/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efbf02729f8741bfdfbd51d748cce31c2d90b0c9ef2f00d841d2940dea5bc144 +size 53256 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360 b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360 new file mode 120000 index 0000000000..ad4f251586 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360 @@ -0,0 +1 @@ +test_subset_data_pt_landuse_amazon_type180 \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360_nositename/landuse.timeseries_291.0_-12.0_amazon_hist_1850-1853_78pfts_c250618.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360_nositename/landuse.timeseries_291.0_-12.0_amazon_hist_1850-1853_78pfts_c250618.nc new file mode 120000 index 0000000000..8678639d98 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360_nositename/landuse.timeseries_291.0_-12.0_amazon_hist_1850-1853_78pfts_c250618.nc @@ -0,0 +1 @@ +../test_subset_data_pt_landuse_amazon_type360/landuse.timeseries_TMP_amazon_hist_1850-1853_78pfts_c250618.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360_nositename/surfdata_291.0_-12.0_amazon_hist_1850_78pfts_c250618.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360_nositename/surfdata_291.0_-12.0_amazon_hist_1850_78pfts_c250618.nc new file mode 120000 index 0000000000..2efbb745f4 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_landuse_amazon_type360_nositename/surfdata_291.0_-12.0_amazon_hist_1850_78pfts_c250618.nc @@ -0,0 +1 @@ +../test_subset_data_pt_landuse_amazon_type360/surfdata_TMP_amazon_hist_1850_78pfts_c250618.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc new file mode 100644 index 0000000000..6e742560d0 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e694ca46925fbe07270b5468fe3899ead98dcc7d41353a6551dcc1ec92a9f9e0 +size 27740 diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180_nositename/surfdata_-69.0_-12.0_amazon_hist_16pfts_CMIP6_2000_c250617.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180_nositename/surfdata_-69.0_-12.0_amazon_hist_16pfts_CMIP6_2000_c250617.nc new file mode 120000 index 0000000000..9e811ca9c3 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type180_nositename/surfdata_-69.0_-12.0_amazon_hist_16pfts_CMIP6_2000_c250617.nc @@ -0,0 +1 @@ +../test_subset_data_pt_surface_amazon_type180/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250617.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type360 b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type360 new file mode 120000 index 0000000000..3a7bc5efe3 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_pt_surface_amazon_type360 @@ -0,0 +1 @@ +test_subset_data_pt_surface_amazon_type180 \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/domain.lnd.5x5pt-amazon_navy_291.0-299.0_-12.0--7.0_c250508.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/domain.lnd.5x5pt-amazon_navy_291.0-299.0_-12.0--7.0_c250508.nc new file mode 120000 index 0000000000..99d8401b46 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/domain.lnd.5x5pt-amazon_navy_291.0-299.0_-12.0--7.0_c250508.nc @@ -0,0 +1 @@ +../test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/domain.lnd.5x5pt-amazon_navy_291.0-299.0_-12.0--7.0_c250508_ESMF_UNSTRUCTURED_MESH.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/domain.lnd.5x5pt-amazon_navy_291.0-299.0_-12.0--7.0_c250508_ESMF_UNSTRUCTURED_MESH.nc new file mode 120000 index 0000000000..6f8ca4e665 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/domain.lnd.5x5pt-amazon_navy_291.0-299.0_-12.0--7.0_c250508_ESMF_UNSTRUCTURED_MESH.nc @@ -0,0 +1 @@ +../test_subset_data_reg_amazon/domain.lnd.5x5pt-amazon_navy_TMP_c250508_ESMF_UNSTRUCTURED_MESH.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/surfdata_291.0-299.0_-12.0--7.0_amazon_hist_16pfts_CMIP6_2000_c250508.nc b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/surfdata_291.0-299.0_-12.0--7.0_amazon_hist_16pfts_CMIP6_2000_c250508.nc new file mode 120000 index 0000000000..73bde404b0 --- /dev/null +++ b/python/ctsm/test/testinputs/expected_result_files/test_subset_data_reg_amazon_noregname/surfdata_291.0-299.0_-12.0--7.0_amazon_hist_16pfts_CMIP6_2000_c250508.nc @@ -0,0 +1 @@ +../test_subset_data_reg_amazon/surfdata_TMP_amazon_hist_16pfts_CMIP6_2000_c250508.nc \ No newline at end of file diff --git a/python/ctsm/test/testinputs/landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc b/python/ctsm/test/testinputs/landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc new file mode 100644 index 0000000000..9e81ad351c --- /dev/null +++ b/python/ctsm/test/testinputs/landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83b34be6da2047bb9a099346f7f5472b932ead7033fe8ab817540b99ff3117b8 +size 215248 diff --git a/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_site_valid.csv b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_site_valid.csv new file mode 100644 index 0000000000..2c1580bc03 --- /dev/null +++ b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_site_valid.csv @@ -0,0 +1,7 @@ +#pftX-cth and pftX-cbh are the site=specific canopy top and bottom heights +#start_year and end_year will be used to define DATM_YR_ALIGH, DATM_YR_START and DATM_YR_END, and STOP_N in units of nyears. +#RUN_STARTDATE and START_TOD are specified because we are starting at GMT corresponding to local midnight. +#ATM_NCPL is specified so that the time step of the model matches the time interval specified by the atm forcing data. +#longitudes must be in the range [-180,180] +,Site,Lat,Lon,pft1,pft1-%,pft1-cth,pft1-cbh,pft2,pft2-%,pft2-cth,pft2-cbh,start_year,end_year,RUN_STARTDATE,START_TOD,ATM_NCPL +27,BE-Lon,50.551590, 4.746130,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2014,2004-12-31,82800,48 diff --git a/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_sites_invalid_pft.csv b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_sites_invalid_pft.csv new file mode 100644 index 0000000000..e8f0eb8fbb --- /dev/null +++ b/python/ctsm/test/testinputs/plumber2_surf_wrapper/PLUMBER2_sites_invalid_pft.csv @@ -0,0 +1,8 @@ +#pftX-cth and pftX-cbh are the site=specific canopy top and bottom heights +#start_year and end_year will be used to define DATM_YR_ALIGH, DATM_YR_START and DATM_YR_END, and STOP_N in units of nyears. +#RUN_STARTDATE and START_TOD are specified because we are starting at GMT corresponding to local midnight. +#ATM_NCPL is specified so that the time step of the model matches the time interval specified by the atm forcing data. +#longitudes must be in the range [-180,180] +,Site,Lat,Lon,pft1,pft1-%,pft1-cth,pft1-cbh,pft2,pft2-%,pft2-cth,pft2-cbh,start_year,end_year,RUN_STARTDATE,START_TOD,ATM_NCPL +26,Invalid-Pft,51.309166, 4.520560,-1,19.22,21.00,10.50,7,80.78,21.00,12.08,2004,2014,2003-12-31,82800,48 +27,BE-Lon,50.551590, 4.746130,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2014,2004-12-31,82800,48 diff --git a/python/ctsm/test/testinputs/subset_data_amazon_1850.cfg b/python/ctsm/test/testinputs/subset_data_amazon_1850.cfg new file mode 100644 index 0000000000..6b16160f48 --- /dev/null +++ b/python/ctsm/test/testinputs/subset_data_amazon_1850.cfg @@ -0,0 +1,14 @@ +[surfdat] +dir = ctsm/test/testinputs +surfdat_16pft = surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc +surfdat_78pft = surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc +mesh_dir = ctsm/test/testinputs +mesh_surf = ESMF_mesh_5x5pt_amazon_from_domain_c230308.nc + +[landuse] +dir = ctsm/test/testinputs +landuse_16pft = landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc +landuse_78pft = landuse.timeseries_5x5_amazon_hist_1850-1853_78pfts_c250617.nc + +[domain] +file = ctsm/test/testinputs/domain.lnd.5x5pt-amazon_navy.090715.nc diff --git a/python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc b/python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc new file mode 100644 index 0000000000..747c33a2b0 --- /dev/null +++ b/python/ctsm/test/testinputs/surfdata_5x5_amazon_hist_1850_78pfts_c250617.nc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0795d84b3e07a9437c7e9869810b74002210f7c55349f57983c36db9990db4a +size 893512 diff --git a/python/ctsm/test_gen_mksurfdata_jobscript_single_parent.py b/python/ctsm/test_gen_mksurfdata_jobscript_single_parent.py new file mode 100755 index 0000000000..b6a3741444 --- /dev/null +++ b/python/ctsm/test_gen_mksurfdata_jobscript_single_parent.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +""" +Parent class for some unittest modules relating to gen_mksurfdata_jobscript_single.py +""" + +import unittest +import os +import sys +import shutil +from pathlib import Path + +import tempfile + +from ctsm.path_utils import path_to_ctsm_root + + +# pylint: disable=too-many-instance-attributes +class TestFGenMkSurfJobscriptSingleParent(unittest.TestCase): + """Parent class for some unittest modules relating to gen_mksurfdata_jobscript_single.py""" + + def setUp(self): + """Setup for trying out the methods""" + testinputs_path = os.path.join(path_to_ctsm_root(), "python/ctsm/test/testinputs") + self._testinputs_path = testinputs_path + self._previous_dir = os.getcwd() + self._tempdir = tempfile.mkdtemp() + os.chdir(self._tempdir) + self._account = "ACCOUNT_NUMBER" + self._jobscript_file = "output_jobscript" + self._output_compare = """#!/bin/bash +# Edit the batch directives for your batch system +# Below are default batch directives for derecho +#PBS -N mksurfdata +#PBS -j oe +#PBS -k eod +#PBS -S /bin/bash +#PBS -l walltime=12:00:00 +#PBS -A ACCOUNT_NUMBER +#PBS -q main +#PBS -l select=1:ncpus=128:mpiprocs=64:mem=218GB + +# This is a batch script to run a set of resolutions for mksurfdata_esmf input namelist +# NOTE: THIS SCRIPT IS AUTOMATICALLY GENERATED SO IN GENERAL YOU SHOULD NOT EDIT it!! + +""" + self._bld_path = os.path.join(self._tempdir, "tools_bld") + os.makedirs(self._bld_path) + self.assertTrue(os.path.isdir(self._bld_path)) + self._nlfile = os.path.join(self._tempdir, "namelist_file") + Path.touch(self._nlfile) + self.assertTrue(os.path.exists(self._nlfile)) + self._mksurf_exe = os.path.join(self._bld_path, "mksurfdata") + Path.touch(self._mksurf_exe) + self.assertTrue(os.path.exists(self._mksurf_exe)) + self._env_mach = os.path.join(self._bld_path, ".env_mach_specific.sh") + Path.touch(self._env_mach) + self.assertTrue(os.path.exists(self._env_mach)) + sys.argv = [ + "gen_mksurfdata_jobscript_single", + "--bld-path", + self._bld_path, + "--namelist-file", + self._nlfile, + "--jobscript-file", + self._jobscript_file, + "--account", + self._account, + ] + + def tearDown(self): + """ + Remove temporary directory + """ + os.chdir(self._previous_dir) + shutil.rmtree(self._tempdir, ignore_errors=True) diff --git a/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py b/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py index 88ffa7f7b1..8154167609 100755 --- a/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py +++ b/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py @@ -205,11 +205,11 @@ def main(): "ultra_hi_res_no_crop": ["mpasa15", "mpasa3p75"], "standard_res": ["360x720cru", "0.9x1.25", "1.9x2.5", "C96", "mpasa120"], "standard_res_no_f09": ["360x720cru", "1.9x2.5", "C96", "mpasa120"], - "low_res": ["4x5", "10x15", "ne3np4.pg3"], + "low_res": ["4x5", "10x15", "ne3np4.pg3", "ne3np4"], "mpasa480": ["mpasa480"], "nldas_res": ["0.125nldas2"], "5x5_amazon": ["5x5_amazon"], - "ne3": ["ne3np4.pg3"], + "ne3": ["ne3np4", "ne3np4.pg3"], "ne16": ["ne16np4.pg3"], "ne30": ["ne30np4.pg3", "ne30np4.pg2", "ne30np4"], "ne0np4": [ diff --git a/python/ctsm/toolchain/gen_mksurfdata_namelist.py b/python/ctsm/toolchain/gen_mksurfdata_namelist.py index 31fcbfe8ff..3a405bf5fa 100755 --- a/python/ctsm/toolchain/gen_mksurfdata_namelist.py +++ b/python/ctsm/toolchain/gen_mksurfdata_namelist.py @@ -15,6 +15,7 @@ from ctsm.path_utils import path_to_ctsm_root, path_to_cime from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args +from ctsm.pft_utils import MAX_PFT_GENERICCROPS, MAX_PFT_MANAGEDCROPS logger = logging.getLogger(__name__) @@ -306,9 +307,9 @@ def main(): # Determine num_pft if nocrop_flag: - num_pft = "16" + num_pft = str(MAX_PFT_GENERICCROPS) else: - num_pft = "78" + num_pft = str(MAX_PFT_MANAGEDCROPS) logger.info("num_pft is %s", num_pft) # Write out if surface dataset will be created diff --git a/python/ctsm/unit_testing.py b/python/ctsm/unit_testing.py index d3a308c796..8370830b4d 100644 --- a/python/ctsm/unit_testing.py +++ b/python/ctsm/unit_testing.py @@ -1,8 +1,22 @@ """Functions to aid unit tests""" +import sys from ctsm.ctsm_logging import setup_logging_for_tests +def add_machine_node_args(machine, nodes, tasks): + """add arguments to sys.argv""" + args_to_add = [ + "--machine", + machine, + "--number-of-nodes", + str(nodes), + "--tasks-per-node", + str(tasks), + ] + sys.argv += args_to_add + + def setup_for_tests(enable_critical_logs=False): """Call this at the beginning of unit testing diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 9707af4f0b..2682775ca5 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -45,6 +45,8 @@ add_subdirectory(${CLM_ROOT}/share/unit_test_stubs/util csm_share_stubs) list ( APPEND drv_sources_needed ${CLM_ROOT}/components/cmeps/cesm/nuopc_cap_share/glc_elevclass_mod.F90 ${CLM_ROOT}/components/cmeps/cesm/nuopc_cap_share/shr_dust_emis_mod.F90 + ${CLM_ROOT}/components/cmeps/cesm/nuopc_cap_share/shr_expr_parser_mod.F90 + ${CLM_ROOT}/components/cmeps/cesm/nuopc_cap_share/shr_fire_emis_mod.F90 ) # Add CLM source directories diff --git a/src/biogeochem/CMakeLists.txt b/src/biogeochem/CMakeLists.txt index 270e85838b..3da0a2eab6 100644 --- a/src/biogeochem/CMakeLists.txt +++ b/src/biogeochem/CMakeLists.txt @@ -2,6 +2,7 @@ # source files that are currently used in unit tests list(APPEND clm_sources + ch4varcon.F90 CNSharedParamsMod.F90 CNPhenologyMod.F90 CNSpeciesMod.F90 @@ -12,6 +13,14 @@ list(APPEND clm_sources DustEmisFactory.F90 CropReprPoolsMod.F90 CropType.F90 + CNFireBaseMod.F90 + CNFireNoFireMod.F90 + CNFireFactoryMod.F90 + CNFireLi2014Mod.F90 + CNFireLi2016Mod.F90 + CNFireLi2021Mod.F90 + CNFireLi2024Mod.F90 + CNVegMatrixMod.F90 CNVegStateType.F90 CNVegCarbonStateType.F90 CNVegCarbonFluxType.F90 @@ -20,6 +29,11 @@ list(APPEND clm_sources CNVegNitrogenFluxType.F90 CNCIsoAtmTimeSeriesReadMod.F90 CNVegComputeSeedMod.F90 + FATESFireBase.F90 + FATESFireDataMod.F90 + FATESFireFactoryMod.F90 + FATESFireNoDataMod.F90 + SatellitePhenologyMod.F90 SpeciesBaseType.F90 SpeciesIsotopeType.F90 SpeciesNonIsotopeType.F90 diff --git a/src/biogeochem/CNDriverMod.F90 b/src/biogeochem/CNDriverMod.F90 index b407e07ad5..9a51d9f616 100644 --- a/src/biogeochem/CNDriverMod.F90 +++ b/src/biogeochem/CNDriverMod.F90 @@ -60,7 +60,7 @@ module CNDriverMod contains !----------------------------------------------------------------------- - subroutine CNDriverInit(bounds, NLFilename, cnfire_method) + subroutine CNDriverInit(bounds, NLFilename) ! ! !DESCRIPTION: ! Initialzation of the CN Ecosystem dynamics. @@ -68,18 +68,15 @@ subroutine CNDriverInit(bounds, NLFilename, cnfire_method) ! !USES: use CNSharedParamsMod , only : use_fun use CNPhenologyMod , only : CNPhenologyInit - use FireMethodType , only : fire_method_type use SoilBiogeochemCompetitionMod, only : SoilBiogeochemCompetitionInit ! ! !ARGUMENTS: type(bounds_type) , intent(in) :: bounds character(len=*) , intent(in) :: NLFilename ! Namelist filename - class(fire_method_type) , intent(inout) :: cnfire_method !----------------------------------------------------------------------- call SoilBiogeochemCompetitionInit(bounds) if(use_cn)then call CNPhenologyInit(bounds) - call cnfire_method%FireInit(bounds, NLFilename) end if end subroutine CNDriverInit diff --git a/src/biogeochem/CNFireBaseMod.F90 b/src/biogeochem/CNFireBaseMod.F90 index 2f9e99ea44..42a054b44c 100644 --- a/src/biogeochem/CNFireBaseMod.F90 +++ b/src/biogeochem/CNFireBaseMod.F90 @@ -85,13 +85,17 @@ module CNFireBaseMod private ! !PRIVATE MEMBER DATA: ! !PUBLIC MEMBER DATA (used by extensions of the base class): - real(r8), public, pointer :: btran2_patch (:) ! patch root zone soil wetness factor (0 to 1) + real(r8), public, pointer :: btran2_patch (:) => NULL() ! patch root zone soil wetness factor (0 to 1) contains ! ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: CNFireInit ! Initialization of Fire procedure, public :: FireInit => CNFireInit ! Initialization of Fire - procedure, public :: FireReadNML ! Read in namelist for CNFire + procedure, public :: CNFireCleanBase ! Deallocate fire data + procedure, public :: FireClean => CNFireCleanBase ! Deallocate fire data + procedure, public :: CNFireReadNML ! Read in namelist for CNFire + procedure, public :: FireReadNML => CNFireReadNML ! Read in namelist for CNFire procedure, public :: CNFireReadParams ! Read in constant parameters from the paramsfile procedure, public :: CNFireFluxes ! Calculate fire fluxes procedure, public :: CNFire_calc_fire_root_wetness_Li2014 ! Calculate CN-fire specific root wetness: original version @@ -129,17 +133,16 @@ end function need_lightning_and_popdens_interface contains !----------------------------------------------------------------------- - subroutine CNFireInit( this, bounds, NLFilename ) + subroutine CNFireInit( this, bounds ) ! ! !DESCRIPTION: ! Initialize CN Fire module ! !ARGUMENTS: class(cnfire_base_type) :: this type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename !----------------------------------------------------------------------- ! Call the base-class Initialization method - call this%BaseFireInit( bounds, NLFilename ) + call this%BaseFireInit( bounds ) ! Allocate memory call this%InitAllocate( bounds ) @@ -185,6 +188,24 @@ subroutine InitHistory( this, bounds ) ptr_patch=this%btran2_patch, l2g_scale_type='veg') end subroutine InitHistory + !---------------------------------------------------------------------- + + subroutine CNFireCleanBase( this ) + ! + ! Deallocate data + ! + ! !ARGUMENTS: + class(cnfire_base_type) :: this + !----------------------------------------------------------------------- + ! Call the base class clean method + !call this%BaseFireClean() + + if ( associated(this%btran2_patch) )then + deallocate(this%btran2_patch) + end if + this%btran2_patch => NULL() + end subroutine CNFireCleanBase + !---------------------------------------------------------------------- subroutine CNFire_calc_fire_root_wetness_Li2014( this, bounds, & num_exposedvegp, filter_exposedvegp, num_noexposedvegp, filter_noexposedvegp, & @@ -321,7 +342,7 @@ end subroutine CNFire_calc_fire_root_wetness_Li2021 !---------------------------------------------------------------------- !---------------------------------------------------------------------- - subroutine FireReadNML( this, NLFilename ) + subroutine CNFireReadNML( this, bounds, NLFilename ) ! ! !DESCRIPTION: ! Read the namelist for CNFire @@ -331,17 +352,17 @@ subroutine FireReadNML( this, NLFilename ) use shr_nl_mod , only : shr_nl_find_group_name use spmdMod , only : masterproc, mpicom use shr_mpi_mod , only : shr_mpi_bcast - use clm_varctl , only : iulog ! ! !ARGUMENTS: class(cnfire_base_type) :: this + type(bounds_type), intent(in):: bounds !bounds character(len=*), intent(in) :: NLFilename ! Namelist filename ! ! !LOCAL VARIABLES: integer :: ierr ! error code integer :: unitn ! unit for namelist file - character(len=*), parameter :: subname = 'FireReadNML' + character(len=*), parameter :: subname = 'CNFireReadNML' character(len=*), parameter :: nmlname = 'lifire_inparm' !----------------------------------------------------------------------- real(r8) :: cli_scale, boreal_peatfire_c, pot_hmn_ign_counts_alpha @@ -361,6 +382,9 @@ subroutine FireReadNML( this, NLFilename ) borpeat_fire_soilmoist_denom, nonborpeat_fire_precip_denom if ( this%need_lightning_and_popdens() ) then + ! Read the base namelist + call this%BaseFireReadNML( bounds, NLFilename ) + cli_scale = cnfire_const%cli_scale boreal_peatfire_c = cnfire_const%boreal_peatfire_c non_boreal_peatfire_c = cnfire_const%non_boreal_peatfire_c @@ -392,9 +416,11 @@ subroutine FireReadNML( this, NLFilename ) read(unitn, nml=lifire_inparm, iostat=ierr) if (ierr /= 0) then call endrun(msg="ERROR reading "//nmlname//"namelist"//errmsg(sourcefile, __LINE__)) + return end if else call endrun(msg="ERROR could NOT find "//nmlname//"namelist"//errmsg(sourcefile, __LINE__)) + return end if call relavu( unitn ) end if @@ -447,7 +473,7 @@ subroutine FireReadNML( this, NLFilename ) end if end if - end subroutine FireReadNML + end subroutine CNFireReadNML !----------------------------------------------------------------------- subroutine CNFireFluxes (this, bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, & diff --git a/src/biogeochem/CNFireFactoryMod.F90 b/src/biogeochem/CNFireFactoryMod.F90 index 44407da927..9d1962d4ff 100644 --- a/src/biogeochem/CNFireFactoryMod.F90 +++ b/src/biogeochem/CNFireFactoryMod.F90 @@ -9,17 +9,20 @@ module CNFireFactoryMod use abortutils , only : endrun use shr_log_mod , only : errMsg => shr_log_errMsg use clm_varctl , only : iulog + use shr_kind_mod , only : CS => SHR_KIND_CS implicit none save private ! ! !PUBLIC ROUTINES: - public :: CNFireReadNML ! read the fire namelist + public :: CNFireReadNML ! read the fire factory namelist to get the CN fire_method to use public :: create_cnfire_method ! create an object of class fire_method_type + ! For Unit Testing: + public :: CNFireSetFireMethod ! Set the fire_method ! !PRIVATE DATA MEMBERS: - character(len=80), private :: fire_method = "li2014qianfrc" + character(len=CS), private :: fire_method = "UNSET" character(len=*), parameter, private :: sourcefile = & __FILE__ @@ -63,9 +66,11 @@ subroutine CNFireReadNML( NLFilename ) read(unitn, nml=cnfire_inparm, iostat=ierr) if (ierr /= 0) then call endrun(msg="ERROR reading "//nmlname//"namelist"//errmsg(sourcefile, __LINE__)) + return end if else call endrun(msg="ERROR finding "//nmlname//"namelist"//errmsg(sourcefile, __LINE__)) + return end if call relavu( unitn ) end if @@ -82,7 +87,7 @@ end subroutine CNFireReadNML !----------------------------------------------------------------------- !----------------------------------------------------------------------- - subroutine create_cnfire_method( NLFilename, cnfire_method ) + subroutine create_cnfire_method( cnfire_method ) ! ! !DESCRIPTION: ! Create and return an object of fire_method_type. The particular type @@ -98,11 +103,9 @@ subroutine create_cnfire_method( NLFilename, cnfire_method ) use decompMod , only : bounds_type ! ! !ARGUMENTS: - character(len=*), intent(in) :: NLFilename ! Namelist filename class(fire_method_type), allocatable, intent(inout) :: cnfire_method ! ! !LOCAL VARIABLES: - character(len=*), parameter :: subname = 'create_cnfire_method' !----------------------------------------------------------------------- select case (trim(fire_method)) @@ -119,13 +122,29 @@ subroutine create_cnfire_method( NLFilename, cnfire_method ) allocate(cnfire_li2024_type :: cnfire_method) case default - write(iulog,*) subname//' ERROR: unknown method: ', fire_method - call endrun(msg=errMsg(sourcefile, __LINE__)) + write(iulog,*) 'Unrecognized fire_method ' // errMsg(sourcefile, __LINE__) + call endrun( msg='Unknown option for namelist item fire_method: ' // trim(fire_method) ) + ! For unit-testing, make sure a valid cnfire_method is set and return, otherwise it fails with a seg-fault + allocate(cnfire_nofire_type :: cnfire_method) + return end select - call cnfire_method%FireReadNML( NLFilename ) end subroutine create_cnfire_method !----------------------------------------------------------------------- + subroutine CNFireSetFireMethod( fire_method_in ) + ! + ! !DESCRIPTION: + ! Set the fire_method (to be used in unit testing) + ! + ! !USES: + ! !ARGUMENTS: + character(len=*), intent(IN) :: fire_method_in + + fire_method = trim(fire_method_in) + + end subroutine CNFireSetFireMethod + !----------------------------------------------------------------------- + end module CNFireFactoryMod diff --git a/src/biogeochem/CNFireNoFireMod.F90 b/src/biogeochem/CNFireNoFireMod.F90 index e0605585e9..da6f28cd0d 100644 --- a/src/biogeochem/CNFireNoFireMod.F90 +++ b/src/biogeochem/CNFireNoFireMod.F90 @@ -8,6 +8,8 @@ module CNFireNoFireMod ! ! !USES: use shr_kind_mod , only : r8 => shr_kind_r8 + use abortutils , only : endrun + use clm_varctl , only : iulog use decompMod , only : bounds_type use atm2lndType , only : atm2lnd_type use CNVegStateType , only : cnveg_state_type @@ -36,10 +38,15 @@ module CNFireNoFireMod contains ! ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: need_lightning_and_popdens - procedure, public :: CNFireArea ! Calculate fire area + procedure, public :: need_lightning_and_popdens ! If need lightning and/or population density (always .false. here) + procedure, public :: NoFireInit ! Initiialization + procedure, public :: FireInit => NoFireInit ! Initiialization + procedure, public :: CNFireArea ! Calculate fire area end type cnfire_nofire_type + character(len=*), parameter, private :: sourcefile = & + __FILE__ + contains !----------------------------------------------------------------------- @@ -56,6 +63,28 @@ function need_lightning_and_popdens(this) need_lightning_and_popdens = .false. end function need_lightning_and_popdens + !----------------------------------------------------------------------- + subroutine NoFireInit( this, bounds ) + ! + ! !DESCRIPTION: + ! Initialize No Fire module + use shr_fire_emis_mod, only : shr_fire_emis_mechcomps_n + use shr_log_mod , only : errMsg => shr_log_errMsg + ! !ARGUMENTS: + class(cnfire_nofire_type) :: this + type(bounds_type), intent(in) :: bounds + + if ( shr_fire_emis_mechcomps_n > 0) then + write(iulog,*) "Fire emissions can NOT be active for fire_method=nofire" // & + errMsg(sourcefile, __LINE__) + call endrun(msg="Having fire emissions on requires fire_method to be something besides nofire" ) + return + end if + call this%CNFireInit( bounds ) + + end subroutine NoFireInit + !----------------------------------------------------------------------- + !----------------------------------------------------------------------- subroutine CNFireArea (this, bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, & num_exposedvegp, filter_exposedvegp, num_noexposedvegp, filter_noexposedvegp, & diff --git a/src/biogeochem/CNVegetationFacade.F90 b/src/biogeochem/CNVegetationFacade.F90 index 47099708f4..b47237690d 100644 --- a/src/biogeochem/CNVegetationFacade.F90 +++ b/src/biogeochem/CNVegetationFacade.F90 @@ -204,10 +204,12 @@ subroutine Init(this, bounds, NLFilename, nskip_steps, params_ncid) ! ! !USES: use CNFireFactoryMod , only : create_cnfire_method + use CNFireNoFireMod , only : cnfire_nofire_type use clm_varcon , only : c13ratio, c14ratio use ncdio_pio , only : file_desc_t use filterMod , only : filter use decompMod , only : get_proc_clumps + ! ! !ARGUMENTS: class(cn_vegetation_type), intent(inout) :: this @@ -302,10 +304,21 @@ subroutine Init(this, bounds, NLFilename, nskip_steps, params_ncid) ! use_cndv is true so that it can be used in associate statements (nag compiler ! complains otherwise) call this%dgvs_inst%Init(bounds) - end if - call create_cnfire_method(NLFilename, this%cnfire_method) - call this%cnfire_method%CNFireReadParams( params_ncid ) + call create_cnfire_method( this%cnfire_method ) + call this%cnfire_method%FireInit( bounds ) + call this%cnfire_method%FireReadNML( bounds, NLFilename ) + call this%cnfire_method%CNFireReadParams( params_ncid ) + end if + + ! + ! For FATES we HAVE to allocate a cnfire_method even through it won't be used + ! cnfire_method is passed down to CN routines that are used for FATES + ! so there has to be something allocated that is passed down + ! + if ( use_fates_bgc )then + allocate(cnfire_nofire_type :: this%cnfire_method) + end if end subroutine Init @@ -584,7 +597,7 @@ subroutine Init2(this, bounds, NLFilename) character(len=*), parameter :: subname = 'Init2' !----------------------------------------------------------------------- - call CNDriverInit(bounds, NLFilename, this%cnfire_method) + call CNDriverInit(bounds, NLFilename) if (use_cndv) then call dynCNDV_init(bounds, this%dgvs_inst) diff --git a/src/biogeochem/FATESFireFactoryMod.F90 b/src/biogeochem/FATESFireFactoryMod.F90 index 0352994e5f..94e3eee4c3 100644 --- a/src/biogeochem/FATESFireFactoryMod.F90 +++ b/src/biogeochem/FATESFireFactoryMod.F90 @@ -42,7 +42,8 @@ subroutine create_fates_fire_data_method( fates_fire_data_method ) ! The particular type is determined based on a namelist parameter. ! ! !USES: - use clm_varctl, only: fates_spitfire_mode + use clm_varctl, only: fates_spitfire_mode, use_fates_sp, use_fates_ed_st3 + use shr_fire_emis_mod, only : shr_fire_emis_mechcomps_n use FATESFireBase, only: fates_fire_base_type use FATESFireNoDataMod, only: fates_fire_no_data_type use FATESFireDataMod, only: fates_fire_data_type @@ -51,25 +52,69 @@ subroutine create_fates_fire_data_method( fates_fire_data_method ) class(fates_fire_base_type), allocatable, intent(inout) :: fates_fire_data_method ! function result ! ! !LOCAL VARIABLES: - integer :: current_case - - character(len=*), parameter :: subname = 'create_fates_fire_data_method' !----------------------------------------------------------------------- - current_case = fates_spitfire_mode - - select case (current_case) + ! + ! For FATES options that bypass fire... + ! + if ( use_fates_sp .or. use_fates_ed_st3 )then + ! + ! Make sure fire-emissions is NOT on + ! + if ( shr_fire_emis_mechcomps_n > 0 )then + if ( use_fates_sp )then + write(iulog,*) "Fire emissions can NOT be on with FATES-SP mode: ", & + errMsg(sourcefile, __LINE__) + call endrun(msg="Fire emission with FATES requires FATES to NOT be in Satellite Phenology (SP) mode" ) + else if ( use_fates_ed_st3 )then + write(iulog,*) "Fire emissions can NOT be on with FATES ST3 mode: ", & + errMsg(sourcefile, __LINE__) + call endrun(msg="Fire emission with FATES requires FATES to NOT be in Static Stand Structure mode" ) + end if + ! For unit-testing return with a FATESFireData type, so there isn't a run-time error + ! Also do the FATESFireData type, as using FATESFireNoData type will fail with an error + allocate(fates_fire_data_type :: fates_fire_data_method) + return + end if + allocate(fates_fire_no_data_type :: fates_fire_data_method) + else + ! + ! For regular FATES options that include fire + ! + select case (fates_spitfire_mode) - case (no_fire:scalar_lightning) - allocate(fates_fire_no_data_type :: fates_fire_data_method) - case (lightning_from_data:anthro_suppression) - allocate(fates_fire_data_type :: fates_fire_data_method) + ! No-fire, scalar-lightning and successful_ignitions ALL do NOT need input data from the base class + case (no_fire:scalar_lightning) + allocate(fates_fire_no_data_type :: fates_fire_data_method) + case (successful_ignitions) + allocate(fates_fire_no_data_type :: fates_fire_data_method) + ! Lightning from data, and the anthro types (ignition and suppression) need lightning data from the base class + case (lightning_from_data) + allocate(fates_fire_data_type :: fates_fire_data_method) + case (anthro_ignitions:anthro_suppression) + allocate(fates_fire_data_type :: fates_fire_data_method) - case default - write(iulog,*) subname//' ERROR: unknown method: ', fates_spitfire_mode - call endrun(msg=errMsg(sourcefile, __LINE__)) + case default + write(iulog,*) 'Unrecognized fates_spitfire_mode option = ', fates_spitfire_mode, ' in: ', & + errMsg(sourcefile, __LINE__) + call endrun(msg="Unknown option for namelist item fates_spitfire_mode:") + ! For unit-testing, make sure a valid fates_fire_data_method is set and return, otherwise it fails with a seg-fault + allocate(fates_fire_no_data_type :: fates_fire_data_method) - end select + end select + ! ------------------------------------------------------------------------------------------------------- + ! For now we die with a error whenever fire-emissions are turned on -- because this isn't setup in FATES + ! + if ( fates_spitfire_mode /= no_fire ) then + if ( shr_fire_emis_mechcomps_n > 0 )then + write(iulog,*) "Fire emissions can NOT be on with FATES currently: ", & + errMsg(sourcefile, __LINE__) + call endrun(msg="Fire emission with FATES can NOT currently be turned on (see issue #1045)" ) + return + end if + end if + ! ------------------------------------------------------------------------------------------------------- + end if end subroutine create_fates_fire_data_method diff --git a/src/biogeochem/FATESFireNoDataMod.F90 b/src/biogeochem/FATESFireNoDataMod.F90 index 4034b68e97..65b7bae5af 100644 --- a/src/biogeochem/FATESFireNoDataMod.F90 +++ b/src/biogeochem/FATESFireNoDataMod.F90 @@ -27,6 +27,8 @@ module FATESFireNoDataMod contains ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: FATESNoFireInit! Initialization + procedure, public :: FireInit => FATESNoFireInit procedure, public :: need_lightning_and_popdens procedure, public :: GetLight24 ! Return the 24-hour averaged lightning data procedure, public :: GetGDP ! Return the global gdp data @@ -40,6 +42,28 @@ module FATESFireNoDataMod contains + !----------------------------------------------------------------------- + subroutine FATESNoFireInit( this, bounds ) + ! + ! !DESCRIPTION: + ! Initialize No Fire data module for FATES + use shr_fire_emis_mod, only : shr_fire_emis_mechcomps_n + use shr_log_mod , only : errMsg => shr_log_errMsg + use clm_varctl , only : fates_spitfire_mode + ! !ARGUMENTS: + class(fates_fire_no_data_type) :: this + type(bounds_type), intent(in) :: bounds + + if ( (shr_fire_emis_mechcomps_n > 0) .and. (fates_spitfire_mode == 0) ) then + write(iulog,*) "Fire emissions can NOT be active for fates_spitfire_mode=0 (no_fire)", & + errMsg(sourcefile, __LINE__) + call endrun(msg="Having fire emissions on requires fates_spitfire_mode to be something besides no_fire (0)" ) + return + end if + call this%CNFireInit( bounds ) + + end subroutine FATESNoFireInit + !------------------------------------------------------------------------ function need_lightning_and_popdens(this) ! !ARGUMENTS: diff --git a/src/biogeochem/SatellitePhenologyMod.F90 b/src/biogeochem/SatellitePhenologyMod.F90 index a6d5fde22d..7747aad5fa 100644 --- a/src/biogeochem/SatellitePhenologyMod.F90 +++ b/src/biogeochem/SatellitePhenologyMod.F90 @@ -13,12 +13,13 @@ module SatellitePhenologyMod use shr_log_mod , only : errMsg => shr_log_errMsg use decompMod , only : bounds_type use abortutils , only : endrun - use clm_varctl , only : iulog, use_lai_streams + use clm_varctl , only : iulog, use_lai_streams, single_column use perf_mod , only : t_startf, t_stopf use spmdMod , only : masterproc, mpicom, iam use laiStreamMod , only : lai_init, lai_advance, lai_interp use clm_varctl , only : use_fates - use ncdio_pio + use ncdio_pio , only : ncd_pio_openfile, ncd_inqfdims, check_dim_size, ncd_io + use ncdio_pio , only : ncd_pio_closefile, file_desc_t ! ! !PUBLIC TYPES: implicit none @@ -56,6 +57,9 @@ subroutine SatellitePhenologyInit (bounds) ! ! !USES: use shr_infnan_mod, only : nan => shr_infnan_nan, assignment(=) + use shr_fire_emis_mod, only : shr_fire_emis_mechcomps_n + use shr_log_mod, only : errMsg => shr_log_errMsg + use clm_varctl, only : use_cn ! ! !ARGUMENTS: type(bounds_type), intent(in) :: bounds @@ -63,6 +67,12 @@ subroutine SatellitePhenologyInit (bounds) ! !LOCAL VARIABLES: integer :: ier ! error code !----------------------------------------------------------------------- + if ( (shr_fire_emis_mechcomps_n > 0) .and. (.not. use_cn) ) then + write(iulog,*) "Fire emissions can NOT be active for Satellite Phenology mode (SP)" // & + errMsg(sourcefile, __LINE__) + call endrun(msg="Fire emission requires BGC to be on rather than a Satelitte Pheonology (SP) case") + return + end if InterpMonths1 = -999 ! saved month index @@ -363,7 +373,7 @@ subroutine readAnnualVegetation (bounds, canopystate_inst) call ncd_pio_openfile (ncid, trim(locfn), 0) call ncd_inqfdims (ncid, isgrid2d, ni, nj, ns) - if (ldomain%ns /= ns .or. ldomain%ni /= ni .or. ldomain%nj /= nj) then + if (.not. single_column .and. (ldomain%ns /= ns .or. ldomain%ni /= ni .or. ldomain%nj /= nj)) then write(iulog,*)trim(subname), 'ldomain and input file do not match dims ' write(iulog,*)trim(subname), 'ldomain%ni,ni,= ',ldomain%ni,ni write(iulog,*)trim(subname), 'ldomain%nj,nj,= ',ldomain%nj,nj diff --git a/src/biogeochem/test/CMakeLists.txt b/src/biogeochem/test/CMakeLists.txt index e22a720523..2ebe27c76f 100644 --- a/src/biogeochem/test/CMakeLists.txt +++ b/src/biogeochem/test/CMakeLists.txt @@ -3,3 +3,5 @@ add_subdirectory(CNVegComputeSeed_test) add_subdirectory(CNPhenology_test) add_subdirectory(Latbaset_test) add_subdirectory(DustEmis_test) +add_subdirectory(CNFireFactory_test) +add_subdirectory(FATESFireFactory_test) diff --git a/src/biogeochem/test/CNFireFactory_test/CMakeLists.txt b/src/biogeochem/test/CNFireFactory_test/CMakeLists.txt new file mode 100644 index 0000000000..032e0fa953 --- /dev/null +++ b/src/biogeochem/test/CNFireFactory_test/CMakeLists.txt @@ -0,0 +1,7 @@ +set (pfunit_sources + test_CNFireFactory.pf +) + +add_pfunit_ctest(CNFireFActory + TEST_SOURCES "${pfunit_sources}" + LINK_LIBRARIES clm csm_share esmf) diff --git a/src/biogeochem/test/CNFireFactory_test/test_CNFireFactory.pf b/src/biogeochem/test/CNFireFactory_test/test_CNFireFactory.pf new file mode 100644 index 0000000000..5b0f52c8d4 --- /dev/null +++ b/src/biogeochem/test/CNFireFactory_test/test_CNFireFactory.pf @@ -0,0 +1,240 @@ +module test_CNFireFactory + + ! Tests of CNFireFactory + + use funit + use unittestSubgridMod, only : bounds + use FireMethodType , only : fire_method_type + use CNFireFactoryMod + use ESMF, only : ESMF_SUCCESS + use shr_kind_mod , only : r8 => shr_kind_r8 + use clm_varctl, only : use_cn, iulog + + implicit none + + @TestCase + type, extends(TestCase) :: TestCNFireFactory + logical :: initialized = .false. + class(fire_method_type), allocatable :: cnfire_method + contains + procedure :: setUp + procedure :: tearDown + procedure :: FireFactInit + procedure :: turn_fire_emis_on + end type TestCNFireFactory + + contains + + !----------------------------------------------------------------------- + + subroutine setUp(this) + use shr_log_mod, only : shr_log_setLogUnit + use ESMF, only : ESMF_Initialize, ESMF_IsInitialized + use shr_sys_mod, only : shr_sys_system + class(TestCNFireFactory), intent(inout) :: this + + integer :: rc + logical :: esmf_initialized + + esmf_initialized = ESMF_IsInitialized( rc=rc ) + if (rc /= ESMF_SUCCESS) then + stop 'Error in ESMF_IsInitialized' + end if + if ( .not. esmf_initialized )then + call ESMF_Initialize( rc=rc ) + if (rc /= ESMF_SUCCESS) then + stop 'Error in ESMF_Initialize' + end if + end if + use_cn = .true. + iulog = 6 + call shr_log_setLogUnit(iulog) + this%initialized = .false. + + end subroutine setUp + !----------------------------------------------------------------------- + + subroutine tearDown(this) + use shr_sys_mod, only : shr_sys_system + use shr_log_mod, only : shr_log_setLogUnit + class(TestCNFireFactory), intent(inout) :: this + + integer :: rc + + ! A clean method should be added to the fire method class structures + if ( this%initialized )then + call this%cnfire_method%FireClean() + deallocate( this%cnfire_method ) + end if + ! IMPORTANT NOTE: DO NOT CALL ESMF_Finalize HERE! + ! Calling ESMF_Finalize here, with full ESMF, means you couldn't call ESMF_Initialize again + this%initialized = .false. + + end subroutine tearDown + + !----------------------------------------------------------------------- + + subroutine FireFactInit(this, fire_method) + class(TestCNFireFactory), intent(inout) :: this + character(len=*), intent(in) :: fire_method + + if ( trim(fire_method) /= "DO_NOT_SET") then + call CNFireSetFireMethod( fire_method_in=fire_method ) + end if + call create_cnfire_method(this%cnfire_method) + call this%cnfire_method%FireInit(bounds) + this%initialized = .true. + + end subroutine FireFactInit + + !----------------------------------------------------------------------- + + subroutine turn_fire_emis_on(this) + use shr_fire_emis_mod, only : shr_fire_emis_readnl, shr_fire_emis_mechcomps_n + use shr_sys_mod, only : shr_sys_system + class(TestCNFireFactory), intent(inout) :: this + + ! NOTE!: This is bad that this can be done directly without having it done through a namelist, or setter! + shr_fire_emis_mechcomps_n = 2 + end subroutine turn_fire_emis_on + + !----------------------------------------------------------------------- + + @Test + subroutine fire_method_not_set_fails(this) + class(TestCNFireFactory), intent(inout) :: this + character(100) :: expected_msg + + call this%FireFactInit( fire_method = "DO_NOT_SET") + expected_msg = "ABORTED: Unknown option for namelist item fire_method: UNSET" + @assertExceptionRaised(expected_msg) + + end subroutine fire_method_not_set_fails + + !----------------------------------------------------------------------- + + @Test + subroutine fire_method_bad_fails(this) + class(TestCNFireFactory), intent(inout) :: this + character(100) :: expected_msg + + call this%FireFactInit( fire_method = "ZZTOP") ! Set to an invalid option + expected_msg = "ABORTED: Unknown option for namelist item fire_method: ZZTOP" + @assertExceptionRaised(expected_msg) + + end subroutine fire_method_bad_fails + + !----------------------------------------------------------------------- + + @Test + subroutine nofire_with_fire_emis_fails(this) + class(TestCNFireFactory), intent(inout) :: this + character(100) :: expected_msg + + call this%turn_fire_emis_on() + call this%FireFactInit( fire_method = "nofire") + expected_msg = "ABORTED: Having fire emissions on requires fire_method to be something besides nofire" + @assertExceptionRaised(expected_msg) + + end subroutine nofire_with_fire_emis_fails + + !----------------------------------------------------------------------- + + @Test + subroutine spcase_with_fire_emis_fails(this) + use SatellitePhenologyMod, only : SatellitePhenologyInit + class(TestCNFireFactory), intent(inout) :: this + character(100) :: expected_msg + + use_cn = .false. + call this%turn_fire_emis_on() + call SatellitePhenologyInit( bounds ) + expected_msg = "ABORTED: Fire emission requires BGC to be on rather than a Satelitte Pheonology (SP) case" + @assertExceptionRaised(expected_msg) + + end subroutine spcase_with_fire_emis_fails + + !----------------------------------------------------------------------- + + @Test + subroutine li2014_works(this) + class(TestCNFireFactory), intent(inout) :: this + + call this%FireFactInit( fire_method = "li2014qianfrc") + + end subroutine li2014_works + + !----------------------------------------------------------------------- + + ! + ! Test that default settings with ALL of the Li Fire options work by default + ! (These tests are done one by one which makes them dead simple, but take up more code + ! see the looping option below) + ! + @Test + subroutine li2016_works(this) + class(TestCNFireFactory), intent(inout) :: this + + call this%FireFactInit( fire_method = "li2016crufrc") + + end subroutine li2016_works + + !----------------------------------------------------------------------- + + @Test + subroutine li2021_works(this) + class(TestCNFireFactory), intent(inout) :: this + + call this%FireFactInit( fire_method = "li2021gswpfrc") + + end subroutine li2021_works + + !----------------------------------------------------------------------- + + @Test + subroutine li2024_works(this) + class(TestCNFireFactory), intent(inout) :: this + + call this%FireFactInit( fire_method = "li2024gswpfrc") + + end subroutine li2024_works + + !----------------------------------------------------------------------- + + @Test + subroutine li2024crujra_works(this) + class(TestCNFireFactory), intent(inout) :: this + + call this%FireFactInit( fire_method = "li2024crujra") + + end subroutine li2024crujra_works + + !----------------------------------------------------------------------- + + ! + ! Test that default settings with ALL of the Li Fire options work when fire emissions + ! are turned on. This test is done with a loop rather than one by one as above. + ! This cuts down on the total test code, but also means that setUp and tearDown have + ! to be explicitly called for example. Setup is always called before a test, and tearDown + ! after each test) + ! + + @Test + subroutine all_li_options_with_fire_emis_works(this) + class(TestCNFireFactory), intent(inout) :: this + integer, parameter :: noptions = 5 + integer :: i + character(len=*), parameter :: fire_method_options(noptions) = (/ 'li2014qianfrc', 'li2016crufrc ', 'li2021gswpfrc', 'li2024gswpfrc', 'li2024crujra '/) + + do i = 1, noptions + call this%setUp() ! This is needed because of the loop over all options + call this%turn_fire_emis_on() + call this%FireFactInit( fire_method = fire_method_options(i) ) + call this%tearDown() ! This is needed because of the loop over all options + end do + + end subroutine all_li_options_with_fire_emis_works + + !----------------------------------------------------------------------- + +end module test_CNFireFactory diff --git a/src/biogeochem/test/FATESFireFactory_test/CMakeLists.txt b/src/biogeochem/test/FATESFireFactory_test/CMakeLists.txt new file mode 100644 index 0000000000..80ac4114e7 --- /dev/null +++ b/src/biogeochem/test/FATESFireFactory_test/CMakeLists.txt @@ -0,0 +1,7 @@ +set (pfunit_sources + test_FATESFireFactory.pf +) + +add_pfunit_ctest(FATESFireFActory + TEST_SOURCES "${pfunit_sources}" + LINK_LIBRARIES clm csm_share esmf) diff --git a/src/biogeochem/test/FATESFireFactory_test/test_FATESFireFactory.pf b/src/biogeochem/test/FATESFireFactory_test/test_FATESFireFactory.pf new file mode 100644 index 0000000000..fba39098a8 --- /dev/null +++ b/src/biogeochem/test/FATESFireFactory_test/test_FATESFireFactory.pf @@ -0,0 +1,167 @@ +module test_FATESFireFactory + + ! Tests of FATESFireFactory + + use funit + use unittestSubgridMod, only : bounds + use FATESFireBase, only : fates_fire_base_type + use FATESFireFactoryMod + use shr_kind_mod , only : r8 => shr_kind_r8, CS => shr_kind_CS + use clm_varctl, only : iulog, fates_spitfire_mode, use_fates, use_fates_sp, use_fates_ed_st3 + + implicit none + + @TestCase + type, extends(TestCase) :: TestFATESFireFactory + logical :: initialized = .false. + class(fates_fire_base_type), allocatable :: fates_fire_method + contains + procedure :: setUp + procedure :: tearDown + procedure :: FireFactInit + procedure :: turn_fire_emis_on + end type TestFATESFireFactory + + contains + + !----------------------------------------------------------------------- + + subroutine setUp(this) + use shr_log_mod, only : shr_log_setLogUnit + use ESMF, only : ESMF_Initialize + use shr_sys_mod, only : shr_sys_system + class(TestFATESFireFactory), intent(inout) :: this + + call ESMF_Initialize() + use_fates = .true. + use_fates_sp = .false. + use_fates_ed_st3 = .false. + fates_spitfire_mode = no_fire + iulog = 6 + call shr_log_setLogUnit(iulog) + this%initialized = .false. + + end subroutine setUp + !----------------------------------------------------------------------- + + subroutine tearDown(this) + use shr_sys_mod, only : shr_sys_system + use shr_log_mod, only : shr_log_setLogUnit + class(TestFATESFireFactory), intent(inout) :: this + + if ( this%initialized )then + call this%fates_fire_method%FireClean() + deallocate( this%fates_fire_method ) + end if + this%initialized = .false. + + end subroutine tearDown + + !----------------------------------------------------------------------- + + subroutine FireFactInit(this) + class(TestFATESFireFactory), intent(inout) :: this + + call create_fates_fire_data_method(this%fates_fire_method) + call this%fates_fire_method%FireInit(bounds) + this%initialized = .true. + + end subroutine FireFactInit + + !----------------------------------------------------------------------- + + subroutine turn_fire_emis_on(this) + use shr_fire_emis_mod, only : shr_fire_emis_readnl, shr_fire_emis_mechcomps_n + use shr_sys_mod, only : shr_sys_system + class(TestFATESFireFactory), intent(inout) :: this + + ! NOTE!: This is bad that this can be done directly without having it done through a namelist, or setter! + shr_fire_emis_mechcomps_n = 2 + end subroutine turn_fire_emis_on + + !----------------------------------------------------------------------- + + @Test + subroutine fates_spitfire_mode_bad_fails(this) + class(TestFATESFireFactory), intent(inout) :: this + character(100) :: expected_msg + + fates_spitfire_mode = -1 + call this%FireFactInit( ) + expected_msg = "ABORTED: Unknown option for namelist item fates_spitfire_mode:" + @assertExceptionRaised(expected_msg) + + end subroutine fates_spitfire_mode_bad_fails + + !----------------------------------------------------------------------- + + @Test + subroutine fates_sp_case_with_fire_emis_fails(this) + use clm_varctl, only : use_fates_sp + class(TestFATESFireFactory), intent(inout) :: this + character(100) :: expected_msg + + use_fates_sp = .true. + call this%turn_fire_emis_on() + call this%FireFactInit( ) + expected_msg = "ABORTED: Fire emission with FATES requires FATES to NOT be in Satellite Phenology (SP) mode" + @assertExceptionRaised(expected_msg) + + end subroutine fates_sp_case_with_fire_emis_fails + + !----------------------------------------------------------------------- + + @Test + subroutine fates_st3_case_with_fire_emis_fails(this) + use clm_varctl, only : use_fates_ed_st3 + class(TestFATESFireFactory), intent(inout) :: this + character(100) :: expected_msg + + use_fates_ed_st3 = .true. + call this%turn_fire_emis_on() + call this%FireFactInit( ) + expected_msg = "ABORTED: Fire emission with FATES requires FATES to NOT be in Static Stand Structure mode" + @assertExceptionRaised(expected_msg) + + end subroutine fates_st3_case_with_fire_emis_fails + + !----------------------------------------------------------------------- + + @Test + subroutine fates_no_spitfire_case_with_fire_emis_fails(this) + class(TestFATESFireFactory), intent(inout) :: this + character(100) :: expected_msg + + call this%turn_fire_emis_on() + fates_spitfire_mode = no_fire + call this%FireFactInit( ) + expected_msg = "ABORTED: Having fire emissions on requires fates_spitfire_mode to be something besides no_fire (0)" + @assertExceptionRaised(expected_msg) + + end subroutine fates_no_spitfire_case_with_fire_emis_fails + + !----------------------------------------------------------------------- + + @Test + subroutine all_fates_spitfire_options_with_fire_emis_fails(this) + class(TestFATESFireFactory), intent(inout) :: this + integer, parameter :: noptions = anthro_suppression + integer :: i + character(100) :: expected_msg + + do i = scalar_lightning, noptions + call this%setUp() + call this%turn_fire_emis_on() + fates_spitfire_mode = i + use_fates_sp = .false. + call this%FireFactInit( ) + expected_msg = "ABORTED: Fire emission with FATES can NOT currently be turned on (see issue #1045)" + @assertExceptionRaised(expected_msg) + call this%tearDown() + end do + + end subroutine all_fates_spitfire_options_with_fire_emis_fails + + !----------------------------------------------------------------------- + +end module test_FATESFireFactory diff --git a/src/biogeophys/UrbanParamsType.F90 b/src/biogeophys/UrbanParamsType.F90 index 4b7b80e4fe..c6443897fe 100644 --- a/src/biogeophys/UrbanParamsType.F90 +++ b/src/biogeophys/UrbanParamsType.F90 @@ -9,9 +9,9 @@ module UrbanParamsType use shr_log_mod , only : errMsg => shr_log_errMsg use abortutils , only : endrun use decompMod , only : bounds_type, subgrid_level_gridcell, subgrid_level_landunit - use clm_varctl , only : iulog, fsurdat + use clm_varctl , only : iulog, fsurdat, single_column use clm_varcon , only : grlnd, spval - use LandunitType , only : lun + use LandunitType , only : lun ! implicit none save @@ -26,21 +26,21 @@ module UrbanParamsType ! ! !PRIVATE TYPE type urbinp_type - real(r8), pointer :: canyon_hwr (:,:) - real(r8), pointer :: wtlunit_roof (:,:) - real(r8), pointer :: wtroad_perv (:,:) - real(r8), pointer :: em_roof (:,:) - real(r8), pointer :: em_improad (:,:) - real(r8), pointer :: em_perroad (:,:) - real(r8), pointer :: em_wall (:,:) - real(r8), pointer :: alb_roof_dir (:,:,:) - real(r8), pointer :: alb_roof_dif (:,:,:) - real(r8), pointer :: alb_improad_dir (:,:,:) - real(r8), pointer :: alb_improad_dif (:,:,:) - real(r8), pointer :: alb_perroad_dir (:,:,:) - real(r8), pointer :: alb_perroad_dif (:,:,:) - real(r8), pointer :: alb_wall_dir (:,:,:) - real(r8), pointer :: alb_wall_dif (:,:,:) + real(r8), pointer :: canyon_hwr (:,:) + real(r8), pointer :: wtlunit_roof (:,:) + real(r8), pointer :: wtroad_perv (:,:) + real(r8), pointer :: em_roof (:,:) + real(r8), pointer :: em_improad (:,:) + real(r8), pointer :: em_perroad (:,:) + real(r8), pointer :: em_wall (:,:) + real(r8), pointer :: alb_roof_dir (:,:,:) + real(r8), pointer :: alb_roof_dif (:,:,:) + real(r8), pointer :: alb_improad_dir (:,:,:) + real(r8), pointer :: alb_improad_dif (:,:,:) + real(r8), pointer :: alb_perroad_dir (:,:,:) + real(r8), pointer :: alb_perroad_dif (:,:,:) + real(r8), pointer :: alb_wall_dir (:,:,:) + real(r8), pointer :: alb_wall_dif (:,:,:) real(r8), pointer :: ht_roof (:,:) real(r8), pointer :: wind_hgt_canyon (:,:) real(r8), pointer :: tk_wall (:,:,:) @@ -92,14 +92,14 @@ module UrbanParamsType real(r8), pointer :: eflx_traffic_factor (:) ! lun multiplicative traffic factor for sensible heat flux from urban traffic (-) contains - procedure, public :: Init - + procedure, public :: Init + end type urbanparams_type ! ! !Urban control variables - character(len= *), parameter, public :: urban_hac_off = 'OFF' - character(len= *), parameter, public :: urban_hac_on = 'ON' - character(len= *), parameter, public :: urban_wasteheat_on = 'ON_WASTEHEAT' + character(len= *), parameter, public :: urban_hac_off = 'OFF' + character(len= *), parameter, public :: urban_hac_on = 'ON' + character(len= *), parameter, public :: urban_wasteheat_on = 'ON_WASTEHEAT' character(len= 16), public :: urban_hac = urban_hac_off logical, public :: urban_explicit_ac = .true. ! whether to use explicit, time-varying AC adoption rate logical, public :: urban_traffic = .false. ! urban traffic fluxes @@ -112,7 +112,7 @@ module UrbanParamsType character(len=*), parameter, private :: sourcefile = & __FILE__ - !----------------------------------------------------------------------- + !----------------------------------------------------------------------- contains @@ -132,11 +132,11 @@ subroutine Init(this, bounds) ! ! !ARGUMENTS: class(urbanparams_type) :: this - type(bounds_type) , intent(in) :: bounds + type(bounds_type) , intent(in) :: bounds ! ! !LOCAL VARIABLES: integer :: j,l,c,p,g ! indices - integer :: nc,fl,ib ! indices + integer :: nc,fl,ib ! indices integer :: dindx ! urban density type index integer :: ier ! error status real(r8) :: sumvf ! sum of view factors for wall or road @@ -182,12 +182,12 @@ subroutine Init(this, bounds) allocate(this%em_perroad (begl:endl)) ; this%em_perroad (:) = nan allocate(this%em_wall (begl:endl)) ; this%em_wall (:) = nan allocate(this%alb_roof_dir (begl:endl,numrad)) ; this%alb_roof_dir (:,:) = nan - allocate(this%alb_roof_dif (begl:endl,numrad)) ; this%alb_roof_dif (:,:) = nan - allocate(this%alb_improad_dir (begl:endl,numrad)) ; this%alb_improad_dir (:,:) = nan - allocate(this%alb_perroad_dir (begl:endl,numrad)) ; this%alb_perroad_dir (:,:) = nan - allocate(this%alb_improad_dif (begl:endl,numrad)) ; this%alb_improad_dif (:,:) = nan - allocate(this%alb_perroad_dif (begl:endl,numrad)) ; this%alb_perroad_dif (:,:) = nan - allocate(this%alb_wall_dir (begl:endl,numrad)) ; this%alb_wall_dir (:,:) = nan + allocate(this%alb_roof_dif (begl:endl,numrad)) ; this%alb_roof_dif (:,:) = nan + allocate(this%alb_improad_dir (begl:endl,numrad)) ; this%alb_improad_dir (:,:) = nan + allocate(this%alb_perroad_dir (begl:endl,numrad)) ; this%alb_perroad_dir (:,:) = nan + allocate(this%alb_improad_dif (begl:endl,numrad)) ; this%alb_improad_dif (:,:) = nan + allocate(this%alb_perroad_dif (begl:endl,numrad)) ; this%alb_perroad_dif (:,:) = nan + allocate(this%alb_wall_dir (begl:endl,numrad)) ; this%alb_wall_dir (:,:) = nan allocate(this%alb_wall_dif (begl:endl,numrad)) ; this%alb_wall_dif (:,:) = nan allocate(this%eflx_traffic_factor (begl:endl)) ; this%eflx_traffic_factor (:) = nan @@ -261,7 +261,7 @@ subroutine Init(this, bounds) ! | \ vsr / | | r | | \ vww / s ! | \ / | h o w | \ / k ! wall | \ / | wall | a | | \ / y - ! |vwr \ / vwr| | d | |vrw \ / vsw + ! |vwr \ / vwr| | d | |vrw \ / vsw ! ------\/------ - - |-----\/----- ! road wall | ! <----- w ----> | @@ -272,20 +272,20 @@ subroutine Init(this, bounds) ! vsw = view factor of sky for wall ! vsr + vwr + vwr = 1 vrw + vww + vsw = 1 ! - ! Source: Masson, V. (2000) A physically-based scheme for the urban energy budget in + ! Source: Masson, V. (2000) A physically-based scheme for the urban energy budget in ! atmospheric models. Boundary-Layer Meteorology 94:357-397 ! ! - Calculate urban land unit aerodynamic constants using Macdonald (1998) as used in ! Grimmond and Oke (1999) ! --------------------------------------------------------------------------------------- - - ! road -- sky view factor -> 1 as building height -> 0 + + ! road -- sky view factor -> 1 as building height -> 0 ! and -> 0 as building height -> infinity this%vf_sr(l) = sqrt(lun%canyon_hwr(l)**2 + 1._r8) - lun%canyon_hwr(l) this%vf_wr(l) = 0.5_r8 * (1._r8 - this%vf_sr(l)) - ! one wall -- sky view factor -> 0.5 as building height -> 0 + ! one wall -- sky view factor -> 0.5 as building height -> 0 ! and -> 0 as building height -> infinity this%vf_sw(l) = 0.5_r8 * (lun%canyon_hwr(l) + 1._r8 - sqrt(lun%canyon_hwr(l)**2+1._r8)) / lun%canyon_hwr(l) @@ -311,7 +311,7 @@ subroutine Init(this, bounds) ! Grimmond and Oke (1999) !---------------------------------------------------------------------------------- - ! Calculate plan area index + ! Calculate plan area index plan_ai = lun%canyon_hwr(l)/(lun%canyon_hwr(l) + 1._r8) ! Building shape shortside/longside ratio (e.g. 1 = square ) @@ -344,7 +344,7 @@ subroutine Init(this, bounds) (1 - lun%z_d_town(l) / lun%ht_roof(l)) * frontal_ai)**(-0.5_r8)) end if - else ! Not urban point + else ! Not urban point this%eflx_traffic_factor(l) = spval this%t_building_min(l) = spval @@ -366,7 +366,7 @@ end subroutine Init !----------------------------------------------------------------------- subroutine UrbanInput(begg, endg, mode) ! - ! !DESCRIPTION: + ! !DESCRIPTION: ! Allocate memory and read in urban input data ! ! !USES: @@ -375,7 +375,7 @@ subroutine UrbanInput(begg, endg, mode) use fileutils , only : getavu, relavu, getfil, opnfil use spmdMod , only : masterproc use domainMod , only : ldomain - use ncdio_pio , only : file_desc_t, ncd_io, ncd_inqvdlen, ncd_inqfdims + use ncdio_pio , only : file_desc_t, ncd_io, ncd_inqvdlen, ncd_inqfdims use ncdio_pio , only : ncd_pio_openfile, ncd_pio_closefile, ncd_inqdid, ncd_inqdlen ! ! !ARGUMENTS: @@ -392,7 +392,7 @@ subroutine UrbanInput(begg, endg, mode) integer :: numrad_i ! input grid: number of solar bands (VIS/NIR) integer :: numurbl_i ! input grid: number of urban landunits integer :: ier,ret ! error status - logical :: isgrid2d ! true => file is 2d + logical :: isgrid2d ! true => file is 2d logical :: readvar ! true => variable is on dataset logical :: has_numurbl ! true => numurbl dimension is on dataset character(len=32) :: subname = 'UrbanInput' ! subroutine name @@ -403,11 +403,11 @@ subroutine UrbanInput(begg, endg, mode) if (mode == 'initialize') then ! Read urban data - + if (masterproc) then write(iulog,*)' Reading in urban input data from fsurdat file ...' end if - + call getfil (fsurdat, locfn, 0) call ncd_pio_openfile (ncid, locfn, 0) @@ -428,20 +428,20 @@ subroutine UrbanInput(begg, endg, mode) if ( nlevurb == 0 ) return ! Allocate dynamic memory - allocate(urbinp%canyon_hwr(begg:endg, numurbl), & - urbinp%wtlunit_roof(begg:endg, numurbl), & + allocate(urbinp%canyon_hwr(begg:endg, numurbl), & + urbinp%wtlunit_roof(begg:endg, numurbl), & urbinp%wtroad_perv(begg:endg, numurbl), & - urbinp%em_roof(begg:endg, numurbl), & - urbinp%em_improad(begg:endg, numurbl), & - urbinp%em_perroad(begg:endg, numurbl), & - urbinp%em_wall(begg:endg, numurbl), & - urbinp%alb_roof_dir(begg:endg, numurbl, numrad), & - urbinp%alb_roof_dif(begg:endg, numurbl, numrad), & - urbinp%alb_improad_dir(begg:endg, numurbl, numrad), & - urbinp%alb_perroad_dir(begg:endg, numurbl, numrad), & - urbinp%alb_improad_dif(begg:endg, numurbl, numrad), & - urbinp%alb_perroad_dif(begg:endg, numurbl, numrad), & - urbinp%alb_wall_dir(begg:endg, numurbl, numrad), & + urbinp%em_roof(begg:endg, numurbl), & + urbinp%em_improad(begg:endg, numurbl), & + urbinp%em_perroad(begg:endg, numurbl), & + urbinp%em_wall(begg:endg, numurbl), & + urbinp%alb_roof_dir(begg:endg, numurbl, numrad), & + urbinp%alb_roof_dif(begg:endg, numurbl, numrad), & + urbinp%alb_improad_dir(begg:endg, numurbl, numrad), & + urbinp%alb_perroad_dir(begg:endg, numurbl, numrad), & + urbinp%alb_improad_dif(begg:endg, numurbl, numrad), & + urbinp%alb_perroad_dif(begg:endg, numurbl, numrad), & + urbinp%alb_wall_dir(begg:endg, numurbl, numrad), & urbinp%alb_wall_dif(begg:endg, numurbl, numrad), & urbinp%ht_roof(begg:endg, numurbl), & urbinp%wind_hgt_canyon(begg:endg, numurbl), & @@ -461,7 +461,7 @@ subroutine UrbanInput(begg, endg, mode) endif call ncd_inqfdims (ncid, isgrid2d, ni, nj, ns) - if (ldomain%ns /= ns .or. ldomain%ni /= ni .or. ldomain%nj /= nj) then + if (.not. single_column .and. (ldomain%ns /= ns .or. ldomain%ni /= ni .or. ldomain%nj /= nj)) then write(iulog,*)trim(subname), 'ldomain and input file do not match dims ' write(iulog,*)trim(subname), 'ldomain%ni,ni,= ',ldomain%ni,ni write(iulog,*)trim(subname), 'ldomain%nj,nj,= ',ldomain%nj,nj @@ -655,7 +655,7 @@ subroutine UrbanInput(begg, endg, mode) call ncd_pio_closefile(ncid) if (masterproc) then - write(iulog,*)' Sucessfully read urban input data' + write(iulog,*)' Sucessfully read urban input data' write(iulog,*) end if @@ -955,7 +955,3 @@ end function IsProgBuildTemp !----------------------------------------------------------------------- end module UrbanParamsType - - - - diff --git a/src/cpl/nuopc/lnd_comp_nuopc.F90 b/src/cpl/nuopc/lnd_comp_nuopc.F90 index b7ef7216d9..af1426bf9b 100644 --- a/src/cpl/nuopc/lnd_comp_nuopc.F90 +++ b/src/cpl/nuopc/lnd_comp_nuopc.F90 @@ -642,8 +642,10 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call ESMF_GridCompGet(gcomp, vm=vm, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + call t_startf ('lc_lnd_set_decomp_and_domain_from_readmesh') call lnd_set_decomp_and_domain_from_readmesh(driver='cmeps', vm=vm, & meshfile_lnd=model_meshfile, meshfile_mask=meshfile_mask, mesh_ctsm=mesh, ni=ni, nj=nj, rc=rc) + call t_stopf ('lc_lnd_set_decomp_and_domain_from_readmesh') if (ChkErr(rc,__LINE__,u_FILE_u)) return end if diff --git a/src/cpl/share_esmf/FireDataBaseType.F90 b/src/cpl/share_esmf/FireDataBaseType.F90 index b84e3bfa33..13323cd924 100644 --- a/src/cpl/share_esmf/FireDataBaseType.F90 +++ b/src/cpl/share_esmf/FireDataBaseType.F90 @@ -26,23 +26,25 @@ module FireDataBaseType type, abstract, extends(fire_method_type) :: fire_base_type private ! !PRIVATE MEMBER DATA: - real(r8), public, pointer :: forc_hdm(:) ! Human population density - type(shr_strdata_type) :: sdat_hdm ! Human population density input data stream - real(r8), public, pointer :: forc_lnfm(:) ! Lightning frequency - type(shr_strdata_type) :: sdat_lnfm ! Lightning frequency input data stream + real(r8), public, pointer :: forc_hdm(:) => NULL() ! Human population density + type(shr_strdata_type) :: sdat_hdm ! Human population density input data stream + real(r8), public, pointer :: forc_lnfm(:) => NULL() ! Lightning frequency + type(shr_strdata_type) :: sdat_lnfm ! Lightning frequency input data stream - real(r8), public, pointer :: gdp_lf_col(:) ! col global real gdp data (k US$/capita) - real(r8), public, pointer :: peatf_lf_col(:) ! col global peatland fraction data (0-1) - integer , public, pointer :: abm_lf_col(:) ! col global peak month of crop fire emissions + real(r8), public, pointer :: gdp_lf_col(:) => NULL() ! col global real gdp data (k US$/capita) + real(r8), public, pointer :: peatf_lf_col(:) => NULL() ! col global peatland fraction data (0-1) + integer , public, pointer :: abm_lf_col(:) => NULL() ! col global peak month of crop fire emissions contains ! ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: FireInit => BaseFireInit ! Initialization of Fire procedure, public :: BaseFireInit ! Initialization of Fire + procedure, public :: FireInit => BaseFireInit ! Initialization of Fire + procedure, public :: BaseFireClean ! Clean up data and deallocate data + procedure, public :: FireClean => BaseFireClean ! Clean up data and deallocate data procedure, public :: FireInterp ! Interpolate fire data - procedure(FireReadNML_interface), public, deferred :: & - FireReadNML ! Read in namelist for Fire + procedure, public :: BaseFireReadNML ! Read in the namelist for fire + procedure, public :: ReadFireNML => BaseFireReadNML ! Read in the namelist for fire procedure(need_lightning_and_popdens_interface), public, deferred :: & need_lightning_and_popdens ! Returns true if need lightning & popdens ! @@ -78,7 +80,7 @@ end function need_lightning_and_popdens_interface contains !============================================================================== - subroutine FireReadNML_interface( this, NLFilename ) + subroutine BaseFireReadNML( this, bounds, NLFilename ) ! ! !DESCRIPTION: ! Read the namelist for Fire @@ -87,11 +89,21 @@ subroutine FireReadNML_interface( this, NLFilename ) ! ! !ARGUMENTS: class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds character(len=*), intent(in) :: NLFilename ! Namelist filename - end subroutine FireReadNML_interface + + ! Read the namelists for the fire data and do the preparation needed on them + if ( this%need_lightning_and_popdens() ) then + call this%hdm_init(bounds, NLFilename) + call this%hdm_interp(bounds) + call this%lnfm_init(bounds, NLFilename) + call this%lnfm_interp(bounds) + call this%surfdataread(bounds) + end if + end subroutine BaseFireReadNML !================================================================ - subroutine BaseFireInit( this, bounds, NLFilename ) + subroutine BaseFireInit( this, bounds ) ! ! !DESCRIPTION: ! Initialize CN Fire module @@ -101,9 +113,7 @@ subroutine BaseFireInit( this, bounds, NLFilename ) ! !ARGUMENTS: class(fire_base_type) :: this type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename !----------------------------------------------------------------------- - if ( this%need_lightning_and_popdens() ) then ! Allocate lightning forcing data allocate( this%forc_lnfm(bounds%begg:bounds%endg) ) @@ -118,16 +128,36 @@ subroutine BaseFireInit( this, bounds, NLFilename ) allocate(this%peatf_lf_col(bounds%begc:bounds%endc)) ! Allocates peak month of crop fire emissions allocate(this%abm_lf_col(bounds%begc:bounds%endc)) - - call this%hdm_init(bounds, NLFilename) - call this%hdm_interp(bounds) - call this%lnfm_init(bounds, NLFilename) - call this%lnfm_interp(bounds) - call this%surfdataread(bounds) end if end subroutine BaseFireInit + !================================================================ + subroutine BaseFireClean( this ) + ! + ! !DESCRIPTION: + ! Clean fire data + ! !USES: + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + !----------------------------------------------------------------------- + + if ( this%need_lightning_and_popdens() ) then + deallocate( this%forc_lnfm ) + deallocate( this%forc_hdm ) + deallocate( this%gdp_lf_col ) + deallocate( this%peatf_lf_col ) + deallocate( this%abm_lf_col ) + this%forc_lnfm => NULL() + this%forc_hdm => NULL() + this%gdp_lf_col => NULL() + this%peatf_lf_col => NULL() + this%abm_lf_col => NULL() + end if + + end subroutine BaseFireClean + !================================================================ subroutine FireInterp(this,bounds) ! diff --git a/src/init_interp/initInterp.F90 b/src/init_interp/initInterp.F90 index e0d56aed62..3ccdcd9b58 100644 --- a/src/init_interp/initInterp.F90 +++ b/src/init_interp/initInterp.F90 @@ -75,6 +75,9 @@ module initInterpMod ! patch-level variables) logical :: init_interp_fill_missing_with_natveg + ! If true, fill missing urban landunit type with closest urban high density (HD) landunit + logical :: init_interp_fill_missing_urban_with_HD + character(len=*), parameter, private :: sourcefile = & __FILE__ @@ -106,11 +109,13 @@ subroutine initInterp_readnl(NLFilename) !----------------------------------------------------------------------- namelist /clm_initinterp_inparm/ & - init_interp_method, init_interp_fill_missing_with_natveg + init_interp_method, init_interp_fill_missing_with_natveg, & + init_interp_fill_missing_urban_with_HD ! Initialize options to default values, in case they are not specified in the namelist init_interp_method = ' ' init_interp_fill_missing_with_natveg = .false. + init_interp_fill_missing_urban_with_HD = .false. if (masterproc) then unitn = getavu() @@ -130,6 +135,7 @@ subroutine initInterp_readnl(NLFilename) call shr_mpi_bcast (init_interp_method, mpicom) call shr_mpi_bcast (init_interp_fill_missing_with_natveg, mpicom) + call shr_mpi_bcast (init_interp_fill_missing_urban_with_HD, mpicom) if (masterproc) then write(iulog,*) ' ' @@ -287,12 +293,36 @@ subroutine initInterp (filei, fileo, bounds, glc_behavior) status = pio_get_att(ncidi, pio_global, & 'icol_vegetated_or_bare_soil', & subgrid_special_indices%icol_vegetated_or_bare_soil) + status = pio_get_att(ncidi, pio_global, & + 'icol_urban_roof', & + subgrid_special_indices%icol_urban_roof) + status = pio_get_att(ncidi, pio_global, & + 'icol_urban_sunwall', & + subgrid_special_indices%icol_urban_sunwall) + status = pio_get_att(ncidi, pio_global, & + 'icol_urban_shadewall', & + subgrid_special_indices%icol_urban_shadewall) + status = pio_get_att(ncidi, pio_global, & + 'icol_urban_impervious_road', & + subgrid_special_indices%icol_urban_impervious_road) + status = pio_get_att(ncidi, pio_global, & + 'icol_urban_pervious_road', & + subgrid_special_indices%icol_urban_pervious_road) status = pio_get_att(ncidi, pio_global, & 'ilun_vegetated_or_bare_soil', & subgrid_special_indices%ilun_vegetated_or_bare_soil) status = pio_get_att(ncidi, pio_global, & 'ilun_crop', & subgrid_special_indices%ilun_crop) + status = pio_get_att(ncidi, pio_global, & + 'ilun_urban_tbd', & + subgrid_special_indices%ilun_urban_TBD) + status = pio_get_att(ncidi, pio_global, & + 'ilun_urban_hd', & + subgrid_special_indices%ilun_urban_HD) + status = pio_get_att(ncidi, pio_global, & + 'ilun_urban_md', & + subgrid_special_indices%ilun_urban_MD) ! BACKWARDS_COMPATIBILITY(wjs, 2021-04-16) ilun_landice_multiple_elevation_classes has ! been renamed to ilun_landice. For now we need to handle both possibilities for the @@ -321,10 +351,26 @@ subroutine initInterp (filei, fileo, bounds, glc_behavior) subgrid_special_indices%ipft_not_vegetated write(iulog,*)'icol_vegetated_or_bare_soil = ' , & subgrid_special_indices%icol_vegetated_or_bare_soil + write(iulog,*)'icol_urban_roof = ' , & + subgrid_special_indices%icol_urban_roof + write(iulog,*)'icol_urban_sunwall = ' , & + subgrid_special_indices%icol_urban_sunwall + write(iulog,*)'icol_urban_shadewall = ' , & + subgrid_special_indices%icol_urban_shadewall + write(iulog,*)'icol_urban_impervious_road = ' , & + subgrid_special_indices%icol_urban_impervious_road + write(iulog,*)'icol_urban_pervious_road = ' , & + subgrid_special_indices%icol_urban_pervious_road write(iulog,*)'ilun_vegetated_or_bare_soil = ' , & subgrid_special_indices%ilun_vegetated_or_bare_soil write(iulog,*)'ilun_crop = ' , & subgrid_special_indices%ilun_crop + write(iulog,*)'ilun_urban_tbd = ' , & + subgrid_special_indices%ilun_urban_TBD + write(iulog,*)'ilun_urban_hd = ' , & + subgrid_special_indices%ilun_urban_HD + write(iulog,*)'ilun_urban_md = ' , & + subgrid_special_indices%ilun_urban_MD write(iulog,*)'ilun_landice = ' , & subgrid_special_indices%ilun_landice write(iulog,*)'create_glacier_mec_landunits = ', & @@ -820,13 +866,13 @@ subroutine findMinDist( dimname, begi, endi, bego, endo, ncidi, ncido, & write(iulog,*)'calling set_subgrid_info for ',trim(dimname), ' for input' end if call set_subgrid_info(beg=begi, end=endi, dimname=dimname, use_glob=.true., & - ncid=ncidi, active=activei, subgrid=subgridi) + ncid=ncidi, active=activei, subgrid=subgridi, allow_scm=.false.) if (masterproc) then write(iulog,*)'calling set_subgrid_info for ',trim(dimname), ' for output' end if call set_subgrid_info(beg=bego, end=endo, dimname=dimname, use_glob=.false., & - ncid=ncido, active=activeo, subgrid=subgrido) + ncid=ncido, active=activeo, subgrid=subgrido, allow_scm=.true.) select case (interp_method) case (interp_method_general) @@ -839,6 +885,7 @@ subroutine findMinDist( dimname, begi, endi, bego, endo, ncidi, ncido, & glc_behavior=glc_behavior, & glc_elevclasses_same=glc_elevclasses_same, & fill_missing_with_natveg=init_interp_fill_missing_with_natveg, & + fill_missing_urban_with_HD=init_interp_fill_missing_urban_with_HD, & mindist_index=minindx) case (interp_method_finidat_areas) if (masterproc) then @@ -859,7 +906,7 @@ end subroutine findMinDist !======================================================================= - subroutine set_subgrid_info(beg, end, dimname, use_glob, ncid, active, subgrid) + subroutine set_subgrid_info(beg, end, dimname, use_glob, ncid, active, subgrid, allow_scm) ! -------------------------------------------------------------------- ! arguments @@ -869,6 +916,7 @@ subroutine set_subgrid_info(beg, end, dimname, use_glob, ncid, active, subgrid) logical , intent(in) :: use_glob ! if .true., use the 'glob' form of ncd_io logical , intent(out) :: active(beg:end) type(subgrid_type) , intent(inout) :: subgrid + logical , intent(in) :: allow_scm ! if .true., allow single column model subset of data ! ! local variables integer :: n @@ -896,32 +944,32 @@ subroutine set_subgrid_info(beg, end, dimname, use_glob, ncid, active, subgrid) end if if (dimname == 'pft') then - call read_var_double(ncid=ncid, varname='pfts1d_lon' , data=subgrid%lon , dim1name='pft', use_glob=use_glob) - call read_var_double(ncid=ncid, varname='pfts1d_lat' , data=subgrid%lat , dim1name='pft', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='pfts1d_itypveg', data=subgrid%ptype, dim1name='pft', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='pfts1d_itypcol', data=subgrid%ctype, dim1name='pft', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='pfts1d_ityplun', data=subgrid%ltype, dim1name='pft', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='pfts1d_active' , data=itemp , dim1name='pft', use_glob=use_glob) + call read_var_double(ncid=ncid, varname='pfts1d_lon' , data=subgrid%lon , dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) + call read_var_double(ncid=ncid, varname='pfts1d_lat' , data=subgrid%lat , dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='pfts1d_itypveg', data=subgrid%ptype, dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='pfts1d_itypcol', data=subgrid%ctype, dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='pfts1d_ityplun', data=subgrid%ltype, dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='pfts1d_active' , data=itemp , dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) if (associated(subgrid%topoglc)) then - call read_var_double(ncid=ncid, varname='pfts1d_topoglc', data=subgrid%topoglc, dim1name='pft', use_glob=use_glob) + call read_var_double(ncid=ncid, varname='pfts1d_topoglc', data=subgrid%topoglc, dim1name='pft', use_glob=use_glob, allow_scm=allow_scm) end if else if (dimname == 'column') then - call read_var_double(ncid=ncid, varname='cols1d_lon' , data=subgrid%lon , dim1name='column', use_glob=use_glob) - call read_var_double(ncid=ncid, varname='cols1d_lat' , data=subgrid%lat , dim1name='column', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='cols1d_ityp' , data=subgrid%ctype, dim1name='column', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='cols1d_ityplun', data=subgrid%ltype, dim1name='column', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='cols1d_active' , data=itemp , dim1name='column', use_glob=use_glob) + call read_var_double(ncid=ncid, varname='cols1d_lon' , data=subgrid%lon , dim1name='column', use_glob=use_glob, allow_scm=allow_scm) + call read_var_double(ncid=ncid, varname='cols1d_lat' , data=subgrid%lat , dim1name='column', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='cols1d_ityp' , data=subgrid%ctype, dim1name='column', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='cols1d_ityplun', data=subgrid%ltype, dim1name='column', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='cols1d_active' , data=itemp , dim1name='column', use_glob=use_glob, allow_scm=allow_scm) if (associated(subgrid%topoglc)) then - call read_var_double(ncid=ncid, varname='cols1d_topoglc', data=subgrid%topoglc, dim1name='column', use_glob=use_glob) + call read_var_double(ncid=ncid, varname='cols1d_topoglc', data=subgrid%topoglc, dim1name='column', use_glob=use_glob, allow_scm=allow_scm) end if else if (dimname == 'landunit') then - call read_var_double(ncid=ncid, varname='land1d_lon' , data=subgrid%lon , dim1name='landunit', use_glob=use_glob) - call read_var_double(ncid=ncid, varname='land1d_lat' , data=subgrid%lat , dim1name='landunit', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='land1d_ityplun', data=subgrid%ltype, dim1name='landunit', use_glob=use_glob) - call read_var_int(ncid=ncid, varname='land1d_active' , data=itemp , dim1name='landunit', use_glob=use_glob) + call read_var_double(ncid=ncid, varname='land1d_lon' , data=subgrid%lon , dim1name='landunit', use_glob=use_glob, allow_scm=allow_scm) + call read_var_double(ncid=ncid, varname='land1d_lat' , data=subgrid%lat , dim1name='landunit', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='land1d_ityplun', data=subgrid%ltype, dim1name='landunit', use_glob=use_glob, allow_scm=allow_scm) + call read_var_int(ncid=ncid, varname='land1d_active' , data=itemp , dim1name='landunit', use_glob=use_glob, allow_scm=allow_scm) else if (dimname == 'gridcell') then - call read_var_double(ncid=ncid, varname='grid1d_lon' , data=subgrid%lon , dim1name='gridcell', use_glob=use_glob) - call read_var_double(ncid=ncid, varname='grid1d_lat' , data=subgrid%lat , dim1name='gridcell', use_glob=use_glob) + call read_var_double(ncid=ncid, varname='grid1d_lon' , data=subgrid%lon , dim1name='gridcell', use_glob=use_glob, allow_scm=allow_scm) + call read_var_double(ncid=ncid, varname='grid1d_lat' , data=subgrid%lat , dim1name='gridcell', use_glob=use_glob, allow_scm=allow_scm) ! All gridcells in the restart file are active itemp(beg:end) = 1 @@ -942,7 +990,7 @@ subroutine set_subgrid_info(beg, end, dimname, use_glob, ncid, active, subgrid) contains - subroutine read_var_double(ncid, varname, data, dim1name, use_glob) + subroutine read_var_double(ncid, varname, data, dim1name, use_glob, allow_scm) ! Wraps the ncd_io call, providing logic related to whether we're using the 'glob' ! form of ncd_io type(file_desc_t) , intent(inout) :: ncid @@ -950,15 +998,29 @@ subroutine read_var_double(ncid, varname, data, dim1name, use_glob) real(r8), pointer , intent(inout) :: data(:) character(len=*) , intent(in) :: dim1name logical , intent(in) :: use_glob ! if .true., use the 'glob' form of ncd_io + logical , intent(in) :: allow_scm ! if .true., allow single column model subset of data + + ! local + character(16) :: readflag + + if (allow_scm) then + readflag='read' + else + ! Flag to distinguish the times during IC interpolation when running in single column mode but + ! need to read the full data grid. Normally single_column means + ! "read the data grid and extract the closest column" but + ! during IC interpolation you need to read in the full grid to be interpolated regardless of the single_column flag. + readflag='read_noscm' + endif if (use_glob) then - call ncd_io(ncid=ncid, varname=varname, flag='read', data=data) + call ncd_io(ncid=ncid, varname=varname, flag=trim(readflag), data=data) else - call ncd_io(ncid=ncid, varname=varname, flag='read', data=data, dim1name=dim1name) + call ncd_io(ncid=ncid, varname=varname, flag=trim(readflag), data=data, dim1name=dim1name) end if end subroutine read_var_double - subroutine read_var_int(ncid, varname, data, dim1name, use_glob) + subroutine read_var_int(ncid, varname, data, dim1name, use_glob, allow_scm) ! Wraps the ncd_io call, providing logic related to whether we're using the 'glob' ! form of ncd_io type(file_desc_t) , intent(inout) :: ncid @@ -966,11 +1028,25 @@ subroutine read_var_int(ncid, varname, data, dim1name, use_glob) integer, pointer , intent(inout) :: data(:) character(len=*) , intent(in) :: dim1name logical , intent(in) :: use_glob ! if .true., use the 'glob' form of ncd_io + logical , intent(in) :: allow_scm ! if .true., allow single column model subset of data + + ! local + character(16) :: readflag + + if (allow_scm) then + readflag='read' + else + ! Flag to distinguish the times during IC interpolation when running in single column mode but + ! need to read the full data grid. Normally single_column means + ! "read the data grid and extract the closest column" but + ! during IC interpolation you need to read in the full grid to be interpolated regardless of the single_column flag. + readflag='read_noscm' + endif if (use_glob) then - call ncd_io(ncid=ncid, varname=varname, flag='read', data=data) + call ncd_io(ncid=ncid, varname=varname, flag=trim(readflag), data=data) else - call ncd_io(ncid=ncid, varname=varname, flag='read', data=data, dim1name=dim1name) + call ncd_io(ncid=ncid, varname=varname, flag=trim(readflag), data=data, dim1name=dim1name) end if end subroutine read_var_int @@ -1038,7 +1114,7 @@ subroutine interp_1d_double (varname, varname_i, dimname, begi, endi, bego, endo end if allocate (rbufsli(begi:endi), rbufslo(bego:endo)) - call ncd_io(ncid=ncidi, varname=trim(varname_i), flag='read', data=rbufsli) + call ncd_io(ncid=ncidi, varname=trim(varname_i), flag='read_noscm', data=rbufsli) call ncd_io(ncid=ncido, varname=trim(varname), flag='read', data=rbufslo, & dim1name=dimname) @@ -1080,7 +1156,7 @@ subroutine interp_1d_int (varname, varname_i, dimname, begi, endi, bego, endo, n allocate (ibufsli(begi:endi), ibufslo(bego:endo)) - call ncd_io(ncid=ncidi, varname=trim(varname_i), flag='read', & + call ncd_io(ncid=ncidi, varname=trim(varname_i), flag='read_noscm', & data=ibufsli) call ncd_io(ncid=ncido, varname=trim(varname), flag='read', & data=ibufslo, dim1name=dimname) diff --git a/src/init_interp/initInterpMindist.F90 b/src/init_interp/initInterpMindist.F90 index f6853b1cd3..9fdc9f81dd 100644 --- a/src/init_interp/initInterpMindist.F90 +++ b/src/init_interp/initInterpMindist.F90 @@ -32,11 +32,20 @@ module initInterpMindist type, public :: subgrid_special_indices_type integer :: ipft_not_vegetated integer :: icol_vegetated_or_bare_soil + integer :: icol_urban_roof + integer :: icol_urban_sunwall + integer :: icol_urban_shadewall + integer :: icol_urban_impervious_road + integer :: icol_urban_pervious_road integer :: ilun_vegetated_or_bare_soil integer :: ilun_crop integer :: ilun_landice + integer :: ilun_urban_TBD + integer :: ilun_urban_HD + integer :: ilun_urban_MD contains procedure :: is_vegetated_landunit ! returns true if the given landunit type is natural veg or crop + procedure :: is_urban_landunit ! returns true if the given landunit type is urban end type subgrid_special_indices_type type, public :: subgrid_type @@ -58,8 +67,10 @@ module initInterpMindist private :: set_glc_must_be_same_type private :: set_ice_adjustable_type private :: do_fill_missing_with_natveg + private :: do_fill_missing_urban_with_HD private :: is_sametype private :: is_baresoil + private :: is_urban_HD character(len=*), parameter, private :: sourcefile = & __FILE__ @@ -147,7 +158,7 @@ end subroutine destroy_subgrid_type subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgrido, & subgrid_special_indices, glc_behavior, glc_elevclasses_same, & - fill_missing_with_natveg, mindist_index) + fill_missing_with_natveg, fill_missing_urban_with_HD, mindist_index) ! -------------------------------------------------------------------- ! arguments @@ -165,7 +176,7 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr logical , intent(in) :: glc_elevclasses_same ! If false: if an output type cannot be found in the input, code aborts - ! If true: if an output type cannot be found in the input, fill with closest natural + ! If true: if a non-urban output type cannot be found in the input, fill with closest natural ! veg column (using bare soil for patch-level variables) ! ! NOTE: always treated as true for natural veg and crop landunits/columns/patches in @@ -173,6 +184,11 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr ! use the closest natural veg column, regardless of the value of this flag. logical , intent(in) :: fill_missing_with_natveg + + ! If false: if an urban output type cannot be found in the input, code aborts + ! If true: if an urban output type cannot be found in the input, fill with closest urban HD + logical , intent(in) :: fill_missing_urban_with_HD + integer , intent(out) :: mindist_index(bego:endo) ! ! local variables @@ -187,6 +203,8 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr ! considered the same type. This is only valid for glc points, and is only valid ! for subgrid name = 'pft' or 'column'. logical :: glc_must_be_same_type_o(bego:endo) + + character(len=*), parameter :: subname = 'set_mindist' ! -------------------------------------------------------------------- if (associated(subgridi%topoglc) .and. associated(subgrido%topoglc)) then @@ -221,7 +239,8 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr subgridi = subgridi, subgrido = subgrido, & subgrid_special_indices = subgrid_special_indices, & glc_must_be_same_type = glc_must_be_same_type_o(no), & - veg_patch_just_considers_ptype = .true.)) then + veg_patch_just_considers_ptype = .true., & + do_fill_missing_urban_with_HD = .false.)) then dy = abs(subgrido%lat(no)-subgridi%lat(ni))*re dx = abs(subgrido%lon(no)-subgridi%lon(ni))*re * & 0.5_r8*(subgrido%coslat(no)+subgridi%coslat(ni)) @@ -260,7 +279,11 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr end if end do - ! If output type is not contained in input dataset, then use closest bare soil, + ! Note that do_fill_missing_with_natveg below will return .false. for pfts and columnns associated + ! with urban landunits so that the fill missing with bare soil will be implemented only for + ! non-urban types (pfts, columns, landunits, gridcells). + + ! If non-urban output type is not contained in input dataset, then use closest bare soil, ! if this point is one for which we fill missing with natveg. if ( distmin == spval .and. & do_fill_missing_with_natveg( & @@ -279,6 +302,50 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr end if end if end do + + ! If urban output type is not contained in input dataset, then use closest urban HD, + ! if this point is one for which we fill missing urban with urban HD. + else if (distmin == spval & + .and. do_fill_missing_urban_with_HD( & + fill_missing_urban_with_HD, no, subgrido, subgrid_special_indices)) then + do ni = begi, endi + if (activei(ni)) then + ! We need to call is_sametype for pfts and columns here to make sure that each + ! urban input pft and column type matches the output pft and column type. We don't + ! want to call it for landunits because they intentionally won't be the same type + ! (since we are filling missing urban landunits with HD) + if (subgrido%name .eq. 'landunit') then + if ( is_urban_HD(ni, subgridi, subgrid_special_indices)) then + dy = abs(subgrido%lat(no)-subgridi%lat(ni))*re + dx = abs(subgrido%lon(no)-subgridi%lon(ni))*re * & + 0.5_r8*(subgrido%coslat(no)+subgridi%coslat(ni)) + dist = dx*dx + dy*dy + if ( dist < distmin )then + distmin = dist + nmin = ni + end if + end if + else + if (is_sametype(ni = ni, no = no, & + subgridi = subgridi, subgrido = subgrido, & + subgrid_special_indices = subgrid_special_indices, & + glc_must_be_same_type = glc_must_be_same_type_o(no), & + veg_patch_just_considers_ptype = .false., & + do_fill_missing_urban_with_HD = .true.)) then + if ( is_urban_HD(ni, subgridi, subgrid_special_indices)) then + dy = abs(subgrido%lat(no)-subgridi%lat(ni))*re + dx = abs(subgrido%lon(no)-subgridi%lon(ni))*re * & + 0.5_r8*(subgrido%coslat(no)+subgridi%coslat(ni)) + dist = dx*dx + dy*dy + if ( dist < distmin )then + distmin = dist + nmin = ni + end if + end if + end if + end if + end if + end do end if ! Error conditions @@ -287,13 +354,29 @@ subroutine set_mindist(begi, endi, bego, endo, activei, activeo, subgridi, subgr &Cannot find any input points matching output point:' call subgrido%print_point(no, iulog) write(iulog,*) ' ' - write(iulog,*) 'Consider rerunning with the following in user_nl_clm:' + write(iulog,*) 'If this is an urban type' + write(iulog,*) '(ltype = ', subgrid_special_indices%ilun_urban_TBD, & + ',', subgrid_special_indices%ilun_urban_HD, & + ', or', subgrid_special_indices%ilun_urban_MD, ')' + write(iulog,*) 'then consider rerunning with the following in user_nl_clm:' + write(iulog,*) 'init_interp_fill_missing_urban_with_HD = .true.' + write(iulog,*) 'However, note that this will fill all urban missing types in the output' + write(iulog,*) 'with the closest urban high density (HD) type in the input' + write(iulog,*) 'So, you should consider whether that is what you want.' + write(iulog,*) ' ' + write(iulog,*) 'If this is a non-urban type' + write(iulog,*) '(ltype \= ',subgrid_special_indices%ilun_urban_TBD, & + ',', subgrid_special_indices%ilun_urban_HD, & + ', or', subgrid_special_indices%ilun_urban_MD, ')' + write(iulog,*) 'consider rerunning with the following in user_nl_clm:' write(iulog,*) 'init_interp_fill_missing_with_natveg = .true.' - write(iulog,*) 'However, note that this will fill all missing types in the output' + write(iulog,*) 'However, note that this will fill all non-urban missing types in the output' write(iulog,*) 'with the closest natural veg column in the input' write(iulog,*) '(using bare soil for patch-level variables).' write(iulog,*) 'So, you should consider whether that is what you want.' - call endrun(msg=errMsg(sourcefile, __LINE__)) + write(iulog,*) errMsg(sourcefile, __LINE__) + call endrun(msg=subname// & + ' ERROR: Cannot find any input points matching output point') end if mindist_index(no) = nmin @@ -378,7 +461,8 @@ subroutine set_single_match(begi, endi, bego, endo, activeo, subgridi, subgrido, subgridi = subgridi, subgrido = subgrido, & subgrid_special_indices = subgrid_special_indices, & glc_must_be_same_type = glc_must_be_same_type_o(no), & - veg_patch_just_considers_ptype = .false.) + veg_patch_just_considers_ptype = .false., & + do_fill_missing_urban_with_HD = .false.) if (ni_sametype) then if (found) then write(iulog,*) subname// & @@ -555,7 +639,7 @@ function do_fill_missing_with_natveg(fill_missing_with_natveg, & no, subgrido, subgrid_special_indices) ! ! !DESCRIPTION: - ! Returns true if the given output point, if missing, should be filled with the + ! Returns true if the given non-urban output point, if missing, should be filled with the ! closest natural veg point. ! ! !ARGUMENTS: @@ -576,8 +660,8 @@ function do_fill_missing_with_natveg(fill_missing_with_natveg, & if (subgrido%name == 'gridcell') then ! It makes no sense to try to fill missing with natveg for gridcell-level values do_fill_missing_with_natveg = .false. - else if (fill_missing_with_natveg) then - ! User has asked for all missing points to be filled with natveg + else if (fill_missing_with_natveg .and. .not. subgrid_special_indices%is_urban_landunit(subgrido%ltype(no))) then + ! User has asked for all non-urban missing points to be filled with natveg do_fill_missing_with_natveg = .true. else if (subgrid_special_indices%is_vegetated_landunit(subgrido%ltype(no))) then ! Even if user hasn't asked for it, we fill missing vegetated points (natural veg @@ -591,11 +675,46 @@ function do_fill_missing_with_natveg(fill_missing_with_natveg, & end function do_fill_missing_with_natveg + !----------------------------------------------------------------------- + function do_fill_missing_urban_with_HD(fill_missing_urban_with_HD, & + no, subgrido, subgrid_special_indices) + ! + ! !DESCRIPTION: + ! Returns true if the given urban output point, if missing, should be filled with the + ! closest urban HD point. + ! + ! !ARGUMENTS: + logical :: do_fill_missing_urban_with_HD ! function result + + ! whether we should fill ALL missing points with urban HD + logical, intent(in) :: fill_missing_urban_with_HD + + integer , intent(in) :: no + type(subgrid_type), intent(in) :: subgrido + type(subgrid_special_indices_type), intent(in) :: subgrid_special_indices + ! + ! !LOCAL VARIABLES: + + character(len=*), parameter :: subname = 'do_fill_missing_urban_with_HD' + !----------------------------------------------------------------------- + + if (subgrido%name == 'gridcell') then + ! It makes no sense to try to fill missing with urban HD for gridcell-level values + do_fill_missing_urban_with_HD = .false. + else if (fill_missing_urban_with_HD) then + ! User has asked for all missing urban points to be filled with urban HD + do_fill_missing_urban_with_HD = .true. + else + do_fill_missing_urban_with_HD = .false. + end if + + end function do_fill_missing_urban_with_HD !======================================================================= logical function is_sametype (ni, no, subgridi, subgrido, subgrid_special_indices, & - glc_must_be_same_type, veg_patch_just_considers_ptype) + glc_must_be_same_type, veg_patch_just_considers_ptype, & + do_fill_missing_urban_with_HD) ! -------------------------------------------------------------------- ! arguments @@ -620,6 +739,12 @@ logical function is_sametype (ni, no, subgridi, subgrido, subgrid_special_indice ! If false, then they need to have the same column and landunit types, too (as is the ! general case). logical, intent(in) :: veg_patch_just_considers_ptype + + ! If True, we allow for landunits to be different when checking if pft and column are + ! the same type, to allow for HD fill of missing urban output points. + logical, intent(in) :: do_fill_missing_urban_with_HD + + ! For urban columns/patches ! -------------------------------------------------------------------- is_sametype = .false. @@ -644,6 +769,10 @@ logical function is_sametype (ni, no, subgridi, subgrido, subgrid_special_indice subgridi%ptype(ni) == subgrido%ptype(no)) then is_sametype = .true. end if + else if (subgridi%ptype(ni) == subgrido%ptype(no) .and. & + subgridi%ctype(ni) == subgrido%ctype(no) .and. & + do_fill_missing_urban_with_HD) then + is_sametype = .true. else if (subgridi%ptype(ni) == subgrido%ptype(no) .and. & subgridi%ctype(ni) == subgrido%ctype(no) .and. & subgridi%ltype(ni) == subgrido%ltype(no)) then @@ -654,6 +783,9 @@ logical function is_sametype (ni, no, subgridi, subgrido, subgrid_special_indice subgridi%ltype(ni) == subgrid_special_indices%ilun_landice .and. & subgrido%ltype(no) == subgrid_special_indices%ilun_landice ) then is_sametype = .true. + else if (subgridi%ctype(ni) == subgrido%ctype(no) .and. & + do_fill_missing_urban_with_HD) then + is_sametype = .true. else if (subgridi%ctype(ni) == subgrido%ctype(no) .and. & subgridi%ltype(ni) == subgrido%ltype(no)) then is_sametype = .true. @@ -712,6 +844,31 @@ logical function is_baresoil (n, subgrid, subgrid_special_indices) end function is_baresoil + !----------------------------------------------------------------------- + logical function is_urban_HD (n, subgrid, subgrid_special_indices) + + ! -------------------------------------------------------------------- + ! arguments + integer , intent(in) :: n + type(subgrid_type), intent(in) :: subgrid + type(subgrid_special_indices_type), intent(in) :: subgrid_special_indices + ! -------------------------------------------------------------------- + + is_urban_HD = .false. + + if (subgrid%name == 'pft' .or. subgrid%name == 'column' .or. subgrid%name == 'landunit') then + if (subgrid%ltype(n) == subgrid_special_indices%ilun_urban_HD) then + is_urban_HD = .true. + end if + else + if (masterproc) then + write(iulog,*)'ERROR interpinic: is_urban_HD subgrid type ',subgrid%name,' not supported' + end if + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if + + end function is_urban_HD + !----------------------------------------------------------------------- function is_vegetated_landunit(this, ltype) ! @@ -739,5 +896,30 @@ function is_vegetated_landunit(this, ltype) end function is_vegetated_landunit + function is_urban_landunit(this, ltype) + ! + ! !DESCRIPTION: + ! Returns true if the given landunit type is urban + ! + ! !USES: + ! + ! !ARGUMENTS: + logical :: is_urban_landunit ! function result + class(subgrid_special_indices_type), intent(in) :: this + integer, intent(in) :: ltype ! landunit type of interest + ! + ! !LOCAL VARIABLES: + + character(len=*), parameter :: subname = 'is_urban_landunit' + !----------------------------------------------------------------------- + + if (ltype == this%ilun_urban_TBD .or. ltype == this%ilun_urban_HD & + .or. ltype == this%ilun_urban_MD) then + is_urban_landunit = .true. + else + is_urban_landunit = .false. + end if + + end function is_urban_landunit end module initInterpMindist diff --git a/src/init_interp/initInterpMultilevelContainer.F90 b/src/init_interp/initInterpMultilevelContainer.F90 index 5a7b14832a..d26e51c71c 100644 --- a/src/init_interp/initInterpMultilevelContainer.F90 +++ b/src/init_interp/initInterpMultilevelContainer.F90 @@ -732,7 +732,7 @@ subroutine create_snow_interpolators(interp_multilevel_levsno, interp_multilevel ! Read snlsno_source_sgrid allocate(snlsno_source_sgrid(bounds_source%get_begc() : bounds_source%get_endc())) - call ncd_io(ncid=ncid_source, varname='SNLSNO', flag='read', & + call ncd_io(ncid=ncid_source, varname='SNLSNO', flag='read_noscm', & data=snlsno_source_sgrid) snlsno_source_sgrid(:) = abs(snlsno_source_sgrid(:)) diff --git a/src/init_interp/test/initInterpMindist_test/initInterpMindistTestUtils.pf b/src/init_interp/test/initInterpMindist_test/initInterpMindistTestUtils.pf index 04f09cb55d..7d277566af 100644 --- a/src/init_interp/test/initInterpMindist_test/initInterpMindistTestUtils.pf +++ b/src/init_interp/test/initInterpMindist_test/initInterpMindistTestUtils.pf @@ -19,12 +19,20 @@ module initInterpMindistTestUtils subgrid_special_indices_type( & ipft_not_vegetated = 0, & icol_vegetated_or_bare_soil = 10, & + icol_urban_roof = 71, & + icol_urban_sunwall = 72, & + icol_urban_shadewall = 73, & + icol_urban_impervious_road = 74, & + icol_urban_pervious_road = 75, & ilun_vegetated_or_bare_soil = 3, & ilun_crop = 4, & - ilun_landice = 5) + ilun_landice = 5, & + ilun_urban_TBD = 7, & + ilun_urban_HD = 8, & + ilun_urban_MD = 9) - ! value we can use for a special landunit; note that this just needs to differ from - ! ilun_vegetated_or_bare_soil and from ilun_crop + ! value we can use for a special landunit; note that this needs to differ from + ! ilun_vegetated_or_bare_soil, ilun_crop, ilun_urban_TBD, ilun_urban_HD, ilun_urban_MD integer, parameter, public :: ilun_special = 6 contains diff --git a/src/init_interp/test/initInterpMindist_test/test_set_mindist.pf b/src/init_interp/test/initInterpMindist_test/test_set_mindist.pf index 06ce20d7de..7a2c51456d 100644 --- a/src/init_interp/test/initInterpMindist_test/test_set_mindist.pf +++ b/src/init_interp/test/initInterpMindist_test/test_set_mindist.pf @@ -10,6 +10,7 @@ module test_set_mindist use clm_varcon , only: spval use unittestSimpleSubgridSetupsMod use unittestSubgridMod + use unittestUtils, only : endrun_msg use glcBehaviorMod, only: glc_behavior_type implicit none @@ -41,7 +42,7 @@ contains end subroutine tearDown subroutine wrap_set_mindist(subgridi, subgrido, mindist_index, activei, activeo, & - glc_behavior, glc_elevclasses_same, fill_missing_with_natveg) + glc_behavior, glc_elevclasses_same, fill_missing_with_natveg, fill_missing_urban_with_HD) ! Wrap the call to set_mindist. ! ! If activei / activeo are not provided, they are assumed to be .true. for all points. @@ -52,6 +53,7 @@ contains ! If glc_elevclasses_same is not present, it is assumed to be true. ! ! If fill_missing_with_natveg is not provided, it is assumed to be false + ! If fill_missing_urban_with_HD is not provided, it is assumed to be false ! Arguments: type(subgrid_type), intent(in) :: subgridi @@ -62,6 +64,7 @@ contains type(glc_behavior_type), intent(in), optional :: glc_behavior logical, intent(in), optional :: glc_elevclasses_same logical, intent(in), optional :: fill_missing_with_natveg + logical, intent(in), optional :: fill_missing_urban_with_HD ! Local variables: integer :: npts_i, npts_o @@ -71,6 +74,7 @@ contains type(glc_behavior_type) :: l_glc_behavior logical :: l_glc_elevclasses_same logical :: l_fill_missing_with_natveg + logical :: l_fill_missing_urban_with_HD !----------------------------------------------------------------------- @@ -115,12 +119,19 @@ contains l_fill_missing_with_natveg = .false. end if + if (present(fill_missing_urban_with_HD)) then + l_fill_missing_urban_with_HD = fill_missing_urban_with_HD + else + l_fill_missing_urban_with_HD = .false. + end if + call set_mindist(begi = 1, endi = npts_i, bego = bego, endo = endo, & activei = l_activei, activeo = l_activeo, subgridi = subgridi, subgrido = subgrido, & subgrid_special_indices = subgrid_special_indices, & glc_behavior = l_glc_behavior, & glc_elevclasses_same = l_glc_elevclasses_same, & fill_missing_with_natveg = l_fill_missing_with_natveg, & + fill_missing_urban_with_HD = l_fill_missing_urban_with_HD, & mindist_index = mindist_index) end subroutine wrap_set_mindist @@ -724,6 +735,186 @@ contains end associate end subroutine newveg_usesBaresoil + @Test + subroutine TBDurban_usesHDurban(this) + ! If there's a new urban TBD type, this should take inputs from the closest + ! HD type, if fill_missing_urban_with_HD = .true and fill_missing_with_natveg = .false. + ! + class(TestSetMindist), intent(inout) :: this + type(subgrid_type) :: subgridi, subgrido + real(r8), parameter :: my_lat = 31._r8 + real(r8), parameter :: my_lon = 41._r8 + integer :: i + integer :: mindist_index(1) + + associate( & + icol_urban_roof => subgrid_special_indices%icol_urban_roof, & + icol_urban_sunwall => subgrid_special_indices%icol_urban_sunwall, & + icol_urban_shadewall => subgrid_special_indices%icol_urban_shadewall, & + icol_urban_impervious_road => subgrid_special_indices%icol_urban_impervious_road, & + icol_urban_pervious_road => subgrid_special_indices%icol_urban_pervious_road, & + ilun_urban_TBD => subgrid_special_indices%ilun_urban_TBD, & + ilun_urban_HD => subgrid_special_indices%ilun_urban_HD, & + ilun_urban_MD => subgrid_special_indices%ilun_urban_MD & + ) + + call setup_landunit_ncols(ltype=ilun_urban_TBD, & + ctypes=[icol_urban_roof,icol_urban_sunwall,icol_urban_shadewall, & + icol_urban_impervious_road,icol_urban_pervious_road], & + cweights=[0.6_r8,0.1_r8,0.1_r8,0.1_r8,0.1_r8], & + ptype=0) + + call create_subgrid_info( & + subgrid_info = subgrido, & + npts = 1, & + beg = 1, & + name = 'landunit', & + ltype = [ilun_urban_TBD], & + lat = [my_lat], & + lon = [my_lon]) + + ! Input points differ in landunit type + call create_subgrid_info( & + subgrid_info = subgridi, & + npts = 2, & + name = 'landunit', & + ltype = [ilun_urban_MD, ilun_urban_HD], & + lat = [(my_lat, i=1,2)], & + lon = [(my_lon, i=1,2)]) + + call wrap_set_mindist(subgridi, subgrido, mindist_index, & + fill_missing_urban_with_HD = .true., & + fill_missing_with_natveg = .false.) + + ! Note that the mindist_index should return the second index of the + ! ltype array (2), not the actual value of ilun_urban_HD + @assertEqual(2, mindist_index(1)) + + end associate + end subroutine TBDurban_usesHDurban + + @Test + subroutine TBDurban_usesHDurban_aborts(this) + ! If there's a new urban TBD type, this should take inputs from the closest + ! HD type. This test will abort correctly if fill_missing_urban_with_HD = .false. + ! + class(TestSetMindist), intent(inout) :: this + type(subgrid_type) :: subgridi, subgrido + real(r8), parameter :: my_lat = 31._r8 + real(r8), parameter :: my_lon = 41._r8 + integer :: i + integer :: mindist_index(1) + character(len=:), allocatable :: expected_msg + + associate( & + icol_urban_roof => subgrid_special_indices%icol_urban_roof, & + icol_urban_sunwall => subgrid_special_indices%icol_urban_sunwall, & + icol_urban_shadewall => subgrid_special_indices%icol_urban_shadewall, & + icol_urban_impervious_road => subgrid_special_indices%icol_urban_impervious_road, & + icol_urban_pervious_road => subgrid_special_indices%icol_urban_pervious_road, & + ilun_urban_TBD => subgrid_special_indices%ilun_urban_TBD, & + ilun_urban_HD => subgrid_special_indices%ilun_urban_HD, & + ilun_urban_MD => subgrid_special_indices%ilun_urban_MD & + ) + + call setup_landunit_ncols(ltype=ilun_urban_TBD, & + ctypes=[icol_urban_roof,icol_urban_sunwall,icol_urban_shadewall, & + icol_urban_impervious_road,icol_urban_pervious_road], & + cweights=[0.6_r8,0.1_r8,0.1_r8,0.1_r8,0.1_r8], & + ptype=0) + + call create_subgrid_info( & + subgrid_info = subgrido, & + npts = 1, & + beg = 1, & + name = 'landunit', & + ltype = [ilun_urban_TBD], & + lat = [my_lat], & + lon = [my_lon]) + + ! Input points differ in landunit type + call create_subgrid_info( & + subgrid_info = subgridi, & + npts = 2, & + name = 'landunit', & + ltype = [ilun_urban_MD, ilun_urban_HD], & + lat = [(my_lat, i=1,2)], & + lon = [(my_lon, i=1,2)]) + + call wrap_set_mindist(subgridi, subgrido, mindist_index, & + fill_missing_urban_with_HD = .false.) + + expected_msg = endrun_msg( & + 'set_mindist ERROR: Cannot find any input points matching output point') + @assertExceptionRaised(expected_msg) + + end associate + end subroutine TBDurban_usesHDurban_aborts + + @Test + subroutine urbanlandunits_NotFilled_with_natveg_aborts(this) + ! With fill_missing_urban_with_HD = .false. and fill_missing_with_natveg = .true., + ! urban landunit should not be filled with natveg, and an error in set_mindist will be + ! thrown, and this test should pass. + ! + class(TestSetMindist), intent(inout) :: this + type(subgrid_type) :: subgridi, subgrido + real(r8), parameter :: my_lat = 31._r8 + real(r8), parameter :: my_lon = 41._r8 + integer :: i + integer :: mindist_index(1) + character(len=:), allocatable :: expected_msg + + associate( & + ipft_bare => subgrid_special_indices%ipft_not_vegetated, & + icol_urban_roof => subgrid_special_indices%icol_urban_roof, & + icol_urban_sunwall => subgrid_special_indices%icol_urban_sunwall, & + icol_urban_shadewall => subgrid_special_indices%icol_urban_shadewall, & + icol_urban_impervious_road => subgrid_special_indices%icol_urban_impervious_road, & + icol_urban_pervious_road => subgrid_special_indices%icol_urban_pervious_road, & + icol_natveg => subgrid_special_indices%icol_vegetated_or_bare_soil, & + ilun_natveg => subgrid_special_indices%ilun_vegetated_or_bare_soil, & + ilun_urban_TBD => subgrid_special_indices%ilun_urban_TBD & + ) + + call setup_landunit_ncols(ltype=ilun_urban_TBD, & + ctypes=[icol_urban_roof,icol_urban_sunwall,icol_urban_shadewall, & + icol_urban_impervious_road,icol_urban_pervious_road], & + cweights=[0.6_r8,0.1_r8,0.1_r8,0.1_r8,0.1_r8], & + ptype=0) + + call create_subgrid_info( & + subgrid_info = subgrido, & + npts = 1, & + beg = 1, & + name = 'pft', & + ptype = [0], & + ctype = [icol_urban_roof], & + ltype = [ilun_urban_TBD], & + lat = [my_lat], & + lon = [my_lon]) + + call create_subgrid_info( & + subgrid_info = subgridi, & + npts = 1, & + name = 'pft', & + ptype = [ipft_bare], & + ctype = [icol_natveg], & + ltype = [ilun_natveg], & + lat = [my_lat], & + lon = [my_lon]) + + call wrap_set_mindist(subgridi, subgrido, mindist_index, & + fill_missing_urban_with_HD = .false., & + fill_missing_with_natveg = .true.) + + expected_msg = endrun_msg( & + 'set_mindist ERROR: Cannot find any input points matching output point') + @assertExceptionRaised(expected_msg) + + end associate + end subroutine urbanlandunits_NotFilled_with_natveg_aborts + @Test subroutine baresoil_ignoresSpecialLandunits(this) ! This test ensures that, when finding a match for a bare soil patch, we ignore diff --git a/src/main/CMakeLists.txt b/src/main/CMakeLists.txt index 53a6edb8a5..fc324efeb9 100644 --- a/src/main/CMakeLists.txt +++ b/src/main/CMakeLists.txt @@ -20,6 +20,7 @@ list(APPEND clm_sources column_varcon.F90 decompMod.F90 filterColMod.F90 + FireMethodType.F90 glc2lndMod.F90 glcBehaviorMod.F90 initSubgridMod.F90 diff --git a/src/main/FireMethodType.F90 b/src/main/FireMethodType.F90 index 978450e65f..5f90dea893 100644 --- a/src/main/FireMethodType.F90 +++ b/src/main/FireMethodType.F90 @@ -34,6 +34,9 @@ module FireMethodType ! Figure out the fire fluxes procedure(CNFireFluxes_interface) , public, deferred :: CNFireFluxes + ! Deallocate the fire datasets + procedure(FireClean_interface) , public, deferred :: FireClean + end type fire_method_type abstract interface @@ -52,7 +55,7 @@ module FireMethodType ! consistent between different implementations. ! !--------------------------------------------------------------------------- - subroutine FireInit_interface(this, bounds, NLFilename ) + subroutine FireInit_interface(this, bounds ) ! ! !DESCRIPTION: ! Initialize Fire datasets @@ -63,20 +66,21 @@ subroutine FireInit_interface(this, bounds, NLFilename ) ! !ARGUMENTS: class(fire_method_type) :: this type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename !----------------------------------------------------------------------- end subroutine FireInit_interface - subroutine FireReadNML_interface(this, NLFilename ) + subroutine FireReadNML_interface(this, bounds, NLFilename ) ! ! !DESCRIPTION: ! Read general fire namelist ! ! USES + use decompMod , only : bounds_type import :: fire_method_type ! !ARGUMENTS: class(fire_method_type) :: this + type(bounds_type), intent(in) :: bounds character(len=*), intent(in) :: NLFilename !----------------------------------------------------------------------- @@ -97,6 +101,20 @@ subroutine FireInterp_interface(this, bounds) end subroutine FireInterp_interface + !----------------------------------------------------------------------- + subroutine FireClean_interface(this) + ! + ! !DESCRIPTION: + ! Deallocate Fire datasets + ! + ! USES + import :: fire_method_type + ! !ARGUMENTS: + class(fire_method_type) :: this + !----------------------------------------------------------------------- + + end subroutine FireClean_interface + !----------------------------------------------------------------------- subroutine CNFireReadParams_interface( this, ncid ) ! diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90 index 8c0b50230b..da8185be31 100644 --- a/src/main/clm_initializeMod.F90 +++ b/src/main/clm_initializeMod.F90 @@ -279,7 +279,9 @@ subroutine initialize2(ni,nj, currtime) end if ! Determine decomposition of subgrid scale landunits, columns, patches + call t_startf('clm_decompInit_clumps') call decompInit_clumps(ni, nj, glc_behavior) + call t_stopf('clm_decompInit_clumps') ! *** Get ALL processor bounds - for gridcells, landunit, columns and patches *** call get_proc_bounds(bounds_proc) @@ -304,7 +306,9 @@ subroutine initialize2(ni,nj, currtime) !$OMP END PARALLEL DO ! Set global seg maps for gridcells, landlunits, columns and patches + call t_startf('clm_decompInit_glcp') call decompInit_glcp(ni, nj, glc_behavior) + call t_stopf('clm_decompInit_glcp') if (use_hillslope) then ! Initialize hillslope properties diff --git a/src/main/histFileMod.F90 b/src/main/histFileMod.F90 index f5147559d9..74d4185a56 100644 --- a/src/main/histFileMod.F90 +++ b/src/main/histFileMod.F90 @@ -46,9 +46,12 @@ module histFileMod integer , public, parameter :: max_tapes = 10 ! max number of history tapes integer , public, parameter :: max_flds = 2500 ! max number of history fields integer , public, parameter :: max_namlen = 64 ! maximum number of characters for field name - integer , public, parameter :: scale_type_strlen = 32 ! maximum number of characters for scale types + integer , private, parameter :: scale_type_strlen = 32 ! maximum number of characters for scale types integer , private, parameter :: avgflag_strlen = 10 ! maximum number of characters for avgflag integer , private, parameter :: hist_dim_name_length = 16 ! lenngth of character strings in dimension names + integer , private, parameter :: max_split_files = 2 ! max number of files per tape + integer , private, parameter :: accumulated_file_index = 1 ! non-instantaneous file identifier + integer , private, parameter :: instantaneous_file_index = 2 ! instantaneous file identifier ! Possible ways to treat multi-layer snow fields at times when no snow is present in a ! given layer. Note that the public parameters are the only ones that can be used by @@ -141,7 +144,7 @@ module histFileMod fexcl(max_flds,max_tapes) ! copy of hist_fexcl* fields in 2-D format. Note Fortran ! used to have a bug in 2-D namelists, thus this workaround. - logical, private :: if_disphist(max_tapes) ! restart, true => save history file + logical, private :: if_disphist(max_tapes, max_split_files) ! restart, true => save history file ! ! !PUBLIC MEMBER FUNCTIONS: (in rough call order) public :: hist_addfld1d ! Add a 1d single-level field to the list of all history fields @@ -258,7 +261,7 @@ end subroutine copy_entry_interface ! practice are all disabled. Fields for those tapes have to be specified ! explicitly and manually via hist_fincl2 et al. type, extends(entry_base) :: allhistfldlist_entry - logical :: actflag(max_tapes) ! which history tapes to write to. + logical :: actflag(max_tapes,max_split_files) ! which history tapes to write to character(len=avgflag_strlen) :: avgflag(max_tapes) ! type of time averaging contains procedure :: copy => copy_allhistfldlist_entry @@ -280,16 +283,15 @@ end subroutine copy_entry_interface ! tapes is assembled in the 'allhistfldlist' variable. Note that the first history tape is index 1 in ! the code but contains 'h0' in its output filenames (see set_hist_filename method). type history_tape - integer :: nflds ! number of active fields on tape - integer :: ntimes ! current number of time samples on tape + integer :: nflds(max_split_files) ! number of active fields on file + integer :: ntimes(max_split_files) ! current number of time samples on tape; although ntimes is an array, all its values are the same integer :: mfilt ! maximum number of time samples per tape integer :: nhtfrq ! number of time samples per tape integer :: ncprec ! netcdf output precision logical :: dov2xy ! true => do xy average for all fields logical :: is_endhist ! true => current time step is end of history interval real(r8) :: begtime ! time at beginning of history averaging interval - type (history_entry) :: hlist(max_flds) ! array of active history tape entries. - ! The ordering matches the allhistfldlist's. + type (history_entry) :: hlist(max_flds, max_split_files) ! array of active history tape and file entries listed in the same order as in allhistfldlist, but hlist contains the active subset of all the fields end type history_tape type clmpoint_rs ! Pointer to real scalar data (1D) @@ -312,10 +314,10 @@ end subroutine copy_entry_interface ! type (allhistfldlist_entry) :: allhistfldlist(max_flds) ! list of all history fields ! - ! Whether each history tape is in use in this run. If history_tape_in_use(i) is false, - ! then data in tape(i) is undefined and should not be referenced. + ! Whether each history tape is in use in this run. If history_tape_in_use(i,j) is false, + ! then data in [tape(i), file(j)] is undefined and should not be referenced. ! - logical :: history_tape_in_use(max_tapes) ! whether each history tape is in use in this run + logical :: history_tape_in_use(max_tapes, max_split_files) ! history tape is/isn't in use in this run ! ! The actual (accumulated) history data for all active fields in each in-use tape. See ! 'history_tape_in_use' for in-use tapes, and 'allhistfldlist' for active fields. See also @@ -331,14 +333,14 @@ end subroutine copy_entry_interface ! ! Other variables ! - character(len=max_length_filename) :: locfnh(max_tapes) ! local history file names - character(len=max_length_filename) :: locfnhr(max_tapes) ! local history restart file names + character(len=max_length_filename) :: locfnh(max_tapes, max_split_files) ! local history file names + character(len=max_length_filename) :: locfnhr(max_tapes, max_split_files) ! local history restart file names logical :: htapes_defined = .false. ! flag indicates history output fields have been defined ! ! NetCDF Id's ! - type(file_desc_t), target :: nfid(max_tapes) ! file ids - type(file_desc_t), target :: ncid_hist(max_tapes) ! file ids for history restart files + type(file_desc_t), target :: nfid(max_tapes, max_split_files) ! file ids + type(file_desc_t), target :: ncid_hist(max_tapes, max_split_files) ! file ids for history restart files integer :: time_dimid ! time dimension id integer :: nbnd_dimid ! time bounds dimension id integer :: strlen_dimid ! string dimension id @@ -372,7 +374,7 @@ subroutine hist_printflds() ! !ARGUMENTS: ! ! !LOCAL VARIABLES: - integer, parameter :: ncol = 5 ! number of table columns + integer, parameter :: ncol = 6 ! number of table columns integer nf, i, j ! do-loop counters integer hist_fields_file ! file unit number integer width_col(ncol) ! widths of table columns @@ -401,7 +403,7 @@ subroutine hist_printflds() ! the CTSM's web-based documentation. ! First sort the list to be in alphabetical order - call sort_hist_list(1, nallhistflds, allhistfldlist) + call sort_hist_list(nallhistflds, allhistfldlist) if (masterproc .and. hist_fields_list_file) then ! Hardwired table column widths to fit the table on a computer @@ -413,7 +415,8 @@ subroutine hist_printflds() width_col(2) = hist_dim_name_length ! level dimension column width_col(3) = 94 ! long description column width_col(4) = 65 ! units column - width_col(5) = 7 ! active (T or F) column + width_col(5) = 10 ! active (T or F) column + width_col(6) = 12 ! active (T or F) column width_col_sum = sum(width_col) + ncol - 1 ! sum of widths & blank spaces ! Convert integer widths to strings for use in format statements @@ -467,9 +470,9 @@ subroutine hist_printflds() fmt_txt = '('//str_w_col_sum//'a)' write(hist_fields_file,fmt_txt) ('-', i=1, width_col_sum) ! Concatenate strings needed in format statement - fmt_txt = '(a'//str_width_col(1)//',x,a'//str_width_col(2)//',x,a'//str_width_col(3)//',x,a'//str_width_col(4)//',x,a'//str_width_col(5)//')' + fmt_txt = '(a'//str_width_col(1)//',x,a'//str_width_col(2)//',x,a'//str_width_col(3)//',x,a'//str_width_col(4)//',x,a'//str_width_col(5)//',x,a'//str_width_col(6)//')' write(hist_fields_file,fmt_txt) 'Variable Name', & - 'Level Dim.', 'Long Description', 'Units', 'Active?' + 'Level Dim.', 'Long Description', 'Units', "Active 'I'", "Act. not 'I'" ! End header, same as header ! Concatenate strings needed in format statement @@ -481,14 +484,14 @@ subroutine hist_printflds() ! Main table ! Concatenate strings needed in format statement - fmt_txt = '(a'//str_width_col(1)//',x,a'//str_width_col(2)//',x,a'//str_width_col(3)//',x,a'//str_width_col(4)//',l'//str_width_col(5)//')' + fmt_txt = '(a'//str_width_col(1)//',x,a'//str_width_col(2)//',x,a'//str_width_col(3)//',x,a'//str_width_col(4)//',l'//str_width_col(5)//',l'//str_width_col(6)//')' do nf = 1,nallhistflds write(hist_fields_file,fmt_txt) & allhistfldlist(nf)%field%name, & allhistfldlist(nf)%field%type2d, & allhistfldlist(nf)%field%long_name, & allhistfldlist(nf)%field%units, & - allhistfldlist(nf)%actflag(1) + allhistfldlist(nf)%actflag(1,:) end do ! Table footer, same as header @@ -538,7 +541,7 @@ subroutine allhistfldlist_addfld (fname, numdims, type1d, type1d_out, & ! ! !LOCAL VARIABLES: integer :: n ! loop index - integer :: f ! allhistfldlist index + integer :: fld ! allhistfldlist index integer :: numa ! total number of atm cells across all processors integer :: numg ! total number of gridcells across all processors integer :: numl ! total number of landunits across all processors @@ -583,7 +586,7 @@ subroutine allhistfldlist_addfld (fname, numdims, type1d, type1d_out, & ! Increase number of fields on list of all history fields nallhistflds = nallhistflds + 1 - f = nallhistflds + fld = nallhistflds ! Check number of fields in list against maximum number @@ -595,49 +598,49 @@ subroutine allhistfldlist_addfld (fname, numdims, type1d, type1d_out, & ! Add field to list of all history fields - allhistfldlist(f)%field%name = fname - allhistfldlist(f)%field%long_name = long_name - allhistfldlist(f)%field%units = units - allhistfldlist(f)%field%type1d = type1d - allhistfldlist(f)%field%type1d_out = type1d_out - allhistfldlist(f)%field%type2d = type2d - allhistfldlist(f)%field%numdims = numdims - allhistfldlist(f)%field%num2d = num2d - allhistfldlist(f)%field%hpindex = hpindex - allhistfldlist(f)%field%p2c_scale_type = p2c_scale_type - allhistfldlist(f)%field%c2l_scale_type = c2l_scale_type - allhistfldlist(f)%field%l2g_scale_type = l2g_scale_type + allhistfldlist(fld)%field%name = fname + allhistfldlist(fld)%field%long_name = long_name + allhistfldlist(fld)%field%units = units + allhistfldlist(fld)%field%type1d = type1d + allhistfldlist(fld)%field%type1d_out = type1d_out + allhistfldlist(fld)%field%type2d = type2d + allhistfldlist(fld)%field%numdims = numdims + allhistfldlist(fld)%field%num2d = num2d + allhistfldlist(fld)%field%hpindex = hpindex + allhistfldlist(fld)%field%p2c_scale_type = p2c_scale_type + allhistfldlist(fld)%field%c2l_scale_type = c2l_scale_type + allhistfldlist(fld)%field%l2g_scale_type = l2g_scale_type select case (type1d) case (grlnd) - allhistfldlist(f)%field%beg1d = bounds%begg - allhistfldlist(f)%field%end1d = bounds%endg - allhistfldlist(f)%field%num1d = numg + allhistfldlist(fld)%field%beg1d = bounds%begg + allhistfldlist(fld)%field%end1d = bounds%endg + allhistfldlist(fld)%field%num1d = numg case (nameg) - allhistfldlist(f)%field%beg1d = bounds%begg - allhistfldlist(f)%field%end1d = bounds%endg - allhistfldlist(f)%field%num1d = numg + allhistfldlist(fld)%field%beg1d = bounds%begg + allhistfldlist(fld)%field%end1d = bounds%endg + allhistfldlist(fld)%field%num1d = numg case (namel) - allhistfldlist(f)%field%beg1d = bounds%begl - allhistfldlist(f)%field%end1d = bounds%endl - allhistfldlist(f)%field%num1d = numl + allhistfldlist(fld)%field%beg1d = bounds%begl + allhistfldlist(fld)%field%end1d = bounds%endl + allhistfldlist(fld)%field%num1d = numl case (namec) - allhistfldlist(f)%field%beg1d = bounds%begc - allhistfldlist(f)%field%end1d = bounds%endc - allhistfldlist(f)%field%num1d = numc + allhistfldlist(fld)%field%beg1d = bounds%begc + allhistfldlist(fld)%field%end1d = bounds%endc + allhistfldlist(fld)%field%num1d = numc case (namep) - allhistfldlist(f)%field%beg1d = bounds%begp - allhistfldlist(f)%field%end1d = bounds%endp - allhistfldlist(f)%field%num1d = nump + allhistfldlist(fld)%field%beg1d = bounds%begp + allhistfldlist(fld)%field%end1d = bounds%endp + allhistfldlist(fld)%field%num1d = nump case default write(iulog,*) trim(subname),' ERROR: unknown 1d output type= ',type1d call endrun(msg=errMsg(sourcefile, __LINE__)) end select if (present(no_snow_behavior)) then - allhistfldlist(f)%field%no_snow_behavior = no_snow_behavior + allhistfldlist(fld)%field%no_snow_behavior = no_snow_behavior else - allhistfldlist(f)%field%no_snow_behavior = no_snow_unset + allhistfldlist(fld)%field%no_snow_behavior = no_snow_unset end if ! The following two fields are used only in list of all history fields, @@ -645,8 +648,8 @@ subroutine allhistfldlist_addfld (fname, numdims, type1d, type1d_out, & ! ALL FIELDS IN THE FORMER ARE INITIALIZED WITH THE ACTIVE ! FLAG SET TO FALSE - allhistfldlist(f)%avgflag(:) = avgflag - allhistfldlist(f)%actflag(:) = .false. + allhistfldlist(fld)%avgflag(:) = avgflag + allhistfldlist(fld)%actflag(:,:) = .false. end subroutine allhistfldlist_addfld @@ -704,7 +707,7 @@ subroutine hist_htapes_build () ! Note - with netcdf, only 1 (ncd_double) and 2 (ncd_float) are allowed do t=1,ntapes - tape(t)%ntimes = 0 + tape(t)%ntimes(:) = 0 tape(t)%dov2xy = hist_dov2xy(t) tape(t)%nhtfrq = hist_nhtfrq(t) tape(t)%mfilt = hist_mfilt(t) @@ -744,7 +747,7 @@ subroutine allhistfldlist_make_active (name, tape_index, avgflag) character(len=*), intent(in), optional :: avgflag ! time averaging flag ! ! !LOCAL VARIABLES: - integer :: f ! field index + integer :: fld ! field index logical :: found ! flag indicates field found in allhistfldlist character(len=*),parameter :: subname = 'allhistfldlist_make_active' !----------------------------------------------------------------------- @@ -768,11 +771,15 @@ subroutine allhistfldlist_make_active (name, tape_index, avgflag) ! Also reset averaging flag if told to use other than default. found = .false. - do f = 1,nallhistflds - if (trim(name) == trim(allhistfldlist(f)%field%name)) then - allhistfldlist(f)%actflag(tape_index) = .true. + do fld = 1, nallhistflds + if (trim(name) == trim(allhistfldlist(fld)%field%name)) then if (present(avgflag)) then - if (avgflag/= ' ') allhistfldlist(f)%avgflag(tape_index) = avgflag + if (avgflag /= ' ') allhistfldlist(fld)%avgflag(tape_index) = avgflag + end if + if (allhistfldlist(fld)%avgflag(tape_index) == 'I') then + allhistfldlist(fld)%actflag(tape_index,instantaneous_file_index) = .true. + else + allhistfldlist(fld)%actflag(tape_index,accumulated_file_index) = .true. end if found = .true. exit @@ -796,7 +803,7 @@ subroutine allhistfldlist_change_timeavg (t) integer, intent(in) :: t ! history tape index ! ! !LOCAL VARIABLES: - integer :: f ! field index + integer :: fld ! field index character(len=avgflag_strlen) :: avgflag ! local equiv of hist_avgflag_pertape(t) character(len=*),parameter :: subname = 'allhistfldlist_change_timeavg' !----------------------------------------------------------------------- @@ -807,8 +814,8 @@ subroutine allhistfldlist_change_timeavg (t) call endrun(msg=errMsg(sourcefile, __LINE__)) end if - do f = 1,nallhistflds - allhistfldlist(f)%avgflag(t) = avgflag + do fld = 1, nallhistflds + allhistfldlist(fld)%avgflag(t) = avgflag end do end subroutine allhistfldlist_change_timeavg @@ -828,7 +835,8 @@ subroutine htapes_fieldlist() ! !ARGUMENTS: ! ! !LOCAL VARIABLES: - integer :: t, f ! tape, field indices + class(entry_base), pointer :: tmp_hlist(:) ! temporary subset of hlist to pass as call argument + integer :: t, f, fld ! tape, file, field indices integer :: ff ! index into include, exclude and fprec list character(len=max_namlen) :: name ! field name portion of fincl (i.e. no avgflag separator) character(len=max_namlen) :: allhistfldname ! name from allhistfldlist field @@ -872,40 +880,40 @@ subroutine htapes_fieldlist() ! First ensure contents of fincl and fexcl are valid names - do t = 1,max_tapes - f = 1 - do while (f < max_flds .and. fincl(f,t) /= ' ') - name = getname (fincl(f,t)) + tape_loop1: do t = 1, max_tapes + fld = 1 + do while (fld < max_flds .and. fincl(fld,t) /= ' ') + name = getname (fincl(fld,t)) do ff = 1,nallhistflds allhistfldname = allhistfldlist(ff)%field%name if (name == allhistfldname) exit end do if (name /= allhistfldname) then - write(iulog,*) trim(subname),' ERROR: ', trim(name), ' in fincl(', f, ') ',& + write(iulog,*) trim(subname),' ERROR: ', trim(name), ' in fincl(', fld, ') ',& 'for history tape ',t,' not found' call endrun(msg=errMsg(sourcefile, __LINE__)) end if - f = f + 1 + fld = fld + 1 end do - f = 1 - do while (f < max_flds .and. fexcl(f,t) /= ' ') + fld = 1 + do while (fld < max_flds .and. fexcl(fld,t) /= ' ') do ff = 1,nallhistflds allhistfldname = allhistfldlist(ff)%field%name - if (fexcl(f,t) == allhistfldname) exit + if (fexcl(fld,t) == allhistfldname) exit end do - if (fexcl(f,t) /= allhistfldname) then - write(iulog,*) trim(subname),' ERROR: ', fexcl(f,t), ' in fexcl(', f, ') ', & + if (fexcl(fld,t) /= allhistfldname) then + write(iulog,*) trim(subname),' ERROR: ', fexcl(fld,t), ' in fexcl(', fld, ') ', & 'for history tape ',t,' not found' call endrun(msg=errMsg(sourcefile, __LINE__)) end if - f = f + 1 + fld = fld + 1 end do - end do + history_tape_in_use(t,:) = .false. + tape(t)%nflds(:) = 0 + end do tape_loop1 - history_tape_in_use(:) = .false. - tape(:)%nflds = 0 - do t = 1,max_tapes + tape_loop2: do t = 1, max_tapes ! Loop through the allhistfldlist set of field names and determine if any of those ! are in the FINCL or FEXCL arrays @@ -914,68 +922,101 @@ subroutine htapes_fieldlist() ! Add the field to the tape if specified via namelist (FINCL[1-max_tapes]), ! or if it is on by default and was not excluded via namelist (FEXCL[1-max_tapes]). - do f = 1,nallhistflds - allhistfldname = allhistfldlist(f)%field%name - call list_index (fincl(1,t), allhistfldname, ff) + file_loop1: do f = 1, max_split_files + fld_loop: do fld = 1, nallhistflds + allhistfldname = allhistfldlist(fld)%field%name + call list_index (fincl(1,t), allhistfldname, ff) - if (ff > 0) then + ff_gt_0: if (ff > 0) then - ! if field is in include list, ff > 0 and htape_addfld - ! will be called for field + ! if field is in include list, ff > 0 and htape_addfld + ! will be called for field - avgflag = getflag (fincl(ff,t)) - call htape_addfld (t, f, avgflag) + avgflag = getflag (fincl(ff,t)) - else if (.not. hist_empty_htapes) then + ! Set time averaging flag based on allhistfldlist setting or + ! override the default averaging flag with namelist setting - ! find index of field in exclude list + if (.not. avgflag_valid(avgflag, blank_valid=.true.)) then + write(iulog,*) trim(subname),' ERROR: unknown avgflag=', avgflag + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if - call list_index (fexcl(1,t), allhistfldname, ff) + if (avgflag == ' ') then + avgflag = allhistfldlist(fld)%avgflag(t) + end if - ! if field is in exclude list, ff > 0 and htape_addfld - ! will not be called for field - ! if field is not in exclude list, ff =0 and htape_addfld - ! will be called for field (note that htape_addfld will be - ! called below only if field is not in exclude list OR in - ! include list + ! This if-statement is in a loop of f (instantaneous_ or + ! accumulated_file_index) so it matters whether f is one + ! or the other when going through here. Otherwise all fields + ! would end up on all files, which is not the intent. + if (f == instantaneous_file_index .and. avgflag == 'I') then + call htape_addfld (t, f, fld, avgflag) + else if (f == accumulated_file_index .and. avgflag /= 'I') then + call htape_addfld (t, f, fld, avgflag) + else if (f /= instantaneous_file_index .and. f /= accumulated_file_index) then + write(iulog,*) trim(subname),' ERROR: invalid f =', f, ' should be one of these values:', accumulated_file_index, instantaneous_file_index + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if - if (ff == 0 .and. allhistfldlist(f)%actflag(t)) then - call htape_addfld (t, f, ' ') - end if + else if (.not. hist_empty_htapes) then - end if - end do + ! find index of field in exclude list - ! Specification of tape contents now complete. - ! Sort each list of active entries - call sort_hist_list(t, tape(t)%nflds, tape(t)%hlist) + call list_index (fexcl(1,t), allhistfldname, ff) - if (masterproc) then - if (tape(t)%nflds > 0) then - write(iulog,*) trim(subname),' : Included fields tape ',t,'=',tape(t)%nflds + ! if field is in exclude list, ff > 0 and htape_addfld + ! will not be called for field + ! if field is not in exclude list, ff =0 and htape_addfld + ! will be called for field (note that htape_addfld will be + ! called below only if field is not in exclude list OR in + ! include list + + if (ff == 0 .and. allhistfldlist(fld)%actflag(t,f)) then + call htape_addfld (t, f, fld, ' ') + end if + + end if ff_gt_0 + end do fld_loop + + ! Specification of tape contents now complete. + ! Sort each list of active entries + associate(tmp_hlist => tape(t)%hlist(:,f)) + call sort_hist_list(tape(t)%nflds(f), tmp_hlist(:)) + end associate + + if (masterproc) then + if (tape(t)%nflds(f) > 0) then + write(iulog,*) trim(subname),' : Included fields tape ', t, '=',tape(t)%nflds(f) + end if + do fld = 1, tape(t)%nflds(f) + write(iulog,*) fld, ' ', tape(t)%hlist(fld,f)%field%name, & + tape(t)%hlist(fld,f)%field%num2d, ' ', tape(t)%hlist(fld,f)%avgflag + end do + call shr_sys_flush(iulog) end if - do f = 1,tape(t)%nflds - write(iulog,*) f,' ',tape(t)%hlist(f)%field%name, & - tape(t)%hlist(f)%field%num2d,' ',tape(t)%hlist(f)%avgflag - end do - call shr_sys_flush(iulog) - end if - end do + end do file_loop1 + end do tape_loop2 ! Determine index of max active history tape, and whether each tape is in use ntapes = 0 do t = max_tapes,1,-1 - if (tape(t)%nflds > 0) then - ntapes = t - exit - end if + do f = 1, max_split_files + if (tape(t)%nflds(f) > 0) then + ntapes = t + exit + end if + end do + if (ntapes > 0) exit end do do t = 1, ntapes - if (tape(t)%nflds > 0) then - history_tape_in_use(t) = .true. - end if + do f = 1, max_split_files + if (tape(t)%nflds(f) > 0) then + history_tape_in_use(t,f) = .true. + end if + end do end do ! Change 1d output per tape output flag if requested - only for history @@ -996,7 +1037,7 @@ subroutine htapes_fieldlist() if (masterproc) then write(iulog,*) 'There will be a total of ',ntapes,' history tapes' - do t=1,ntapes + tape_loop3: do t = 1, ntapes write(iulog,*) if (hist_nhtfrq(t) == 0) then write(iulog,*)'History tape ',t,' write frequency is MONTHLY' @@ -1010,12 +1051,14 @@ subroutine htapes_fieldlist() end if write(iulog,*)'Number of time samples on history tape ',t,' is ',hist_mfilt(t) write(iulog,*)'Output precision on history tape ',t,'=',hist_ndens(t) - if (.not. history_tape_in_use(t)) then - write(iulog,*) 'History tape ',t,' does not have any fields,' - write(iulog,*) 'so it will not be written!' - end if + file_loop2: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + write(iulog,*) 'History tape ', t,' and file ', f, ' has no fields,' + write(iulog,*) 'so it will not be written!' + end if + end do file_loop2 write(iulog,*) - end do + end do tape_loop3 call shr_sys_flush(iulog) end if @@ -1065,19 +1108,18 @@ subroutine copy_history_entry(this, other) end subroutine copy_history_entry !----------------------------------------------------------------------- - subroutine sort_hist_list(t, n_fields, hist_list) + subroutine sort_hist_list(n_fields, hist_list) ! !DESCRIPTION: ! Sort list of history variable names hist_list in alphabetical ! order. ! !ARGUMENTS: - integer, intent(in) :: t ! tape index integer, intent(in) :: n_fields ! number of fields class(entry_base), intent(inout) :: hist_list(:) ! !LOCAL VARIABLES: - integer :: f, ff ! field indices + integer :: fld, ff ! field indices class(entry_base), allocatable :: tmp character(len=*), parameter :: subname = 'sort_hist_list' @@ -1091,8 +1133,8 @@ subroutine sort_hist_list(t, n_fields, hist_list) allocate(tmp, source = hist_list(1)) - do f = n_fields-1, 1, -1 - do ff = 1, f + do fld = n_fields-1, 1, -1 + do ff = 1, fld ! First sort by the name of the level dimension; then, within the list of ! fields with the same level dimension, sort by field name. Sorting first by ! the level dimension gives a significant performance improvement especially @@ -1147,14 +1189,15 @@ logical function is_mapping_upto_subgrid( type1d, type1d_out ) result ( mapping) end function is_mapping_upto_subgrid !----------------------------------------------------------------------- - subroutine htape_addfld (t, f, avgflag) + subroutine htape_addfld (t, f, fld, avgflag) ! ! !DESCRIPTION: ! Add a field to a history tape, copying metadata from the list of all history fields ! ! !ARGUMENTS: integer, intent(in) :: t ! history tape index - integer, intent(in) :: f ! field index from list of all history fields + integer, intent(in) :: f ! history file index + integer, intent(in) :: fld ! field index from list of all history fields character(len=*), intent(in) :: avgflag ! time averaging flag ! ! !LOCAL VARIABLES: @@ -1179,16 +1222,16 @@ subroutine htape_addfld (t, f, avgflag) if (htapes_defined) then write(iulog,*) trim(subname),' ERROR: attempt to add field ', & - allhistfldlist(f)%field%name, ' after history files are set' + allhistfldlist(fld)%field%name, ' after history files are set' call endrun(msg=errMsg(sourcefile, __LINE__)) end if - tape(t)%nflds = tape(t)%nflds + 1 - n = tape(t)%nflds + tape(t)%nflds(f) = tape(t)%nflds(f) + 1 + n = tape(t)%nflds(f) ! Copy field information - tape(t)%hlist(n)%field = allhistfldlist(f)%field + tape(t)%hlist(n,f)%field = allhistfldlist(fld)%field ! Determine bounds @@ -1203,16 +1246,16 @@ subroutine htape_addfld (t, f, avgflag) ! ***NOTE- the following logic is what permits non lat/lon grids to ! be written to clm history file - type1d = tape(t)%hlist(n)%field%type1d + type1d = tape(t)%hlist(n,f)%field%type1d if (type1d == nameg .or. & type1d == namel .or. & type1d == namec .or. & type1d == namep) then - tape(t)%hlist(n)%field%type1d_out = grlnd + tape(t)%hlist(n,f)%field%type1d_out = grlnd end if if (type1d == grlnd) then - tape(t)%hlist(n)%field%type1d_out = grlnd + tape(t)%hlist(n,f)%field%type1d_out = grlnd end if else if (hist_type1d_pertape(t) /= ' ') then @@ -1220,17 +1263,17 @@ subroutine htape_addfld (t, f, avgflag) ! Set output 1d type based on namelist setting of hist_type1d_pertape ! Only applies to tapes when xy output is not required - type1d = tape(t)%hlist(n)%field%type1d + type1d = tape(t)%hlist(n,f)%field%type1d select case (trim(hist_type1d_pertape(t))) case('GRID') - tape(t)%hlist(n)%field%type1d_out = nameg + tape(t)%hlist(n,f)%field%type1d_out = nameg case('LAND') - tape(t)%hlist(n)%field%type1d_out = namel + tape(t)%hlist(n,f)%field%type1d_out = namel case('COLS') - tape(t)%hlist(n)%field%type1d_out = namec + tape(t)%hlist(n,f)%field%type1d_out = namec case ('PFTS') - tape(t)%hlist(n)%field%type1d_out = namep + tape(t)%hlist(n,f)%field%type1d_out = namep case default write(iulog,*) trim(subname),' ERROR: unknown input hist_type1d_pertape= ', hist_type1d_pertape(t) call endrun(msg=errMsg(sourcefile, __LINE__)) @@ -1240,7 +1283,7 @@ subroutine htape_addfld (t, f, avgflag) ! Determine output 1d dimensions - type1d_out = tape(t)%hlist(n)%field%type1d_out + type1d_out = tape(t)%hlist(n,f)%field%type1d_out if (type1d_out == grlnd) then beg1d_out = bounds%begg end1d_out = bounds%endg @@ -1267,26 +1310,26 @@ subroutine htape_addfld (t, f, avgflag) end if ! Output bounds for the field - tape(t)%hlist(n)%field%beg1d_out = beg1d_out - tape(t)%hlist(n)%field%end1d_out = end1d_out - tape(t)%hlist(n)%field%num1d_out = num1d_out + tape(t)%hlist(n,f)%field%beg1d_out = beg1d_out + tape(t)%hlist(n,f)%field%end1d_out = end1d_out + tape(t)%hlist(n,f)%field%num1d_out = num1d_out ! Fields native bounds - beg1d = allhistfldlist(f)%field%beg1d - end1d = allhistfldlist(f)%field%end1d + beg1d = allhistfldlist(fld)%field%beg1d + end1d = allhistfldlist(fld)%field%end1d - ! Alloccate and initialize history buffer and related info + ! Allocate and initialize history buffer and related info - num2d = tape(t)%hlist(n)%field%num2d + num2d = tape(t)%hlist(n,f)%field%num2d if ( is_mapping_upto_subgrid( type1d, type1d_out ) ) then - allocate (tape(t)%hlist(n)%hbuf(beg1d_out:end1d_out,num2d)) - allocate (tape(t)%hlist(n)%nacs(beg1d_out:end1d_out,num2d)) + allocate (tape(t)%hlist(n,f)%hbuf(beg1d_out:end1d_out,num2d)) + allocate (tape(t)%hlist(n,f)%nacs(beg1d_out:end1d_out,num2d)) else - allocate (tape(t)%hlist(n)%hbuf(beg1d:end1d,num2d)) - allocate (tape(t)%hlist(n)%nacs(beg1d:end1d,num2d)) + allocate (tape(t)%hlist(n,f)%hbuf(beg1d:end1d,num2d)) + allocate (tape(t)%hlist(n,f)%nacs(beg1d:end1d,num2d)) end if - tape(t)%hlist(n)%hbuf(:,:) = 0._r8 - tape(t)%hlist(n)%nacs(:,:) = 0 + tape(t)%hlist(n,f)%hbuf(:,:) = 0._r8 + tape(t)%hlist(n,f)%nacs(:,:) = 0 ! Set time averaging flag based on allhistfldlist setting or ! override the default averaging flag with namelist setting @@ -1297,9 +1340,9 @@ subroutine htape_addfld (t, f, avgflag) end if if (avgflag == ' ') then - tape(t)%hlist(n)%avgflag = allhistfldlist(f)%avgflag(t) + tape(t)%hlist(n,f)%avgflag = allhistfldlist(fld)%avgflag(t) else - tape(t)%hlist(n)%avgflag = avgflag + tape(t)%hlist(n,f)%avgflag = avgflag end if ! Override this tape's avgflag if nhtfrq == 1 @@ -1312,7 +1355,7 @@ subroutine htape_addfld (t, f, avgflag) ! - local time (L) avgflag_temp = hist_avgflag_pertape(t) if (avgflag_temp == 'I' .or. avgflag_temp(1:1) == 'L') then - tape(t)%hlist(n)%avgflag = avgflag_temp + tape(t)%hlist(n,f)%avgflag = avgflag_temp end if end subroutine htape_addfld @@ -1329,33 +1372,36 @@ subroutine hist_update_hbuf(bounds) ! ! !LOCAL VARIABLES: integer :: t ! tape index - integer :: f ! field index + integer :: f ! file index + integer :: fld ! field index integer :: num2d ! size of second dimension (e.g. number of vertical levels) integer :: numdims ! number of dimensions character(len=*),parameter :: subname = 'hist_update_hbuf' character(len=hist_dim_name_length) :: type2d ! hbuf second dimension type ["levgrnd","levlak","numrad","ltype","natpft","cft","glc_nec","elevclas","subname(n)","mxsowings","mxharvests"] !----------------------------------------------------------------------- - do t = 1,ntapes -!$OMP PARALLEL DO PRIVATE (f, num2d, numdims) - do f = 1,tape(t)%nflds + tape_loop: do t = 1, ntapes + file_loop: do f = 1, max_split_files +!$OMP PARALLEL DO PRIVATE (fld, num2d, numdims) + do fld = 1, tape(t)%nflds(f) - numdims = tape(t)%hlist(f)%field%numdims + numdims = tape(t)%hlist(fld,f)%field%numdims - if ( numdims == 1) then - call hist_update_hbuf_field_1d (t, f, bounds) - else - num2d = tape(t)%hlist(f)%field%num2d - call hist_update_hbuf_field_2d (t, f, bounds, num2d) - end if - end do + if ( numdims == 1) then + call hist_update_hbuf_field_1d (t, f, fld, bounds) + else + num2d = tape(t)%hlist(fld,f)%field%num2d + call hist_update_hbuf_field_2d (t, f, fld, bounds, num2d) + end if + end do !$OMP END PARALLEL DO - end do + end do file_loop + end do tape_loop end subroutine hist_update_hbuf !----------------------------------------------------------------------- - subroutine hist_update_hbuf_field_1d (t, f, bounds) + subroutine hist_update_hbuf_field_1d (t, f, fld, bounds) ! ! !DESCRIPTION: ! Accumulate (or take min, max, etc. as appropriate) input field @@ -1372,7 +1418,8 @@ subroutine hist_update_hbuf_field_1d (t, f, bounds) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index - integer, intent(in) :: f ! field index + integer, intent(in) :: f ! file index + integer, intent(in) :: fld ! field index type(bounds_type), intent(in) :: bounds ! ! !LOCAL VARIABLES: @@ -1412,19 +1459,19 @@ subroutine hist_update_hbuf_field_1d (t, f, bounds) SHR_ASSERT_FL(bounds%level == bounds_level_proc, sourcefile, __LINE__) - avgflag = tape(t)%hlist(f)%avgflag - nacs => tape(t)%hlist(f)%nacs - hbuf => tape(t)%hlist(f)%hbuf - beg1d = tape(t)%hlist(f)%field%beg1d - end1d = tape(t)%hlist(f)%field%end1d - beg1d_out = tape(t)%hlist(f)%field%beg1d_out - end1d_out = tape(t)%hlist(f)%field%end1d_out - type1d = tape(t)%hlist(f)%field%type1d - type1d_out = tape(t)%hlist(f)%field%type1d_out - p2c_scale_type = tape(t)%hlist(f)%field%p2c_scale_type - c2l_scale_type = tape(t)%hlist(f)%field%c2l_scale_type - l2g_scale_type = tape(t)%hlist(f)%field%l2g_scale_type - hpindex = tape(t)%hlist(f)%field%hpindex + avgflag = tape(t)%hlist(fld,f)%avgflag + nacs => tape(t)%hlist(fld,f)%nacs + hbuf => tape(t)%hlist(fld,f)%hbuf + beg1d = tape(t)%hlist(fld,f)%field%beg1d + end1d = tape(t)%hlist(fld,f)%field%end1d + beg1d_out = tape(t)%hlist(fld,f)%field%beg1d_out + end1d_out = tape(t)%hlist(fld,f)%field%end1d_out + type1d = tape(t)%hlist(fld,f)%field%type1d + type1d_out = tape(t)%hlist(fld,f)%field%type1d_out + p2c_scale_type = tape(t)%hlist(fld,f)%field%p2c_scale_type + c2l_scale_type = tape(t)%hlist(fld,f)%field%c2l_scale_type + l2g_scale_type = tape(t)%hlist(fld,f)%field%l2g_scale_type + hpindex = tape(t)%hlist(fld,f)%field%hpindex field => clmptr_rs(hpindex)%ptr call get_curr_date (year, month, day, secs) @@ -1718,7 +1765,7 @@ subroutine hist_update_hbuf_field_1d (t, f, bounds) end subroutine hist_update_hbuf_field_1d !----------------------------------------------------------------------- - subroutine hist_update_hbuf_field_2d (t, f, bounds, num2d) + subroutine hist_update_hbuf_field_2d (t, f, fld, bounds, num2d) ! ! !DESCRIPTION: ! Accumulate (or take min, max, etc. as appropriate) input field @@ -1736,7 +1783,8 @@ subroutine hist_update_hbuf_field_2d (t, f, bounds, num2d) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index - integer, intent(in) :: f ! field index + integer, intent(in) :: f ! file index + integer, intent(in) :: fld ! field index type(bounds_type), intent(in) :: bounds integer, intent(in) :: num2d ! size of second dimension ! @@ -1779,20 +1827,20 @@ subroutine hist_update_hbuf_field_2d (t, f, bounds, num2d) SHR_ASSERT_FL(bounds%level == bounds_level_proc, sourcefile, __LINE__) - avgflag = tape(t)%hlist(f)%avgflag - nacs => tape(t)%hlist(f)%nacs - hbuf => tape(t)%hlist(f)%hbuf - beg1d = tape(t)%hlist(f)%field%beg1d - end1d = tape(t)%hlist(f)%field%end1d - beg1d_out = tape(t)%hlist(f)%field%beg1d_out - end1d_out = tape(t)%hlist(f)%field%end1d_out - type1d = tape(t)%hlist(f)%field%type1d - type1d_out = tape(t)%hlist(f)%field%type1d_out - p2c_scale_type = tape(t)%hlist(f)%field%p2c_scale_type - c2l_scale_type = tape(t)%hlist(f)%field%c2l_scale_type - l2g_scale_type = tape(t)%hlist(f)%field%l2g_scale_type - no_snow_behavior = tape(t)%hlist(f)%field%no_snow_behavior - hpindex = tape(t)%hlist(f)%field%hpindex + avgflag = tape(t)%hlist(fld,f)%avgflag + nacs => tape(t)%hlist(fld,f)%nacs + hbuf => tape(t)%hlist(fld,f)%hbuf + beg1d = tape(t)%hlist(fld,f)%field%beg1d + end1d = tape(t)%hlist(fld,f)%field%end1d + beg1d_out = tape(t)%hlist(fld,f)%field%beg1d_out + end1d_out = tape(t)%hlist(fld,f)%field%end1d_out + type1d = tape(t)%hlist(fld,f)%field%type1d + type1d_out = tape(t)%hlist(fld,f)%field%type1d_out + p2c_scale_type = tape(t)%hlist(fld,f)%field%p2c_scale_type + c2l_scale_type = tape(t)%hlist(fld,f)%field%c2l_scale_type + l2g_scale_type = tape(t)%hlist(fld,f)%field%l2g_scale_type + no_snow_behavior = tape(t)%hlist(fld,f)%field%no_snow_behavior + hpindex = tape(t)%hlist(fld,f)%field%hpindex call get_curr_date (year, month, day, secs) @@ -2253,7 +2301,7 @@ end subroutine hist_set_snow_field_2d !----------------------------------------------------------------------- - subroutine hfields_normalize (t) + subroutine hfields_normalize (t, f) ! ! !DESCRIPTION: ! Normalize fields on a history file by the number of accumulations. @@ -2262,9 +2310,10 @@ subroutine hfields_normalize (t) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index + integer, intent(in) :: f ! file index ! ! !LOCAL VARIABLES: - integer :: f ! field index + integer :: fld ! field index integer :: k ! 1d index integer :: j ! 2d index logical :: aflag ! averaging flag @@ -2278,18 +2327,18 @@ subroutine hfields_normalize (t) ! Normalize by number of accumulations for time averaged case - do f = 1,tape(t)%nflds - avgflag = tape(t)%hlist(f)%avgflag - if ( is_mapping_upto_subgrid(tape(t)%hlist(f)%field%type1d, tape(t)%hlist(f)%field%type1d_out) )then - beg1d = tape(t)%hlist(f)%field%beg1d_out - end1d = tape(t)%hlist(f)%field%end1d_out + do fld = 1, tape(t)%nflds(f) + avgflag = tape(t)%hlist(fld,f)%avgflag + if ( is_mapping_upto_subgrid(tape(t)%hlist(fld,f)%field%type1d, tape(t)%hlist(fld,f)%field%type1d_out) )then + beg1d = tape(t)%hlist(fld,f)%field%beg1d_out + end1d = tape(t)%hlist(fld,f)%field%end1d_out else - beg1d = tape(t)%hlist(f)%field%beg1d - end1d = tape(t)%hlist(f)%field%end1d + beg1d = tape(t)%hlist(fld,f)%field%beg1d + end1d = tape(t)%hlist(fld,f)%field%end1d end if - num2d = tape(t)%hlist(f)%field%num2d - nacs => tape(t)%hlist(f)%nacs - hbuf => tape(t)%hlist(f)%hbuf + num2d = tape(t)%hlist(fld,f)%field%num2d + nacs => tape(t)%hlist(fld,f)%nacs + hbuf => tape(t)%hlist(fld,f)%hbuf if (avgflag == 'A' .or. avgflag(1:1) == 'L') then aflag = .true. @@ -2311,7 +2360,7 @@ subroutine hfields_normalize (t) end subroutine hfields_normalize !----------------------------------------------------------------------- - subroutine hfields_zero (t) + subroutine hfields_zero (t, f) ! ! !DESCRIPTION: ! Zero out accumulation and history buffers for a given history tape. @@ -2319,21 +2368,22 @@ subroutine hfields_zero (t) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index + integer, intent(in) :: f ! file index ! ! !LOCAL VARIABLES: - integer :: f ! field index + integer :: fld ! field index character(len=*),parameter :: subname = 'hfields_zero' !----------------------------------------------------------------------- - do f = 1,tape(t)%nflds - tape(t)%hlist(f)%hbuf(:,:) = 0._r8 - tape(t)%hlist(f)%nacs(:,:) = 0 + do fld = 1,tape(t)%nflds(f) + tape(t)%hlist(fld,f)%hbuf(:,:) = 0._r8 + tape(t)%hlist(fld,f)%nacs(:,:) = 0 end do end subroutine hfields_zero !----------------------------------------------------------------------- - subroutine htape_create (t, histrest) + subroutine htape_create (t, f, histrest) ! ! !DESCRIPTION: ! Define netcdf metadata of history file t. @@ -2351,10 +2401,10 @@ subroutine htape_create (t, histrest) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index + integer, intent(in) :: f ! file index logical, intent(in), optional :: histrest ! if creating the history restart file ! ! !LOCAL VARIABLES: - integer :: f ! field index integer :: p,c,l,n ! indices integer :: ier ! error code integer :: num2d ! size of second dimension (e.g. number of vertical levels) @@ -2394,9 +2444,9 @@ subroutine htape_create (t, histrest) ncprec = tape(t)%ncprec if (lhistrest) then - lnfid => ncid_hist(t) + lnfid => ncid_hist(t,f) else - lnfid => nfid(t) + lnfid => nfid(t,f) endif ! Create new netCDF file. It will be in define mode @@ -2404,20 +2454,20 @@ subroutine htape_create (t, histrest) if ( .not. lhistrest )then if (masterproc) then write(iulog,*) trim(subname),' : Opening netcdf htape ', & - trim(locfnh(t)) + trim(locfnh(t,f)) call shr_sys_flush(iulog) end if - call ncd_pio_createfile(lnfid, trim(locfnh(t))) + call ncd_pio_createfile(lnfid, trim(locfnh(t,f))) call ncd_putatt(lnfid, ncd_global, 'title', 'CLM History file information' ) call ncd_putatt(lnfid, ncd_global, 'comment', & "NOTE: None of the variables are weighted by land fraction!" ) else if (masterproc) then write(iulog,*) trim(subname),' : Opening netcdf rhtape ', & - trim(locfnhr(t)) + trim(locfnhr(t,f)) call shr_sys_flush(iulog) end if - call ncd_pio_createfile(lnfid, trim(locfnhr(t))) + call ncd_pio_createfile(lnfid, trim(locfnhr(t,f))) call ncd_putatt(lnfid, ncd_global, 'title', & 'CLM Restart History information, required to continue a simulation' ) call ncd_putatt(lnfid, ncd_global, 'comment', & @@ -2542,7 +2592,7 @@ subroutine htape_create (t, histrest) call ncd_defdim(lnfid, 'time', ncd_unlimited, time_dimid) if (masterproc)then write(iulog,*) trim(subname), & - ' : Successfully defined netcdf history file ',t + ' : Successfully defined netcdf history file ', t, f call shr_sys_flush(iulog) end if else @@ -2665,7 +2715,7 @@ subroutine htape_add_cft_metadata(lnfid) end subroutine htape_add_cft_metadata !----------------------------------------------------------------------- - subroutine htape_timeconst3D(t, & + subroutine htape_timeconst3D(t, f, & bounds, watsat_col, sucsat_col, bsw_col, hksat_col, & cellsand_col, cellclay_col, mode) ! @@ -2684,6 +2734,7 @@ subroutine htape_timeconst3D(t, & ! ! !ARGUMENTS: integer , intent(in) :: t ! tape index + integer , intent(in) :: f ! file index type(bounds_type) , intent(in) :: bounds real(r8) , intent(in) :: watsat_col( bounds%begc:,1: ) real(r8) , intent(in) :: sucsat_col( bounds%begc:,1: ) @@ -2786,20 +2837,20 @@ subroutine htape_timeconst3D(t, & end if if (tape(t)%dov2xy) then if (ldomain%isgrid2d) then - call ncd_defvar(ncid=nfid(t), varname=trim(varnames(ifld)), xtype=tape(t)%ncprec,& + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnames(ifld)), xtype=tape(t)%ncprec,& dim1name='lon', dim2name='lat', dim3name='levgrnd', & long_name=long_name, units=units, missing_value=spval, fill_value=spval, & varid=varid) else - call ncd_defvar(ncid=nfid(t), varname=trim(varnames(ifld)), xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnames(ifld)), xtype=tape(t)%ncprec, & dim1name=grlnd, dim2name='levgrnd', & long_name=long_name, units=units, missing_value=spval, fill_value=spval, & varid=varid) end if - call add_landunit_mask_metadata(nfid(t), varid, l2g_scale_type(ifld)) + call add_landunit_mask_metadata(nfid(t,f), varid, l2g_scale_type(ifld)) else - call ncd_defvar(ncid=nfid(t), varname=trim(varnames(ifld)), xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnames(ifld)), xtype=tape(t)%ncprec, & dim1name=namec, dim2name='levgrnd', & long_name=long_name, units=units, missing_value=spval, fill_value=spval) end if @@ -2849,14 +2900,14 @@ subroutine htape_timeconst3D(t, & if (ldomain%isgrid2d) then call ncd_io(varname=trim(varnames(ifld)), dim1name=grlnd, & - data=histo, ncid=nfid(t), flag='write') + data=histo, ncid=nfid(t,f), flag='write') else call ncd_io(varname=trim(varnames(ifld)), dim1name=grlnd, & - data=histo, ncid=nfid(t), flag='write') + data=histo, ncid=nfid(t,f), flag='write') end if else call ncd_io(varname=trim(varnames(ifld)), dim1name=namec, & - data=histi, ncid=nfid(t), flag='write') + data=histi, ncid=nfid(t,f), flag='write') end if end do @@ -2877,20 +2928,20 @@ subroutine htape_timeconst3D(t, & end if if (tape(t)%dov2xy) then if (ldomain%isgrid2d) then - call ncd_defvar(ncid=nfid(t), varname=trim(varnamesl(ifld)), xtype=tape(t)%ncprec,& + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnamesl(ifld)), xtype=tape(t)%ncprec,& dim1name='lon', dim2name='lat', dim3name='levlak', & long_name=long_name, units=units, missing_value=spval, fill_value=spval, & varid=varid) else - call ncd_defvar(ncid=nfid(t), varname=trim(varnamesl(ifld)), xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnamesl(ifld)), xtype=tape(t)%ncprec, & dim1name=grlnd, dim2name='levlak', & long_name=long_name, units=units, missing_value=spval, fill_value=spval, & varid=varid) end if - call add_landunit_mask_metadata(nfid(t), varid, l2g_scale_typel(ifld)) + call add_landunit_mask_metadata(nfid(t,f), varid, l2g_scale_typel(ifld)) else - call ncd_defvar(ncid=nfid(t), varname=trim(varnamesl(ifld)), xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnamesl(ifld)), xtype=tape(t)%ncprec, & dim1name=namec, dim2name='levlak', & long_name=long_name, units=units, missing_value=spval, fill_value=spval) end if @@ -2935,14 +2986,14 @@ subroutine htape_timeconst3D(t, & c2l_scale_type='unity', l2g_scale_type=l2g_scale_typel(ifld)) if (ldomain%isgrid2d) then call ncd_io(varname=trim(varnamesl(ifld)), dim1name=grlnd, & - data=histol, ncid=nfid(t), flag='write') + data=histol, ncid=nfid(t,f), flag='write') else call ncd_io(varname=trim(varnamesl(ifld)), dim1name=grlnd, & - data=histol, ncid=nfid(t), flag='write') + data=histol, ncid=nfid(t,f), flag='write') end if else call ncd_io(varname=trim(varnamesl(ifld)), dim1name=namec, & - data=histil, ncid=nfid(t), flag='write') + data=histil, ncid=nfid(t,f), flag='write') end if end do @@ -2963,16 +3014,16 @@ subroutine htape_timeconst3D(t, & end if if (tape(t)%dov2xy) then if (ldomain%isgrid2d) then - call ncd_defvar(ncid=nfid(t), varname=trim(varnamest(ifld)), xtype=tape(t)%ncprec,& + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnamest(ifld)), xtype=tape(t)%ncprec,& dim1name='lon', dim2name='lat', dim3name='levsoi', & long_name=long_name, units=units, missing_value=spval, fill_value=spval) else - call ncd_defvar(ncid=nfid(t), varname=trim(varnamest(ifld)), xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnamest(ifld)), xtype=tape(t)%ncprec, & dim1name=grlnd, dim2name='levsoi', & long_name=long_name, units=units, missing_value=spval, fill_value=spval) end if else - call ncd_defvar(ncid=nfid(t), varname=trim(varnamest(ifld)), xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=trim(varnamest(ifld)), xtype=tape(t)%ncprec, & dim1name=namec, dim2name='levsoi', & long_name=long_name, units=units, missing_value=spval, fill_value=spval) end if @@ -3014,14 +3065,14 @@ subroutine htape_timeconst3D(t, & c2l_scale_type='unity', l2g_scale_type='veg') if (ldomain%isgrid2d) then call ncd_io(varname=trim(varnamest(ifld)), dim1name=grlnd, & - data=histot, ncid=nfid(t), flag='write') + data=histot, ncid=nfid(t,f), flag='write') else call ncd_io(varname=trim(varnamest(ifld)), dim1name=grlnd, & - data=histot, ncid=nfid(t), flag='write') + data=histot, ncid=nfid(t,f), flag='write') end if else call ncd_io(varname=trim(varnamest(ifld)), dim1name=namec, & - data=histit, ncid=nfid(t), flag='write') + data=histit, ncid=nfid(t,f), flag='write') end if end do @@ -3033,7 +3084,7 @@ subroutine htape_timeconst3D(t, & end subroutine htape_timeconst3D !----------------------------------------------------------------------- - subroutine htape_timeconst(t, mode) + subroutine htape_timeconst(t, f, mode) ! ! !DESCRIPTION: ! Write time constant values to primary history tape. @@ -3095,6 +3146,7 @@ subroutine htape_timeconst(t, mode) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index + integer, intent(in) :: f ! file index integer :: dtime ! timestep size character(len=*), intent(in) :: mode ! 'define' or 'write' ! @@ -3140,147 +3192,147 @@ subroutine htape_timeconst(t, mode) call get_proc_bounds(bounds) - if (tape(t)%ntimes == 1) then + if (tape(t)%ntimes(f) == 1) then if (mode == 'define') then call ncd_defvar(varname='levgrnd', xtype=tape(t)%ncprec, & dim1name='levgrnd', & - long_name='coordinate ground levels', units='m', ncid=nfid(t)) + long_name='coordinate ground levels', units='m', ncid=nfid(t,f)) call ncd_defvar(varname='levsoi', xtype=tape(t)%ncprec, & dim1name='levsoi', & - long_name='coordinate soil levels (equivalent to top nlevsoi levels of levgrnd)', units='m', ncid=nfid(t)) + long_name='coordinate soil levels (equivalent to top nlevsoi levels of levgrnd)', units='m', ncid=nfid(t,f)) call ncd_defvar(varname='levlak', xtype=tape(t)%ncprec, & dim1name='levlak', & - long_name='coordinate lake levels', units='m', ncid=nfid(t)) + long_name='coordinate lake levels', units='m', ncid=nfid(t,f)) call ncd_defvar(varname='levdcmp', xtype=tape(t)%ncprec, dim1name='levdcmp', & - long_name='coordinate levels for soil decomposition variables', units='m', ncid=nfid(t)) + long_name='coordinate levels for soil decomposition variables', units='m', ncid=nfid(t,f)) if (use_hillslope .and. .not.tape(t)%dov2xy)then call ncd_defvar(varname='hillslope_distance', xtype=ncd_double, & dim1name=namec, long_name='hillslope column distance', & - units='m', ncid=nfid(t)) + units='m', ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_width', xtype=ncd_double, & dim1name=namec, long_name='hillslope column width', & - units='m', ncid=nfid(t)) + units='m', ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_area', xtype=ncd_double, & dim1name=namec, long_name='hillslope column area', & - units='m2', ncid=nfid(t)) + units='m', ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_elev', xtype=ncd_double, & dim1name=namec, long_name='hillslope column elevation', & - units='m', ncid=nfid(t)) + units='m', ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_slope', xtype=ncd_double, & dim1name=namec, long_name='hillslope column slope', & - units='m/m', ncid=nfid(t)) + units='m', ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_aspect', xtype=ncd_double, & dim1name=namec, long_name='hillslope column aspect', & - units='radians', ncid=nfid(t)) + units='m', ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_index', xtype=ncd_int, & dim1name=namec, long_name='hillslope index', & - ncid=nfid(t)) + ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_cold', xtype=ncd_int, & dim1name=namec, long_name='hillslope downhill column index', & - ncid=nfid(t)) + ncid=nfid(t,f)) call ncd_defvar(varname='hillslope_colu', xtype=ncd_int, & dim1name=namec, long_name='hillslope uphill column index', & - ncid=nfid(t)) + ncid=nfid(t,f)) end if if(use_fates)then call ncd_defvar(varname='fates_levscls', xtype=tape(t)%ncprec, dim1name='fates_levscls', & - long_name='FATES diameter size class lower bound', units='cm', ncid=nfid(t)) + long_name='FATES diameter size class lower bound', units='cm', ncid=nfid(t,f)) call ncd_defvar(varname='fates_scmap_levscag', xtype=ncd_int, dim1name='fates_levscag', & - long_name='FATES size-class map into size x patch age', units='-', ncid=nfid(t)) + long_name='FATES size-class map into size x patch age', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_agmap_levscag', xtype=ncd_int, dim1name='fates_levscag', & - long_name='FATES age-class map into size x patch age', units='-', ncid=nfid(t)) + long_name='FATES age-class map into size x patch age', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_pftmap_levscpf',xtype=ncd_int, dim1name='fates_levscpf', & - long_name='FATES pft index of the combined pft-size class dimension', units='-', ncid=nfid(t)) + long_name='FATES pft index of the combined pft-size class dimension', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_scmap_levscpf',xtype=ncd_int, dim1name='fates_levscpf', & - long_name='FATES size index of the combined pft-size class dimension', units='-', ncid=nfid(t)) + long_name='FATES size index of the combined pft-size class dimension', units='-', ncid=nfid(t,f)) ! Units are dash here with units of yr added to the long name so ! that postprocessors (like ferret) won't get confused with what ! the time coordinate is. EBK Nov/3/2021 (see #1540) call ncd_defvar(varname='fates_levcacls', xtype=tape(t)%ncprec, dim1name='fates_levcacls', & - long_name='FATES cohort age class lower bound (yr)', units='-', ncid=nfid(t)) + long_name='FATES cohort age class lower bound (yr)', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_pftmap_levcapf',xtype=ncd_int, dim1name='fates_levcapf', & - long_name='FATES pft index of the combined pft-cohort age class dimension', units='-', ncid=nfid(t)) + long_name='FATES pft index of the combined pft-cohort age class dimension', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_camap_levcapf',xtype=ncd_int, dim1name='fates_levcapf', & - long_name='FATES cohort age index of the combined pft-cohort age dimension', units='-', ncid=nfid(t)) + long_name='FATES cohort age index of the combined pft-cohort age dimension', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levage',xtype=tape(t)%ncprec, dim1name='fates_levage', & - long_name='FATES patch age (yr)', ncid=nfid(t)) + long_name='FATES patch age (yr)', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levheight',xtype=tape(t)%ncprec, dim1name='fates_levheight', & - long_name='FATES height (m)', ncid=nfid(t)) + long_name='FATES height (m)', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levpft',xtype=ncd_int, dim1name='fates_levpft', & - long_name='FATES pft number', ncid=nfid(t)) + long_name='FATES pft number', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levfuel',xtype=ncd_int, dim1name='fates_levfuel', & - long_name='FATES fuel index', ncid=nfid(t)) + long_name='FATES fuel index', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levcwdsc',xtype=ncd_int, dim1name='fates_levcwdsc', & - long_name='FATES cwd size class', ncid=nfid(t)) + long_name='FATES cwd size class', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levcan',xtype=ncd_int, dim1name='fates_levcan', & - long_name='FATES canopy level', ncid=nfid(t)) + long_name='FATES canopy level', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levleaf',xtype=ncd_int, dim1name='fates_levleaf', & - long_name='FATES leaf+stem level', units='VAI', ncid=nfid(t)) + long_name='FATES leaf+stem level', units='VAI', ncid=nfid(t,f)) call ncd_defvar(varname='fates_canmap_levcnlf',xtype=ncd_int, dim1name='fates_levcnlf', & - long_name='FATES canopy level of combined canopy-leaf dimension', ncid=nfid(t)) + long_name='FATES canopy level of combined canopy-leaf dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_lfmap_levcnlf',xtype=ncd_int, dim1name='fates_levcnlf', & - long_name='FATES leaf level of combined canopy-leaf dimension', ncid=nfid(t)) + long_name='FATES leaf level of combined canopy-leaf dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_canmap_levcnlfpf',xtype=ncd_int, dim1name='fates_levcnlfpf', & - long_name='FATES canopy level of combined canopy x leaf x pft dimension', ncid=nfid(t)) + long_name='FATES canopy level of combined canopy x leaf x pft dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_lfmap_levcnlfpf',xtype=ncd_int, dim1name='fates_levcnlfpf', & - long_name='FATES leaf level of combined canopy x leaf x pft dimension', ncid=nfid(t)) + long_name='FATES leaf level of combined canopy x leaf x pft dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_pftmap_levcnlfpf',xtype=ncd_int, dim1name='fates_levcnlfpf', & - long_name='FATES PFT level of combined canopy x leaf x pft dimension', ncid=nfid(t)) + long_name='FATES PFT level of combined canopy x leaf x pft dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_scmap_levscagpft', xtype=ncd_int, dim1name='fates_levscagpf', & - long_name='FATES size-class map into size x patch age x pft', units='-', ncid=nfid(t)) + long_name='FATES size-class map into size x patch age x pft', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_agmap_levscagpft', xtype=ncd_int, dim1name='fates_levscagpf', & - long_name='FATES age-class map into size x patch age x pft', units='-', ncid=nfid(t)) + long_name='FATES age-class map into size x patch age x pft', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_pftmap_levscagpft', xtype=ncd_int, dim1name='fates_levscagpf', & - long_name='FATES pft map into size x patch age x pft', units='-', ncid=nfid(t)) + long_name='FATES pft map into size x patch age x pft', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_pftmap_levagepft', xtype=ncd_int, dim1name='fates_levagepft', & - long_name='FATES pft map into patch age x pft', units='-', ncid=nfid(t)) + long_name='FATES pft map into patch age x pft', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_agmap_levagepft', xtype=ncd_int, dim1name='fates_levagepft', & - long_name='FATES age-class map into patch age x pft', units='-', ncid=nfid(t)) + long_name='FATES age-class map into patch age x pft', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_agmap_levagefuel', xtype=ncd_int, dim1name='fates_levagefuel', & - long_name='FATES age-class map into patch age x fuel size', units='-', ncid=nfid(t)) + long_name='FATES age-class map into patch age x fuel size', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_fscmap_levagefuel', xtype=ncd_int, dim1name='fates_levagefuel', & - long_name='FATES fuel size-class map into patch age x fuel size', units='-', ncid=nfid(t)) + long_name='FATES fuel size-class map into patch age x fuel size', units='-', ncid=nfid(t,f)) call ncd_defvar(varname='fates_cdmap_levcdsc',xtype=ncd_int, dim1name='fates_levcdsc', & - long_name='FATES damage index of the combined damage-size dimension', ncid=nfid(t)) + long_name='FATES damage index of the combined damage-size dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_scmap_levcdsc',xtype=ncd_int, dim1name='fates_levcdsc', & - long_name='FATES size index of the combined damage-size dimension', ncid=nfid(t)) + long_name='FATES size index of the combined damage-size dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_cdmap_levcdpf',xtype=ncd_int, dim1name='fates_levcdpf', & - long_name='FATES damage index of the combined damage-size-PFT dimension', ncid=nfid(t)) + long_name='FATES damage index of the combined damage-size-PFT dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_scmap_levcdpf',xtype=ncd_int, dim1name='fates_levcdpf', & - long_name='FATES size index of the combined damage-size-PFT dimension', ncid=nfid(t)) + long_name='FATES size index of the combined damage-size-PFT dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_pftmap_levcdpf',xtype=ncd_int, dim1name='fates_levcdpf', & - long_name='FATES pft index of the combined damage-size-PFT dimension', ncid=nfid(t)) + long_name='FATES pft index of the combined damage-size-PFT dimension', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levcdam', xtype=tape(t)%ncprec, dim1name='fates_levcdam', & - long_name='FATES damage class lower bound', units='unitless', ncid=nfid(t)) + long_name='FATES damage class lower bound', units='unitless', ncid=nfid(t,f)) call ncd_defvar(varname='fates_levlanduse',xtype=ncd_int, dim1name='fates_levlanduse', & - long_name='FATES land use label', ncid=nfid(t)) + long_name='FATES land use label', ncid=nfid(t,f)) end if elseif (mode == 'write') then if ( masterproc ) write(iulog, *) ' zsoi:',zsoi - call ncd_io(varname='levgrnd', data=zsoi, ncid=nfid(t), flag='write') - call ncd_io(varname='levsoi', data=zsoi(1:nlevsoi), ncid=nfid(t), flag='write') - call ncd_io(varname='levlak' , data=zlak, ncid=nfid(t), flag='write') + call ncd_io(varname='levgrnd', data=zsoi, ncid=nfid(t,f), flag='write') + call ncd_io(varname='levsoi', data=zsoi(1:nlevsoi), ncid=nfid(t,f), flag='write') + call ncd_io(varname='levlak' , data=zlak, ncid=nfid(t,f), flag='write') if ( decomp_method /= no_soil_decomp )then - call ncd_io(varname='levdcmp', data=zsoi, ncid=nfid(t), flag='write') + call ncd_io(varname='levdcmp', data=zsoi, ncid=nfid(t,f), flag='write') else zsoi_1d(1) = 1._r8 - call ncd_io(varname='levdcmp', data=zsoi_1d, ncid=nfid(t), flag='write') + call ncd_io(varname='levdcmp', data=zsoi_1d, ncid=nfid(t,f), flag='write') end if if (use_hillslope .and. .not.tape(t)%dov2xy) then - call ncd_io(varname='hillslope_distance' , data=col%hill_distance, dim1name=namec, ncid=nfid(t), flag='write') - call ncd_io(varname='hillslope_width' , data=col%hill_width, dim1name=namec, ncid=nfid(t), flag='write') - call ncd_io(varname='hillslope_area' , data=col%hill_area, dim1name=namec, ncid=nfid(t), flag='write') - call ncd_io(varname='hillslope_elev' , data=col%hill_elev, dim1name=namec, ncid=nfid(t), flag='write') - call ncd_io(varname='hillslope_slope' , data=col%hill_slope, dim1name=namec, ncid=nfid(t), flag='write') - call ncd_io(varname='hillslope_aspect' , data=col%hill_aspect, dim1name=namec, ncid=nfid(t), flag='write') - call ncd_io(varname='hillslope_index' , data=col%hillslope_ndx, dim1name=namec, ncid=nfid(t), flag='write') + call ncd_io(varname='hillslope_distance' , data=col%hill_distance, dim1name=namec, ncid=nfid(t,f), flag='write') + call ncd_io(varname='hillslope_width' , data=col%hill_width, dim1name=namec, ncid=nfid(t,f), flag='write') + call ncd_io(varname='hillslope_area' , data=col%hill_area, dim1name=namec, ncid=nfid(t,f), flag='write') + call ncd_io(varname='hillslope_elev' , data=col%hill_elev, dim1name=namec, ncid=nfid(t,f), flag='write') + call ncd_io(varname='hillslope_slope' , data=col%hill_slope, dim1name=namec, ncid=nfid(t,f), flag='write') + call ncd_io(varname='hillslope_aspect' , data=col%hill_aspect, dim1name=namec, ncid=nfid(t,f), flag='write') + call ncd_io(varname='hillslope_index' , data=col%hillslope_ndx, dim1name=namec, ncid=nfid(t,f), flag='write') ! write global indices rather than local indices allocate(icarr(bounds%begc:bounds%endc),stat=ier) @@ -3296,7 +3348,7 @@ subroutine htape_timeconst(t, mode) endif enddo - call ncd_io(varname='hillslope_cold' , data=icarr, dim1name=namec, ncid=nfid(t), flag='write') + call ncd_io(varname='hillslope_cold' , data=icarr, dim1name=namec, ncid=nfid(t,f), flag='write') do c = bounds%begc,bounds%endc if (col%colu(c) /= ispval) then @@ -3306,45 +3358,45 @@ subroutine htape_timeconst(t, mode) endif enddo - call ncd_io(varname='hillslope_colu' , data=icarr, dim1name=namec, ncid=nfid(t), flag='write') + call ncd_io(varname='hillslope_colu' , data=icarr, dim1name=namec, ncid=nfid(t,f), flag='write') deallocate(icarr) endif if(use_fates)then - call ncd_io(varname='fates_scmap_levscag',data=fates_hdim_scmap_levscag, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_agmap_levscag',data=fates_hdim_agmap_levscag, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levscls',data=fates_hdim_levsclass, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levcacls',data=fates_hdim_levcoage, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_pftmap_levscpf',data=fates_hdim_pfmap_levscpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_scmap_levscpf',data=fates_hdim_scmap_levscpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_pftmap_levcapf',data=fates_hdim_pfmap_levcapf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_camap_levcapf',data=fates_hdim_camap_levcapf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levage',data=fates_hdim_levage, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levheight',data=fates_hdim_levheight, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levpft',data=fates_hdim_levpft, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levfuel',data=fates_hdim_levfuel, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levcdam',data=fates_hdim_levdamage, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levcwdsc',data=fates_hdim_levcwdsc, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levcan',data=fates_hdim_levcan, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levleaf',data=fates_hdim_levleaf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_canmap_levcnlf',data=fates_hdim_canmap_levcnlf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_lfmap_levcnlf',data=fates_hdim_lfmap_levcnlf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_canmap_levcnlfpf',data=fates_hdim_canmap_levcnlfpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_lfmap_levcnlfpf',data=fates_hdim_lfmap_levcnlfpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_pftmap_levcnlfpf',data=fates_hdim_pftmap_levcnlfpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_scmap_levscagpft',data=fates_hdim_scmap_levscagpft, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_agmap_levscagpft',data=fates_hdim_agmap_levscagpft, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_pftmap_levscagpft',data=fates_hdim_pftmap_levscagpft, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_pftmap_levagepft',data=fates_hdim_pftmap_levagepft, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_agmap_levagepft',data=fates_hdim_agmap_levagepft, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_agmap_levagefuel',data=fates_hdim_agmap_levagefuel, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_fscmap_levagefuel',data=fates_hdim_fscmap_levagefuel, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_scmap_levcdsc',data=fates_hdim_scmap_levcdsc, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_cdmap_levcdsc',data=fates_hdim_cdmap_levcdsc, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_scmap_levcdpf',data=fates_hdim_scmap_levcdpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_cdmap_levcdpf',data=fates_hdim_cdmap_levcdpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_pftmap_levcdpf',data=fates_hdim_pftmap_levcdpf, ncid=nfid(t), flag='write') - call ncd_io(varname='fates_levlanduse',data=fates_hdim_levlanduse, ncid=nfid(t), flag='write') + call ncd_io(varname='fates_scmap_levscag',data=fates_hdim_scmap_levscag, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_agmap_levscag',data=fates_hdim_agmap_levscag, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levscls',data=fates_hdim_levsclass, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levcacls',data=fates_hdim_levcoage, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_pftmap_levscpf',data=fates_hdim_pfmap_levscpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_scmap_levscpf',data=fates_hdim_scmap_levscpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_pftmap_levcapf',data=fates_hdim_pfmap_levcapf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_camap_levcapf',data=fates_hdim_camap_levcapf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levage',data=fates_hdim_levage, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levheight',data=fates_hdim_levheight, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levpft',data=fates_hdim_levpft, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levfuel',data=fates_hdim_levfuel, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levcdam',data=fates_hdim_levdamage, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levcwdsc',data=fates_hdim_levcwdsc, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levcan',data=fates_hdim_levcan, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levleaf',data=fates_hdim_levleaf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_canmap_levcnlf',data=fates_hdim_canmap_levcnlf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_lfmap_levcnlf',data=fates_hdim_lfmap_levcnlf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_canmap_levcnlfpf',data=fates_hdim_canmap_levcnlfpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_lfmap_levcnlfpf',data=fates_hdim_lfmap_levcnlfpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_pftmap_levcnlfpf',data=fates_hdim_pftmap_levcnlfpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_scmap_levscagpft',data=fates_hdim_scmap_levscagpft, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_agmap_levscagpft',data=fates_hdim_agmap_levscagpft, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_pftmap_levscagpft',data=fates_hdim_pftmap_levscagpft, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_pftmap_levagepft',data=fates_hdim_pftmap_levagepft, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_agmap_levagepft',data=fates_hdim_agmap_levagepft, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_agmap_levagefuel',data=fates_hdim_agmap_levagefuel, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_fscmap_levagefuel',data=fates_hdim_fscmap_levagefuel, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_scmap_levcdsc',data=fates_hdim_scmap_levcdsc, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_cdmap_levcdsc',data=fates_hdim_cdmap_levcdsc, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_scmap_levcdpf',data=fates_hdim_scmap_levcdpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_cdmap_levcdpf',data=fates_hdim_cdmap_levcdpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_pftmap_levcdpf',data=fates_hdim_pftmap_levcdpf, ncid=nfid(t,f), flag='write') + call ncd_io(varname='fates_levlanduse',data=fates_hdim_levlanduse, ncid=nfid(t,f), flag='write') end if endif @@ -3355,7 +3407,7 @@ subroutine htape_timeconst(t, mode) !------------------------------------------------------------------------------- ! For define mode -- only do this for first time-sample - if (mode == 'define' .and. tape(t)%ntimes == 1) then + if (mode == 'define' .and. tape(t)%ntimes(f) == 1) then call get_ref_date(yr, mon, day, nbsec) nstep = get_nstep() hours = nbsec / 3600 @@ -3368,16 +3420,16 @@ subroutine htape_timeconst(t, mode) dim1id(1) = time_dimid str = 'days since ' // basedate // " " // basesec - if (hist_avgflag_pertape(t) /= 'I') then ! NOT instantaneous fields tape + if (f == accumulated_file_index) then step_or_bounds = 'time_bounds' long_name = 'time at exact middle of ' // step_or_bounds - call ncd_defvar(nfid(t), 'time', tape(t)%ncprec, 1, dim1id, varid, & + call ncd_defvar(nfid(t,f), 'time', tape(t)%ncprec, 1, dim1id, varid, & long_name=long_name, units=str) - call ncd_putatt(nfid(t), varid, 'bounds', 'time_bounds') - else ! instantaneous fields tape + call ncd_putatt(nfid(t,f), varid, 'bounds', 'time_bounds') + else ! instantaneous file step_or_bounds = 'time step' long_name = 'time at end of ' // step_or_bounds - call ncd_defvar(nfid(t), 'time', tape(t)%ncprec, 1, dim1id, varid, & + call ncd_defvar(nfid(t,f), 'time', tape(t)%ncprec, 1, dim1id, varid, & long_name=long_name, units=str) end if cal = get_calendar() @@ -3386,13 +3438,13 @@ subroutine htape_timeconst(t, mode) else if ( trim(cal) == GREGORIAN_C )then caldesc = "gregorian" end if - call ncd_putatt(nfid(t), varid, 'calendar', caldesc) + call ncd_putatt(nfid(t,f), varid, 'calendar', caldesc) dim1id(1) = time_dimid long_name = 'current date (YYYYMMDD) at end of ' // step_or_bounds - call ncd_defvar(nfid(t) , 'mcdate', ncd_int, 1, dim1id , varid, & + call ncd_defvar(nfid(t,f) , 'mcdate', ncd_int, 1, dim1id , varid, & long_name = long_name) - call ncd_putatt(nfid(t), varid, 'calendar', caldesc) + call ncd_putatt(nfid(t,f), varid, 'calendar', caldesc) ! ! add global attribute time_period_freq ! @@ -3416,42 +3468,42 @@ subroutine htape_timeconst(t, mode) end if 999 format(a,i0) - call ncd_putatt(nfid(t), ncd_global, 'time_period_freq', & + call ncd_putatt(nfid(t,f), ncd_global, 'time_period_freq', & trim(time_period_freq)) long_name = 'current seconds of current date at end of ' // step_or_bounds - call ncd_defvar(nfid(t) , 'mcsec' , ncd_int, 1, dim1id , varid, & + call ncd_defvar(nfid(t,f) , 'mcsec' , ncd_int, 1, dim1id , varid, & long_name = long_name, units='s') - call ncd_putatt(nfid(t), varid, 'calendar', caldesc) + call ncd_putatt(nfid(t,f), varid, 'calendar', caldesc) long_name = 'current day (from base day) at end of ' // step_or_bounds - call ncd_defvar(nfid(t) , 'mdcur' , ncd_int, 1, dim1id , varid, & + call ncd_defvar(nfid(t,f) , 'mdcur' , ncd_int, 1, dim1id , varid, & long_name = long_name) - call ncd_putatt(nfid(t), varid, 'calendar', caldesc) + call ncd_putatt(nfid(t,f), varid, 'calendar', caldesc) long_name = 'current seconds of current day at end of ' // step_or_bounds - call ncd_defvar(nfid(t) , 'mscur' , ncd_int, 1, dim1id , varid, & + call ncd_defvar(nfid(t,f) , 'mscur' , ncd_int, 1, dim1id , varid, & long_name = long_name) - call ncd_putatt(nfid(t), varid, 'calendar', caldesc) - call ncd_defvar(nfid(t) , 'nstep' , ncd_int, 1, dim1id , varid, & + call ncd_putatt(nfid(t,f), varid, 'calendar', caldesc) + call ncd_defvar(nfid(t,f) , 'nstep' , ncd_int, 1, dim1id , varid, & long_name = 'time step') dim2id(1) = nbnd_dimid; dim2id(2) = time_dimid - if (hist_avgflag_pertape(t) /= 'I') then ! NOT instantaneous fields tape - call ncd_defvar(nfid(t), 'time_bounds', ncd_double, 2, dim2id, varid, & + if (f == accumulated_file_index) then + call ncd_defvar(nfid(t,f), 'time_bounds', ncd_double, 2, dim2id, varid, & long_name = 'time interval endpoints', & units = str) - call ncd_putatt(nfid(t), varid, 'calendar', caldesc) + call ncd_putatt(nfid(t,f), varid, 'calendar', caldesc) end if dim2id(1) = strlen_dimid; dim2id(2) = time_dimid - call ncd_defvar(nfid(t), 'date_written', ncd_char, 2, dim2id, varid) - call ncd_defvar(nfid(t), 'time_written', ncd_char, 2, dim2id, varid) + call ncd_defvar(nfid(t,f), 'date_written', ncd_char, 2, dim2id, varid) + call ncd_defvar(nfid(t,f), 'time_written', ncd_char, 2, dim2id, varid) if ( len_trim(TimeConst3DVars_Filename) > 0 )then - call ncd_putatt(nfid(t), ncd_global, 'Time_constant_3Dvars_filename', & + call ncd_putatt(nfid(t,f), ncd_global, 'Time_constant_3Dvars_filename', & trim(TimeConst3DVars_Filename)) end if if ( len_trim(TimeConst3DVars) > 0 )then - call ncd_putatt(nfid(t), ncd_global, 'Time_constant_3Dvars', & + call ncd_putatt(nfid(t,f), ncd_global, 'Time_constant_3Dvars', & trim(TimeConst3DVars)) end if @@ -3462,26 +3514,26 @@ subroutine htape_timeconst(t, mode) mcdate = yr*10000 + mon*100 + day nstep = get_nstep() - call ncd_io('mcdate', mcdate, 'write', nfid(t), nt=tape(t)%ntimes) - call ncd_io('mcsec' , mcsec , 'write', nfid(t), nt=tape(t)%ntimes) - call ncd_io('mdcur' , mdcur , 'write', nfid(t), nt=tape(t)%ntimes) - call ncd_io('mscur' , mscur , 'write', nfid(t), nt=tape(t)%ntimes) - call ncd_io('nstep' , nstep , 'write', nfid(t), nt=tape(t)%ntimes) + call ncd_io('mcdate', mcdate, 'write', nfid(t,f), nt=tape(t)%ntimes(f)) + call ncd_io('mcsec' , mcsec , 'write', nfid(t,f), nt=tape(t)%ntimes(f)) + call ncd_io('mdcur' , mdcur , 'write', nfid(t,f), nt=tape(t)%ntimes(f)) + call ncd_io('mscur' , mscur , 'write', nfid(t,f), nt=tape(t)%ntimes(f)) + call ncd_io('nstep' , nstep , 'write', nfid(t,f), nt=tape(t)%ntimes(f)) timedata(1) = tape(t)%begtime ! beginning time timedata(2) = mdcur + mscur/secspday ! end time - if (hist_avgflag_pertape(t) /= 'I') then ! NOT instantaneous fields tape + if (f == accumulated_file_index) then time = (timedata(1) + timedata(2)) * 0.5_r8 - call ncd_io('time_bounds', timedata, 'write', nfid(t), nt=tape(t)%ntimes) - else + call ncd_io('time_bounds', timedata, 'write', nfid(t,f), nt=tape(t)%ntimes(f)) + else ! instantaneous file time = timedata(2) end if - call ncd_io('time' , time , 'write', nfid(t), nt=tape(t)%ntimes) + call ncd_io('time' , time , 'write', nfid(t,f), nt=tape(t)%ntimes(f)) call getdatetime (cdate, ctime) - call ncd_io('date_written', cdate, 'write', nfid(t), nt=tape(t)%ntimes) + call ncd_io('date_written', cdate, 'write', nfid(t,f), nt=tape(t)%ntimes(f)) - call ncd_io('time_written', ctime, 'write', nfid(t), nt=tape(t)%ntimes) + call ncd_io('time_written', ctime, 'write', nfid(t,f), nt=tape(t)%ntimes(f)) endif @@ -3489,96 +3541,96 @@ subroutine htape_timeconst(t, mode) !*** Grid definition variables *** !------------------------------------------------------------------------------- ! For define mode -- only do this for first time-sample - if (mode == 'define' .and. tape(t)%ntimes == 1) then + if (mode == 'define' .and. tape(t)%ntimes(f) == 1) then if (ldomain%isgrid2d) then call ncd_defvar(varname='lon', xtype=tape(t)%ncprec, dim1name='lon', & long_name='coordinate longitude', units='degrees_east', & - ncid=nfid(t), missing_value=spval, fill_value=spval) + ncid=nfid(t,f), missing_value=spval, fill_value=spval) else call ncd_defvar(varname='lon', xtype=tape(t)%ncprec, & dim1name=grlnd, & - long_name='coordinate longitude', units='degrees_east', ncid=nfid(t), & + long_name='coordinate longitude', units='degrees_east', ncid=nfid(t,f), & missing_value=spval, fill_value=spval) end if if (ldomain%isgrid2d) then call ncd_defvar(varname='lat', xtype=tape(t)%ncprec, dim1name='lat', & long_name='coordinate latitude', units='degrees_north', & - ncid=nfid(t), missing_value=spval, fill_value=spval) + ncid=nfid(t,f), missing_value=spval, fill_value=spval) else call ncd_defvar(varname='lat', xtype=tape(t)%ncprec, & dim1name=grlnd, & - long_name='coordinate latitude', units='degrees_north', ncid=nfid(t), & + long_name='coordinate latitude', units='degrees_north', ncid=nfid(t,f), & missing_value=spval, fill_value=spval) end if if (ldomain%isgrid2d) then call ncd_defvar(varname='area', xtype=tape(t)%ncprec, & dim1name='lon', dim2name='lat',& - long_name='grid cell areas', units='km^2', ncid=nfid(t), & + long_name='grid cell areas', units='km^2', ncid=nfid(t,f), & missing_value=spval, fill_value=spval) else call ncd_defvar(varname='area', xtype=tape(t)%ncprec, & dim1name=grlnd, & - long_name='grid cell areas', units='km^2', ncid=nfid(t), & + long_name='grid cell areas', units='km^2', ncid=nfid(t,f), & missing_value=spval, fill_value=spval) end if if (ldomain%isgrid2d) then call ncd_defvar(varname='landfrac', xtype=tape(t)%ncprec, & dim1name='lon', dim2name='lat', & - long_name='land fraction', ncid=nfid(t), & + long_name='land fraction', ncid=nfid(t,f), & missing_value=spval, fill_value=spval) else call ncd_defvar(varname='landfrac', xtype=tape(t)%ncprec, & dim1name=grlnd, & - long_name='land fraction', ncid=nfid(t), & + long_name='land fraction', ncid=nfid(t,f), & missing_value=spval, fill_value=spval) end if if (ldomain%isgrid2d) then call ncd_defvar(varname='landmask', xtype=ncd_int, & dim1name='lon', dim2name='lat', & - long_name='land/ocean mask (0.=ocean and 1.=land)', ncid=nfid(t), & + long_name='land/ocean mask (0.=ocean and 1.=land)', ncid=nfid(t,f), & imissing_value=ispval, ifill_value=ispval) else call ncd_defvar(varname='landmask', xtype=ncd_int, & dim1name=grlnd, & - long_name='land/ocean mask (0.=ocean and 1.=land)', ncid=nfid(t), & + long_name='land/ocean mask (0.=ocean and 1.=land)', ncid=nfid(t,f), & imissing_value=ispval, ifill_value=ispval) end if if (ldomain%isgrid2d) then call ncd_defvar(varname='nbedrock' , xtype=ncd_int, & dim1name='lon', dim2name='lat', & - long_name='index of shallowest bedrock layer', ncid=nfid(t), & + long_name='index of shallowest bedrock layer', ncid=nfid(t,f), & imissing_value=ispval, ifill_value=ispval) else call ncd_defvar(varname='nbedrock' , xtype=ncd_int, & dim1name=grlnd, & - long_name='index of shallowest bedrock layer', ncid=nfid(t), & + long_name='index of shallowest bedrock layer', ncid=nfid(t,f), & imissing_value=ispval, ifill_value=ispval) end if else if (mode == 'write') then - ! Most of this is constant and only needs to be done on tape(t)%ntimes=1 + ! Most of this is constant and only needs to be done on tape(t)%ntimes(f)=1 ! But, some may change for dynamic PATCH mode for example if (ldomain%isgrid2d) then - call ncd_io(varname='lon', data=lon1d, ncid=nfid(t), flag='write') - call ncd_io(varname='lat', data=lat1d, ncid=nfid(t), flag='write') + call ncd_io(varname='lon', data=lon1d, ncid=nfid(t,f), flag='write') + call ncd_io(varname='lat', data=lat1d, ncid=nfid(t,f), flag='write') else - call ncd_io(varname='lon', data=ldomain%lonc, dim1name=grlnd, ncid=nfid(t), flag='write') - call ncd_io(varname='lat', data=ldomain%latc, dim1name=grlnd, ncid=nfid(t), flag='write') + call ncd_io(varname='lon', data=ldomain%lonc, dim1name=grlnd, ncid=nfid(t,f), flag='write') + call ncd_io(varname='lat', data=ldomain%latc, dim1name=grlnd, ncid=nfid(t,f), flag='write') end if - call ncd_io(varname='area' , data=ldomain%area, dim1name=grlnd, ncid=nfid(t), flag='write') - call ncd_io(varname='landfrac', data=ldomain%frac, dim1name=grlnd, ncid=nfid(t), flag='write') - call ncd_io(varname='landmask', data=ldomain%mask, dim1name=grlnd, ncid=nfid(t), flag='write') - call ncd_io(varname='nbedrock' , data=grc%nbedrock, dim1name=grlnd, ncid=nfid(t), flag='write') + call ncd_io(varname='area' , data=ldomain%area, dim1name=grlnd, ncid=nfid(t,f), flag='write') + call ncd_io(varname='landfrac', data=ldomain%frac, dim1name=grlnd, ncid=nfid(t,f), flag='write') + call ncd_io(varname='landmask', data=ldomain%mask, dim1name=grlnd, ncid=nfid(t,f), flag='write') + call ncd_io(varname='nbedrock' , data=grc%nbedrock, dim1name=grlnd, ncid=nfid(t,f), flag='write') end if ! (define/write mode end subroutine htape_timeconst !----------------------------------------------------------------------- - subroutine hfields_write(t, mode) + subroutine hfields_write(t, f, mode) ! ! !DESCRIPTION: ! Write history tape. Issue the call to write the variable. @@ -3588,10 +3640,11 @@ subroutine hfields_write(t, mode) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index + integer, intent(in) :: f ! file index character(len=*), intent(in) :: mode ! 'define' or 'write' ! ! !LOCAL VARIABLES: - integer :: f ! field index + integer :: fld ! field index integer :: k ! 1d index integer :: c,l,p ! indices integer :: beg1d ! on-node 1d field pointer start index @@ -3624,34 +3677,34 @@ subroutine hfields_write(t, mode) if (.not. tape(t)%dov2xy) then if (mode == 'define') then - call hfields_1dinfo(t, mode='define') + call hfields_1dinfo(t, f, mode='define') else if (mode == 'write') then - call hfields_1dinfo(t, mode='write') + call hfields_1dinfo(t, f, mode='write') end if end if ! Define time-dependent variables create variables and attributes for field list - do f = 1,tape(t)%nflds + fld_loop: do fld = 1, tape(t)%nflds(f) ! Set history field variables - varname = tape(t)%hlist(f)%field%name - long_name = tape(t)%hlist(f)%field%long_name - units = tape(t)%hlist(f)%field%units - avgflag = tape(t)%hlist(f)%avgflag - type1d = tape(t)%hlist(f)%field%type1d - type1d_out = tape(t)%hlist(f)%field%type1d_out - beg1d = tape(t)%hlist(f)%field%beg1d - end1d = tape(t)%hlist(f)%field%end1d - beg1d_out = tape(t)%hlist(f)%field%beg1d_out - end1d_out = tape(t)%hlist(f)%field%end1d_out - num1d_out = tape(t)%hlist(f)%field%num1d_out - type2d = tape(t)%hlist(f)%field%type2d - numdims = tape(t)%hlist(f)%field%numdims - num2d = tape(t)%hlist(f)%field%num2d - l2g_scale_type = tape(t)%hlist(f)%field%l2g_scale_type - nt = tape(t)%ntimes + varname = tape(t)%hlist(fld,f)%field%name + long_name = tape(t)%hlist(fld,f)%field%long_name + units = tape(t)%hlist(fld,f)%field%units + avgflag = tape(t)%hlist(fld,f)%avgflag + type1d = tape(t)%hlist(fld,f)%field%type1d + type1d_out = tape(t)%hlist(fld,f)%field%type1d_out + beg1d = tape(t)%hlist(fld,f)%field%beg1d + end1d = tape(t)%hlist(fld,f)%field%end1d + beg1d_out = tape(t)%hlist(fld,f)%field%beg1d_out + end1d_out = tape(t)%hlist(fld,f)%field%end1d_out + num1d_out = tape(t)%hlist(fld,f)%field%num1d_out + type2d = tape(t)%hlist(fld,f)%field%type2d + numdims = tape(t)%hlist(fld,f)%field%numdims + num2d = tape(t)%hlist(fld,f)%field%num2d + l2g_scale_type = tape(t)%hlist(fld,f)%field%l2g_scale_type + nt = tape(t)%ntimes(f) if (mode == 'define') then @@ -3685,13 +3738,13 @@ subroutine hfields_write(t, mode) if (dim2name == 'undefined') then if (numdims == 1) then - call ncd_defvar(ncid=nfid(t), varname=varname, xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=varname, xtype=tape(t)%ncprec, & dim1name=dim1name, dim2name='time', & long_name=long_name, units=units, cell_method=avgstr, & missing_value=spval, fill_value=spval, & varid=varid) else - call ncd_defvar(ncid=nfid(t), varname=varname, xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=varname, xtype=tape(t)%ncprec, & dim1name=dim1name, dim2name=type2d, dim3name='time', & long_name=long_name, units=units, cell_method=avgstr, & missing_value=spval, fill_value=spval, & @@ -3699,13 +3752,13 @@ subroutine hfields_write(t, mode) end if else if (numdims == 1) then - call ncd_defvar(ncid=nfid(t), varname=varname, xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=varname, xtype=tape(t)%ncprec, & dim1name=dim1name, dim2name=dim2name, dim3name='time', & long_name=long_name, units=units, cell_method=avgstr, & missing_value=spval, fill_value=spval, & varid=varid) else - call ncd_defvar(ncid=nfid(t), varname=varname, xtype=tape(t)%ncprec, & + call ncd_defvar(ncid=nfid(t,f), varname=varname, xtype=tape(t)%ncprec, & dim1name=dim1name, dim2name=dim2name, dim3name=type2d, dim4name='time', & long_name=long_name, units=units, cell_method=avgstr, & missing_value=spval, fill_value=spval, & @@ -3714,14 +3767,14 @@ subroutine hfields_write(t, mode) endif if (type1d_out == nameg .or. type1d_out == grlnd) then - call add_landunit_mask_metadata(nfid(t), varid, l2g_scale_type) + call add_landunit_mask_metadata(nfid(t,f), varid, l2g_scale_type) end if else if (mode == 'write') then ! Determine output buffer - histo => tape(t)%hlist(f)%hbuf + histo => tape(t)%hlist(fld,f)%hbuf ! Allocate dynamic memory @@ -3738,10 +3791,10 @@ subroutine hfields_write(t, mode) if (numdims == 1) then call ncd_io(flag='write', varname=varname, & - dim1name=type1d_out, data=hist1do, ncid=nfid(t), nt=nt) + dim1name=type1d_out, data=hist1do, ncid=nfid(t,f), nt=nt) else call ncd_io(flag='write', varname=varname, & - dim1name=type1d_out, data=histo, ncid=nfid(t), nt=nt) + dim1name=type1d_out, data=histo, ncid=nfid(t,f), nt=nt) end if @@ -3753,12 +3806,12 @@ subroutine hfields_write(t, mode) end if - end do + end do fld_loop end subroutine hfields_write !----------------------------------------------------------------------- - subroutine hfields_1dinfo(t, mode) + subroutine hfields_1dinfo(t, f, mode) ! ! !DESCRIPTION: ! Write/define 1d info for history tape. @@ -3769,10 +3822,10 @@ subroutine hfields_1dinfo(t, mode) ! ! !ARGUMENTS: integer, intent(in) :: t ! tape index + integer, intent(in) :: f ! file index character(len=*), intent(in) :: mode ! 'define' or 'write' ! ! !LOCAL VARIABLES: - integer :: f ! field index integer :: k ! 1d index integer :: g,c,l,p ! indices integer :: ier ! errir status @@ -3792,7 +3845,7 @@ subroutine hfields_1dinfo(t, mode) call get_proc_bounds(bounds) - ncid => nfid(t) + ncid => nfid(t,f) if (mode == 'define') then @@ -4123,7 +4176,8 @@ subroutine hist_htapes_wrapup( rstwr, nlend, bounds, & ! ! !LOCAL VARIABLES: integer :: t ! tape index - integer :: f ! field index + integer :: f ! file index + integer :: fld ! field index integer :: ier ! error code integer :: nstep ! current step integer :: day ! current day (1 -> 31) @@ -4166,151 +4220,157 @@ subroutine hist_htapes_wrapup( rstwr, nlend, bounds, & ! Loop over active history tapes, create new history files if necessary ! and write data to history files if end of history interval. - do t = 1, ntapes - - if (.not. history_tape_in_use(t)) then - cycle - end if - - ! Determine if end of history interval - tape(t)%is_endhist = .false. - if (tape(t)%nhtfrq==0) then !monthly average - if (mon /= monm1) tape(t)%is_endhist = .true. - else - if (mod(nstep,tape(t)%nhtfrq) == 0) tape(t)%is_endhist = .true. - end if + tape_loop1: do t = 1, ntapes + file_loop1: do f = 1, max_split_files - ! If end of history interval + if (.not. history_tape_in_use(t,f)) then + cycle + end if - if (tape(t)%is_endhist) then + ! Determine if end of history interval + tape(t)%is_endhist = .false. + if (tape(t)%nhtfrq==0) then !monthly average + if (mon /= monm1) tape(t)%is_endhist = .true. + else + if (mod(nstep,tape(t)%nhtfrq) == 0) tape(t)%is_endhist = .true. + end if - ! Normalize history buffer if time averaged + ! If end of history interval - call hfields_normalize(t) + if (tape(t)%is_endhist) then - ! Increment current time sample counter. + ! Normalize history buffer if time averaged - tape(t)%ntimes = tape(t)%ntimes + 1 + call hfields_normalize(t, f) - ! Create history file if appropriate and build time comment + ! Increment current time sample counter. - ! If first time sample, generate unique history file name, open file, - ! define dims, vars, etc. + tape(t)%ntimes(f) = tape(t)%ntimes(f) + 1 + ! Create history file if appropriate and build time comment - if (tape(t)%ntimes == 1) then - call t_startf('hist_htapes_wrapup_define') - locfnh(t) = set_hist_filename (hist_freq=tape(t)%nhtfrq, & - hist_mfilt=tape(t)%mfilt, hist_file=t) - if (masterproc) then - write(iulog,*) trim(subname),' : Creating history file ', trim(locfnh(t)), & - ' at nstep = ',get_nstep() - write(iulog,*)'calling htape_create for file t = ',t - endif - call htape_create (t) + ! If first time sample, generate unique history file name, open file, + ! define dims, vars, etc. - ! Define time-constant field variables - call htape_timeconst(t, mode='define') + if (tape(t)%ntimes(f) == 1) then + call t_startf('hist_htapes_wrapup_define') + locfnh(t,f) = set_hist_filename (hist_freq=tape(t)%nhtfrq, & + hist_mfilt=tape(t)%mfilt, hist_file=t, f_index=f) + if (masterproc) then + write(iulog,*) trim(subname),' : Creating history file ', trim(locfnh(t,f)), & + ' at nstep = ',get_nstep() + write(iulog,*)'calling htape_create for tape t and file f = ', t, f + endif + call htape_create (t, f) - ! Define 3D time-constant field variables on first history tapes - if ( do_3Dtconst .and. t == 1) then - call htape_timeconst3D(t, & - bounds, watsat_col, sucsat_col, bsw_col, hksat_col, & - cellsand_col, cellclay_col, mode='define') - TimeConst3DVars_Filename = trim(locfnh(t)) - end if + ! Define time-constant field variables + call htape_timeconst(t, f, mode='define') - ! Define model field variables - call hfields_write(t, mode='define') + ! Define 3D time-constant field variables on first history tapes + if ( do_3Dtconst .and. t == 1) then + call htape_timeconst3D(t, f, & + bounds, watsat_col, sucsat_col, bsw_col, hksat_col, & + cellsand_col, cellclay_col, mode='define') + TimeConst3DVars_Filename = trim(locfnh(t,f)) + end if - ! Exit define model - call ncd_enddef(nfid(t)) - call t_stopf('hist_htapes_wrapup_define') - endif + ! Define model field variables + call hfields_write(t, f, mode='define') - call t_startf('hist_htapes_wrapup_tconst') - ! Write time constant history variables - call htape_timeconst(t, mode='write') + ! Exit define model + call ncd_enddef(nfid(t,f)) + call t_stopf('hist_htapes_wrapup_define') + endif - ! Write 3D time constant history variables to first history tapes - if ( do_3Dtconst .and. t == 1 .and. tape(t)%ntimes == 1 )then - call htape_timeconst3D(t, & - bounds, watsat_col, sucsat_col, bsw_col, hksat_col, & - cellsand_col, cellclay_col, mode='write') - do_3Dtconst = .false. - end if + call t_startf('hist_htapes_wrapup_tconst') + ! Write time constant history variables + call htape_timeconst(t, f, mode='write') - if (masterproc) then - write(iulog,*) - write(iulog,*) trim(subname),' : Writing current time sample to local history file ', & - trim(locfnh(t)),' at nstep = ',get_nstep(), & - ' for history time interval beginning at ', tape(t)%begtime, & - ' and ending at ',time - write(iulog,*) - call shr_sys_flush(iulog) - endif + ! Write 3D time constant history variables to first history tapes + if ( do_3Dtconst .and. t == 1 .and. tape(t)%ntimes(f) == 1 )then + call htape_timeconst3D(t, f, & + bounds, watsat_col, sucsat_col, bsw_col, hksat_col, & + cellsand_col, cellclay_col, mode='write') + do_3Dtconst = .false. + end if - ! Update beginning time of next interval - tape(t)%begtime = time - call t_stopf('hist_htapes_wrapup_tconst') + if (masterproc) then + write(iulog,*) + write(iulog,*) trim(subname),' : Writing current time sample to local history file ', & + trim(locfnh(t,f)),' at nstep = ',get_nstep(), & + ' for history time interval beginning at ', tape(t)%begtime, & + ' and ending at ',time + write(iulog,*) + call shr_sys_flush(iulog) + endif - ! Write history time samples - call t_startf('hist_htapes_wrapup_write') - call hfields_write(t, mode='write') - call t_stopf('hist_htapes_wrapup_write') + ! Update beginning time of next interval + tape(t)%begtime = time + call t_stopf('hist_htapes_wrapup_tconst') - ! Zero necessary history buffers - call hfields_zero(t) + ! Write history time samples + call t_startf('hist_htapes_wrapup_write') + call hfields_write(t, f, mode='write') + call t_stopf('hist_htapes_wrapup_write') - end if + ! Zero necessary history buffers + call hfields_zero(t, f) - end do ! end loop over history tapes + end if + end do file_loop1 + end do tape_loop1 ! Determine if file needs to be closed - call hist_do_disp (ntapes, tape(:)%ntimes, tape(:)%mfilt, if_stop, if_disphist, rstwr, nlend) + file_loop1b: do f = 1, max_split_files + call hist_do_disp (ntapes, tape(:)%ntimes(f), tape(:)%mfilt, if_stop, if_disphist(:,f), rstwr, nlend) + end do file_loop1b ! Close open history file ! Auxilary files may have been closed and saved off without being full, ! must reopen the files - do t = 1, ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if + tape_loop2: do t = 1, ntapes + file_loop2: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - if (if_disphist(t)) then - if (tape(t)%ntimes /= 0) then - if (masterproc) then - write(iulog,*) - write(iulog,*) trim(subname),' : Closing local history file ',& - trim(locfnh(t)),' at nstep = ', get_nstep() - write(iulog,*) - endif + if (if_disphist(t,f)) then + if (tape(t)%ntimes(f) /= 0) then + if (masterproc) then + write(iulog,*) + write(iulog,*) trim(subname),' : Closing local history file ',& + trim(locfnh(t,f)),' at nstep = ', get_nstep() + write(iulog,*) + end if - call ncd_pio_closefile(nfid(t)) + call ncd_pio_closefile(nfid(t,f)) - if (.not.if_stop .and. (tape(t)%ntimes/=tape(t)%mfilt)) then - call ncd_pio_openfile (nfid(t), trim(locfnh(t)), ncd_write) - end if - else - if (masterproc) then - write(iulog,*) trim(subname),' : history tape ',t,': no open file to close' - end if + if (.not.if_stop .and. (tape(t)%ntimes(f)/=tape(t)%mfilt)) then + call ncd_pio_openfile (nfid(t,f), trim(locfnh(t,f)), ncd_write) + end if + else + if (masterproc) then + write(iulog,*) trim(subname),' : history tape ',t,': no open file to close' + end if + endif endif - endif - end do + end do file_loop2 + end do tape_loop2 ! Reset number of time samples to zero if file is full do t = 1, ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if + do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - if (if_disphist(t) .and. tape(t)%ntimes==tape(t)%mfilt) then - tape(t)%ntimes = 0 - end if + if (if_disphist(t,f) .and. tape(t)%ntimes(f)==tape(t)%mfilt) then + tape(t)%ntimes(f) = 0 + end if + end do end do end subroutine hist_htapes_wrapup @@ -4349,6 +4409,7 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) integer :: numl ! total number of landunits across all processors integer :: numc ! total number of columns across all processors integer :: nump ! total number of pfts across all processors + integer :: counter ! loop counter character(len=max_namlen) :: name ! variable name character(len=max_namlen) :: name_acc ! accumulator variable name character(len=max_namlen) :: long_name ! long name of variable @@ -4356,7 +4417,9 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) character(len=max_chars) :: units ! units of variable character(len=max_chars) :: units_acc ! accumulator units character(len=max_chars) :: fname ! full name of history file - character(len=max_chars) :: locrest(max_tapes) ! local history restart file names + character(len=max_chars) :: locrest(max_tapes, max_split_files) ! local history restart file names + character(len=max_chars) :: locrest_onfile(max_split_files, max_tapes) ! history restart file names on file, dims flipped + character(len=max_chars) :: locfnh_onfile(max_split_files, max_tapes) ! history file names on file, dims flipped character(len=max_length_filename) :: my_locfnh ! temporary version of locfnh character(len=max_length_filename) :: my_locfnhr ! temporary version of locfnhr @@ -4369,6 +4432,7 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) character(len=avgflag_strlen), allocatable :: tavgflag(:) integer :: start(2) + character(len=1) :: file_index ! instantaneous or accumulated_file_index character(len=1) :: hnum ! history file index character(len=hist_dim_name_length) :: type1d ! clm pointer 1d type character(len=hist_dim_name_length) :: type1d_out ! history buffer 1d type @@ -4389,11 +4453,12 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) integer :: dimid ! dimension ID integer :: k ! 1d index integer :: ntapes_onfile ! number of history tapes on the restart file - logical, allocatable :: history_tape_in_use_onfile(:) ! whether a given history tape is in use, according to the restart file + logical, allocatable :: history_tape_in_use_onfile(:) ! history tape is/isn't in use according to the restart file integer :: nflds_onfile ! number of history fields on the restart file logical :: readvar ! whether a variable was read successfully integer :: t ! tape index - integer :: f ! field index + integer :: f ! file index + integer :: fld ! field index integer :: varid ! variable id integer, allocatable :: itemp(:) ! temporary real(r8), pointer :: hbuf(:,:) ! history buffer @@ -4412,7 +4477,7 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) if (flag == 'read') then if (nsrest == nsrBranch) then do t = 1,ntapes - tape(t)%ntimes = 0 + tape(t)%ntimes(:) = 0 end do return end if @@ -4428,7 +4493,7 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) ! First when writing out and in define mode, create files and define all variables ! !================================================ - if (flag == 'define') then + define_read_write: if (flag == 'define') then !================================================ if (.not. present(rdate)) then @@ -4441,25 +4506,27 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) ! and then add the history and history restart filenames ! call ncd_defdim( ncid, 'ntapes' , ntapes , dimid) + call ncd_defdim( ncid, 'max_split_files', max_split_files, dimid) + call ncd_defdim( ncid, 'ntapes_multiply_by_max_split_files', ntapes * max_split_files, dimid) call ncd_defdim( ncid, 'max_chars' , max_chars , dimid) call ncd_defvar(ncid=ncid, varname='history_tape_in_use', xtype=ncd_log, & - long_name="Whether this history tape is in use", & - dim1name="ntapes") + long_name="Whether this history tape is/isn't in use", & + dim1name="ntapes_multiply_by_max_split_files") ier = PIO_inq_varid(ncid, 'history_tape_in_use', vardesc) ier = PIO_put_att(ncid, vardesc%varid, 'interpinic_flag', iflag_skip) call ncd_defvar(ncid=ncid, varname='locfnh', xtype=ncd_char, & long_name="History filename", & comment="This variable NOT needed for startup or branch simulations", & - dim1name='max_chars', dim2name="ntapes" ) + dim1name='max_chars', dim2name="ntapes_multiply_by_max_split_files" ) ier = PIO_inq_varid(ncid, 'locfnh', vardesc) ier = PIO_put_att(ncid, vardesc%varid, 'interpinic_flag', iflag_skip) call ncd_defvar(ncid=ncid, varname='locfnhr', xtype=ncd_char, & long_name="Restart history filename", & comment="This variable NOT needed for startup or branch simulations", & - dim1name='max_chars', dim2name="ntapes" ) + dim1name='max_chars', dim2name="ntapes_multiply_by_max_split_files" ) ier = PIO_inq_varid(ncid, 'locfnhr', vardesc) ier = PIO_put_att(ncid, vardesc%varid, 'interpinic_flag', iflag_skip) @@ -4471,172 +4538,183 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) ! Loop over tapes - write out namelist information to each restart-history tape ! only read/write accumulators and counters if needed - do t = 1,ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if - - ! Create the restart history filename and open it - write(hnum,'(i1.1)') t-1 - locfnhr(t) = "./" // trim(caseid) //"."// trim(compname) // trim(inst_suffix) & - // ".rh" // hnum //"."// trim(rdate) //".nc" - - call htape_create( t, histrest=.true. ) - - ! Add read/write accumultators and counters if needed - if (.not. tape(t)%is_endhist) then - do f = 1,tape(t)%nflds - name = tape(t)%hlist(f)%field%name - long_name = tape(t)%hlist(f)%field%long_name - units = tape(t)%hlist(f)%field%units - name_acc = trim(name) // "_acc" - units_acc = "unitless positive integer" - long_name_acc = trim(long_name) // " accumulator number of samples" - type1d_out = tape(t)%hlist(f)%field%type1d_out - type2d = tape(t)%hlist(f)%field%type2d - num2d = tape(t)%hlist(f)%field%num2d - nacs => tape(t)%hlist(f)%nacs - hbuf => tape(t)%hlist(f)%hbuf - - if (type1d_out == grlnd) then - if (ldomain%isgrid2d) then - dim1name = 'lon' ; dim2name = 'lat' - else - dim1name = trim(grlnd); dim2name = 'undefined' - end if - else - dim1name = type1d_out ; dim2name = 'undefined' - endif + tape_loop1: do t = 1, ntapes + file_loop1: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - if (dim2name == 'undefined') then - if (num2d == 1) then - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name), xtype=ncd_double, & - dim1name=dim1name, & - long_name=trim(long_name), units=trim(units)) - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name_acc), xtype=ncd_int, & - dim1name=dim1name, & - long_name=trim(long_name_acc), units=trim(units_acc)) + ! Create the restart history filename and open it + write(hnum,'(i1.1)') t-1 + if (f == instantaneous_file_index) then + file_index = 'i' ! instantaneous file_index + else if (f == accumulated_file_index) then + file_index = 'a' ! accumulated file_index + else + write(iulog,*) trim(subname),' ERROR: f index =', f, ' but model expected f = ', instantaneous_file_index, ' (instantaneous file index) or ', accumulated_file_index, ' (accumulated file index)' + write(iulog,*) errMsg(sourcefile, __LINE__) + call endrun(msg="ERROR: file index not in range") + end if + locfnhr(t,f) = "./" // trim(caseid) //"."// trim(compname) // trim(inst_suffix) & + // ".rh" // hnum // file_index //"."// trim(rdate) //".nc" + + call htape_create( t, f, histrest=.true. ) + + ! Add read/write accumultators and counters if needed + not_endhist: if (.not. tape(t)%is_endhist) then + fld_loop1: do fld = 1, tape(t)%nflds(f) + name = tape(t)%hlist(fld,f)%field%name + long_name = tape(t)%hlist(fld,f)%field%long_name + units = tape(t)%hlist(fld,f)%field%units + name_acc = trim(name) // "_acc" + units_acc = "unitless positive integer" + long_name_acc = trim(long_name) // " accumulator number of samples" + type1d_out = tape(t)%hlist(fld,f)%field%type1d_out + type2d = tape(t)%hlist(fld,f)%field%type2d + num2d = tape(t)%hlist(fld,f)%field%num2d + nacs => tape(t)%hlist(fld,f)%nacs + hbuf => tape(t)%hlist(fld,f)%hbuf + + if (type1d_out == grlnd) then + if (ldomain%isgrid2d) then + dim1name = 'lon' ; dim2name = 'lat' + else + dim1name = trim(grlnd); dim2name = 'undefined' + end if else - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name), xtype=ncd_double, & - dim1name=dim1name, dim2name=type2d, & - long_name=trim(long_name), units=trim(units)) - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name_acc), xtype=ncd_int, & - dim1name=dim1name, dim2name=type2d, & - long_name=trim(long_name_acc), units=trim(units_acc)) - end if - else - if (num2d == 1) then - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name), xtype=ncd_double, & - dim1name=dim1name, dim2name=dim2name, & - long_name=trim(long_name), units=trim(units)) - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name_acc), xtype=ncd_int, & - dim1name=dim1name, dim2name=dim2name, & - long_name=trim(long_name_acc), units=trim(units_acc)) + dim1name = type1d_out ; dim2name = 'undefined' + endif + + if (dim2name == 'undefined') then + if (num2d == 1) then + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name), xtype=ncd_double, & + dim1name=dim1name, & + long_name=trim(long_name), units=trim(units)) + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name_acc), xtype=ncd_int, & + dim1name=dim1name, & + long_name=trim(long_name_acc), units=trim(units_acc)) + else + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name), xtype=ncd_double, & + dim1name=dim1name, dim2name=type2d, & + long_name=trim(long_name), units=trim(units)) + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name_acc), xtype=ncd_int, & + dim1name=dim1name, dim2name=type2d, & + long_name=trim(long_name_acc), units=trim(units_acc)) + end if else - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name), xtype=ncd_double, & - dim1name=dim1name, dim2name=dim2name, dim3name=type2d, & - long_name=trim(long_name), units=trim(units)) - call ncd_defvar(ncid=ncid_hist(t), varname=trim(name_acc), xtype=ncd_int, & - dim1name=dim1name, dim2name=dim2name, dim3name=type2d, & - long_name=trim(long_name_acc), units=trim(units_acc)) - end if - endif - end do - endif - - ! - ! Add namelist information to each restart history tape - ! - call ncd_defdim( ncid_hist(t), 'fname_lenp2' , max_namlen+2, dimid) - call ncd_defdim( ncid_hist(t), 'fname_len' , max_namlen , dimid) - call ncd_defdim( ncid_hist(t), 'avgflag_len' , avgflag_strlen, dimid) - call ncd_defdim( ncid_hist(t), 'scalar' , 1 , dimid) - call ncd_defdim( ncid_hist(t), 'max_chars' , max_chars , dimid) - call ncd_defdim( ncid_hist(t), 'max_nflds' , max_nflds , dimid) - call ncd_defdim( ncid_hist(t), 'max_flds' , max_flds , dimid) - - call ncd_defvar(ncid=ncid_hist(t), varname='nhtfrq', xtype=ncd_int, & - long_name="Frequency of history writes", & - comment="Namelist item", & - units="absolute value of negative is in hours, 0=monthly, positive is time-steps", & - dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='mfilt', xtype=ncd_int, & - long_name="Number of history time samples on a file", units="unitless", & - comment="Namelist item", & - dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='ncprec', xtype=ncd_int, & - long_name="Flag for data precision", flag_values=(/1,2/), & - comment="Namelist item", & - nvalid_range=(/1,2/), & - flag_meanings=(/"single-precision", "double-precision"/), & - dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='dov2xy', xtype=ncd_log, & - long_name="Output on 2D grid format (TRUE) or vector format (FALSE)", & - comment="Namelist item", & - dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='fincl', xtype=ncd_char, & - comment="Namelist item", & - long_name="Fieldnames to include", & - dim1name='fname_lenp2', dim2name='max_flds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='fexcl', xtype=ncd_char, & - comment="Namelist item", & - long_name="Fieldnames to exclude", & - dim1name='fname_lenp2', dim2name='max_flds' ) - - call ncd_defvar(ncid=ncid_hist(t), varname='nflds', xtype=ncd_int, & - long_name="Number of fields on file", units="unitless", & - dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='ntimes', xtype=ncd_int, & - long_name="Number of time steps on file", units="time-step", & - dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='is_endhist', xtype=ncd_log, & - long_name="End of history file", dim1name='scalar') - call ncd_defvar(ncid=ncid_hist(t), varname='begtime', xtype=ncd_double, & - long_name="Beginning time", units="time units", & - dim1name='scalar') - - call ncd_defvar(ncid=ncid_hist(t), varname='num2d', xtype=ncd_int, & - long_name="Size of second dimension", units="unitless", & - dim1name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='hpindex', xtype=ncd_int, & - long_name="History pointer index", units="unitless", & - dim1name='max_nflds' ) - - call ncd_defvar(ncid=ncid_hist(t), varname='avgflag', xtype=ncd_char, & - long_name="Averaging flag", & - units="A=Average, X=Maximum, M=Minimum, I=Instantaneous, SUM=Sum", & - dim1name='avgflag_len', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='name', xtype=ncd_char, & - long_name="Fieldnames", & - dim1name='fname_len', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='long_name', xtype=ncd_char, & - long_name="Long descriptive names for fields", & - dim1name='max_chars', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='units', xtype=ncd_char, & - long_name="Units for each history field output", & - dim1name='max_chars', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='type1d', xtype=ncd_char, & - long_name="1st dimension type", & - dim1name='string_length', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='type1d_out', xtype=ncd_char, & - long_name="1st output dimension type", & - dim1name='string_length', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='type2d', xtype=ncd_char, & - long_name="2nd dimension type", & - dim1name='string_length', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='p2c_scale_type', xtype=ncd_char, & - long_name="PFT to column scale type", & - dim1name='scale_type_string_length', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='c2l_scale_type', xtype=ncd_char, & - long_name="column to landunit scale type", & - dim1name='scale_type_string_length', dim2name='max_nflds' ) - call ncd_defvar(ncid=ncid_hist(t), varname='l2g_scale_type', xtype=ncd_char, & - long_name="landunit to gridpoint scale type", & - dim1name='scale_type_string_length', dim2name='max_nflds' ) - - call ncd_enddef(ncid_hist(t)) - - end do ! end of ntapes loop + if (num2d == 1) then + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name), xtype=ncd_double, & + dim1name=dim1name, dim2name=dim2name, & + long_name=trim(long_name), units=trim(units)) + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name_acc), xtype=ncd_int, & + dim1name=dim1name, dim2name=dim2name, & + long_name=trim(long_name_acc), units=trim(units_acc)) + else + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name), xtype=ncd_double, & + dim1name=dim1name, dim2name=dim2name, dim3name=type2d, & + long_name=trim(long_name), units=trim(units)) + call ncd_defvar(ncid=ncid_hist(t,f), varname=trim(name_acc), xtype=ncd_int, & + dim1name=dim1name, dim2name=dim2name, dim3name=type2d, & + long_name=trim(long_name_acc), units=trim(units_acc)) + end if + endif + end do fld_loop1 + end if not_endhist + + ! + ! Add namelist information to each restart history tape + ! + call ncd_defdim( ncid_hist(t,f), 'fname_lenp2' , max_namlen+2, dimid) + call ncd_defdim( ncid_hist(t,f), 'fname_len' , max_namlen , dimid) + call ncd_defdim( ncid_hist(t,f), 'avgflag_len' , avgflag_strlen, dimid) + call ncd_defdim( ncid_hist(t,f), 'scalar' , 1 , dimid) + call ncd_defdim( ncid_hist(t,f), 'max_chars' , max_chars , dimid) + call ncd_defdim( ncid_hist(t,f), 'max_nflds' , max_nflds , dimid) + call ncd_defdim( ncid_hist(t,f), 'max_flds' , max_flds , dimid) + + call ncd_defvar(ncid=ncid_hist(t,f), varname='nhtfrq', xtype=ncd_int, & + long_name="Frequency of history writes", & + comment="Namelist item", & + units="absolute value of negative is in hours, 0=monthly, positive is time-steps", & + dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='mfilt', xtype=ncd_int, & + long_name="Number of history time samples on a file", units="unitless", & + comment="Namelist item", & + dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='ncprec', xtype=ncd_int, & + long_name="Flag for data precision", flag_values=(/1,2/), & + comment="Namelist item", & + nvalid_range=(/1,2/), & + flag_meanings=(/"single-precision", "double-precision"/), & + dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='dov2xy', xtype=ncd_log, & + long_name="Output on 2D grid format (TRUE) or vector format (FALSE)", & + comment="Namelist item", & + dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='fincl', xtype=ncd_char, & + comment="Namelist item", & + long_name="Fieldnames to include", & + dim1name='fname_lenp2', dim2name='max_flds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='fexcl', xtype=ncd_char, & + comment="Namelist item", & + long_name="Fieldnames to exclude", & + dim1name='fname_lenp2', dim2name='max_flds' ) + + call ncd_defvar(ncid=ncid_hist(t,f), varname='nflds', xtype=ncd_int, & + long_name="Number of fields on file", units="unitless", & + dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='ntimes', xtype=ncd_int, & + long_name="Number of time steps on file", units="time-step", & + dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='is_endhist', xtype=ncd_log, & + long_name="End of history file", dim1name='scalar') + call ncd_defvar(ncid=ncid_hist(t,f), varname='begtime', xtype=ncd_double, & + long_name="Beginning time", units="time units", & + dim1name='scalar') + + call ncd_defvar(ncid=ncid_hist(t,f), varname='num2d', xtype=ncd_int, & + long_name="Size of second dimension", units="unitless", & + dim1name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='hpindex', xtype=ncd_int, & + long_name="History pointer index", units="unitless", & + dim1name='max_nflds' ) + + call ncd_defvar(ncid=ncid_hist(t,f), varname='avgflag', xtype=ncd_char, & + long_name="Averaging flag", & + units="A=Average, X=Maximum, M=Minimum, I=Instantaneous, SUM=Sum", & + dim1name='avgflag_len', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='name', xtype=ncd_char, & + long_name="Fieldnames", & + dim1name='fname_len', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='long_name', xtype=ncd_char, & + long_name="Long descriptive names for fields", & + dim1name='max_chars', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='units', xtype=ncd_char, & + long_name="Units for each history field output", & + dim1name='max_chars', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='type1d', xtype=ncd_char, & + long_name="1st dimension type", & + dim1name='string_length', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='type1d_out', xtype=ncd_char, & + long_name="1st output dimension type", & + dim1name='string_length', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='type2d', xtype=ncd_char, & + long_name="2nd dimension type", & + dim1name='string_length', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='p2c_scale_type', xtype=ncd_char, & + long_name="PFT to column scale type", & + dim1name='scale_type_string_length', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='c2l_scale_type', xtype=ncd_char, & + long_name="column to landunit scale type", & + dim1name='scale_type_string_length', dim2name='max_nflds' ) + call ncd_defvar(ncid=ncid_hist(t,f), varname='l2g_scale_type', xtype=ncd_char, & + long_name="landunit to gridpoint scale type", & + dim1name='scale_type_string_length', dim2name='max_nflds' ) + + call ncd_enddef(ncid_hist(t,f)) + + end do file_loop1 + end do tape_loop1 RETURN @@ -4648,18 +4726,21 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) !================================================ ! Add history filenames to master restart file - do t = 1,ntapes - call ncd_io('history_tape_in_use', history_tape_in_use(t), 'write', ncid, nt=t) - if (history_tape_in_use(t)) then - my_locfnh = locfnh(t) - my_locfnhr = locfnhr(t) - else - my_locfnh = 'non_existent_file' - my_locfnhr = 'non_existent_file' - end if - call ncd_io('locfnh', my_locfnh, 'write', ncid, nt=t) - call ncd_io('locfnhr', my_locfnhr, 'write', ncid, nt=t) - end do + counter = 0 + tape_loop2: do t = 1, ntapes + file_loop2: do f = 1, max_split_files + counter = counter + 1 + if (.not. history_tape_in_use(t,f)) then + locfnh(t,f) = 'non_existent_file' + locfnhr(t,f) = 'non_existent_file' + end if + my_locfnh = locfnh(t,f) + my_locfnhr = locfnhr(t,f) + call ncd_io('locfnh', my_locfnh, 'write', ncid, nt=counter) + call ncd_io('locfnhr', my_locfnhr, 'write', ncid, nt=counter) + call ncd_io('history_tape_in_use', history_tape_in_use(t,f), 'write', ncid, nt=counter) + end do file_loop2 + end do tape_loop2 fincl(:,1) = hist_fincl1(:) fincl(:,2) = hist_fincl2(:) @@ -4692,66 +4773,68 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) ! allocate(itemp(max_nflds)) - do t = 1,ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if + tape_loop3: do t = 1, ntapes + file_loop3: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - call ncd_io(varname='fincl', data=fincl(:,t), ncid=ncid_hist(t), flag='write') + call ncd_io(varname='fincl', data=fincl(:,t), ncid=ncid_hist(t,f), flag='write') - call ncd_io(varname='fexcl', data=fexcl(:,t), ncid=ncid_hist(t), flag='write') + call ncd_io(varname='fexcl', data=fexcl(:,t), ncid=ncid_hist(t,f), flag='write') - call ncd_io(varname='is_endhist', data=tape(t)%is_endhist, ncid=ncid_hist(t), flag='write') + call ncd_io(varname='is_endhist', data=tape(t)%is_endhist, ncid=ncid_hist(t,f), flag='write') - call ncd_io(varname='dov2xy', data=tape(t)%dov2xy, ncid=ncid_hist(t), flag='write') + call ncd_io(varname='dov2xy', data=tape(t)%dov2xy, ncid=ncid_hist(t,f), flag='write') - itemp(:) = 0 - do f=1,tape(t)%nflds - itemp(f) = tape(t)%hlist(f)%field%num2d - end do - call ncd_io(varname='num2d', data=itemp(:), ncid=ncid_hist(t), flag='write') + itemp(:) = 0 + do fld = 1, tape(t)%nflds(f) + itemp(fld) = tape(t)%hlist(fld,f)%field%num2d + end do + call ncd_io(varname='num2d', data=itemp(:), ncid=ncid_hist(t,f), flag='write') - itemp(:) = 0 - do f=1,tape(t)%nflds - itemp(f) = tape(t)%hlist(f)%field%hpindex - end do - call ncd_io(varname='hpindex', data=itemp(:), ncid=ncid_hist(t), flag='write') - - call ncd_io('nflds', tape(t)%nflds, 'write', ncid_hist(t) ) - call ncd_io('ntimes', tape(t)%ntimes, 'write', ncid_hist(t) ) - call ncd_io('nhtfrq', tape(t)%nhtfrq, 'write', ncid_hist(t) ) - call ncd_io('mfilt', tape(t)%mfilt, 'write', ncid_hist(t) ) - call ncd_io('ncprec', tape(t)%ncprec, 'write', ncid_hist(t) ) - call ncd_io('begtime', tape(t)%begtime, 'write', ncid_hist(t) ) - allocate(tmpstr(tape(t)%nflds,3 ),tname(tape(t)%nflds), & - tavgflag(tape(t)%nflds),tunits(tape(t)%nflds),tlongname(tape(t)%nflds), & - p2c_scale_type(tape(t)%nflds), c2l_scale_type(tape(t)%nflds), & - l2g_scale_type(tape(t)%nflds)) - do f=1,tape(t)%nflds - tname(f) = tape(t)%hlist(f)%field%name - tunits(f) = tape(t)%hlist(f)%field%units - tlongname(f) = tape(t)%hlist(f)%field%long_name - tmpstr(f,1) = tape(t)%hlist(f)%field%type1d - tmpstr(f,2) = tape(t)%hlist(f)%field%type1d_out - tmpstr(f,3) = tape(t)%hlist(f)%field%type2d - tavgflag(f) = tape(t)%hlist(f)%avgflag - p2c_scale_type(f) = tape(t)%hlist(f)%field%p2c_scale_type - c2l_scale_type(f) = tape(t)%hlist(f)%field%c2l_scale_type - l2g_scale_type(f) = tape(t)%hlist(f)%field%l2g_scale_type - end do - call ncd_io( 'name', tname, 'write',ncid_hist(t)) - call ncd_io('long_name', tlongname, 'write', ncid_hist(t)) - call ncd_io('units', tunits, 'write',ncid_hist(t)) - call ncd_io('type1d', tmpstr(:,1), 'write', ncid_hist(t)) - call ncd_io('type1d_out', tmpstr(:,2), 'write', ncid_hist(t)) - call ncd_io('type2d', tmpstr(:,3), 'write', ncid_hist(t)) - call ncd_io('avgflag',tavgflag , 'write', ncid_hist(t)) - call ncd_io('p2c_scale_type', p2c_scale_type, 'write', ncid_hist(t)) - call ncd_io('c2l_scale_type', c2l_scale_type, 'write', ncid_hist(t)) - call ncd_io('l2g_scale_type', l2g_scale_type, 'write', ncid_hist(t)) - deallocate(tname,tlongname,tunits,tmpstr,tavgflag) - deallocate(p2c_scale_type, c2l_scale_type, l2g_scale_type) - enddo + itemp(:) = 0 + do fld = 1, tape(t)%nflds(f) + itemp(fld) = tape(t)%hlist(fld,f)%field%hpindex + end do + call ncd_io(varname='hpindex', data=itemp(:), ncid=ncid_hist(t,f), flag='write') + + call ncd_io('nflds', tape(t)%nflds(f), 'write', ncid_hist(t,f) ) + call ncd_io('ntimes', tape(t)%ntimes(f), 'write', ncid_hist(t,f) ) + call ncd_io('nhtfrq', tape(t)%nhtfrq, 'write', ncid_hist(t,f) ) + call ncd_io('mfilt', tape(t)%mfilt, 'write', ncid_hist(t,f) ) + call ncd_io('ncprec', tape(t)%ncprec, 'write', ncid_hist(t,f) ) + call ncd_io('begtime', tape(t)%begtime, 'write', ncid_hist(t,f) ) + allocate(tmpstr(tape(t)%nflds(f), 3), tname(tape(t)%nflds(f)), & + tavgflag(tape(t)%nflds(f)), tunits(tape(t)%nflds(f)), tlongname(tape(t)%nflds(f)), & + p2c_scale_type(tape(t)%nflds(f)), c2l_scale_type(tape(t)%nflds(f)), & + l2g_scale_type(tape(t)%nflds(f))) + do fld = 1, tape(t)%nflds(f) + tname(fld) = tape(t)%hlist(fld,f)%field%name + tunits(fld) = tape(t)%hlist(fld,f)%field%units + tlongname(fld) = tape(t)%hlist(fld,f)%field%long_name + tmpstr(fld,1) = tape(t)%hlist(fld,f)%field%type1d + tmpstr(fld,2) = tape(t)%hlist(fld,f)%field%type1d_out + tmpstr(fld,3) = tape(t)%hlist(fld,f)%field%type2d + tavgflag(fld) = tape(t)%hlist(fld,f)%avgflag + p2c_scale_type(fld) = tape(t)%hlist(fld,f)%field%p2c_scale_type + c2l_scale_type(fld) = tape(t)%hlist(fld,f)%field%c2l_scale_type + l2g_scale_type(fld) = tape(t)%hlist(fld,f)%field%l2g_scale_type + end do + call ncd_io( 'name', tname, 'write',ncid_hist(t,f)) + call ncd_io('long_name', tlongname, 'write', ncid_hist(t,f)) + call ncd_io('units', tunits, 'write',ncid_hist(t,f)) + call ncd_io('type1d', tmpstr(:,1), 'write', ncid_hist(t,f)) + call ncd_io('type1d_out', tmpstr(:,2), 'write', ncid_hist(t,f)) + call ncd_io('type2d', tmpstr(:,3), 'write', ncid_hist(t,f)) + call ncd_io('avgflag',tavgflag , 'write', ncid_hist(t,f)) + call ncd_io('p2c_scale_type', p2c_scale_type, 'write', ncid_hist(t,f)) + call ncd_io('c2l_scale_type', c2l_scale_type, 'write', ncid_hist(t,f)) + call ncd_io('l2g_scale_type', l2g_scale_type, 'write', ncid_hist(t,f)) + deallocate(tname,tlongname,tunits,tmpstr,tavgflag) + deallocate(p2c_scale_type, c2l_scale_type, l2g_scale_type) + end do file_loop3 + end do tape_loop3 deallocate(itemp) ! @@ -4762,7 +4845,7 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) !================================================ call ncd_inqdlen(ncid,dimid,ntapes_onfile, name='ntapes') - if (is_restart()) then + if_restart1: if (is_restart()) then if (ntapes_onfile /= ntapes) then write(iulog,*) 'ntapes = ', ntapes, ' ntapes_onfile = ', ntapes_onfile call endrun(msg=' ERROR: number of ntapes differs from restart file. '// & @@ -4770,8 +4853,8 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) additional_msg=errMsg(sourcefile, __LINE__)) end if - if (ntapes > 0) then - allocate(history_tape_in_use_onfile(ntapes)) + ntapes_gt_0: if (ntapes > 0) then + allocate(history_tape_in_use_onfile(max_split_files*ntapes)) call ncd_io('history_tape_in_use', history_tape_in_use_onfile, 'read', ncid, & readvar=readvar) if (.not. readvar) then @@ -4780,205 +4863,216 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) ! true for all tapes <= ntapes. history_tape_in_use_onfile(:) = .true. end if - do t = 1, ntapes - if (history_tape_in_use_onfile(t) .neqv. history_tape_in_use(t)) then - write(iulog,*) subname//' ERROR: history_tape_in_use on restart file' - write(iulog,*) 'disagrees with current run: For tape ', t - write(iulog,*) 'On restart file: ', history_tape_in_use_onfile(t) - write(iulog,*) 'In current run : ', history_tape_in_use(t) - write(iulog,*) 'This suggests that this tape was empty in one case,' - write(iulog,*) 'but non-empty in the other. (history_tape_in_use .false.' - write(iulog,*) 'means that history tape is empty.)' - call endrun(msg=' ERROR: history_tape_in_use differs from restart file. '// & - 'You can NOT change history options on restart.', & - additional_msg=errMsg(sourcefile, __LINE__)) - end if - end do - - call ncd_io('locfnh', locfnh(1:ntapes), 'read', ncid ) - call ncd_io('locfnhr', locrest(1:ntapes), 'read', ncid ) - do t = 1,ntapes - call strip_null(locrest(t)) - call strip_null(locfnh(t)) - end do - end if - end if + counter = 0 + tape_loop4: do t = 1, ntapes + file_loop4: do f = 1, max_split_files + counter = counter + 1 + if (history_tape_in_use_onfile(counter) .neqv. history_tape_in_use(t,f)) then + write(iulog,*) subname//' ERROR: history_tape_in_use on restart file' + write(iulog,*) 'disagrees with current run: For tape and file ', t, f + write(iulog,*) 'On restart file: ', history_tape_in_use_onfile(counter) + write(iulog,*) 'In current run : ', history_tape_in_use(t,f) + write(iulog,*) 'This suggests that this tape was empty in one case,' + write(iulog,*) 'but non-empty in the other. (history_tape_in_use .false.' + write(iulog,*) 'means that history tape is empty.)' + call endrun(msg=' ERROR: history_tape_in_use differs from restart file. '// & + 'You can NOT change history options on restart.', & + additional_msg=errMsg(sourcefile, __LINE__)) + end if + end do file_loop4 + end do tape_loop4 + call ncd_io('locfnh', locfnh_onfile, 'read', ncid ) + call ncd_io('locfnhr', locrest_onfile, 'read', ncid ) + tape_loop5: do t = 1, ntapes + file_loop5: do f = 1, max_split_files + call strip_null(locrest_onfile(f,t)) + call strip_null(locfnh_onfile(f,t)) + ! These character variables get read with their dimensions backwards + ! so flip them before using them + locrest(t,f) = locrest_onfile(f,t) + locfnh(t,f) = locfnh_onfile(f,t) + end do file_loop5 + end do tape_loop5 + end if ntapes_gt_0 + end if if_restart1 ! Determine necessary indices - the following is needed if model decomposition is different on restart start(1)=1 - if ( is_restart() )then - do t = 1,ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if + if_restart2: if ( is_restart() ) then + tape_loop6: do t = 1, ntapes + file_loop6: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - call getfil( locrest(t), locfnhr(t), 0 ) - call ncd_pio_openfile (ncid_hist(t), trim(locfnhr(t)), ncd_nowrite) + call getfil( locrest(t,f), locfnhr(t,f), 0 ) + call ncd_pio_openfile (ncid_hist(t,f), trim(locfnhr(t,f)), ncd_nowrite) - if ( t == 1 )then + if ( t == 1 .and. f == 1 )then - call ncd_inqdlen(ncid_hist(1),dimid,max_nflds,name='max_nflds') + call ncd_inqdlen(ncid_hist(1,f),dimid,max_nflds,name='max_nflds') - allocate(itemp(max_nflds)) - end if + allocate(itemp(max_nflds)) + end if - call ncd_inqvid(ncid_hist(t), 'name', varid, name_desc) - call ncd_inqvid(ncid_hist(t), 'long_name', varid, longname_desc) - call ncd_inqvid(ncid_hist(t), 'units', varid, units_desc) - call ncd_inqvid(ncid_hist(t), 'type1d', varid, type1d_desc) - call ncd_inqvid(ncid_hist(t), 'type1d_out', varid, type1d_out_desc) - call ncd_inqvid(ncid_hist(t), 'type2d', varid, type2d_desc) - call ncd_inqvid(ncid_hist(t), 'avgflag', varid, avgflag_desc) - call ncd_inqvid(ncid_hist(t), 'p2c_scale_type', varid, p2c_scale_type_desc) - call ncd_inqvid(ncid_hist(t), 'c2l_scale_type', varid, c2l_scale_type_desc) - call ncd_inqvid(ncid_hist(t), 'l2g_scale_type', varid, l2g_scale_type_desc) - - call ncd_io(varname='fincl', data=fincl(:,t), ncid=ncid_hist(t), flag='read') - - call ncd_io(varname='fexcl', data=fexcl(:,t), ncid=ncid_hist(t), flag='read') - - call ncd_io('nflds', nflds_onfile, 'read', ncid_hist(t) ) - if ( nflds_onfile /= tape(t)%nflds )then - write(iulog,*) 'nflds = ', tape(t)%nflds, ' nflds_onfile = ', nflds_onfile - call endrun(msg=' ERROR: number of fields different than on restart file!,'// & - ' you can NOT change history options on restart!' //& + call ncd_inqvid(ncid_hist(t,f), 'name', varid, name_desc) + call ncd_inqvid(ncid_hist(t,f), 'long_name', varid, longname_desc) + call ncd_inqvid(ncid_hist(t,f), 'units', varid, units_desc) + call ncd_inqvid(ncid_hist(t,f), 'type1d', varid, type1d_desc) + call ncd_inqvid(ncid_hist(t,f), 'type1d_out', varid, type1d_out_desc) + call ncd_inqvid(ncid_hist(t,f), 'type2d', varid, type2d_desc) + call ncd_inqvid(ncid_hist(t,f), 'avgflag', varid, avgflag_desc) + call ncd_inqvid(ncid_hist(t,f), 'p2c_scale_type', varid, p2c_scale_type_desc) + call ncd_inqvid(ncid_hist(t,f), 'c2l_scale_type', varid, c2l_scale_type_desc) + call ncd_inqvid(ncid_hist(t,f), 'l2g_scale_type', varid, l2g_scale_type_desc) + + call ncd_io(varname='fincl', data=fincl(:,t), ncid=ncid_hist(t,f), flag='read') + + call ncd_io(varname='fexcl', data=fexcl(:,t), ncid=ncid_hist(t,f), flag='read') + + call ncd_io('nflds', nflds_onfile, 'read', ncid_hist(t,f) ) + if ( nflds_onfile /= tape(t)%nflds(f) ) then + write(iulog,*) 'nflds = ', tape(t)%nflds(f), ' nflds_onfile = ', nflds_onfile + call endrun(msg=' ERROR: number of fields different than on restart file!,'// & + ' you can NOT change history options on restart!' //& errMsg(sourcefile, __LINE__)) - end if - call ncd_io('ntimes', tape(t)%ntimes, 'read', ncid_hist(t) ) - call ncd_io('nhtfrq', tape(t)%nhtfrq, 'read', ncid_hist(t) ) - call ncd_io('mfilt', tape(t)%mfilt, 'read', ncid_hist(t) ) - call ncd_io('ncprec', tape(t)%ncprec, 'read', ncid_hist(t) ) - call ncd_io('begtime', tape(t)%begtime, 'read', ncid_hist(t) ) - - call ncd_io(varname='is_endhist', data=tape(t)%is_endhist, ncid=ncid_hist(t), flag='read') - call ncd_io(varname='dov2xy', data=tape(t)%dov2xy, ncid=ncid_hist(t), flag='read') - call ncd_io(varname='num2d', data=itemp(:), ncid=ncid_hist(t), flag='read') - do f=1,tape(t)%nflds - tape(t)%hlist(f)%field%num2d = itemp(f) - end do + end if + call ncd_io('ntimes', tape(t)%ntimes(f), 'read', ncid_hist(t,f) ) + call ncd_io('nhtfrq', tape(t)%nhtfrq, 'read', ncid_hist(t,f) ) + call ncd_io('mfilt', tape(t)%mfilt, 'read', ncid_hist(t,f) ) + call ncd_io('ncprec', tape(t)%ncprec, 'read', ncid_hist(t,f) ) + call ncd_io('begtime', tape(t)%begtime, 'read', ncid_hist(t,f) ) + + call ncd_io(varname='is_endhist', data=tape(t)%is_endhist, ncid=ncid_hist(t,f), flag='read') + call ncd_io(varname='dov2xy', data=tape(t)%dov2xy, ncid=ncid_hist(t,f), flag='read') + call ncd_io(varname='num2d', data=itemp(:), ncid=ncid_hist(t,f), flag='read') + do fld = 1, tape(t)%nflds(f) + tape(t)%hlist(fld,f)%field%num2d = itemp(fld) + end do + + call ncd_io(varname='hpindex', data=itemp(:), ncid=ncid_hist(t,f), flag='read') + do fld = 1, tape(t)%nflds(f) + tape(t)%hlist(fld,f)%field%hpindex = itemp(fld) + end do + + fld_loop2: do fld = 1, tape(t)%nflds(f) + start(2) = fld + call ncd_io( name_desc, tape(t)%hlist(fld,f)%field%name, & + 'read', ncid_hist(t,f), start ) + call ncd_io( longname_desc, tape(t)%hlist(fld,f)%field%long_name, & + 'read', ncid_hist(t,f), start ) + call ncd_io( units_desc, tape(t)%hlist(fld,f)%field%units, & + 'read', ncid_hist(t,f), start ) + call ncd_io( type1d_desc, tape(t)%hlist(fld,f)%field%type1d, & + 'read', ncid_hist(t,f), start ) + call ncd_io( type1d_out_desc, tape(t)%hlist(fld,f)%field%type1d_out, & + 'read', ncid_hist(t,f), start ) + call ncd_io( type2d_desc, tape(t)%hlist(fld,f)%field%type2d, & + 'read', ncid_hist(t,f), start ) + call ncd_io( avgflag_desc, tape(t)%hlist(fld,f)%avgflag, & + 'read', ncid_hist(t,f), start ) + call ncd_io( p2c_scale_type_desc, tape(t)%hlist(fld,f)%field%p2c_scale_type, & + 'read', ncid_hist(t,f), start ) + call ncd_io( c2l_scale_type_desc, tape(t)%hlist(fld,f)%field%c2l_scale_type, & + 'read', ncid_hist(t,f), start ) + call ncd_io( l2g_scale_type_desc, tape(t)%hlist(fld,f)%field%l2g_scale_type, & + 'read', ncid_hist(t,f), start ) + call strip_null(tape(t)%hlist(fld,f)%field%name) + call strip_null(tape(t)%hlist(fld,f)%field%long_name) + call strip_null(tape(t)%hlist(fld,f)%field%units) + call strip_null(tape(t)%hlist(fld,f)%field%type1d) + call strip_null(tape(t)%hlist(fld,f)%field%type1d_out) + call strip_null(tape(t)%hlist(fld,f)%field%type2d) + call strip_null(tape(t)%hlist(fld,f)%field%p2c_scale_type) + call strip_null(tape(t)%hlist(fld,f)%field%c2l_scale_type) + call strip_null(tape(t)%hlist(fld,f)%field%l2g_scale_type) + call strip_null(tape(t)%hlist(fld,f)%avgflag) + + type1d_out = trim(tape(t)%hlist(fld,f)%field%type1d_out) + select case (trim(type1d_out)) + case (grlnd) + num1d_out = numg + beg1d_out = bounds%begg + end1d_out = bounds%endg + case (nameg) + num1d_out = numg + beg1d_out = bounds%begg + end1d_out = bounds%endg + case (namel) + num1d_out = numl + beg1d_out = bounds%begl + end1d_out = bounds%endl + case (namec) + num1d_out = numc + beg1d_out = bounds%begc + end1d_out = bounds%endc + case (namep) + num1d_out = nump + beg1d_out = bounds%begp + end1d_out = bounds%endp + case default + write(iulog,*) trim(subname),' ERROR: read unknown 1d output type=',trim(type1d_out) + call endrun(msg=errMsg(sourcefile, __LINE__)) + end select - call ncd_io(varname='hpindex', data=itemp(:), ncid=ncid_hist(t), flag='read') - do f=1,tape(t)%nflds - tape(t)%hlist(f)%field%hpindex = itemp(f) - end do + tape(t)%hlist(fld,f)%field%num1d_out = num1d_out + tape(t)%hlist(fld,f)%field%beg1d_out = beg1d_out + tape(t)%hlist(fld,f)%field%end1d_out = end1d_out - do f=1,tape(t)%nflds - start(2) = f - call ncd_io( name_desc, tape(t)%hlist(f)%field%name, & - 'read', ncid_hist(t), start ) - call ncd_io( longname_desc, tape(t)%hlist(f)%field%long_name, & - 'read', ncid_hist(t), start ) - call ncd_io( units_desc, tape(t)%hlist(f)%field%units, & - 'read', ncid_hist(t), start ) - call ncd_io( type1d_desc, tape(t)%hlist(f)%field%type1d, & - 'read', ncid_hist(t), start ) - call ncd_io( type1d_out_desc, tape(t)%hlist(f)%field%type1d_out, & - 'read', ncid_hist(t), start ) - call ncd_io( type2d_desc, tape(t)%hlist(f)%field%type2d, & - 'read', ncid_hist(t), start ) - call ncd_io( avgflag_desc, tape(t)%hlist(f)%avgflag, & - 'read', ncid_hist(t), start ) - call ncd_io( p2c_scale_type_desc, tape(t)%hlist(f)%field%p2c_scale_type, & - 'read', ncid_hist(t), start ) - call ncd_io( c2l_scale_type_desc, tape(t)%hlist(f)%field%c2l_scale_type, & - 'read', ncid_hist(t), start ) - call ncd_io( l2g_scale_type_desc, tape(t)%hlist(f)%field%l2g_scale_type, & - 'read', ncid_hist(t), start ) - call strip_null(tape(t)%hlist(f)%field%name) - call strip_null(tape(t)%hlist(f)%field%long_name) - call strip_null(tape(t)%hlist(f)%field%units) - call strip_null(tape(t)%hlist(f)%field%type1d) - call strip_null(tape(t)%hlist(f)%field%type1d_out) - call strip_null(tape(t)%hlist(f)%field%type2d) - call strip_null(tape(t)%hlist(f)%field%p2c_scale_type) - call strip_null(tape(t)%hlist(f)%field%c2l_scale_type) - call strip_null(tape(t)%hlist(f)%field%l2g_scale_type) - call strip_null(tape(t)%hlist(f)%avgflag) - - type1d_out = trim(tape(t)%hlist(f)%field%type1d_out) - select case (trim(type1d_out)) - case (grlnd) - num1d_out = numg - beg1d_out = bounds%begg - end1d_out = bounds%endg - case (nameg) - num1d_out = numg - beg1d_out = bounds%begg - end1d_out = bounds%endg - case (namel) - num1d_out = numl - beg1d_out = bounds%begl - end1d_out = bounds%endl - case (namec) - num1d_out = numc - beg1d_out = bounds%begc - end1d_out = bounds%endc - case (namep) - num1d_out = nump - beg1d_out = bounds%begp - end1d_out = bounds%endp - case default - write(iulog,*) trim(subname),' ERROR: read unknown 1d output type=',trim(type1d_out) - call endrun(msg=errMsg(sourcefile, __LINE__)) - end select - - tape(t)%hlist(f)%field%num1d_out = num1d_out - tape(t)%hlist(f)%field%beg1d_out = beg1d_out - tape(t)%hlist(f)%field%end1d_out = end1d_out - - num2d = tape(t)%hlist(f)%field%num2d - allocate (tape(t)%hlist(f)%hbuf(beg1d_out:end1d_out,num2d), & - tape(t)%hlist(f)%nacs(beg1d_out:end1d_out,num2d), & - stat=status) - if (status /= 0) then - write(iulog,*) trim(subname),' ERROR: allocation error for hbuf,nacs at t,f=',t,f - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - tape(t)%hlist(f)%hbuf(:,:) = 0._r8 - tape(t)%hlist(f)%nacs(:,:) = 0 - - type1d = tape(t)%hlist(f)%field%type1d - select case (type1d) - case (grlnd) - num1d = numg - beg1d = bounds%begg - end1d = bounds%endg - case (nameg) - num1d = numg - beg1d = bounds%begg - end1d = bounds%endg - case (namel) - num1d = numl - beg1d = bounds%begl - end1d = bounds%endl - case (namec) - num1d = numc - beg1d = bounds%begc - end1d = bounds%endc - case (namep) - num1d = nump - beg1d = bounds%begp - end1d = bounds%endp - case default - write(iulog,*) trim(subname),' ERROR: read unknown 1d type=',type1d - call endrun(msg=errMsg(sourcefile, __LINE__)) - end select + num2d = tape(t)%hlist(fld,f)%field%num2d + allocate (tape(t)%hlist(fld,f)%hbuf(beg1d_out:end1d_out,num2d), & + tape(t)%hlist(fld,f)%nacs(beg1d_out:end1d_out,num2d), & + stat=status) + if (status /= 0) then + write(iulog,*) trim(subname),' ERROR: allocation error for hbuf,nacs at t,f,fld=',t,f,fld + call endrun(msg=errMsg(sourcefile, __LINE__)) + endif + tape(t)%hlist(fld,f)%hbuf(:,:) = 0._r8 + tape(t)%hlist(fld,f)%nacs(:,:) = 0 + + type1d = tape(t)%hlist(fld,f)%field%type1d + select case (type1d) + case (grlnd) + num1d = numg + beg1d = bounds%begg + end1d = bounds%endg + case (nameg) + num1d = numg + beg1d = bounds%begg + end1d = bounds%endg + case (namel) + num1d = numl + beg1d = bounds%begl + end1d = bounds%endl + case (namec) + num1d = numc + beg1d = bounds%begc + end1d = bounds%endc + case (namep) + num1d = nump + beg1d = bounds%begp + end1d = bounds%endp + case default + write(iulog,*) trim(subname),' ERROR: read unknown 1d type=',type1d + call endrun(msg=errMsg(sourcefile, __LINE__)) + end select - tape(t)%hlist(f)%field%num1d = num1d - tape(t)%hlist(f)%field%beg1d = beg1d - tape(t)%hlist(f)%field%end1d = end1d + tape(t)%hlist(fld,f)%field%num1d = num1d + tape(t)%hlist(fld,f)%field%beg1d = beg1d + tape(t)%hlist(fld,f)%field%end1d = end1d - end do ! end of flds loop + end do fld_loop2 - ! If history file is not full, open it + ! If history file is not full, open it - if (tape(t)%ntimes /= 0) then - call ncd_pio_openfile (nfid(t), trim(locfnh(t)), ncd_write) - end if + if (tape(t)%ntimes(f) /= 0) then + call ncd_pio_openfile (nfid(t,f), trim(locfnh(t,f)), ncd_write) + end if - end do ! end of tapes loop + end do file_loop6 + end do tape_loop6 hist_fincl1(:) = fincl(:,1) hist_fincl2(:) = fincl(:,2) @@ -5002,11 +5096,11 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) hist_fexcl9(:) = fexcl(:,9) hist_fexcl10(:) = fexcl(:,10) - end if + end if if_restart2 if ( allocated(itemp) ) deallocate(itemp) - end if + end if define_read_write !====================================================================== ! Read/write history file restart data. @@ -5015,114 +5109,118 @@ subroutine hist_restart_ncd (bounds, ncid, flag, rdate) ! A new history file is used on a branch run. !====================================================================== - if (flag == 'write') then - - do t = 1,ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if + read_write: if (flag == 'write') then - if (.not. tape(t)%is_endhist) then - - do f = 1,tape(t)%nflds - name = tape(t)%hlist(f)%field%name - name_acc = trim(name) // "_acc" - type1d_out = tape(t)%hlist(f)%field%type1d_out - type2d = tape(t)%hlist(f)%field%type2d - num2d = tape(t)%hlist(f)%field%num2d - beg1d_out = tape(t)%hlist(f)%field%beg1d_out - end1d_out = tape(t)%hlist(f)%field%end1d_out - nacs => tape(t)%hlist(f)%nacs - hbuf => tape(t)%hlist(f)%hbuf - - if (num2d == 1) then - allocate(hbuf1d(beg1d_out:end1d_out), & - nacs1d(beg1d_out:end1d_out), stat=status) - if (status /= 0) then - write(iulog,*) trim(subname),' ERROR: allocation' - call endrun(msg=errMsg(sourcefile, __LINE__)) - end if + tape_loop7: do t = 1, ntapes + file_loop7: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - hbuf1d(beg1d_out:end1d_out) = hbuf(beg1d_out:end1d_out,1) - nacs1d(beg1d_out:end1d_out) = nacs(beg1d_out:end1d_out,1) + if (.not. tape(t)%is_endhist) then - call ncd_io(ncid=ncid_hist(t), flag='write', varname=trim(name), & - dim1name=type1d_out, data=hbuf1d) - call ncd_io(ncid=ncid_hist(t), flag='write', varname=trim(name_acc), & - dim1name=type1d_out, data=nacs1d) + fld_loop3: do fld = 1, tape(t)%nflds(f) + name = tape(t)%hlist(fld,f)%field%name + name_acc = trim(name) // "_acc" + type1d_out = tape(t)%hlist(fld,f)%field%type1d_out + type2d = tape(t)%hlist(fld,f)%field%type2d + num2d = tape(t)%hlist(fld,f)%field%num2d + beg1d_out = tape(t)%hlist(fld,f)%field%beg1d_out + end1d_out = tape(t)%hlist(fld,f)%field%end1d_out + nacs => tape(t)%hlist(fld,f)%nacs + hbuf => tape(t)%hlist(fld,f)%hbuf - deallocate(hbuf1d) - deallocate(nacs1d) - else - call ncd_io(ncid=ncid_hist(t), flag='write', varname=trim(name), & - dim1name=type1d_out, data=hbuf) - call ncd_io(ncid=ncid_hist(t), flag='write', varname=trim(name_acc), & - dim1name=type1d_out, data=nacs) - end if + if (num2d == 1) then + allocate(hbuf1d(beg1d_out:end1d_out), & + nacs1d(beg1d_out:end1d_out), stat=status) + if (status /= 0) then + write(iulog,*) trim(subname),' ERROR: allocation' + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if + + hbuf1d(beg1d_out:end1d_out) = hbuf(beg1d_out:end1d_out,1) + nacs1d(beg1d_out:end1d_out) = nacs(beg1d_out:end1d_out,1) + + call ncd_io(ncid=ncid_hist(t,f), flag='write', varname=trim(name), & + dim1name=type1d_out, data=hbuf1d) + call ncd_io(ncid=ncid_hist(t,f), flag='write', varname=trim(name_acc), & + dim1name=type1d_out, data=nacs1d) + + deallocate(hbuf1d) + deallocate(nacs1d) + else + call ncd_io(ncid=ncid_hist(t,f), flag='write', varname=trim(name), & + dim1name=type1d_out, data=hbuf) + call ncd_io(ncid=ncid_hist(t,f), flag='write', varname=trim(name_acc), & + dim1name=type1d_out, data=nacs) + end if - end do + end do fld_loop3 - end if ! end of is_endhist block + end if ! end of is_endhist block - call ncd_pio_closefile(ncid_hist(t)) + call ncd_pio_closefile(ncid_hist(t,f)) - end do ! end of ntapes loop + end do file_loop7 + end do tape_loop7 else if (flag == 'read') then ! Read history restart information if history files are not full - do t = 1,ntapes - if (.not. history_tape_in_use(t)) then - cycle - end if - - if (.not. tape(t)%is_endhist) then - - do f = 1,tape(t)%nflds - name = tape(t)%hlist(f)%field%name - name_acc = trim(name) // "_acc" - type1d_out = tape(t)%hlist(f)%field%type1d_out - type2d = tape(t)%hlist(f)%field%type2d - num2d = tape(t)%hlist(f)%field%num2d - beg1d_out = tape(t)%hlist(f)%field%beg1d_out - end1d_out = tape(t)%hlist(f)%field%end1d_out - nacs => tape(t)%hlist(f)%nacs - hbuf => tape(t)%hlist(f)%hbuf - - if (num2d == 1) then - allocate(hbuf1d(beg1d_out:end1d_out), & - nacs1d(beg1d_out:end1d_out), stat=status) - if (status /= 0) then - write(iulog,*) trim(subname),' ERROR: allocation' - call endrun(msg=errMsg(sourcefile, __LINE__)) - end if + tape_loop8: do t = 1, ntapes + file_loop8: do f = 1, max_split_files + if (.not. history_tape_in_use(t,f)) then + cycle + end if - call ncd_io(ncid=ncid_hist(t), flag='read', varname=trim(name), & - dim1name=type1d_out, data=hbuf1d) - call ncd_io(ncid=ncid_hist(t), flag='read', varname=trim(name_acc), & - dim1name=type1d_out, data=nacs1d) + if (.not. tape(t)%is_endhist) then - hbuf(beg1d_out:end1d_out,1) = hbuf1d(beg1d_out:end1d_out) - nacs(beg1d_out:end1d_out,1) = nacs1d(beg1d_out:end1d_out) + fld_loop4: do fld = 1, tape(t)%nflds(f) + name = tape(t)%hlist(fld,f)%field%name + name_acc = trim(name) // "_acc" + type1d_out = tape(t)%hlist(fld,f)%field%type1d_out + type2d = tape(t)%hlist(fld,f)%field%type2d + num2d = tape(t)%hlist(fld,f)%field%num2d + beg1d_out = tape(t)%hlist(fld,f)%field%beg1d_out + end1d_out = tape(t)%hlist(fld,f)%field%end1d_out + nacs => tape(t)%hlist(fld,f)%nacs + hbuf => tape(t)%hlist(fld,f)%hbuf - deallocate(hbuf1d) - deallocate(nacs1d) - else - call ncd_io(ncid=ncid_hist(t), flag='read', varname=trim(name), & - dim1name=type1d_out, data=hbuf) - call ncd_io(ncid=ncid_hist(t), flag='read', varname=trim(name_acc), & - dim1name=type1d_out, data=nacs) - end if - end do + if (num2d == 1) then + allocate(hbuf1d(beg1d_out:end1d_out), & + nacs1d(beg1d_out:end1d_out), stat=status) + if (status /= 0) then + write(iulog,*) trim(subname),' ERROR: allocation' + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if + + call ncd_io(ncid=ncid_hist(t,f), flag='read', varname=trim(name), & + dim1name=type1d_out, data=hbuf1d, posNOTonfile=.true.) + call ncd_io(ncid=ncid_hist(t,f), flag='read', varname=trim(name_acc), & + dim1name=type1d_out, data=nacs1d, posNOTonfile=.true.) + + hbuf(beg1d_out:end1d_out,1) = hbuf1d(beg1d_out:end1d_out) + nacs(beg1d_out:end1d_out,1) = nacs1d(beg1d_out:end1d_out) + + deallocate(hbuf1d) + deallocate(nacs1d) + else + call ncd_io(ncid=ncid_hist(t,f), flag='read', varname=trim(name), & + dim1name=type1d_out, data=hbuf, posNOTonfile=.true.) + call ncd_io(ncid=ncid_hist(t,f), flag='read', varname=trim(name_acc), & + dim1name=type1d_out, data=nacs, posNOTonfile=.true.) + end if + end do fld_loop4 - end if + end if - call ncd_pio_closefile(ncid_hist(t)) + call ncd_pio_closefile(ncid_hist(t,f)) - end do + end do file_loop8 + end do tape_loop8 - end if + end if read_write end subroutine hist_restart_ncd @@ -5135,13 +5233,15 @@ integer function max_nFields() ! !ARGUMENTS: ! ! !LOCAL VARIABLES: - integer :: t ! index + integer :: t, f ! indices character(len=*),parameter :: subname = 'max_nFields' !----------------------------------------------------------------------- max_nFields = 0 do t = 1,ntapes - max_nFields = max(max_nFields, tape(t)%nflds) + do f = 1, max_split_files + max_nFields = max(max_nFields, tape(t)%nflds(f)) + end do end do return end function max_nFields @@ -5221,18 +5321,18 @@ subroutine list_index (list, name, index) ! !LOCAL VARIABLES: !EOP character(len=max_namlen) :: listname ! input name with ":" stripped off. - integer f ! field index + integer fld ! field index character(len=*),parameter :: subname = 'list_index' !----------------------------------------------------------------------- ! Only list items index = 0 - do f=1,max_flds - listname = getname (list(f)) + do fld = 1, max_flds + listname = getname (list(fld)) if (listname == ' ') exit if (listname == name) then - index = f + index = fld exit end if end do @@ -5240,7 +5340,7 @@ subroutine list_index (list, name, index) end subroutine list_index !----------------------------------------------------------------------- - character(len=max_length_filename) function set_hist_filename (hist_freq, hist_mfilt, hist_file) + character(len=max_length_filename) function set_hist_filename (hist_freq, hist_mfilt, hist_file, f_index) ! ! !DESCRIPTION: ! Determine history dataset filenames. @@ -5255,11 +5355,13 @@ character(len=max_length_filename) function set_hist_filename (hist_freq, hist_m integer, intent(in) :: hist_freq !history file frequency integer, intent(in) :: hist_mfilt !history file number of time-samples integer, intent(in) :: hist_file !history file index + integer, intent(in) :: f_index ! instantaneous or accumulated_file_index ! ! !LOCAL VARIABLES: !EOP character(len=max_chars) :: cdate !date char string character(len= 1) :: hist_index !p,1 or 2 (currently) + character(len = 1) :: file_index ! instantaneous or accumulated_file_index integer :: day !day (1 -> 31) integer :: mon !month (1 -> 12) integer :: yr !year (0 -> ...) @@ -5276,8 +5378,13 @@ character(len=max_length_filename) function set_hist_filename (hist_freq, hist_m write(cdate,'(i4.4,"-",i2.2,"-",i2.2,"-",i5.5)') yr,mon,day,sec endif write(hist_index,'(i1.1)') hist_file - 1 + if (f_index == instantaneous_file_index) then + file_index = 'i' ! instantaneous file_index + else if (f_index == accumulated_file_index) then + file_index = 'a' ! accumulated file_index + end if set_hist_filename = "./"//trim(caseid)//"."//trim(compname)//trim(inst_suffix)//& - ".h"//hist_index//"."//trim(cdate)//".nc" + ".h"//hist_index//file_index//"."//trim(cdate)//".nc" ! check to see if the concatenated filename exceeded the ! length. Simplest way to do this is ensure that the file diff --git a/src/main/ncdio_pio.F90.in b/src/main/ncdio_pio.F90.in index 86f3e0cb43..991823fb67 100644 --- a/src/main/ncdio_pio.F90.in +++ b/src/main/ncdio_pio.F90.in @@ -541,14 +541,6 @@ contains character(len=32) :: subname = 'ncd_inqfdims' ! subroutine name !----------------------------------------------------------------------- - if (single_column) then - ni = 1 - nj = 1 - ns = 1 - isgrid2d = .true. - RETURN - end if - ni = 0 nj = 0 @@ -1353,7 +1345,7 @@ contains start(:) = 0 count(:) = 0 - if (flag == 'read') then + if (flag == 'read' .or. flag == 'read_noscm') then call ncd_inqvid(ncid, varname, varid, vardesc, readvar=varpresent) @@ -1382,7 +1374,7 @@ contains #else if (varpresent) then allocate(idata1d(size(data))) - if (single_column) then + if (single_column .and. flag == 'read') then call scam_field_offsets(ncid,'undefined', vardesc,& start, count, found=found, posNOTonfile=posNOTonfile) if ( found )then @@ -1478,7 +1470,7 @@ contains start(:) = 0 count(:) = 0 - if (flag == 'read') then + if (flag == 'read' .or. flag == 'read_noscm') then call ncd_inqvid(ncid, varname, varid, vardesc, readvar=varpresent) @@ -1499,7 +1491,7 @@ contains end if #else if (varpresent) then - if (single_column) then + if (single_column .and. flag == 'read') then call scam_field_offsets(ncid,'undefined', vardesc,& start, count, found=found, posNOTonfile=posNOTonfile) if ( found )then @@ -1648,7 +1640,7 @@ contains !----------------------------------------------------------------------- !TYPE int,double,logical - subroutine ncd_io_1d_{TYPE}(varname, data, dim1name, flag, ncid, nt, readvar, cnvrtnan2fill) + subroutine ncd_io_1d_{TYPE}(varname, data, dim1name, flag, ncid, nt, readvar, cnvrtnan2fill, posNOTonfile) ! ! !DESCRIPTION: ! netcdf I/O for 1d @@ -1662,6 +1654,7 @@ contains integer , optional, intent(in) :: nt ! time sample index logical , optional, intent(out) :: readvar ! true => variable is on initial dataset (read only) logical , optional, intent(in) :: cnvrtnan2fill ! true => convert any NaN's to _FillValue (spval) + logical , optional, intent(in) :: posNOTonfile ! Position is NOT on this file ! ! Local Variables character(len=8) :: subgrid_level_name ! nameg, namel, etc. @@ -1676,12 +1669,15 @@ contains integer :: start(3) ! netcdf start index integer :: count(3) ! netcdf count index integer :: status ! error code + logical :: found ! if true, found lat/lon dims on file logical :: varpresent ! if true, variable is on tape integer :: xtype ! type of var in file integer , pointer :: idata(:) ! Temporary integer data to send to file type(iodesc_plus_type) , pointer :: iodesc_plus type(var_desc_t) :: vardesc integer :: oldhandle ! previous value of pio_error_handle + integer :: ni,nj,ns ! lat/lon indicies + logical :: isgrid2d ! if true, latlon grid character(len=*),parameter :: subname='ncd_io_1d_{TYPE}' ! subroutine name !----------------------------------------------------------------------- @@ -1703,14 +1699,15 @@ contains end if #endif - if (flag == 'read') then + if (flag == 'read' .or. flag == 'read_noscm') then call ncd_inqvid(ncid, varname, varid, vardesc, readvar=varpresent) if (varpresent) then if (single_column) then start(:) = 1 ; count(:) = 1 - call scam_field_offsets(ncid,subgrid_level_name,vardesc,start,count) - if (trim(subgrid_level_name) == grlnd) then + call scam_field_offsets(ncid,subgrid_level_name,vardesc,start,count,found=found,posNOTonfile=posNOTonfile) + call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) + if (isgrid2d) then n=2 if (present(nt)) then start(3) = nt ; count(3) = 1 @@ -1823,7 +1820,7 @@ contains !TYPE int,double subroutine ncd_io_2d_{TYPE}(varname, data, dim1name, lowerb2, upperb2, & - flag, ncid, nt, readvar, switchdim, cnvrtnan2fill) + flag, ncid, nt, readvar, switchdim, cnvrtnan2fill, posNOTonfile ) ! ! !DESCRIPTION: ! Netcdf i/o of 2d @@ -1839,7 +1836,7 @@ contains logical, optional, intent(out) :: readvar ! true => variable is on initial dataset (read only) logical, optional, intent(in) :: switchdim ! true=> permute dim1 and dim2 for output logical, optional, intent(in) :: cnvrtnan2fill ! true => convert any NaN's to _FillValue (spval) - ! + logical, optional, intent(in) :: posNOTonfile ! Position is NOT on this file ! ! !LOCAL VARIABLES: #if ({ITYPE}==TYPEINT) integer , pointer :: temp(:,:) @@ -1862,7 +1859,10 @@ contains logical :: varpresent ! if true, variable is on tape integer :: lb1,lb2 integer :: ub1,ub2 + integer :: ni,nj,ns + logical :: isgrid2d ! if true, latlon grid integer :: xtype ! netcdf type of variable on file + logical :: found ! if true, found lat/lon dims on file type(iodesc_plus_type) , pointer :: iodesc_plus type(var_desc_t) :: vardesc @@ -1898,14 +1898,16 @@ contains allocate(temp(lb2:ub2,lb1:ub1)) end if - if (flag == 'read') then + if (flag == 'read' .or. flag == 'read_noscm') then call ncd_inqvid(ncid, varname, varid, vardesc, readvar=varpresent) if (varpresent) then - if (single_column) then + if (single_column .and. flag == 'read') then start(:) = 1 ; count(:) = 1 - call scam_field_offsets(ncid, subgrid_level_name, vardesc, start, count) - if (trim(subgrid_level_name) == grlnd) then + call scam_field_offsets(ncid, subgrid_level_name, vardesc, start, count,found=found,posNOTonfile=posNOTonfile) + call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) + call ncd_inqvdims(ncid, ndims, vardesc) + if (isgrid2d) then count(3) = size(data,dim=2) n=3 if (present(nt)) then @@ -1913,11 +1915,9 @@ contains n=4 end if else - count(2) = size(data,dim=2) - n=2 + n=ndims if (present(nt)) then - start(3) = nt ; count(3) = 1 - n=3 + start(n) = nt ; count(n) = 1 end if end if if (present(switchdim)) then @@ -2067,7 +2067,7 @@ contains !----------------------------------------------------------------------- !TYPE int,double - subroutine ncd_io_3d_{TYPE}(varname, data, dim1name, flag, ncid, nt, readvar) + subroutine ncd_io_3d_{TYPE}(varname, data, dim1name, flag, ncid, nt, readvar, posNOTonfile) ! ! !DESCRIPTION: ! Netcdf i/o of 3d @@ -2080,7 +2080,7 @@ contains character(len=*) , intent(in) :: dim1name ! dimension 1 name integer, optional, intent(in) :: nt ! time sample index logical, optional, intent(out) :: readvar ! true => variable is on initial dataset (read only) - ! + logical, optional, intent(in) :: posNOTonfile ! Position is NOT on this file ! ! !LOCAL VARIABLES: integer :: ndim1,ndim2 character(len=8) :: subgrid_level_name ! nameg, namel, etc. @@ -2099,6 +2099,9 @@ contains logical :: varpresent ! if true, variable is on tape type(iodesc_plus_type) , pointer :: iodesc_plus type(var_desc_t) :: vardesc + integer :: ni,nj,ns + logical :: isgrid2d ! if true, latlon grid + logical :: found ! if true, found lat/lon dims on file character(len=*),parameter :: subname='ncd_io_3d_{TYPE}' ! subroutine name !----------------------------------------------------------------------- @@ -2108,15 +2111,16 @@ contains write(iulog,*) trim(subname),' ',trim(flag),' ',trim(varname),' ',trim(subgrid_level_name) end if - if (flag == 'read') then + if (flag == 'read' .or. flag == 'read_noscm') then call ncd_inqvid(ncid, varname, varid, vardesc, readvar=varpresent) if (varpresent) then - if (single_column) then + if (single_column .and. flag == 'read') then start(:) = 1 count(:) = 1 - call scam_field_offsets(ncid, subgrid_level_name, vardesc, start, count) - if (trim(subgrid_level_name) == grlnd) then + call scam_field_offsets(ncid, subgrid_level_name, vardesc, start, count,found=found,posNOTonfile=posNOTonfile) + call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) + if (isgrid2d) then count(3) = size(data,dim=2); count(4) = size(data,dim=3) n=4 @@ -2435,7 +2439,10 @@ contains if ( trim(dimname)=='nj'.or. trim(dimname)=='lat'.or. trim(dimname)=='lsmlat') then start(i)=latidx count(i)=1 - else if ( trim(dimname)=='ni'.or. trim(dimname)=='lon'.or. trim(dimname)=='lsmlon') then + else if ( trim(dimname)=='ni'.or. trim(dimname)=='lon'.or. trim(dimname)=='lsmlon'.or. trim(dimname)=='gridcell') then + start(i)=lonidx + count(i)=1 + else if ( trim(dimname)=='gridcell') then start(i)=lonidx count(i)=1 else if ( trim(dimname)=='column') then diff --git a/src/main/organicFileMod.F90 b/src/main/organicFileMod.F90 index 3adbd5b6f1..5b61a8c0db 100644 --- a/src/main/organicFileMod.F90 +++ b/src/main/organicFileMod.F90 @@ -6,8 +6,8 @@ module organicFileMod ! !MODULE: organicFileMod ! ! !DESCRIPTION: -! Contains methods for reading in organic matter data file which has -! organic matter density for each grid point and soil level +! Contains methods for reading in organic matter data file which has +! organic matter density for each grid point and soil level ! ! !USES use abortutils , only : endrun @@ -30,7 +30,7 @@ module organicFileMod ! !EOP ! -!----------------------------------------------------------------------- +!----------------------------------------------------------------------- contains @@ -42,7 +42,7 @@ module organicFileMod ! !INTERFACE: subroutine organicrd(organic) ! -! !DESCRIPTION: +! !DESCRIPTION: ! Read the organic matter dataset. ! ! !USES: @@ -68,7 +68,7 @@ subroutine organicrd(organic) !EOP character(len=256) :: locfn ! local file name type(file_desc_t) :: ncid ! netcdf id - integer :: ni,nj,ns ! dimension sizes + integer :: ni,nj,ns ! dimension sizes logical :: isgrid2d ! true => file is 2d logical :: readvar ! true => variable is on dataset character(len=32) :: subname = 'organicrd' ! subroutine name @@ -77,9 +77,9 @@ subroutine organicrd(organic) ! Initialize data to zero - no organic matter dataset organic(:,:) = 0._r8 - + ! Read data if file was specified in namelist - + if (fsurdat /= ' ') then if (masterproc) then write(iulog,*) 'Attempting to read organic matter data .....' @@ -90,14 +90,14 @@ subroutine organicrd(organic) call ncd_pio_openfile (ncid, locfn, 0) call ncd_inqfdims (ncid, isgrid2d, ni, nj, ns) - if (ldomain%ns /= ns .or. ldomain%ni /= ni .or. ldomain%nj /= nj) then + if (.not. single_column .and. (ldomain%ns /= ns .or. ldomain%ni /= ni .or. ldomain%nj /= nj)) then write(iulog,*)trim(subname), 'ldomain and input file do not match dims ' write(iulog,*)trim(subname), 'ldomain%ni,ni,= ',ldomain%ni,ni write(iulog,*)trim(subname), 'ldomain%nj,nj,= ',ldomain%nj,nj write(iulog,*)trim(subname), 'ldomain%ns,ns,= ',ldomain%ns,ns call endrun() end if - + call ncd_io(ncid=ncid, varname='ORGANIC', flag='read', data=organic, & dim1name=grlnd, readvar=readvar) if (.not. readvar) call endrun('organicrd: errror reading ORGANIC') diff --git a/src/main/surfrdMod.F90 b/src/main/surfrdMod.F90 index 4005ec7845..c70ec28fa0 100644 --- a/src/main/surfrdMod.F90 +++ b/src/main/surfrdMod.F90 @@ -363,8 +363,10 @@ subroutine surfrd_get_num_patches (lfsurdat, actual_maxsoil_patches, actual_nump type(file_desc_t) :: ncid ! netcdf file id integer :: dimid ! netCDF dimension id logical :: cft_dim_exists ! dimension exists on dataset + logical :: natpft_dim_exists ! dimension exists on dataset integer :: check_numpft ! Surface dataset count of numpft, should ! match maxsoil_patches - actual_numcft + integer :: actual_numnatpft ! natpft value from sfc dataset character(len=32) :: subname = 'surfrd_get_num_patches' ! subroutine name !----------------------------------------------------------------------- @@ -396,9 +398,17 @@ subroutine surfrd_get_num_patches (lfsurdat, actual_maxsoil_patches, actual_nump call ncd_inqdlen(ncid, dimid, actual_maxsoil_patches, 'lsmpft') actual_numpft = actual_maxsoil_patches - actual_numcft - call ncd_inqdlen(ncid, dimid, check_numpft, 'natpft') + ! Read numpft + call ncd_inqdid(ncid, 'natpft', dimid, natpft_dim_exists) + if ( natpft_dim_exists ) then + call ncd_inqdlen(ncid, dimid, actual_numnatpft, 'natpft') + call ncd_inqdlen(ncid, dimid, check_numpft, 'natpft') + else + actual_numnatpft = 0 + end if - if(check_numpft.ne.actual_numpft)then +!jt if(check_numpft.ne.actual_numpft)then + if(actual_numcft+actual_numnatpft.ne.actual_maxsoil_patches)then write(iulog,*)'the sum of the cftdim and the natpft dim should match the lsmpft dim in the surface file' write(iulog,*)'natpft: ',check_numpft write(iulog,*)'lsmpft: ',actual_maxsoil_patches diff --git a/src/soilbiogeochem/CMakeLists.txt b/src/soilbiogeochem/CMakeLists.txt index e2baa2d1b2..ac467c3e5f 100644 --- a/src/soilbiogeochem/CMakeLists.txt +++ b/src/soilbiogeochem/CMakeLists.txt @@ -2,6 +2,7 @@ # source files that are currently used in unit tests list(APPEND clm_sources + SoilBiogeochemCarbonFluxType.F90 SoilBiogeochemStateType.F90 SoilBiogeochemDecompCascadeConType.F90 SoilBiogeochemStateType.F90 diff --git a/src/unit_test_stubs/main/ncdio_pio_fake.F90.in b/src/unit_test_stubs/main/ncdio_pio_fake.F90.in index e8ef14e457..7f38565e90 100644 --- a/src/unit_test_stubs/main/ncdio_pio_fake.F90.in +++ b/src/unit_test_stubs/main/ncdio_pio_fake.F90.in @@ -48,6 +48,7 @@ module ncdio_pio public :: check_var ! determine if variable is on netcdf file public :: check_dim ! determine if dimension is on netcdf file public :: check_var_or_dim ! determine if variable or dimension is on netcdf file + public :: check_dim_size ! validity check on dimension public :: ncd_io ! do fake i/o (currently only set up to read) public :: ncd_inqvid ! inquire on a variable id public :: ncd_set_var ! set data on "file" for one variable @@ -340,6 +341,25 @@ contains end subroutine check_var_or_dim + !----------------------------------------------------------------------- + subroutine check_dim_size(ncid, dimname, value, msg) + ! + ! !DESCRIPTION: + ! Validity check on dimension + ! + ! !ARGUMENTS: + class(file_desc_t),intent(in) :: ncid ! PIO file handle + character(len=*) , intent(in) :: dimname ! Dimension name + integer, intent(in) :: value ! Expected dimension size + + character(len=*), intent(in), optional :: msg ! Optional additional message printed upon error + ! + ! !LOCAL VARIABLES: + !----------------------------------------------------------------------- + + ! Does nothing assumes the dim size is as expected + + end subroutine check_dim_size !----------------------------------------------------------------------- subroutine ncd_inqdid(ncid, name, dimid, dimexist) diff --git a/src/unit_test_stubs/share_esmf/CMakeLists.txt b/src/unit_test_stubs/share_esmf/CMakeLists.txt index 1d767543ea..368601dcc8 100644 --- a/src/unit_test_stubs/share_esmf/CMakeLists.txt +++ b/src/unit_test_stubs/share_esmf/CMakeLists.txt @@ -1,5 +1,7 @@ list(APPEND clm_sources ExcessIceStreamType.F90 + FireDataBaseType.F90 + laiStreamMod.F90 PrigentRoughnessStreamType.F90 ZenderSoilErodStreamType.F90 ) diff --git a/src/unit_test_stubs/share_esmf/FireDataBaseType.F90 b/src/unit_test_stubs/share_esmf/FireDataBaseType.F90 new file mode 100644 index 0000000000..63046188a3 --- /dev/null +++ b/src/unit_test_stubs/share_esmf/FireDataBaseType.F90 @@ -0,0 +1,123 @@ +module FireDataBaseType + +#include "shr_assert.h" + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! module for handling of fire data + ! UNIT-TEST STUB for fire data Streams + ! This just allows the fire code to be tested without + ! reading in the streams data, by faking it and setting it to a + ! constant value. + ! + ! !USES: + use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL + use shr_log_mod , only : errMsg => shr_log_errMsg + use clm_varctl , only : iulog + use spmdMod , only : masterproc, mpicom, iam + use abortutils , only : endrun + use decompMod , only : bounds_type + use FireMethodType , only : fire_method_type + ! + implicit none + private + ! + ! !PUBLIC TYPES: + public :: fire_base_type + ! + type, abstract, extends(fire_method_type) :: fire_base_type + private + ! !PRIVATE MEMBER DATA: + real(r8), public, pointer :: forc_hdm(:) ! Human population density + real(r8), public, pointer :: forc_lnfm(:) ! Lightning frequency + real(r8), public, pointer :: gdp_lf_col(:) ! col global real gdp data (k US$/capita) + real(r8), public, pointer :: peatf_lf_col(:) ! col global peatland fraction data (0-1) + integer , public, pointer :: abm_lf_col(:) ! col global peak month of crop fire emissions + + contains + ! + ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: BaseFireInit ! Initialization of Fire + procedure, public :: FireInit => BaseFireInit ! Initialization of Fire + procedure, public :: FireInterp ! Interpolate fire data + procedure, public :: BaseFireReadNML ! Read in the namelist + procedure, public :: FireReadNML => BaseFireReadNML ! Read in the namelist + procedure(need_lightning_and_popdens_interface), public, deferred :: & + need_lightning_and_popdens ! Returns true if need lightning & popdens + + end type fire_base_type + + abstract interface + !----------------------------------------------------------------------- + function need_lightning_and_popdens_interface(this) result(need_lightning_and_popdens) + ! + ! !DESCRIPTION: + ! Returns true if need lightning and popdens, false otherwise + ! + ! USES + import :: fire_base_type + ! + ! !ARGUMENTS: + class(fire_base_type), intent(in) :: this + logical :: need_lightning_and_popdens ! function result + !----------------------------------------------------------------------- + end function need_lightning_and_popdens_interface + end interface + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + +!============================================================================== +contains +!============================================================================== + + subroutine BaseFireReadNML( this, bounds, NLFilename ) + ! + ! !DESCRIPTION: + ! Read the namelist for Fire + ! + ! !USES: + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + end subroutine BaseFireReadNML + + !================================================================ + subroutine BaseFireInit( this, bounds ) + ! + ! !DESCRIPTION: + ! Initialize CN Fire module + ! !USES: + use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + !----------------------------------------------------------------------- + + if ( this%need_lightning_and_popdens() ) then + + end if + + end subroutine BaseFireInit + + !================================================================ + subroutine FireInterp(this,bounds) + ! + ! !DESCRIPTION: + ! Interpolate CN Fire datasets + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + !----------------------------------------------------------------------- + + if ( this%need_lightning_and_popdens() ) then + + end if + + end subroutine FireInterp + +end module FireDataBaseType diff --git a/src/unit_test_stubs/share_esmf/laiStreamMod.F90 b/src/unit_test_stubs/share_esmf/laiStreamMod.F90 new file mode 100644 index 0000000000..a39a3eb053 --- /dev/null +++ b/src/unit_test_stubs/share_esmf/laiStreamMod.F90 @@ -0,0 +1,74 @@ +module laiStreamMod + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Read LAI from stream + ! + ! !USES: + use decompMod , only : bounds_type + use abortutils , only : endrun + use clm_varctl , only : iulog + ! + ! !PUBLIC TYPES: + implicit none + private + + ! !PUBLIC MEMBER FUNCTIONS: + public :: lai_init ! position datasets for LAI + public :: lai_advance ! Advance the LAI streams (outside of a Open-MP threading loop) + public :: lai_interp ! interpolates between two years of LAI data (when LAI streams + + character(len=*), parameter :: sourcefile = & + __FILE__ + +!============================================================================== +contains +!============================================================================== + + subroutine lai_init(bounds) + ! + ! Initialize data stream information for LAI. + ! + ! !USES: + ! + ! !ARGUMENTS: + type(bounds_type), intent(in) :: bounds ! bounds + ! + ! !LOCAL VARIABLES: + !----------------------------------------------------------------------- + + end subroutine lai_init + + !================================================================ + subroutine lai_advance( bounds ) + ! + ! Advance LAI streams + ! + ! !USES: + ! + ! !ARGUMENTS: + type(bounds_type), intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + !----------------------------------------------------------------------- + + end subroutine lai_advance + + !================================================================ + subroutine lai_interp(bounds, canopystate_inst) + ! + ! Interpolate data stream information for Lai. + ! + ! !USES: + use CanopyStateType , only : canopystate_type + ! + ! !ARGUMENTS: + type(bounds_type) , intent(in) :: bounds + type(canopystate_type) , intent(inout) :: canopystate_inst + ! + ! !LOCAL VARIABLES: + !----------------------------------------------------------------------- + + end subroutine lai_interp + +end module LaiStreamMod diff --git a/src/utils/clmfates_interfaceMod.F90 b/src/utils/clmfates_interfaceMod.F90 index 2effb561dd..289244ae89 100644 --- a/src/utils/clmfates_interfaceMod.F90 +++ b/src/utils/clmfates_interfaceMod.F90 @@ -3231,7 +3231,8 @@ subroutine Init2(this, bounds, NLFilename) call t_startf('fates_init2') - call this%fates_fire_data_method%FireInit(bounds, NLFilename) + call this%fates_fire_data_method%FireInit(bounds) + call this%fates_fire_data_method%FireReadNML(bounds, NLFilename) call t_stopf('fates_init2') diff --git a/tools/contrib/SpinupStability_BGC_v10.ncl b/tools/contrib/SpinupStability_BGC_v10.ncl index 5ed7516455..f0ebcbd7be 100644 --- a/tools/contrib/SpinupStability_BGC_v10.ncl +++ b/tools/contrib/SpinupStability_BGC_v10.ncl @@ -85,9 +85,9 @@ begin end if if (annual_hist) then - fls = systemfunc("ls " + data_dir + caseid+".clm2.h0.*-*-*-*"+".nc") + fls = systemfunc("ls " + data_dir + caseid+".clm2.h0a.*-*-*-*"+".nc") else - fls = systemfunc("ls " + data_dir + caseid+".clm2.h0.*-*"+".nc") + fls = systemfunc("ls " + data_dir + caseid+".clm2.h0a.*-*"+".nc") end if flsdims = dimsizes(fls) diff --git a/tools/contrib/SpinupStability_BGC_v11_SE.ncl b/tools/contrib/SpinupStability_BGC_v11_SE.ncl index db00c0b484..5666016a87 100644 --- a/tools/contrib/SpinupStability_BGC_v11_SE.ncl +++ b/tools/contrib/SpinupStability_BGC_v11_SE.ncl @@ -81,9 +81,9 @@ begin totecosysc_thresh = 1.0 ; disequilibrium threshold for individual gridcells (gC/m2/yr) if (annual_hist) then - fls = systemfunc("ls " + data_dir + caseid+".clm2.h0.*-*-*-*"+".nc") + fls = systemfunc("ls " + data_dir + caseid+".clm2.h0a.*-*-*-*"+".nc") else - fls = systemfunc("ls " + data_dir + caseid+".clm2.h0.*-*"+".nc") + fls = systemfunc("ls " + data_dir + caseid+".clm2.h0a.*-*"+".nc") end if flsdims = dimsizes(fls) diff --git a/tools/contrib/SpinupStability_SP_v9.ncl b/tools/contrib/SpinupStability_SP_v9.ncl index 58a769a910..b0bc7ff839 100644 --- a/tools/contrib/SpinupStability_SP_v9.ncl +++ b/tools/contrib/SpinupStability_SP_v9.ncl @@ -54,9 +54,9 @@ begin tws_thresh = 0.001 ; disequilibrium threshold for individual gridcells (m) if (annual_hist .eq. "True") then - fls = systemfunc("ls " + data_dir + caseid+".clm2.h0.*-*-*-*"+".nc") + fls = systemfunc("ls " + data_dir + caseid+".clm2.h0a.*-*-*-*"+".nc") else - fls = systemfunc("ls " + data_dir + caseid+".clm2.h0.*-*"+".nc") + fls = systemfunc("ls " + data_dir + caseid+".clm2.h0a.*-*"+".nc") end if flsdims = dimsizes(fls) diff --git a/tools/contrib/run_clm_historical b/tools/contrib/run_clm_historical index 8dc9269d3b..775d1aab1d 100755 --- a/tools/contrib/run_clm_historical +++ b/tools/contrib/run_clm_historical @@ -125,7 +125,7 @@ while ($DONE_RUNA == 0) set DONE_RUNA = 1 echo '1850-1870 run is complete' while ($DONE_ARCHIVE == 0) - set nh0 = `ls -l $WDIR/*clm?.h0.* | egrep -c '^-'` + set nh0 = `ls -l $WDIR/*clm?.h0a.* | egrep -c '^-'` echo $nh0 if ($nh0 == 1) then set DONE_ARCHIVE = 1 @@ -177,7 +177,7 @@ while ($DONE_RUNA == 0) set DONE_RUNA = 1 echo '1850-1900 run is complete' while ($DONE_ARCHIVE == 0) - set nh0 = `ls -l $WDIR/*clm?.h0.* | egrep -c '^-'` + set nh0 = `ls -l $WDIR/*clm?.h0a.* | egrep -c '^-'` echo $nh0 if ($nh0 == 1) then set DONE_ARCHIVE = 1 @@ -242,7 +242,7 @@ while ($DONE_RUNA == 0) set DONE_RUNA = 1 echo '1901-1989 run is complete' while ($DONE_ARCHIVE == 0) - set nh0 = `ls -l $WDIR/*clm?.h0.* | egrep -c '^-'` + set nh0 = `ls -l $WDIR/*clm?.h0a.* | egrep -c '^-'` echo $nh0 if ($nh0 == 1) then set DONE_ARCHIVE = 1 @@ -295,7 +295,7 @@ while ($DONE_RUNA == 0) set DONE_RUNA = 1 echo '1989-2004 run is complete' while ($DONE_ARCHIVE == 0) - set nh0 = `ls -l $WDIR/*clm?.h0.* | egrep -c '^-'` + set nh0 = `ls -l $WDIR/*clm?.h0a.* | egrep -c '^-'` echo $nh0 if ($nh0 == 1) then set DONE_ARCHIVE = 1 @@ -349,7 +349,7 @@ while ($DONE_RUNA == 0) set DONE_RUNA = 1 echo '2005-2014 run is complete' while ($DONE_ARCHIVE == 0) - set nh0 = `ls -l $WDIR/*clm?.h0.* | egrep -c '^-'` + set nh0 = `ls -l $WDIR/*clm?.h0a.* | egrep -c '^-'` echo $nh0 if ($nh0 == 1) then set DONE_ARCHIVE = 1 diff --git a/tools/mksurfdata_esmf/gen_mksurfdata_namelist.xml b/tools/mksurfdata_esmf/gen_mksurfdata_namelist.xml index 007be2e8bb..a2266bf0a0 100644 --- a/tools/mksurfdata_esmf/gen_mksurfdata_namelist.xml +++ b/tools/mksurfdata_esmf/gen_mksurfdata_namelist.xml @@ -10,7 +10,7 @@ - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histMKSRFDeg025_250417/mksrf_landuse_ctsm53_pftlai_CLIM.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histMKSRFDeg025_240709/mksrf_landuse_ctsm53_pftlai_CLIM.c240709.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc @@ -21,7 +21,7 @@ - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histMKSRFDeg025_250417/mksrf_landuse_ctsm53_soilcolor_CLIM.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histMKSRFDeg025_240709/mksrf_landuse_ctsm53_soilcolor_CLIM.c240709.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc @@ -93,7 +93,7 @@ - lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc + lnd/clm2/mappingdata/grids/UNSTRUCTgrid_3x3min_nomask_cdf5_c200129.nc @@ -213,31 +213,31 @@ version of the raw dataset will probably go away. - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histCMIP7Deg025_250417/mksrf_landuse_ctsm53_histCMIP7_1700.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histTRENDY2024Deg025_240728/mksrf_landuse_ctsm53_histTRENDY2024_1700.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_1850.cdf5.c20220325.nc - /glade/campaign/cgd/tss/people/oleson/Dynamic_Urban_Data/CMIP7/urban_properties_CMIP7_ThreeClass_1700_c250423.nc + lnd/clm2/rawdata/gao_oneill_urban/historical/urban_properties_GaoOneil_05deg_ThreeClass_1850_cdf5_c20220910.nc - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histCMIP7Deg025_250417/mksrf_landuse_ctsm53_histCMIP7_1850.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histTRENDY2024Deg025_240728/mksrf_landuse_ctsm53_histTRENDY2024_1850.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_1850.cdf5.c20220325.nc - /glade/campaign/cgd/tss/people/oleson/Dynamic_Urban_Data/CMIP7/urban_properties_CMIP7_ThreeClass_1850_c250423.nc + lnd/clm2/rawdata/gao_oneill_urban/historical/urban_properties_GaoOneil_05deg_ThreeClass_1850_cdf5_c20220910.nc - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histCMIP7Deg025_250417/mksrf_landuse_ctsm53_histCMIP7_2000.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histTRENDY2024Deg025_240728/mksrf_landuse_ctsm53_histTRENDY2024_2000.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_2000.cdf5.c20220325.nc - /glade/campaign/cgd/tss/people/oleson/Dynamic_Urban_Data/CMIP7/urban_properties_CMIP7_ThreeClass_2000_c250423.nc + lnd/clm2/rawdata/gao_oneill_urban/historical/urban_properties_GaoOneil_05deg_ThreeClass_2000_cdf5_c20220910.nc - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histCMIP7Deg025_250417/mksrf_landuse_ctsm53_histCMIP7_2005.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histTRENDY2024Deg025_240728/mksrf_landuse_ctsm53_histTRENDY2024_2005.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_2005.cdf5.c20220325.nc - /glade/campaign/cgd/tss/people/oleson/Dynamic_Urban_Data/CMIP7/urban_properties_CMIP7_ThreeClass_2005_c250423.nc + lnd/clm2/rawdata/gao_oneill_urban/historical/urban_properties_GaoOneil_05deg_ThreeClass_2005_cdf5_c20220910.nc @@ -251,11 +251,12 @@ version of the raw dataset will probably go away. + - /glade/campaign/cesm/development/lmwg/landuse_source_data/CTSM53CMIP7RawData/globalctsm53histCMIP7Deg025_250417/mksrf_landuse_ctsm53_histCMIP7_%y.c250417.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53histTRENDY2024Deg025_240728/mksrf_landuse_ctsm53_histTRENDY2024_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc - /glade/campaign/cgd/tss/people/oleson/Dynamic_Urban_Data/CMIP7/urban_properties_CMIP7_ThreeClass_%y_c250423.nc + lnd/clm2/rawdata/gao_oneill_urban/historical/urban_properties_GaoOneil_05deg_ThreeClass_%y_cdf5_c20220910.nc @@ -267,7 +268,7 @@ version of the raw dataset will probably go away. - /glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP126Deg025_240728/mksrf_landuse_ctsm53_TRSSP126_%y.c240728.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP126Deg025_240728/mksrf_landuse_ctsm53_TRSSP126_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc lnd/clm2/rawdata/gao_oneill_urban/ssp1/urban_properties_GaoOneil_05deg_ThreeClass_ssp1_%y_cdf5_c20220910.nc @@ -280,21 +281,21 @@ version of the raw dataset will probably go away. - /glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP119Deg025_240728/mksrf_landuse_ctsm53_TRSSP119_%y.c240728.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP119Deg025_240728/mksrf_landuse_ctsm53_TRSSP119_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc lnd/clm2/rawdata/gao_oneill_urban/ssp1/urban_properties_GaoOneil_05deg_ThreeClass_ssp1_%y_cdf5_c20220910.nc - /glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP245Deg025_240728/mksrf_landuse_ctsm53_TRSSP245_%y.c240728.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP245Deg025_240728/mksrf_landuse_ctsm53_TRSSP245_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc lnd/clm2/rawdata/gao_oneill_urban/ssp2/urban_properties_GaoOneil_05deg_ThreeClass_ssp2_%y_cdf5_c20220910.nc - /glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP370Deg025_240728/mksrf_landuse_ctsm53_TRSSP370_%y.c240728.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP370Deg025_240728/mksrf_landuse_ctsm53_TRSSP370_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc lnd/clm2/rawdata/gao_oneill_urban/ssp3/urban_properties_GaoOneil_05deg_ThreeClass_ssp3_%y_cdf5_c20220910.nc @@ -308,14 +309,14 @@ version of the raw dataset will probably go away. - /glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP460Deg025_240728/mksrf_landuse_ctsm53_TRSSP460_%y.c240728.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP460Deg025_240728/mksrf_landuse_ctsm53_TRSSP460_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc lnd/clm2/rawdata/gao_oneill_urban/ssp4/urban_properties_GaoOneil_05deg_ThreeClass_ssp4_%y_cdf5_c20220910.nc - /glade/campaign/cesm/cesmdata/inputdata/lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP585Deg025_240728/mksrf_landuse_ctsm53_TRSSP585_%y.c240728.nc + lnd/clm2/rawdata/CTSM53RawData/globalctsm53TRSSP585Deg025_240728/mksrf_landuse_ctsm53_TRSSP585_%y.c240728.nc lnd/clm2/mappingdata/grids/UNSTRUCTgrid_0.25x0.25_nomask_cdf5_c200129.nc lnd/clm2/rawdata/lake_area/mksurf_lake_0.05x0.05_hist_clm5_hydrolakes_%y.cdf5.c20220325.nc lnd/clm2/rawdata/gao_oneill_urban/ssp5/urban_properties_GaoOneil_05deg_ThreeClass_ssp5_%y_cdf5_c20220910.nc diff --git a/tools/site_and_regional/PLUMBER2_sites.csv b/tools/site_and_regional/PLUMBER2_sites.csv index f252fa1d61..1097568051 100644 --- a/tools/site_and_regional/PLUMBER2_sites.csv +++ b/tools/site_and_regional/PLUMBER2_sites.csv @@ -2,6 +2,7 @@ #start_year and end_year will be used to define DATM_YR_ALIGH, DATM_YR_START and DATM_YR_END, and STOP_N in units of nyears. #RUN_STARTDATE and START_TOD are specified because we are starting at GMT corresponding to local midnight. #ATM_NCPL is specified so that the time step of the model matches the time interval specified by the atm forcing data. +#longitudes must be in the range [-180,180] ,Site,Lat,Lon,pft1,pft1-%,pft1-cth,pft1-cbh,pft2,pft2-%,pft2-cth,pft2-cbh,start_year,end_year,RUN_STARTDATE,START_TOD,ATM_NCPL 1,AR-SLu,-33.464802,-66.459808,5,50.00, 4.50, 0.13,7,50.00, 4.50, 2.59,2010,2010,2010-01-01,10800,48 2,AT-Neu,47.116669,11.317500,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2001-12-31,82800,48 @@ -73,7 +74,7 @@ 68,DK-Sor,55.485870,11.644640,7,100.00,25.00,14.37,-999,-999.00,-999.00,-999.00,1997,2014,1996-12-31,82800,48 69,DK-ZaH,74.473282,-20.550293,12,100.00, 0.47, 0.01,-999,-999.00,-999.00,-999.00,2000,2013,2000-01-01,0,48 70,ES-ES1,39.345970,-0.318817,1,100.00, 7.50, 3.75,-999,-999.00,-999.00,-999.00,1999,2006,1998-12-31,82800,48 -71,ES-ES2,39.275558,-0.315277,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2005,2006,2004-12-31,82800,48 +71,ES-ES2,39.275558,-0.315277,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2006,2004-12-31,82800,48 72,ES-LgS,37.097935,-2.965820,10,30.00, 0.20, 0.04,13,70.00, 0.50, 0.01,2007,2007,2006-12-31,82800,48 73,ES-LMa,39.941502,-5.773346,7,30.00, 8.00, 4.60,14,70.00, 0.50, 0.01,2004,2006,2003-12-31,82800,48 74,ES-VDA,42.152180, 1.448500,7,30.00, 0.50, 0.29,13,70.00, 0.50, 0.01,2004,2004,2003-12-31,82800,48 @@ -94,7 +95,7 @@ 89,IE-Ca1,52.858791,-6.918152,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2004,2006,2004-01-01,0,48 90,IE-Dri,51.986691,-8.751801,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2003,2005,2003-01-01,0,48 91,IT-Amp,41.904099,13.605160,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2003,2006,2002-12-31,82800,48 -92,IT-BCi,40.523800,14.957440,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2005,2010,2004-12-31,82800,48 +92,IT-BCi,40.523800,14.957440,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2005,2010,2004-12-31,82800,48 93,IT-CA1,42.380409,12.026560,7,100.00, 5.50, 3.16,-999,-999.00,-999.00,-999.00,2012,2013,2011-12-31,82800,48 94,IT-CA2,42.377220,12.026040,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2012,2013,2011-12-31,82800,48 95,IT-CA3,42.380001,12.022200,7,100.00, 3.50, 2.01,-999,-999.00,-999.00,-999.00,2012,2013,2011-12-31,82800,48 @@ -151,8 +152,8 @@ 146,US-MMS,39.323200,-86.413086,7,100.00,27.00,15.52,-999,-999.00,-999.00,-999.00,1999,2014,1999-01-01,18000,24 147,US-MOz,38.744110,-92.200012,7,100.00,24.00,13.80,-999,-999.00,-999.00,-999.00,2005,2006,2005-01-01,21600,48 148,US-Myb,38.049801,-121.765106,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2011,2014,2011-01-01,28800,48 -149,US-Ne1,41.165100,-96.476593,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2002,2012,2002-01-01,21600,24 -150,US-Ne2,41.164902,-96.470093,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2002,2012,2002-01-01,21600,24 +149,US-Ne1,41.165100,-96.476593,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2002-01-01,21600,24 +150,US-Ne2,41.164902,-96.470093,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2002-01-01,21600,24 151,US-Ne3,41.179699,-96.439697,15,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2002,2012,2002-01-01,21600,24 152,US-NR1,40.032902,-105.546402,1,100.00,12.00, 6.00,-999,-999.00,-999.00,-999.00,1999,2014,1999-01-01,25200,48 153,US-PFa,45.945900,-90.272308,1, 8.18,30.00,15.00,7,91.82,30.00,17.25,1995,2014,1995-01-01,21600,24 @@ -165,7 +166,7 @@ 160,US-Syv,46.242001,-89.347717,1, 4.91,27.00,13.50,7,95.09,27.00,15.53,2002,2008,2002-01-01,21600,48 161,US-Ton,38.431599,-120.966003,7,70.00, 7.10, 4.08,14,30.00, 0.50, 0.01,2001,2014,2001-01-01,28800,48 162,US-Tw4,38.103001,-121.641403,13,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2014,2014,2014-01-01,28800,48 -163,US-Twt,38.108700,-121.653107,-999,-999.00,-999.00,-999.00,16,100.00, 0.50, 0.01,2010,2014,2010-01-01,28800,48 +163,US-Twt,38.108700,-121.653107,16,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2010,2014,2010-01-01,28800,48 164,US-UMB,45.559799,-84.713806,7,100.00,20.00,11.50,-999,-999.00,-999.00,-999.00,2000,2014,2000-01-01,18000,24 165,US-Var,38.413300,-120.950729,14,100.00, 0.50, 0.01,-999,-999.00,-999.00,-999.00,2001,2014,2001-01-01,28800,48 166,US-WCr,45.805901,-90.079895,7,100.00,24.00,13.80,-999,-999.00,-999.00,-999.00,1999,2006,1999-01-01,21600,48 diff --git a/tools/site_and_regional/default_data_1850.cfg b/tools/site_and_regional/default_data_1850.cfg index 3c9f28c0a2..ce68b1debf 100644 --- a/tools/site_and_regional/default_data_1850.cfg +++ b/tools/site_and_regional/default_data_1850.cfg @@ -1,7 +1,7 @@ [main] clmforcingindir = /glade/campaign/cesm/cesmdata/inputdata -[datm_crujra] +[datm] dir = atm/datm7/atm_forcing.datm7.CRUJRA.0.5d.c20241231/three_stream domain = domain.crujra_v2.3_0.5x0.5.c220801.nc solardir = . @@ -14,19 +14,6 @@ solarname = CLMCRUJRA2024.Solar precname = CLMCRUJRA2024.Precip tpqwname = CLMCRUJRA2024.TPQW -[datm_gswp3] -dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - [surfdat] dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 surfdat_78pft = surfdata_0.9x1.25_hist_1850_78pfts_c240908.nc diff --git a/tools/site_and_regional/default_data_2000.cfg b/tools/site_and_regional/default_data_2000.cfg index a832d810cc..60c012561c 100644 --- a/tools/site_and_regional/default_data_2000.cfg +++ b/tools/site_and_regional/default_data_2000.cfg @@ -1,7 +1,7 @@ [main] clmforcingindir = /glade/campaign/cesm/cesmdata/cseg/inputdata -[datm_crujra] +[datm] dir = atm/datm7/atm_forcing.datm7.CRUJRA.0.5d.c20241231/three_stream domain = domain.crujra_v2.3_0.5x0.5.c220801.nc solardir = . @@ -14,19 +14,6 @@ solarname = CLMCRUJRA2024.Solar precname = CLMCRUJRA2024.Precip tpqwname = CLMCRUJRA2024.TPQW -[datm_gswp3] -dir = atm/datm7/atm_forcing.datm7.GSWP3.0.5d.v1.c170516 -domain = domain.lnd.360x720_gswp3.0v1.c170606.nc -solardir = Solar -precdir = Precip -tpqwdir = TPHWL -solartag = clmforc.GSWP3.c2011.0.5x0.5.Solr. -prectag = clmforc.GSWP3.c2011.0.5x0.5.Prec. -tpqwtag = clmforc.GSWP3.c2011.0.5x0.5.TPQWL. -solarname = CLMGSWP3v1.Solar -precname = CLMGSWP3v1.Precip -tpqwname = CLMGSWP3v1.TPQW - [surfdat] dir = lnd/clm2/surfdata_esmf/ctsm5.3.0 surfdat_16pft = surfdata_0.9x1.25_hist_2000_16pfts_c240908.nc diff --git a/tools/site_and_regional/neon_gcs_upload b/tools/site_and_regional/neon_gcs_upload index 1c931e3b8d..5c673a1963 100755 --- a/tools/site_and_regional/neon_gcs_upload +++ b/tools/site_and_regional/neon_gcs_upload @@ -154,7 +154,7 @@ def main(description): continue with Case(case_path) as case: archive_dir = os.path.join(case.get_value("DOUT_S_ROOT"),"lnd","hist") - for histfile in glob.iglob(archive_dir + "/*.h1.*"): + for histfile in glob.iglob(archive_dir + "/*.h1a.*"): newfile = os.path.basename(histfile) upload_blob("neon-ncar-artifacts", histfile, os.path.join("NEON","archive",site,"lnd","hist",newfile))