diff --git a/.gitignore b/.gitignore
index bd7bee5be3..48a511250b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,7 @@ src/physics/pumas
src/dynamics/fv3/atmos_cubed_sphere
libraries/FMS
src/atmos_phys
+src/dynamics/mpas/dycore
# Ignore compiled python
buildnmlc
diff --git a/Externals.cfg b/Externals.cfg
index 0cf53c4e12..511f55d36c 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -9,7 +9,7 @@ local_path = components/cice
required = True
[cime]
-tag = cime5.8.32
+tag = cime5.8.34
protocol = git
repo_url = https://github.com/ESMCI/cime
local_path = cime
diff --git a/Externals_CAM.cfg b/Externals_CAM.cfg
index 02b08996d1..27074c8781 100644
--- a/Externals_CAM.cfg
+++ b/Externals_CAM.cfg
@@ -54,6 +54,14 @@ repo_url = https://github.com/ESCOMP/FV3_CESM.git
local_path = src/dynamics/fv3/atmos_cubed_sphere
required = True
+[mpas]
+local_path = src/dynamics/mpas/dycore
+protocol = git
+repo_url = https://github.com/MPAS-Dev/MPAS-Model.git
+sparse = ../.mpas_sparse_checkout
+hash = d059bdf
+required = True
+
[externals_description]
schema_version = 1.0.0
diff --git a/bld/build-namelist b/bld/build-namelist
index dba5f5472a..7602802326 100755
--- a/bld/build-namelist
+++ b/bld/build-namelist
@@ -592,7 +592,7 @@ if ( ($chem ne 'none') or ( $prog_species ) ){
if ($chem) {
# drydep_srf_file is only needed for prognostic MAM when the grid is unstructured.
# structured grids can do interpolation on the fly.
- if ($chem =~ /_mam/ and ($dyn =~ /se/ or $dyn =~ /fv3/)) {
+ if ($chem =~ /_mam/ and ($dyn =~ /se|fv3|mpas/)) {
add_default($nl, 'drydep_srf_file');
}
}
@@ -3637,6 +3637,78 @@ if ($dyn =~ /se/) {
}
}
+# MPAS-Atmosphere dycore
+# ======================
+
+if ($dyn =~ /mpas/) {
+
+ ####################################################
+ # namelist group: dyn_mpas #
+ ####################################################
+
+ add_default($nl, 'mpas_time_integration');
+ add_default($nl, 'mpas_time_integration_order');
+ add_default($nl, 'mpas_dt');
+ add_default($nl, 'mpas_split_dynamics_transport');
+ add_default($nl, 'mpas_number_of_sub_steps');
+ add_default($nl, 'mpas_dynamics_split_steps');
+ add_default($nl, 'mpas_h_mom_eddy_visc2');
+ add_default($nl, 'mpas_h_mom_eddy_visc4');
+ add_default($nl, 'mpas_v_mom_eddy_visc2');
+ add_default($nl, 'mpas_h_theta_eddy_visc2');
+ add_default($nl, 'mpas_h_theta_eddy_visc4');
+ add_default($nl, 'mpas_v_theta_eddy_visc2');
+ add_default($nl, 'mpas_horiz_mixing');
+ add_default($nl, 'mpas_len_disp');
+ add_default($nl, 'mpas_visc4_2dsmag');
+ add_default($nl, 'mpas_del4u_div_factor');
+ add_default($nl, 'mpas_w_adv_order');
+ add_default($nl, 'mpas_theta_adv_order');
+ add_default($nl, 'mpas_scalar_adv_order');
+ add_default($nl, 'mpas_u_vadv_order');
+ add_default($nl, 'mpas_w_vadv_order');
+ add_default($nl, 'mpas_theta_vadv_order');
+ add_default($nl, 'mpas_scalar_vadv_order');
+ add_default($nl, 'mpas_scalar_advection');
+ add_default($nl, 'mpas_positive_definite');
+ add_default($nl, 'mpas_monotonic');
+ add_default($nl, 'mpas_coef_3rd_order');
+ add_default($nl, 'mpas_smagorinsky_coef');
+ add_default($nl, 'mpas_mix_full');
+ add_default($nl, 'mpas_epssm');
+ add_default($nl, 'mpas_smdiv');
+ add_default($nl, 'mpas_apvm_upwinding');
+ add_default($nl, 'mpas_h_ScaleWithMesh');
+ add_default($nl, 'mpas_zd');
+ add_default($nl, 'mpas_xnutr');
+ add_default($nl, 'mpas_do_restart');
+ add_default($nl, 'mpas_print_global_minmax_vel');
+ add_default($nl, 'mpas_print_detailed_minmax_vel');
+ add_default($nl, 'mpas_print_global_minmax_sca');
+
+ # mpas_block_decomp_file_prefix should only be set when more than one task is used.
+ # Otherwise the file is not used and should not be added to the input dataset file.
+ if ($opts{'ntasks'} > 1) {
+ add_default($nl, 'mpas_block_decomp_file_prefix');
+ }
+
+ # invoke MPAS utility to generate streams files
+ my $mpas_libdir = $cfg->get('mpas_libdir');
+
+ # Check that the executable file streams_gen is present. streams_gen is built
+ # at the same time as the MPAS library. This allows build-namelist
+ # to be called before the MPAS library is built (via CESM scripts).
+ if ( -x "$mpas_libdir/streams_gen") {
+
+ my $cmnd = "$mpas_libdir/streams_gen $mpas_libdir/Registry_processed.xml ".
+ "streams.atmosphere stream_list.atmosphere. listed ";
+
+ system($cmnd) == 0 or die
+ "$ProgName - ERROR: generating MPAS streams files via command:\n".
+ "$cmnd";
+ }
+}
+
# Defaults for history output
add_default($nl, 'history_amwg');
add_default($nl, 'history_vdiag');
@@ -4183,6 +4255,14 @@ sub check_input_files {
if ($input_pathname_type eq 'abs') {
if ($inputdata_rootdir) {
+
+ # Special cases:
+ # The file referenced by mpas_block_decomp_file_prefix needs to have the
+ # task count appended before being added to the inputdata file.
+ if ($var eq 'mpas_block_decomp_file_prefix') {
+ $pathname = "$pathname$opts{'ntasks'}";
+ }
+
print $fh "$var = $pathname\n";
}
else {
diff --git a/bld/config_files/definition.xml b/bld/config_files/definition.xml
index ecb1fb5712..ba0550dd09 100644
--- a/bld/config_files/definition.xml
+++ b/bld/config_files/definition.xml
@@ -26,8 +26,8 @@ Switch specifies whether CAM is being built by the CCSM sequential scripts. 0 =
Coupling framework: mct or nuopc. Default: mct.
-
-Dynamics package: eul, fv, fv3, or se.
+
+Dynamics package: eul, fv, fv3, se, or mpas.
Switch to turn on waccm physics: 0 => no, 1 => yes.
@@ -292,6 +292,9 @@ Directory containing COSP library.
Directory containing FV3CORE library.
+
+Directory containing MPAS library.
+
OS for which CAM is being built. The default value is the name contained
in Perl's $OSNAME variable. This parameter allows the user to override
diff --git a/bld/configure b/bld/configure
index 80832b1f36..5dbe6152a1 100755
--- a/bld/configure
+++ b/bld/configure
@@ -78,7 +78,7 @@ OPTIONS
-cppdefs A string of user specified CPP defines. Appended to
Makefile defaults. E.g. -cppdefs '-DVAR1 -DVAR2'
-cpl Coupling framework [mct | nuopc]. Default: mct.
- -dyn Dynamical core option: [eul | fv | se | fv3]. Default: fv.
+ -dyn Dynamical core option: [eul | fv | se | fv3 | mpas]. Default: fv.
-edit_chem_mech Invokes CAMCHEM_EDITOR to allow the user to edit the chemistry mechanism file
-hgrid Specify horizontal grid. Use nlatxnlon for spectral grids;
dlatxdlon for fv grids (dlat and dlon are the grid cell size
@@ -175,6 +175,7 @@ OPTIONS
-ldflags A string of user specified load options. Appended to
Makefile defaults.
-linker User specified linker. Overrides Makefile default of \$(FC).
+ -mpas_libdir Directory containing MPAS library.
-mct_libdir Directory containing MCT library. Default: build the library from source
in a subdirectory of \$cam_bld.
-mpi_inc Directory containing MPI include files.
@@ -274,6 +275,7 @@ GetOptions(
"max_n_rad_cnst=s" => \$opts{'max_n_rad_cnst'},
"mct_libdir=s" => \$opts{'mct_libdir'},
"microphys=s" => \$opts{'microphys'},
+ "mpas_libdir=s" => \$opts{'mpas_libdir'},
"mpi_inc=s" => \$opts{'mpi_inc'},
"mpi_lib=s" => \$opts{'mpi_lib'},
"nadv=s" => \$opts{'nadv'},
@@ -694,9 +696,9 @@ if ($print>=2) { print "WACCM physics: $waccm_phys$eol"; }
# WACCM physics only runs with FV, SE and FV3 dycores
-if ( ($waccm_phys) and ($dyn_pkg ne 'fv') and ($dyn_pkg ne 'se') and ($dyn_pkg ne 'fv3') ) {
+if ( ($waccm_phys) and ($dyn_pkg eq 'eul') ) {
die <<"EOF";
-** ERROR: WACCM physics only runs with FV, Spectral Element and FV3 as the dycore.
+** ERROR: WACCM physics does not run with the Eulerian spectral dycore.
EOF
}
@@ -1211,6 +1213,9 @@ elsif ($dyn_pkg eq 'eul') {
elsif ($dyn_pkg eq 'se') {
$hgrid = 'ne16np4';
}
+elsif ($dyn_pkg eq 'mpas') {
+ $hgrid = 'mpasa480';
+}
$cfg_ref->set('hgrid', $hgrid);
# User override.
@@ -1979,6 +1984,37 @@ if ($dyn_pkg eq 'fv3') {
write_fv3_makefile("$cam_dir/src/dynamics/fv3/Makefile.in.fv3", "$fv3core_libdir/Makefile");
}
+#-----------------------------------------------------------------------------------------------
+# MPAS library.
+if ($dyn_pkg eq 'mpas') {
+
+ # Set the directory used to build MPAS. Add location and library name
+ # to the user specified load flags.
+ my $mpas_libdir = '';
+ if (defined $opts{'mpas_libdir'}) {
+ $mpas_libdir = $opts{'mpas_libdir'};
+ } else
+ {
+ $mpas_libdir = "$cam_bld/mpas";
+ }
+
+ $cfg_ref->set('mpas_libdir', "$mpas_libdir");
+
+ my $ldflags = $cfg_ref->get('ldflags');
+ $ldflags .= " -L$mpas_libdir -lmpas ";
+ $cfg_ref->set('ldflags', $ldflags);
+
+ # create the build directory for mpas
+ my $bld_dir = $mpas_libdir;
+ unless (-d $bld_dir or mkdirp($bld_dir)) {
+ die "** Could not create the mpas build directory: $bld_dir\n";
+ }
+
+ # Create the MPAS Makefile from a template and copy it into the mpas bld directory
+ write_mpas_makefile("$cfgdir/../src/dynamics/mpas/Makefile", "$mpas_libdir/Makefile");
+ if ($print) { print "creating $mpas_libdir/Makefile\n"; }
+}
+
#-----------------------------------------------------------------------------------------------
# Write configuration files ####################################################################
#-----------------------------------------------------------------------------------------------
@@ -2259,6 +2295,34 @@ EOF
}
+#-------------------------------------------------------------------------------
+
+sub write_mpas_makefile
+{
+
+ my ($file_in, $file_out) = @_;
+ my $fh_in = new IO::File;
+ my $fh_out = new IO::File;
+
+ $fh_out->open(">$file_out") or die "** can't open file: $file_out\n";
+
+ print $fh_out <<"EOF";
+
+MPAS_SRC_ROOT := $cam_root/src/dynamics/mpas
+
+EOF
+
+ # Copy the "template" makefile to the new makefile.
+ $fh_in->open("<$file_in") or die "** can't open file: $file_in\n";
+ while (<$fh_in>) {
+ print $fh_out $_;
+ }
+
+ $fh_out->close;
+ $fh_in->close;
+}
+
+
#-------------------------------------------------------------------------------
sub write_config_h
@@ -2291,7 +2355,7 @@ sub set_horiz_grid
my ($hgrid_file, $cfg_ref) = @_;
- # Get dycore/grid from the package's configuration
+ # Get dycore/grid from the configuration object
my $dyn_pkg = $cfg_ref->get('dyn');
my $hgrid = $cfg_ref->get('hgrid');
@@ -2321,9 +2385,9 @@ sub set_horiz_grid
$hgrid =~ m/C(\d+)/;
$cfg_ref->set('hgrid', $hgrid);
}
- else {
+ elsif ($dyn_pkg =~ m/^eul$|^fv/) {
- # For non-SE dycores the parameters are read from an input file,
+ # For EUL and FV dycores the parameters are read from an input file,
# and if no dycore/grid matches are found then issue error message.
my $xml = XML::Lite->new( $hgrid_file );
diff --git a/bld/namelist_files/namelist_defaults_cam.xml b/bld/namelist_files/namelist_defaults_cam.xml
index 1e48df8615..ad65debaa6 100644
--- a/bld/namelist_files/namelist_defaults_cam.xml
+++ b/bld/namelist_files/namelist_defaults_cam.xml
@@ -33,12 +33,20 @@
450
225
-
-
+1800
+
+
+
+
atm/cam/inic/cam_vcoords_L26_c180105.nc
atm/cam/inic/cam_vcoords_L30_c180105.nc
atm/cam/inic/cam_vcoords_L32_c180105.nc
+
+atm/cam/inic/mpas/mpasa480_L32_v6.1.grid_c190924.nc
+atm/cam/inic/mpas/mpasa120_L32_topo_grid_c201022.nc
+
+
atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc
atm/cam/inic/fv/cami_0000-09-01_0.23x0.31_L26_c061106.nc
atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc
@@ -227,6 +235,8 @@
atm/waccm/ic/FW2000_CONUS_30x8_L70_01-01-0001_c200602.nc
+atm/cam/inic/mpas/mpasa120_L32_init.umjs.dry_c201021.nc
+
atm/cam/topo/topo-from-cami_0000-01-01_256x512_L26_c030918.nc
atm/cam/topo/USGS-gtopo30_128x256_c050520.nc
@@ -280,6 +290,8 @@
atm/cam/topo/se/ne30x4_ARCTIC_nc3000_Co060_Fi001_MulG_PF_RR_Nsw042_c200428.nc
atm/cam/topo/se/ne30x8_ARCTICGRIS_nc3000_Co060_Fi001_MulG_PF_RR_Nsw042_c200428.nc
+atm/cam/topo/mpas/mpas_120_nc3000_Co060_Fi001_MulG_PF_Nsw042_c200921.nc
+
atm/cam/topo/fv_0.9x1.25_nc3000_Nsw006_Nrs002_Co008_Fi001_ZR_c160505.nc
atm/cam/topo/fv_1.9x2.5_nc3000_Nsw084_Nrs016_Co120_Fi001_ZR_061116.nc
@@ -2657,6 +2669,65 @@
0
0
+
+
+
+
+
+ SRK3
+ 2
+ 1800.0D0
+ 600.0D0
+
+.true.
+ 2
+ 3
+ 0.0D0
+ 0.0D0
+ 0.0D0
+ 0.0D0
+ 0.0D0
+ 0.0D0
+ 2d_smagorinsky
+
+480000.0D0
+120000.0D0
+
+ 0.05D0
+ 10.0D0
+ 3
+ 3
+ 3
+ 3
+ 3
+ 3
+ 3
+ .true.
+ .false.
+ .true.
+ 0.25D0
+ 0.125D0
+ .true.
+ 0.1D0
+ 0.1D0
+ 0.5D0
+ .true.
+ 22000.0D0
+ 0.2D0
+
+atm/cam/inic/mpas/mpasa480.graph.info.part.
+atm/cam/inic/mpas/mpasa120.graph.info.part.
+atm/cam/inic/mpas/mpasa60.graph.info.part.
+atm/cam/inic/mpas/mpasa30.graph.info.part.
+atm/cam/inic/mpas/mpasa15.graph.info.part.
+atm/cam/inic/mpas/mpasa12.graph.info.part.
+atm/cam/inic/mpas/mpasa15-3.graph.info.part.
+
+ .false.
+ .true.
+ .false.
+ .false.
+
diff --git a/bld/namelist_files/namelist_definition.xml b/bld/namelist_files/namelist_definition.xml
index 403d9ebd4d..62eb4e26c6 100644
--- a/bld/namelist_files/namelist_definition.xml
+++ b/bld/namelist_files/namelist_definition.xml
@@ -7351,6 +7351,265 @@ Normally equal to se_vert_num_threads.
Default: Set by build-namelist.
+
+
+
+
+Time integration scheme in MPAS dycore.
+Default: SRK3
+
+
+
+Order for RK time integration in MPAS dycore.
+Default: 2
+
+
+
+Time step (seconds) in MPAS dycore
+Default: Set by build-namelist.
+
+
+
+Whether to super-cycle scalar transport in MPAS dycore.
+Default: TRUE
+
+
+
+Number of acoustic steps per full RK step in MPAS dycore.
+Default: 2
+
+
+
+When {{ hilight }}mpas_split_dynamics_transport{{ closehilight }} = .true., the
+number of RK steps per transport step in MPAS dycore.
+Default: 3
+
+
+
+Del^2 eddy viscosity for horizontal diffusion of momentum in MPAS dycore.
+Default: 0.0
+
+
+
+Del^4 eddy hyper-viscosity for horizontal diffusion of momentum in MPAS dycore.
+Default: 0.0
+
+
+
+Del^2 eddy viscosity for vertical diffusion of momentum in MPAS dycore.
+Default: 0.0
+
+
+
+Del^2 eddy viscosity for horizontal diffusion of theta in MPAS dycore.
+Default: 0.0
+
+
+
+Del^4 eddy hyper-viscosity for horizontal diffusion of theta in MPAS dycore.
+Default: 0.0
+
+
+
+Del^2 eddy viscosity for vertical diffusion of theta in MPAS dycore.
+Default: 0.0
+
+
+
+Formulation of horizontal mixing in MPAS dycore.
+Default: 2d_smagorinsky
+
+
+
+Horizontal length scale, used by the Smagorinsky formulation of horizontal
+diffusion and by 3-d divergence damping in MPAS dycore.
+Default: 120000.0
+
+
+
+Scaling coefficient of delta_x^3 to obtain Del^4 diffusion coefficient in
+MPAS dycore.
+Default: 0.05
+
+
+
+Scaling factor for the divergent component of Del^4 u calculation in MPAS
+dycore.
+Default: 10.0
+
+
+
+Horizontal advection order for w in MPAS dycore.
+Default: 3
+
+
+
+Horizontal advection order for theta in MPAS dycore.
+Default: 3
+
+
+
+Horizontal advection order for scalars in MPAS dycore.
+Default: 3
+
+
+
+Vertical advection order for normal velocities (u) in MPAS dycore
+Default: 3
+
+
+
+Vertical advection order for w in MPAS dycore.
+Default: 3
+
+
+
+Vertical advection order for theta in MPAS dycore.
+Default: 3
+
+
+
+Vertical advection order for scalars in MPAS dycore.
+Default: 3
+
+
+
+Whether to advect scalar fields in MPAS dycore
+Default: TRUE
+
+
+
+Whether to enable positive-definite advection of scalars in MPAS dycore.
+Default: FALSE
+
+
+
+Whether to enable monotonic limiter in scalar advection in MPAS dycore.
+Default: TRUE
+
+
+
+Upwinding coefficient in the 3rd order advection scheme in MPAS dycore.
+Default: 0.25
+
+
+
+Dimensionless empirical parameter relating the strain tensor to the eddy
+viscosity in the Smagorinsky turbulence model in MPAS dycore.
+Default: 0.125
+
+
+
+Mix full theta and u fields, or mix perturbation from intitial state in
+MPAS dycore.
+Default: TRUE
+
+
+
+Off-centering parameter for the vertically implicit acoustic integration in
+MPAS dycore
+Default: 0.1
+
+
+
+3-d divergence damping coefficient in MPAS dycore.
+Default: 0.1
+
+
+
+Amount of upwinding in APVM in MPAS dycore.
+Default: 0.5
+
+
+
+Scale eddy viscosities with mesh-density function for horizontal diffusion
+in MPAS dycore.
+Default: TRUE
+
+
+
+Height in meters above MSL to begin w-damping profile in MPAS dycore.
+Default: 22000.0
+
+
+
+Maximum w-damping coefficient at model top in MPAS dycore.
+Default: 0.2
+
+
+
+Prefix of the MPAS graph decomposition file, to be suffixed with the MPI
+task count. The prefix includes the full pathname of the directory
+containing the file.
+Default: Set by build-namelist.
+
+
+
+Whether this run with the MPAS dycore is to restart from a previous restart
+file or not.
+Default: Set by build-namelist.
+
+
+
+Whether to print the global min/max of horizontal normal velocity and
+vertical velocity each timestep in MPAS dycore.
+Default: TRUE
+
+
+
+Whether to print the global min/max of horizontal normal velocity and
+vertical velocity each timestep, along with the location in the domain
+where those extrema occurred in MPAS dycore.
+Default: FALSE
+
+
+
+Whether to print the global min/max of scalar fields each timestep in MPAS
+dycore.
+Default: FALSE
+
+
+
char
- eul,fv,fv3,se
+ eul,fv,fv3,se,mpas
fv
eul
se
fv3
+ mpas
build_component_cam
env_build.xml
diff --git a/cime_config/testdefs/testlist_cam.xml b/cime_config/testdefs/testlist_cam.xml
index 2771b2e45d..ef72769bd1 100644
--- a/cime_config/testdefs/testlist_cam.xml
+++ b/cime_config/testdefs/testlist_cam.xml
@@ -926,6 +926,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/doc/ChangeLog b/doc/ChangeLog
index a1ed9f651c..dfa20e5009 100644
--- a/doc/ChangeLog
+++ b/doc/ChangeLog
@@ -1,5 +1,354 @@
===============================================================
+Tag name: cam6_3_004
+Originator(s): mgduda, pel, eaton
+Date: Thu Nov 19 11:53:10 MST 2020
+One-line Summary: Add MPAS dycore w/ functional support for simple physics.
+Github PR URL: https://github.com/ESCOMP/CAM/pull/246
+
+Purpose of changes (include the issue number and title text for each relevant GitHub issue):
+
+. fix #220 Add MPAS-A dycore.
+
+. General notes:
+ - MPAS uses height as the vertical coordinate rather than the hybrid
+ pressure coordinate used by all the other dycores in CAM. The height
+ coordinate is a 3D field, and is read from the initial file along with
+ all the horizontal grid coordinate info. The initial file is created
+ using MPAS tools. CAM reads the same initial file that is used by the
+ MPAS standalone model. The names of the dimensions and coordinate
+ variables are different than the "standard" names used by CAM, e.g.,
+ nCells, latCell, and lonCell rather than ncol, lat, and lon. The
+ dimension order for 3D fields is different: i.e., (nlev,nCells) rather
+ than (ncol,nlev). And the index order in the vertical is bottom to top
+ of atmosphere rather than the top to bottom order used by CAM.
+
+ - Because the surface height comes from the height coordinate in the
+ initial file, the PHIS field from the topo file is not used. The topo
+ file is still needed for the subgrid properties used by the physics
+ package (e.g., SGH and ridge data). In order to ensure that the topo
+ file is consistent with the surface height from the initial file, PHIS
+ is read and the implied surface height is compared to the surface
+ height from the coordinate variable. An endrun is issued if the values
+ aren't the same to within a specified tolerance.
+
+ - MPAS uses 3 horizontal grids: cell centers (used by T, P, and
+ constituents), cell edges (used by the normal velocity component), and
+ cell vertices (used for vorticity). Grid objects are created for each
+ of these grids. Dynamics data from the MPAS internal data structures,
+ which is accessed via pointers in the dynamics import/export objects,
+ may be output to CAM's history file. There is currently not an option
+ for online interpolation to a rectangular mesh. There is an offline
+ utility (convert_mpas) to do this.
+
+ - When using analytic ICs MPAS must at a minimum read the 3D height
+ coordinate and data for the 3 horizonal grids from the initial file.
+ This "grid only" file can be produced by the MPAS utility for producing
+ initial files (init_atmosphere_model). That utility also generates
+ full initial files which additionally contain a complete atmospheric
+ state.
+
+ - The physical constants used by standalone MPAS are hardcoded parameters
+ in the mpas_constants module (part of the MPAS external code). To
+ allow CAM to have control over the value of constants the
+ mpas_constants module has a CAM specific modification (controlled by
+ the CPP token MPAS_CAM_DYCORE) which allows it to directly access the
+ data in CAM's physconst module. Before implementing this functionality
+ (i.e., when standalone MPAS and CAM/MPAS used the same values for the
+ constants) some simple model tests were run which demonstrated
+ agreement to within double precision roundoff level between standalone
+ MPAS and CAM/MPAS results.
+
+ - MPAS writes complete grid info along with its state variables to the
+ restart file. This data is in the same form as the initial file, hence
+ we can use MPAS restart files as initial files. They will be larger
+ than needed for initial runs since they'll also contain the physics
+ buffer data, but could easily be trimmed down in a post-processing
+ step. This will be a convenient way to create initial files for MPAS
+ simulations.
+
+ - MPAS uses precomputed partition files for each task count of any given
+ grid. There is an MPAS utility (gpmetis) used to generate the
+ partition files.
+
+
+Describe any changes made to build system:
+. MPAS is built in its own library, like COSP and FV3.
+
+Describe any changes made to the namelist:
+. Add namelist groups nhyd_model, damping, decomposition, restart,
+ printout.
+
+List any changes to the defaults for the boundary datasets: none
+
+Describe any substantial timing or memory changes: N/A
+
+Code reviewed by: cacraig, fvitt, nusbaume, jet, goldy
+
+List all files eliminated: none
+
+List all files added and what they do:
+
+src/dynamics/mpas/Makefile
+. template file used by configure to generate the MPAS Makefile
+
+src/dynamics/mpas/.mpas_sparse_checkout
+. list of the needed directories from the MPAS-Dev/MPAS-Model
+ repository when pulling dycore external with manage_externals
+
+src/dynamics/mpas/dp_coupling.F90
+. d_p_coupling
+ - The pressure in MPAS is non-hydrostatic and so the hydrostatic
+ component of the pressure is computed in dry_hydrostatic_pressure.
+ This routine is from MPAS standalone code.
+ - The conversion of potential temperature to temperature makes use of the
+ exner function computed in MPAS. This is needed for consistency. The
+ exner function computed for CAM's physics is based on the pressure
+ profiles given to the physics package, and is not the same as the
+ pressures (non-hydrostatic) use by MPAS to compute the exner function.
+. p_d_coupling
+ - The tendencies from the physics package are transposed to the dynamics
+ blocks, then the routine derived_tend computes the tendencies that are
+ used by the dycore's next time step.
+
+src/dynamics/mpas/driver/cam_mpas_subdriver.F90
+. The routines in this module are based on the standalone MPAS subdriver
+ code. Only MPAS modules are referenced with the exception of the
+ constituents module which provides the names of the constituents and
+ identifies which ones are water species.
+
+src/dynamics/mpas/dycore.F90
+. logical function dycore_is identifies that the MPAS dycore is running and
+ that the grid is unstructured.
+
+src/dynamics/mpas/dyn_comp.F90
+. The dyn_in/dyn_out objects contain pointers into the MPAS internal data
+ structures. The dimension order for MPAS arrays is vertical levels, then
+ columns. The index order in the vertical is bottom to top of atm.
+. dyn_readnl
+ - Reading the mpas namelist is delegated to cam_mpas_namelist_read which
+ is an auto-generated routine to read the namelist groups and store the
+ data in MPAS internal data structures.
+. dyn_init
+ - initialize the dyn_in/dyn_out objects.
+ - call read_inidat to initialize the dynamics state
+. dyn_run
+ - Calls the subdriver cam_mpas_run which implements a time substepping
+ loop for the case when the mpas timestep is shorter than the CAM
+ dynamics/physics coupling timestep. The subdriver also manages the
+ calculation of fields in the dynamics export state that are derived
+ from the mpas prognostic state.
+. read_inidat
+ - for analytic ICs:
+ . The analytic IC code uses CAM dimension order and vertical indexing
+ conventions. The data is reordered and stored directly in MPAS data
+ structures.
+ . The cell centered zonal and meridional velocity components are
+ converted to the normal velocity components on the cell edges by the
+ call to cam_mpas_cell_to_edge_winds.
+ . There is code to convert temperature to potential temperature. This
+ depends on the pressure profile and the current code which got things
+ running assuming a dry atmosphere will change as the scientific
+ validation is done.
+ - reading the initial file:
+ . The code is designed to read the same initial file that is used by
+ MPAS standalone. Implementing the infld calls led to the realization
+ that the grid objects, which are used to set up the pio calls, need
+ to use the same dimension names that are in the file. This is why
+ the cell centered grid object uses the MPAS dimension name nCells
+ rather than the CAM convention of ncol.
+ - The tracer initialization is set up to allow overwriting the default
+ initializations, used by the analytic IC code, by fields from the
+ initial file.
+
+src/dynamics/mpas/dyn_grid.F90
+. dyn_grid_init
+ - the call to setup_time_invariant is leveraging MPAS code to read all
+ the grid info from the initial file.
+ - the reference pressures used by the CAM physics package are computed
+ from the MPAS reference height array using a US standard atm approx.
+ - The construction of the global arrays for the cell center (physics)
+ grid are done by MPAS code.
+. All the routines used by physgrid to query the cell center grid on the
+ dynamics decomp have been implemented. Note that MPAS uses a single
+ block per task.
+. define_cam_grids
+ - Two cell center grids are defined. One uses the MPAS dimension and
+ coordinate name conventions and is used to read the initial file. The
+ other uses CAM dimension and coordinate name conventions and is used to
+ read PHIS from the topo file.
+ - Grids are defined for cell edges (used for i/o of normal velocity
+ components) and for cell vertices (used for output of vorticity)
+
+src/dynamics/mpas/interp_mod.F90
+. This is a stub module. Online interpolation to a rectangular mesh is not
+ implemented. There is an offline utility for doing this.
+
+src/dynamics/mpas/pmgrid.F90
+. container for PLEV
+
+src/dynamics/mpas/restart_dynamics.F90
+. Writing and reading grid and dynamics state information to/from restart files is
+ delegated to MPAS utility code. This module provides the CAM interfaces for the
+ restart functionality. CAM just provides MPAS with the PIO filehandle to the
+ restart file.
+
+src/dynamics/mpas/spmd_dyn.F90
+. container for info shared between the dp_coupling and phys_grid modules
+
+src/dynamics/mpas/stepon.F90
+. stepon_init
+ - add fields for output on dynamics grids
+. stepon_run1
+ - output of fields from the dycore, then couple to physics grid
+. stepon_run2
+ - couple to dynamics grid, then write the physics forcings on dynamics
+ grids
+. stepon_run3
+ - call the MPAS run method
+
+List all existing files that have been modified, and describe the changes:
+
+.gitignore
+. ignore MPAS external source
+
+Externals_CAM.cfg
+. add MPAS external
+
+Externals.cfg
+. update to cime5.8.34 to get mods for mpas grids and build
+
+bld/build-namelist
+. add defaults for MPAS namelist groups
+
+bld/config_files/definition.xml
+. add mpas as valid value for 'dyn'
+. add mpas_libdir
+
+bld/configure
+. Mods to generate a Makefile used to build the MPAS in its own library.
+
+bld/namelist_files/namelist_defaults_cam.xml
+. add MPAS grid files for use with analytic ICs
+. defaults for MPAS dycore
+
+bld/namelist_files/namelist_definition.xml
+. add MPAS namelist variables
+
+cime_config/buildcpp
+. add -mpas_libdir option to config_opts
+
+cime_config/config_component.xml
+. add regexp to match MPAS grid in CAM_DYCORE
+
+cime_config/testdefs/testlist_cam.xml
+. add 1 aux_cam and 1 prealpha test
+
+src/control/cam_history.F90
+. date2yyyymmdd and sec2hms now accessed from module string_utils
+
+src/control/cam_history_support.F90
+. move utility functions date2yyyymmdd and sec2hms to string_utils
+
+src/control/ncdio_atm.F90
+. mods to allow CAM to read the MPAS initial file. Assumption that column
+ dimension is first is not valid. Assumption that time dimension is
+ named 'time' is not correct.
+
+src/dynamics/fv3/dyn_grid.F90
+. define public module variable ini_grid_name for communicating the grid
+ object name to the infld calls in dyn_comp.
+. define a grid object named 'INI' for the FFSL grid that uses the
+ dimension name ncol and coordinate names lat,lon, for reading the initial
+ file.
+
+src/dynamics/fv3/dyn_comp.F90
+. change the name of the dimension used in the initial file from ncol_d to
+ ncol, and use the ini_grid_name grid object to read it.
+
+src/dynamics/se/dp_coupling.F90
+. remove unused FIX_TOTE conditional
+
+src/dynamics/se/dyn_grid.F90
+. define a public module variable containing the grid name needed for reading
+ the initial file.
+. add routine get_hdim_name to determine whether the initial file uses
+ the horizontal dimension 'ncol', or 'ncol_d'.
+. define_cam_grids
+ - If using CSLAM and the initial file uses 'ncol' then create an
+ alternate GLL grid object using ncol/lat/lon.
+
+src/dynamics/se/dyn_comp.F90
+. Make use of public ini_grid_name variable to identify the grid object used
+ to read the initial file. Previous code assumes the 'GLL' grid object.
+
+src/dynamics/tests/inic_analytic.F90
+. add optional height arg
+. make dyn_set_inic_col public. This is the better interface for calls
+ from the MPAS version of read_inidat. The addition of optional args is
+ awkward using the dyn_set_inic_cblock interface.
+
+src/dynamics/tests/initial_conditions/ic_baroclinic.F90
+. refactor the optional height coordinate input to fully implement the
+ 'vc_height' option. Pass interface rather than mid-point heights so to
+ top of atmosphere height is available.
+
+src/dynamics/tests/initial_conditions/ic_us_standard_atm.F90
+. add optional input arg zint. Remove restriction that PHIS_IN be supplied
+ since that can be gotten from zint.
+. enable the coding to allow constituent initialization to depend on
+ height.
+
+src/physics/cam/const_init.F90
+src/physics/cam/tracers.F90
+. Test for z being present is unnecessary when whole array is passed as
+ actual arg.
+
+src/physics/simple/physpkg.F90
+. Turn off energy fixer for adiabatic mode with MPAS. This change was part
+ of the required mods to get exact agreement with standalone MPAS for an
+ adiabatic test.
+
+src/utils/cam_map_utils.F90
+. cam_filemap_mapVal
+ - fix array assignment that gnu bounds checker complains about
+. cam_filemap_get_array_bounds
+ - treat case this%src(i)==0 and set dims(i,:)=0. This data is used to
+ set the variable fieldbounds in subroutine cam_grid_get_gcid.
+
+src/utils/string_utils.F90
+. add utility functions date2yyyymmdd and sec2hms (moved here from cam_history_support)
+. add utility function int2str
+
+src/utils/time_manager.F90
+. add functions get_stop_date and get_run_duration
+
+If there were any failures reported from running test_driver.sh on any test
+platform, and checkin with these failures has been OK'd by the gatekeeper,
+then copy the lines from the td.*.status files for the failed tests to the
+appropriate machine below. All failed tests must be justified.
+
+cheyenne/intel/aux_cam:
+
+ FAIL ERC_D_Ln9.mpasa120z32_mpasa120.FKESSLER.cheyenne_intel.cam-outfrq3s_usecase NLCOMP
+ FAIL ERC_D_Ln9.mpasa120z32_mpasa120.FKESSLER.cheyenne_intel.cam-outfrq3s_usecase BASELINE /glade/p/cesm/amwg/cesm_baselines/cam6_3_003: ERROR BFAIL baseline directory '/glade/p/cesm/amwg/cesm_baselines/cam6_3_003/ERC_D_Ln9.mpasa120z32_mpasa120.FKESSLER.cheyenne_intel.cam-outfrq3s_usecase' does not exist
+- expected failures for test with no baseline
+
+izumi/nag/aux_cam:
+
+ FAIL DAE.f45_f45_mg37.FHS94.izumi_nag.cam-dae COMPARE_base_da
+- pre-existing failure
+
+izumi/pgi/aux_cam: PASS
+
+CAM tag used for the baseline comparison tests if different than previous
+tag:
+
+Summarize any changes to answers: BFB
+
+===============================================================
+===============================================================
+
Tag name: cam6_3_003
Originator(s): eaton
Date: Mon 09 Nov 2020
@@ -5786,6 +6135,7 @@ Summarize any changes to answers: WACCM and WACCMX answer changing, otherwise BF
===============================================================
===============================================================
+
Tag name: cam6_2_001
Originator(s): cacraig, brianpm, kevin.reed@stonybrook.edu
Date: October 17, 2019
diff --git a/src/control/cam_history.F90 b/src/control/cam_history.F90
index ba5676aeee..dc38e3f2e2 100644
--- a/src/control/cam_history.F90
+++ b/src/control/cam_history.F90
@@ -45,12 +45,12 @@ module cam_history
use cam_logfile, only: iulog
use cam_history_support, only: max_fieldname_len, fieldname_suffix_len, &
max_chars, ptapes, fieldname_len, &
- max_string_len, date2yyyymmdd, pflds, &
- fieldname_lenp2, sec2hms, &
+ max_string_len, pflds, fieldname_lenp2, &
field_info, active_entry, hentry, &
horiz_only, write_hist_coord_attrs, &
write_hist_coord_vars, interp_info_t, &
lookup_hist_coord_indices, get_hist_coord_index
+ use string_utils, only: date2yyyymmdd, sec2hms
use sat_hist, only: is_satfile
use solar_parms_data, only: solar_parms_on, kp=>solar_parms_kp, ap=>solar_parms_ap
use solar_parms_data, only: f107=>solar_parms_f107, f107a=>solar_parms_f107a, f107p=>solar_parms_f107p
diff --git a/src/control/cam_history_support.F90 b/src/control/cam_history_support.F90
index bd35afd41c..dba255727f 100644
--- a/src/control/cam_history_support.F90
+++ b/src/control/cam_history_support.F90
@@ -307,7 +307,6 @@ module cam_history_support
public :: write_hist_coord_attrs, write_hist_coord_vars
public :: lookup_hist_coord_indices, hist_coord_find_levels
public :: get_hist_coord_index, hist_coord_name, hist_coord_size
- public :: sec2hms, date2yyyymmdd
public :: hist_dimension_name
interface add_hist_coord
@@ -1930,68 +1929,6 @@ end function hist_coord_find_levels
!#######################################################################
- character(len=8) function sec2hms (seconds)
-
- ! Input arguments
-
- integer, intent(in) :: seconds
-
- ! Local workspace
-
- integer :: hours ! hours of hh:mm:ss
- integer :: minutes ! minutes of hh:mm:ss
- integer :: secs ! seconds of hh:mm:ss
-
- if (seconds < 0 .or. seconds > 86400) then
- write(iulog,*)'SEC2HRS: bad input seconds:', seconds
- call endrun ()
- end if
-
- hours = seconds / 3600
- minutes = (seconds - hours*3600) / 60
- secs = (seconds - hours*3600 - minutes*60)
-
- if (minutes < 0 .or. minutes > 60) then
- write(iulog,*)'SEC2HRS: bad minutes = ',minutes
- call endrun ()
- end if
-
- if (secs < 0 .or. secs > 60) then
- write(iulog,*)'SEC2HRS: bad secs = ',secs
- call endrun ()
- end if
-
- write(sec2hms,80) hours, minutes, secs
-80 format(i2.2,':',i2.2,':',i2.2)
- return
- end function sec2hms
- character(len=10) function date2yyyymmdd (date)
-
- ! Input arguments
-
- integer, intent(in) :: date
-
- ! Local workspace
-
- integer :: year ! year of yyyy-mm-dd
- integer :: month ! month of yyyy-mm-dd
- integer :: day ! day of yyyy-mm-dd
-
- if (date < 0) then
- call endrun ('DATE2YYYYMMDD: negative date not allowed')
- end if
-
- year = date / 10000
- month = (date - year*10000) / 100
- day = date - year*10000 - month*100
-
- write(date2yyyymmdd,80) year, month, day
-80 format(i4.4,'-',i2.2,'-',i2.2)
- return
- end function date2yyyymmdd
-
- !#######################################################################
-
character(len=max_hcoordname_len) function hist_dimension_name (size)
! Given a specific size value, return the first registered dimension name which matches the size, if it exists
! Otherwise the name returned is blank
diff --git a/src/control/ncdio_atm.F90 b/src/control/ncdio_atm.F90
index 619d9b5b61..a01305c5cd 100644
--- a/src/control/ncdio_atm.F90
+++ b/src/control/ncdio_atm.F90
@@ -19,6 +19,7 @@ module ncdio_atm
use cam_abortutils, only: endrun
use scamMod, only: scmlat,scmlon,single_column
use cam_logfile, only: iulog
+ use string_utils, only: to_lower
!
! !PUBLIC TYPES:
implicit none
@@ -525,6 +526,7 @@ subroutine infld_real_2d_3d(varname, ncid, dimname1, dimname2, &
character(len=*), parameter :: subname='INFLD_REAL_2D_3D' ! subroutine name
character(len=128) :: errormsg
character(len=PIO_MAX_NAME) :: field_dnames(2)
+ character(len=PIO_MAX_NAME) :: file_dnames(3)
! For SCAM
real(r8) :: closelat, closelon
@@ -574,7 +576,8 @@ subroutine infld_real_2d_3d(varname, ncid, dimname1, dimname2, &
!
! Check if field is on file; get netCDF variable id
!
- call cam_pio_check_var(ncid, varname, varid, ndims, dimids, dimlens, readvar_tmp)
+ call cam_pio_check_var(ncid, varname, varid, ndims, dimids, dimlens, &
+ readvar_tmp, dimnames=file_dnames)
! If field is on file:
!
@@ -606,7 +609,7 @@ subroutine infld_real_2d_3d(varname, ncid, dimname1, dimname2, &
else
! Check that the number of columns in the file matches the number of
! columns in the grid object.
- if (dimlens(1) /= grid_dimlens(1)) then
+ if (dimlens(1) /= grid_dimlens(1) .and. dimlens(2) /= grid_dimlens(1)) then
readvar = .false.
return
end if
@@ -614,7 +617,7 @@ subroutine infld_real_2d_3d(varname, ncid, dimname1, dimname2, &
! Check to make sure that the 3rd dimension is time
if (ndims == 3) then
ierr = pio_inq_dimname(ncid, dimids(3), tmpname)
- if (trim(tmpname) /= 'time') then
+ if (to_lower(trim(tmpname)) /= 'time') then
call endrun(trim(subname)//': dimension mismatch for '//trim(varname))
end if
end if
@@ -640,7 +643,7 @@ subroutine infld_real_2d_3d(varname, ncid, dimname1, dimname2, &
else
! All distributed array processing
call cam_grid_get_decomp(grid_id, arraydimsize, dimlens(1:2), &
- pio_double, iodesc, field_dnames=field_dnames)
+ pio_double, iodesc, field_dnames=field_dnames, file_dnames=file_dnames(1:2))
call pio_read_darray(ncid, varid, iodesc, field, ierr)
end if
diff --git a/src/dynamics/fv3/dyn_comp.F90 b/src/dynamics/fv3/dyn_comp.F90
index c67c07c411..6703fd0d8b 100644
--- a/src/dynamics/fv3/dyn_comp.F90
+++ b/src/dynamics/fv3/dyn_comp.F90
@@ -46,7 +46,7 @@ module dyn_comp
use dimensions_mod, only: npx, npy, nlev, &
cnst_name_ffsl,cnst_longname_ffsl, &
fv3_lcp_moist,fv3_lcv_moist,qsize_tracer_idx_cam2dyn,fv3_scale_ttend
- use dyn_grid, only: mytile
+ use dyn_grid, only: mytile, ini_grid_name
use field_manager_mod, only: MODEL_ATMOS
use fms_io_mod, only: set_domain, nullify_domain
use fv_arrays_mod, only: fv_atmos_type, fv_grid_bounds_type
@@ -1078,7 +1078,7 @@ subroutine read_inidat(dyn_in)
! T
if (dyn_field_exists(fh_ini, 'T')) then
- call read_dyn_var('T', fh_ini, 'ncol_d', var3d)
+ call read_dyn_var('T', fh_ini, 'ncol', var3d)
atm(mytile)%pt(is:ie,js:je,1:nlev)=var3d(is:ie,js:je,1:nlev)
else
call endrun(trim(subname)//': T not found')
@@ -1110,14 +1110,14 @@ subroutine read_inidat(dyn_in)
! V
if (dyn_field_exists(fh_ini, 'V')) then
- call read_dyn_var('V', fh_ini, 'ncol_d', var3d)
+ call read_dyn_var('V', fh_ini, 'ncol', var3d)
atm(mytile)%va(is:ie,js:je,1:nlev)=var3d(is:ie,js:je,1:nlev)
else
call endrun(trim(subname)//': V not found')
end if
if (dyn_field_exists(fh_ini, 'U')) then
- call read_dyn_var('U', fh_ini, 'ncol_d', var3d)
+ call read_dyn_var('U', fh_ini, 'ncol', var3d)
atm(mytile)%ua(is:ie,js:je,1:nlev) =var3d(is:ie,js:je,1:nlev)
else
call endrun(trim(subname)//': U not found')
@@ -1125,7 +1125,7 @@ subroutine read_inidat(dyn_in)
m_cnst=1
if (dyn_field_exists(fh_ini, 'Q')) then
- call read_dyn_var('Q', fh_ini, 'ncol_d', var3d)
+ call read_dyn_var('Q', fh_ini, 'ncol', var3d)
atm(mytile)%q(is:ie,js:je,1:nlev,m_cnst) = var3d(is:ie,js:je,1:nlev)
else
call endrun(trim(subname)//': Q not found')
@@ -1145,7 +1145,7 @@ subroutine read_inidat(dyn_in)
end if
if(found) then
- call read_dyn_var(trim(cnst_name(m_cnst)), fh_ini, 'ncol_d', var3d)
+ call read_dyn_var(trim(cnst_name(m_cnst)), fh_ini, 'ncol', var3d)
atm(mytile)%q(is:ie,js:je,1:nlev,m_cnst_ffsl) = var3d(is:ie,js:je,1:nlev)
else
dbuf3=0._r8
@@ -1202,7 +1202,7 @@ subroutine read_inidat(dyn_in)
end if
if(found) then
- call read_dyn_var(trim(cnst_name(m_cnst)), fh_ini, 'ncol_d', var3d)
+ call read_dyn_var(trim(cnst_name(m_cnst)), fh_ini, 'ncol', var3d)
atm(mytile)%q(is:ie,js:je,1:nlev,m_cnst_ffsl) = var3d(is:ie,js:je,1:nlev)
end if
end do
@@ -1835,13 +1835,13 @@ subroutine read_dyn_field_2d(fieldname, fh, dimname, buffer)
buffer = 0.0_r8
call infld(trim(fieldname), fh, dimname, 1, ldof_size, 1, 1, buffer, &
- found, gridname='FFSL')
+ found, gridname=ini_grid_name)
if(.not. found) then
call endrun('READ_DYN_FIELD_2D: Could not find '//trim(fieldname)//' field on input datafile')
end if
! This code allows use of compiler option to set uninitialized values
- ! to NaN. In that case infld can return NaNs where the element FFSL points
+ ! to NaN. In that case infld can return NaNs where the element ini_grid_name points
! are not "unique columns"
where (isnan(buffer)) buffer = 0.0_r8
@@ -1865,13 +1865,13 @@ subroutine read_dyn_field_3d(fieldname, fh, dimname, buffer)
buffer = 0.0_r8
call infld(fieldname, fh,dimname, 'lev', 1, ldof_size, 1, pver, &
- 1, 1, buffer, found, gridname='FFSL')
+ 1, 1, buffer, found, gridname=ini_grid_name)
if(.not. found) then
call endrun('READ_DYN_FIELD_3D: Could not find '//trim(fieldname)//' field on input datafile')
end if
! This code allows use of compiler option to set uninitialized values
- ! to NaN. In that case infld can return NaNs where the element FFSL points
+ ! to NaN. In that case infld can return NaNs where the element ini_grid_name points
! are not "unique columns"
where (isnan(buffer)) buffer = 0.0_r8
diff --git a/src/dynamics/fv3/dyn_grid.F90 b/src/dynamics/fv3/dyn_grid.F90
index 9fa1442d6e..263c04ac3b 100644
--- a/src/dynamics/fv3/dyn_grid.F90
+++ b/src/dynamics/fv3/dyn_grid.F90
@@ -46,13 +46,16 @@ module dyn_grid
private
save
- ! The FV3 dynamics grids
+ ! The FV3 dynamics grids and initial file ncol grid
integer, parameter :: dyn_decomp = 101
integer, parameter :: dyn_decomp_ew = 102
integer, parameter :: dyn_decomp_ns = 103
integer, parameter :: dyn_decomp_hist = 104
integer, parameter :: dyn_decomp_hist_ew = 105
integer, parameter :: dyn_decomp_hist_ns = 106
+ integer, parameter :: ini_decomp = 107
+
+ character(len=3), protected :: ini_grid_name = 'INI'
integer, parameter :: ptimelevels = 2 ! number of time levels in the dycore
@@ -68,6 +71,7 @@ module dyn_grid
public :: &
dyn_decomp, &
+ ini_grid_name, &
p_split, &
grids_on_this_pe, &
ptimelevels
@@ -777,12 +781,30 @@ subroutine define_cam_grids(Atm)
! output local and global uniq points
uniqpts_glob=(npx-1)*(npy-1)*6
+ ! with FV3 if the initial file uses the horizontal dimension 'ncol' rather than
+ ! 'ncol_d' then we need a grid object with the names ncol,lat,lon to read it.
+ ! Create that grid object here.
+
+ lat_coord => horiz_coord_create('lat', 'ncol', uniqpts_glob, 'latitude', &
+ 'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
+ lon_coord => horiz_coord_create('lon', 'ncol', uniqpts_glob, 'longitude', &
+ 'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
+
+ ! register physics cell-center/A-grid
+ call cam_grid_register(ini_grid_name, ini_decomp, lat_coord, lon_coord, &
+ grid_map, block_indexed=.false., unstruct=.true.)
+ call cam_grid_attribute_register(ini_grid_name, 'cell', '', 1)
+ call cam_grid_attribute_register(ini_grid_name, 'area', 'cam cell center areas', &
+ 'ncol', area_ffsl, map=pemap)
+ nullify(lat_coord)
+ nullify(lon_coord)
+
+ ! create and register dynamic A-grid, src_in(/1,2/) allows ilev,jlev,nlev ordering for restart IO
lat_coord => horiz_coord_create('lat_d', 'ncol_d', uniqpts_glob, 'latitude', &
'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
lon_coord => horiz_coord_create('lon_d', 'ncol_d', uniqpts_glob, 'longitude', &
'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
- ! register dynamic A-grid, src_in(/1,2/) allows ilev,jlev,nlev ordering for restart IO
call cam_grid_register('FFSL', dyn_decomp, lat_coord, lon_coord, &
grid_map, block_indexed=.false., unstruct=.true.,src_in=(/1,2/))
call cam_grid_attribute_register('FFSL', 'cell', '', 1)
diff --git a/src/dynamics/mpas/.mpas_sparse_checkout b/src/dynamics/mpas/.mpas_sparse_checkout
new file mode 100644
index 0000000000..5dd1d34ecb
--- /dev/null
+++ b/src/dynamics/mpas/.mpas_sparse_checkout
@@ -0,0 +1,5 @@
+src/external
+src/operators
+src/tools
+src/core_atmosphere
+src/framework
diff --git a/src/dynamics/mpas/Makefile b/src/dynamics/mpas/Makefile
new file mode 100644
index 0000000000..9c1103cc74
--- /dev/null
+++ b/src/dynamics/mpas/Makefile
@@ -0,0 +1,266 @@
+CPPFLAGS := -D_MPI -DMPAS_NATIVE_TIMERS -DMPAS_GIT_VERSION=unknown -DMPAS_NAMELIST_SUFFIX=atmosphere -DMPAS_CAM_DYCORE
+ifdef PIODEF
+ CPPFLAGS += $(PIODEF)
+endif
+
+REGISTRY_FILE := $(MPAS_SRC_ROOT)/dycore/src/core_atmosphere/Registry.xml
+
+VPATH := $(MPAS_SRC_ROOT)/dycore/src/core_atmosphere/diagnostics:$(MPAS_SRC_ROOT)/dycore/src/core_atmosphere/dynamics:
+VPATH += $(MPAS_SRC_ROOT)/dycore/src/core_atmosphere:$(MPAS_SRC_ROOT)/dycore/src/tools/registry:$(MPAS_SRC_ROOT)/dycore/src/tools/input_gen:
+VPATH += $(MPAS_SRC_ROOT)/dycore/src/operators:$(MPAS_SRC_ROOT)/dycore/src/framework:$(MPAS_SRC_ROOT)/dycore/src/external/ezxml:
+VPATH += $(MPAS_SRC_ROOT)/driver
+
+INTERFACE_OBJS = \
+ mpas_atm_core.o \
+ mpas_atm_core_interface.o \
+ mpas_atm_dimensions.o \
+ mpas_atm_threading.o \
+ cam_mpas_subdriver.o
+
+DYN_OBJS = \
+ mpas_atm_time_integration.o \
+ mpas_atm_iau.o
+
+DIAG_OBJS = \
+ mpas_atm_diagnostics_manager.o \
+ mpas_atm_diagnostics_utils.o
+
+DIAGNOSTICS = \
+ mpas_atm_diagnostic_template.o \
+ isobaric_diagnostics.o \
+ convective_diagnostics.o \
+ pv_diagnostics.o \
+ soundings.o
+
+REG_OBJS = \
+ parse.o \
+ dictionary.o \
+ gen_inc.o \
+ fortprintf.o \
+ utility.o
+
+STREAMS_GEN_OBJS = \
+ streams_gen.o \
+ test_functions.o
+
+OPERATOR_OBJS = \
+ mpas_vector_operations.o \
+ mpas_matrix_operations.o \
+ mpas_tensor_operations.o \
+ mpas_rbf_interpolation.o \
+ mpas_vector_reconstruction.o \
+ mpas_spline_interpolation.o \
+ mpas_tracer_advection_helpers.o \
+ mpas_tracer_advection_mono.o \
+ mpas_tracer_advection_std.o \
+ mpas_geometry_utils.o
+
+FRAME_OBJS = \
+ mpas_kind_types.o \
+ mpas_framework.o \
+ mpas_timer.o \
+ mpas_timekeeping.o \
+ mpas_constants.o \
+ mpas_attlist.o \
+ mpas_hash.o \
+ mpas_sort.o \
+ mpas_block_decomp.o \
+ mpas_block_creator.o \
+ mpas_dmpar.o \
+ mpas_abort.o \
+ mpas_decomp.o \
+ mpas_threading.o \
+ mpas_io.o \
+ mpas_io_streams.o \
+ mpas_bootstrapping.o \
+ mpas_io_units.o \
+ mpas_stream_manager.o \
+ mpas_stream_list.o \
+ mpas_forcing.o \
+ mpas_c_interfacing.o \
+ random_id.o \
+ pool_hash.o \
+ mpas_derived_types.o \
+ mpas_domain_routines.o \
+ mpas_field_routines.o \
+ mpas_pool_routines.o \
+ xml_stream_parser.o \
+ regex_matching.o \
+ mpas_field_accessor.o \
+ mpas_log.o
+
+UTIL_OBJS = \
+ ezxml.o
+
+
+all:
+ ( $(MAKE) xml )
+ ( $(MAKE) framework )
+ ( $(MAKE) operators )
+ ( $(MAKE) registry )
+ ( $(MAKE) streams_gen )
+ ( $(MAKE) incs )
+ ( $(MAKE) dycore )
+ ( ar -ru libmpas.a $(INTERFACE_OBJS) $(DYN_OBJS) $(DIAG_OBJS) $(DIAGNOSTICS) $(OPERATOR_OBJS) $(FRAME_OBJS) $(UTIL_OBJS) )
+
+
+#
+# EZXML
+#
+xml: ezxml.o
+
+
+#
+# Framework dependencies
+#
+framework: $(FRAME_OBJS)
+
+mpas_framework.o: mpas_dmpar.o \
+ mpas_io.o \
+ mpas_derived_types.o \
+ mpas_domain_routines.o \
+ mpas_field_routines.o \
+ mpas_pool_routines.o \
+ mpas_timer.o \
+ mpas_sort.o \
+ mpas_io_units.o \
+ mpas_block_decomp.o \
+ mpas_stream_manager.o \
+ mpas_c_interfacing.o
+
+mpas_abort.o: mpas_kind_types.o mpas_io_units.o mpas_threading.o
+
+mpas_constants.o: mpas_kind_types.o
+
+mpas_log.o: mpas_derived_types.o mpas_io_units.o mpas_abort.o mpas_threading.o mpas_c_interfacing.o
+
+mpas_attlist.o: mpas_kind_types.o mpas_io_units.o mpas_derived_types.o
+
+mpas_derived_types.o: mpas_kind_types.o mpas_constants.o $(TYPE_DEPS)
+
+mpas_domain_routines.o: mpas_derived_types.o mpas_pool_routines.o mpas_dmpar.o
+
+mpas_field_routines.o: mpas_derived_types.o duplicate_field_array.inc duplicate_field_scalar.inc mpas_threading.o mpas_attlist.o
+
+mpas_pool_routines.o: mpas_derived_types.o mpas_field_routines.o mpas_threading.o mpas_log.o
+
+mpas_decomp.o: mpas_derived_types.o mpas_stream_manager.o mpas_log.o
+
+mpas_hash.o : mpas_derived_types.o
+
+mpas_dmpar.o: mpas_sort.o mpas_kind_types.o mpas_derived_types.o mpas_hash.o mpas_threading.o mpas_pool_routines.o mpas_log.o
+
+mpas_sort.o: mpas_kind_types.o mpas_log.o
+
+mpas_timekeeping.o: mpas_kind_types.o mpas_derived_types.o mpas_dmpar.o mpas_threading.o mpas_log.o
+
+mpas_timer.o: mpas_kind_types.o mpas_dmpar.o mpas_threading.o mpas_log.o
+
+mpas_block_decomp.o: mpas_derived_types.o mpas_hash.o mpas_io_units.o mpas_dmpar.o
+
+mpas_block_creator.o: mpas_dmpar.o mpas_hash.o mpas_sort.o mpas_io_units.o mpas_block_decomp.o mpas_stream_manager.o mpas_decomp.o mpas_abort.o $(DEPS)
+
+mpas_io.o: mpas_dmpar.o mpas_attlist.o mpas_log.o
+
+mpas_io_streams.o: mpas_attlist.o mpas_derived_types.o mpas_timekeeping.o mpas_io.o mpas_pool_routines.o add_field_indices.inc mpas_log.o $(DEPS)
+
+mpas_bootstrapping.o: mpas_derived_types.o mpas_dmpar.o mpas_block_decomp.o mpas_block_creator.o mpas_sort.o mpas_timekeeping.o mpas_io_streams.o mpas_stream_manager.o random_id.o mpas_log.o $(DEPS)
+
+mpas_io_units.o: mpas_kind_types.o
+
+mpas_threading.o: mpas_kind_types.o
+
+mpas_stream_list.o: mpas_derived_types.o mpas_kind_types.o mpas_io_streams.o mpas_timekeeping.o regex_matching.o mpas_log.o
+
+mpas_stream_manager.o: mpas_io_streams.o mpas_timekeeping.o mpas_derived_types.o mpas_kind_types.o mpas_c_interfacing.o mpas_stream_list.o mpas_dmpar.o mpas_io.o mpas_threading.o mpas_log.o
+
+mpas_forcing.o: mpas_derived_types.o mpas_timekeeping.o mpas_stream_manager.o mpas_log.o mpas_io_units.o
+
+mpas_field_accessor.o: mpas_derived_types.o mpas_kind_types.o mpas_pool_routines.o mpas_log.o
+
+
+#
+# Operator dependencies
+#
+operators: $(OPERATOR_OBJS)
+
+mpas_tensor_operations.o: mpas_vector_operations.o mpas_matrix_operations.o
+
+mpas_rbf_interpolation.o: mpas_vector_operations.o
+
+mpas_vector_reconstruction.o: mpas_rbf_interpolation.o
+
+mpas_tracer_advection_helpers.o: mpas_geometry_utils.o
+
+mpas_tracer_advection_mono.o: mpas_tracer_advection_helpers.o
+
+mpas_tracer_advection_std.o: mpas_tracer_advection_helpers.o
+
+mpas_geometry_utils.o: mpas_vector_operations.o mpas_matrix_operations.o
+
+
+#
+# Registry
+#
+registry: $(REG_OBJS) ezxml.o
+ $(CC) $(CPPFLAGS) $(CFLAGS) -o $@ $(REG_OBJS) ezxml.o
+
+
+#
+# Default stream file generator
+#
+streams_gen: $(STREAMS_GEN_OBJS) ezxml.o
+ $(CC) $(CPPFLAGS) $(CFLAGS) -o $@ $(STREAMS_GEN_OBJS) ezxml.o
+
+
+#
+# Include files manufactured by the registry code generator
+#
+incs: $(REGISTRY_FILE)
+ ( cpp -P -traditional $(CPPFLAGS) -I$(MPAS_SRC_ROOT)/dycore/src/core_atmosphere/diagnostics $(REGISTRY_FILE) > Registry_processed.xml )
+ ( ./registry Registry_processed.xml )
+
+#
+# Dycore
+#
+dycore: $(DYN_OBJS) $(DIAGNOSTICS) $(DIAG_OBJS) $(INTERFACE_OBJS)
+
+mpas_atm_time_integration.o: mpas_atm_iau.o mpas_atm_dimensions.o
+
+
+#
+# Diagnostics
+#
+mpas_atm_diagnostics_manager.o: mpas_atm_diagnostics_utils.o $(DIAGNOSTICS)
+
+convective_diagnostics.o: mpas_atm_diagnostics_utils.o
+isobaric_diagnostics.o: mpas_atm_diagnostics_utils.o
+pv_diagnostics.o: mpas_atm_diagnostics_utils.o
+
+
+#
+# Core interface objects
+#
+mpas_atm_core_interface.o: mpas_atm_core.o incs
+
+mpas_atm_core.o: mpas_atm_threading.o mpas_atm_time_integration.o mpas_atm_diagnostics_manager.o
+
+cam_mpas_subdriver.o: mpas_atm_core_interface.o mpas_derived_types.o mpas_framework.o mpas_domain_routines.o mpas_pool_routines.o
+
+
+clean:
+ rm registry Registry_processed.xml *.inc *.o *.mod lib*.a
+
+# Cancel the built-in implicit rule for Modula-2 files (.mod) to avoid having 'make'
+# try to create .o files from Fortran .mod files
+%.o : %.mod
+
+%.o : %.c
+ $(CC) -c $(CFLAGS) -DUNDERSCORE $(CPPFLAGS) $(CPPINCLUDES) -I$(MPAS_SRC_ROOT)/dycore/src/external/ezxml $<
+
+%.o : %.F
+ $(FC) -c $(CPPFLAGS) $(FFLAGS) $(CPPINCLUDES) $(FCINCLUDES) $<
+
+%.o : %.F90
+ $(FC) -c $(CPPFLAGS) $(FFLAGS) $(CPPINCLUDES) $(FCINCLUDES) $<
+
diff --git a/src/dynamics/mpas/dp_coupling.F90 b/src/dynamics/mpas/dp_coupling.F90
new file mode 100644
index 0000000000..54e5a3880e
--- /dev/null
+++ b/src/dynamics/mpas/dp_coupling.F90
@@ -0,0 +1,770 @@
+module dp_coupling
+
+!-------------------------------------------------------------------------------
+! dynamics - physics coupling module
+!-------------------------------------------------------------------------------
+
+use shr_kind_mod, only: r8=>shr_kind_r8
+use pmgrid, only: plev
+use ppgrid, only: begchunk, endchunk, pcols, pver, pverp
+use constituents, only: pcnst, cnst_type
+use physconst, only: gravit, cpairv, cappa, rairv, rh2o, zvir
+
+use spmd_dyn, only: local_dp_map, block_buf_nrecs, chunk_buf_nrecs
+use spmd_utils, only: mpicom, iam, masterproc
+
+use dyn_grid, only: get_gcol_block_d
+use dyn_comp, only: dyn_export_t, dyn_import_t
+
+use physics_types, only: physics_state, physics_tend
+use phys_grid, only: get_ncols_p, get_gcol_all_p, block_to_chunk_send_pters, &
+ transpose_block_to_chunk, block_to_chunk_recv_pters, &
+ chunk_to_block_send_pters, transpose_chunk_to_block, &
+ chunk_to_block_recv_pters
+
+use physics_buffer, only: physics_buffer_desc, pbuf_get_chunk, pbuf_get_field
+
+use cam_logfile, only: iulog
+use perf_mod, only: t_startf, t_stopf, t_barrierf
+use cam_abortutils, only: endrun
+
+implicit none
+private
+save
+
+public :: &
+ d_p_coupling, &
+ p_d_coupling
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+subroutine d_p_coupling(phys_state, phys_tend, pbuf2d, dyn_out)
+
+ ! Convert the dynamics output state into the physics input state.
+ ! Note that all pressures and tracer mixing ratios coming from the dycore are based on
+ ! dry air mass.
+
+ use mpas_constants, only : Rv_over_Rd => rvord
+
+ ! arguments
+ type(physics_state), intent(inout) :: phys_state(begchunk:endchunk)
+ type(physics_tend ), intent(inout) :: phys_tend(begchunk:endchunk)
+ type(dyn_export_t), intent(inout) :: dyn_out
+ type(physics_buffer_desc), pointer :: pbuf2d(:,:)
+
+ ! local variables
+
+ ! Variables from dynamics export container
+ integer :: nCellsSolve
+ integer :: index_qv
+ integer, dimension(:), pointer :: cam_from_mpas_cnst
+
+ real(r8), pointer :: pmiddry(:,:)
+ real(r8), pointer :: pintdry(:,:)
+ real(r8), pointer :: zint(:,:)
+ real(r8), pointer :: zz(:,:)
+ real(r8), pointer :: rho_zz(:,:)
+ real(r8), pointer :: ux(:,:)
+ real(r8), pointer :: uy(:,:)
+ real(r8), pointer :: w(:,:)
+ real(r8), pointer :: theta_m(:,:)
+ real(r8), pointer :: exner(:,:)
+ real(r8), pointer :: tracers(:,:,:)
+
+
+ integer :: lchnk, icol, k, kk ! indices over chunks, columns, layers
+ integer :: i, m, ncols, blockid
+ integer :: blk(1), bcid(1)
+
+ integer :: pgcols(pcols)
+ integer :: tsize ! amount of data per grid point passed to physics
+ integer, allocatable :: bpter(:,:) ! offsets into block buffer for packing data
+ integer, allocatable :: cpter(:,:) ! offsets into chunk buffer for unpacking data
+
+ real(r8), allocatable, dimension(:) :: bbuffer, cbuffer ! transpose buffers
+
+ character(len=*), parameter :: subname = 'd_p_coupling'
+ !----------------------------------------------------------------------------
+
+ nCellsSolve = dyn_out % nCellsSolve
+ index_qv = dyn_out % index_qv
+ cam_from_mpas_cnst => dyn_out % cam_from_mpas_cnst
+
+ pmiddry => dyn_out % pmiddry
+ pintdry => dyn_out % pintdry
+ zint => dyn_out % zint
+ zz => dyn_out % zz
+ rho_zz => dyn_out % rho_zz
+ ux => dyn_out % ux
+ uy => dyn_out % uy
+ w => dyn_out % w
+ theta_m => dyn_out % theta_m
+ exner => dyn_out % exner
+ tracers => dyn_out % tracers
+
+ ! diagnose pintdry, pmiddry
+ call dry_hydrostatic_pressure( &
+ nCellsSolve, plev, zz, zint, rho_zz, theta_m, pmiddry, pintdry)
+
+ call t_startf('dpcopy')
+
+ if (local_dp_map) then
+
+ !$omp parallel do private (lchnk, ncols, icol, i, k, kk, m, pgcols, blk, bcid)
+ do lchnk = begchunk, endchunk
+
+ ncols = get_ncols_p(lchnk) ! number of columns in this chunk
+ call get_gcol_all_p(lchnk, pcols, pgcols) ! global column indices in chunk
+
+ do icol = 1, ncols ! column index in physics chunk
+ call get_gcol_block_d(pgcols(icol), 1, blk, bcid) ! column index in dynamics block
+ i = bcid(1)
+
+ phys_state(lchnk)%psdry(icol) = pintdry(1,i)
+ phys_state(lchnk)%phis(icol) = zint(1,i) * gravit
+
+ do k = 1, pver ! vertical index in physics chunk
+ kk = pver - k + 1 ! vertical index in dynamics block
+
+ phys_state(lchnk)%t(icol,k) = theta_m(kk,i) / (1.0_r8 + &
+ Rv_over_Rd * tracers(index_qv,kk,i)) * exner(kk,i)
+ phys_state(lchnk)%u(icol,k) = ux(kk,i)
+ phys_state(lchnk)%v(icol,k) = uy(kk,i)
+ phys_state(lchnk)%omega(icol,k) = -rho_zz(kk,i)*zz(kk,i)*gravit*0.5_r8*(w(kk,i)+w(kk+1,i)) ! omega
+ phys_state(lchnk)%pmiddry(icol,k) = pmiddry(kk,i)
+ end do
+
+ do k = 1, pverp
+ kk = pverp - k + 1
+ phys_state(lchnk)%pintdry(icol,k) = pintdry(kk,i)
+ end do
+
+ do m = 1, pcnst
+ do k = 1, pver
+ kk = pver - k + 1
+ phys_state(lchnk)%q(icol,k,m) = tracers(cam_from_mpas_cnst(m),kk,i)
+ end do
+ end do
+ end do
+ end do
+
+ else ! .not. local_dp_map
+
+ tsize = 6 + pcnst
+ allocate(bbuffer(tsize*block_buf_nrecs)) ! block buffer
+ bbuffer = 0.0_r8
+ allocate(cbuffer(tsize*chunk_buf_nrecs)) ! chunk buffer
+ cbuffer = 0.0_r8
+
+ allocate( bpter(nCellsSolve,0:pver) )
+ allocate( cpter(pcols,0:pver) )
+
+ blockid = iam + 1 ! global block index
+ call block_to_chunk_send_pters(blockid, nCellsSolve, pverp, tsize, bpter)
+
+ do i = 1, nCellsSolve ! column index in block
+
+ bbuffer(bpter(i,0)) = pintdry(1,i) ! psdry
+ bbuffer(bpter(i,0)+1) = zint(1,i) * gravit ! phis
+
+ do k = 1, pver
+ bbuffer(bpter(i,k)) = theta_m(k,i) / (1.0_r8 + &
+ Rv_over_Rd * tracers(index_qv,k,i)) * exner(k,i)
+ bbuffer(bpter(i,k)+1) = ux(k,i)
+ bbuffer(bpter(i,k)+2) = uy(k,i)
+ bbuffer(bpter(i,k)+3) = -rho_zz(k,i) * zz(k,i) * gravit * 0.5_r8 * (w(k,i) + w(k+1,i)) ! omega
+ bbuffer(bpter(i,k)+4) = pmiddry(k,i)
+ do m=1,pcnst
+ bbuffer(bpter(i,k)+4+m) = tracers(cam_from_mpas_cnst(m),k,i)
+ end do
+ end do
+
+ do k = 1, pverp
+ bbuffer(bpter(i,k-1)+5+pcnst) = pintdry(k,i)
+ end do
+ end do
+
+ call t_barrierf ('sync_blk_to_chk', mpicom)
+ call t_startf ('block_to_chunk')
+ call transpose_block_to_chunk(tsize, bbuffer, cbuffer)
+ call t_stopf ('block_to_chunk')
+
+ !$omp parallel do private (lchnk, ncols, icol, k, kk, m, cpter)
+ do lchnk = begchunk, endchunk
+ ncols = phys_state(lchnk)%ncol
+
+ call block_to_chunk_recv_pters(lchnk, pcols, pverp, tsize, cpter)
+
+ do icol = 1, ncols
+
+ phys_state(lchnk)%psdry(icol) = cbuffer(cpter(icol,0))
+ phys_state(lchnk)%phis(icol) = cbuffer(cpter(icol,0)+1)
+
+ ! do the vertical reorder here when assigning to phys_state
+ do k = 1, pver
+ kk = pver - k + 1
+ phys_state(lchnk)%t (icol,kk) = cbuffer(cpter(icol,k))
+ phys_state(lchnk)%u (icol,kk) = cbuffer(cpter(icol,k)+1)
+ phys_state(lchnk)%v (icol,kk) = cbuffer(cpter(icol,k)+2)
+ phys_state(lchnk)%omega (icol,kk) = cbuffer(cpter(icol,k)+3)
+ phys_state(lchnk)%pmiddry(icol,kk) = cbuffer(cpter(icol,k)+4)
+ do m = 1, pcnst
+ phys_state(lchnk)%q (icol,kk,m) = cbuffer(cpter(icol,k)+4+m)
+ end do
+ end do
+
+ do k = 0, pver
+ kk = pverp - k
+ phys_state(lchnk)%pintdry(icol,kk) = cbuffer(cpter(icol,k)+5+pcnst)
+ end do
+ end do
+ end do
+
+ deallocate( bbuffer, bpter )
+ deallocate( cbuffer, cpter )
+
+ end if
+ call t_stopf('dpcopy')
+
+ call t_startf('derived_phys')
+ call derived_phys(phys_state, phys_tend, pbuf2d)
+ call t_stopf('derived_phys')
+
+end subroutine d_p_coupling
+
+!=========================================================================================
+
+subroutine p_d_coupling(phys_state, phys_tend, dyn_in)
+
+ ! Convert the physics output state and tendencies into the dynamics
+ ! input state. Begin by redistributing the output of the physics package
+ ! to the block data structure. Then derive the tendencies required by
+ ! MPAS.
+
+ use cam_mpas_subdriver, only : domain_ptr
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_field, mpas_pool_get_array
+ use mpas_field_routines, only : mpas_allocate_scratch_field, mpas_deallocate_scratch_field
+ use mpas_derived_types, only : mpas_pool_type, field2DReal
+ use time_manager, only : get_step_size
+
+ ! Arguments
+ type(physics_state), intent(inout) :: phys_state(begchunk:endchunk)
+ type(physics_tend ), intent(inout) :: phys_tend(begchunk:endchunk)
+ type(dyn_import_t), intent(inout) :: dyn_in
+
+ ! Local variables
+ integer :: lchnk, icol, k, kk ! indices over chunks, columns, layers
+ integer :: i, m, ncols, blockid
+ integer :: blk(1), bcid(1)
+
+ real(r8) :: factor
+ real(r8) :: dt_phys
+
+ ! Variables from dynamics import container
+ integer :: nCellsSolve
+ integer :: nCells
+ integer :: nEdgesSolve
+ integer :: index_qv
+ integer, dimension(:), pointer :: mpas_from_cam_cnst
+
+ real(r8), pointer :: tracers(:,:,:)
+
+ ! CAM physics output redistributed to blocks.
+ real(r8), allocatable :: t_tend(:,:)
+ real(r8), allocatable :: qv_tend(:,:)
+ real(r8), pointer :: u_tend(:,:)
+ real(r8), pointer :: v_tend(:,:)
+
+
+ integer :: pgcols(pcols)
+ integer :: tsize ! amount of data per grid point passed to dynamics
+ integer, allocatable :: bpter(:,:) ! offsets into block buffer for unpacking data
+ integer, allocatable :: cpter(:,:) ! offsets into chunk buffer for packing data
+
+ real(r8), allocatable, dimension(:) :: bbuffer, cbuffer ! transpose buffers
+
+ type (mpas_pool_type), pointer :: tend_physics
+ type (field2DReal), pointer :: tend_uzonal, tend_umerid
+
+ character(len=*), parameter :: subname = 'dp_coupling::p_d_coupling'
+ !----------------------------------------------------------------------------
+
+ nCellsSolve = dyn_in % nCellsSolve
+ nCells = dyn_in % nCells
+ index_qv = dyn_in % index_qv
+ mpas_from_cam_cnst => dyn_in % mpas_from_cam_cnst
+
+ tracers => dyn_in % tracers
+
+ allocate( t_tend(pver,nCellsSolve) )
+ allocate( qv_tend(pver,nCellsSolve) )
+
+ nullify(tend_physics)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'tend_physics', tend_physics)
+
+ nullify(tend_uzonal)
+ nullify(tend_umerid)
+ call mpas_pool_get_field(tend_physics, 'tend_uzonal', tend_uzonal)
+ call mpas_pool_get_field(tend_physics, 'tend_umerid', tend_umerid)
+ call mpas_allocate_scratch_field(tend_uzonal)
+ call mpas_allocate_scratch_field(tend_umerid)
+ call mpas_pool_get_array(tend_physics, 'tend_uzonal', u_tend)
+ call mpas_pool_get_array(tend_physics, 'tend_umerid', v_tend)
+
+ ! Physics coupling interval, used to compute tendency of qv
+ dt_phys = get_step_size()
+
+ call t_startf('pd_copy')
+ if (local_dp_map) then
+
+ !$omp parallel do private (lchnk, ncols, icol, i, k, kk, m, pgcols, blk, bcid)
+ do lchnk = begchunk, endchunk
+
+ ncols = get_ncols_p(lchnk) ! number of columns in this chunk
+ call get_gcol_all_p(lchnk, pcols, pgcols) ! global column indices
+
+ do icol = 1, ncols ! column index in physics chunk
+ call get_gcol_block_d(pgcols(icol), 1, blk, bcid) ! column index in dynamics block
+ i = bcid(1)
+
+ do k = 1, pver ! vertical index in physics chunk
+ kk = pver - k + 1 ! vertical index in dynamics block
+
+ t_tend(kk,i) = phys_tend(lchnk)%dtdt(icol,k)
+ u_tend(kk,i) = phys_tend(lchnk)%dudt(icol,k)
+ v_tend(kk,i) = phys_tend(lchnk)%dvdt(icol,k)
+
+ ! convert wet mixing ratios to dry
+ factor = phys_state(lchnk)%pdel(icol,k)/phys_state(lchnk)%pdeldry(icol,k)
+ do m = 1, pcnst
+ if (cnst_type(mpas_from_cam_cnst(m)) == 'wet') then
+ if (m == index_qv) then
+ qv_tend(kk,i) = (phys_state(lchnk)%q(icol,k,mpas_from_cam_cnst(m))*factor - tracers(index_qv,kk,i)) / dt_phys
+ end if
+ tracers(m,kk,i) = phys_state(lchnk)%q(icol,k,mpas_from_cam_cnst(m))*factor
+ else
+ if (m == index_qv) then
+ qv_tend(kk,i) = (phys_state(lchnk)%q(icol,k,mpas_from_cam_cnst(m)) - tracers(index_qv,kk,i)) / dt_phys
+ end if
+ tracers(m,kk,i) = phys_state(lchnk)%q(icol,k,mpas_from_cam_cnst(m))
+ end if
+ end do
+
+ end do
+ end do
+ end do
+
+ else
+
+ tsize = 3 + pcnst
+ allocate( bbuffer(tsize*block_buf_nrecs) )
+ bbuffer = 0.0_r8
+ allocate( cbuffer(tsize*chunk_buf_nrecs) )
+ cbuffer = 0.0_r8
+
+ allocate( bpter(nCellsSolve,0:pver) )
+ allocate( cpter(pcols,0:pver) )
+
+ !$omp parallel do private (lchnk, ncols, icol, k, m, cpter)
+ do lchnk = begchunk, endchunk
+ ncols = get_ncols_p(lchnk)
+
+ call chunk_to_block_send_pters(lchnk, pcols, pverp, tsize, cpter)
+
+ do icol = 1, ncols
+
+ do k = 1, pver
+ cbuffer(cpter(icol,k)) = phys_tend(lchnk)%dtdt(icol,k)
+ cbuffer(cpter(icol,k)+1) = phys_tend(lchnk)%dudt(icol,k)
+ cbuffer(cpter(icol,k)+2) = phys_tend(lchnk)%dvdt(icol,k)
+
+ ! convert wet mixing ratios to dry
+ factor = phys_state(lchnk)%pdel(icol,k)/phys_state(lchnk)%pdeldry(icol,k)
+ do m = 1, pcnst
+ if (cnst_type(m) == 'wet') then
+ cbuffer(cpter(icol,k)+2+m) = phys_state(lchnk)%q(icol,k,m)*factor
+ else
+ cbuffer(cpter(icol,k)+2+m) = phys_state(lchnk)%q(icol,k,m)
+ end if
+ end do
+
+ end do
+ end do
+ end do
+
+ call t_barrierf('sync_chk_to_blk', mpicom)
+ call t_startf ('chunk_to_block')
+ call transpose_chunk_to_block(tsize, cbuffer, bbuffer)
+ call t_stopf ('chunk_to_block')
+
+ blockid = iam + 1 ! global block index
+
+ call chunk_to_block_recv_pters(blockid, nCellsSolve, pverp, tsize, bpter)
+
+ do i = 1, nCellsSolve ! index in dynamics block
+
+ ! flip vertical index here
+ do k = 1, pver ! vertical index in physics chunk
+ kk = pver - k + 1 ! vertical index in dynamics block
+
+ t_tend(kk,i) = bbuffer(bpter(i,k))
+ u_tend(kk,i) = bbuffer(bpter(i,k)+1)
+ v_tend(kk,i) = bbuffer(bpter(i,k)+2)
+
+ do m = 1, pcnst
+ if (m == index_qv) then
+ qv_tend(kk,i) = (bbuffer(bpter(i,k)+2+mpas_from_cam_cnst(m)) - tracers(index_qv,kk,i)) / dt_phys
+ end if
+ tracers(m,kk,i) = bbuffer(bpter(i,k)+2+mpas_from_cam_cnst(m))
+ end do
+
+ end do
+ end do
+
+ deallocate( bbuffer, bpter )
+ deallocate( cbuffer, cpter )
+
+ end if
+ call t_stopf('pd_copy')
+
+ call t_startf('derived_tend')
+ call derived_tend(nCellsSolve, nCells, t_tend, u_tend, v_tend, qv_tend, dyn_in)
+ call t_stopf('derived_tend')
+
+ call mpas_deallocate_scratch_field(tend_uzonal)
+ call mpas_deallocate_scratch_field(tend_umerid)
+
+end subroutine p_d_coupling
+
+!=========================================================================================
+
+subroutine derived_phys(phys_state, phys_tend, pbuf2d)
+
+ ! Compute fields in the physics state object which are diagnosed from the
+ ! MPAS prognostic fields.
+
+ use geopotential, only: geopotential_t
+ use check_energy, only: check_energy_timestep_init
+ use shr_vmath_mod, only: shr_vmath_log
+
+ ! Arguments
+ type(physics_state), intent(inout) :: phys_state(begchunk:endchunk)
+ type(physics_tend ), intent(inout) :: phys_tend(begchunk:endchunk)
+ type(physics_buffer_desc), pointer :: pbuf2d(:,:)
+
+ ! Local variables
+
+ integer :: k, lchnk, m, ncol
+
+ real(r8) :: factor(pcols,pver)
+ real(r8) :: zvirv(pcols,pver)
+
+ real(r8), parameter :: pref = 1.e5_r8 ! reference pressure (Pa)
+
+ type(physics_buffer_desc), pointer :: pbuf_chnk(:)
+
+ character(len=*), parameter :: subname = 'dp_coupling::derived_phys'
+ !----------------------------------------------------------------------------
+
+ !$omp parallel do private (lchnk, ncol, k, factor)
+ do lchnk = begchunk,endchunk
+ ncol = get_ncols_p(lchnk)
+
+ ! The dry pressure profiles are derived using hydrostatic formulas
+ ! and the dry air mass from MPAS.
+
+ ! Derived variables for dry pressure profiles:
+
+ do k = 1, pver
+
+ phys_state(lchnk)%pdeldry(:ncol,k) = phys_state(lchnk)%pintdry(:ncol,k+1) - &
+ phys_state(lchnk)%pintdry(:ncol,k)
+
+ phys_state(lchnk)%rpdeldry(:ncol,k) = 1._r8 / phys_state(lchnk)%pdeldry(:ncol,k)
+
+ call shr_vmath_log(phys_state(lchnk)%pintdry(:ncol,k), &
+ phys_state(lchnk)%lnpintdry(:ncol,k), ncol)
+
+ call shr_vmath_log(phys_state(lchnk)%pmiddry(:ncol,k), &
+ phys_state(lchnk)%lnpmiddry(:ncol,k), ncol)
+
+ end do
+
+ call shr_vmath_log(phys_state(lchnk)%pintdry(:ncol,pverp), &
+ phys_state(lchnk)%lnpintdry(:ncol,pverp), ncol)
+
+
+ ! Add in the water vapor mass to compute the moist pressure profiles
+ ! used by CAM's physics packages.
+ ! **N.B.** The input water vapor mixing ratio in phys_state is based on dry air. It
+ ! gets converted to a wet basis later.
+
+ do k = 1, pver
+ ! To be consistent with total energy formula in physic's check_energy module only
+ ! include water vapor in moist pdel.
+ factor(:ncol,k) = 1._r8 + phys_state(lchnk)%q(:ncol,k,1)
+ phys_state(lchnk)%pdel(:ncol,k) = phys_state(lchnk)%pdeldry(:ncol,k)*factor(:ncol,k)
+ phys_state(lchnk)%rpdel(:ncol,k) = 1._r8 / phys_state(lchnk)%pdel(:ncol,k)
+ end do
+
+ ! Assume no water vapor above top of model.
+ phys_state(lchnk)%pint(:ncol,1) = phys_state(lchnk)%pintdry(:ncol,1)
+ do k = 1, pver
+ phys_state(lchnk)%pint(:ncol,k+1) = phys_state(lchnk)%pint(:ncol,k) + &
+ phys_state(lchnk)%pdel(:ncol,k)
+ call shr_vmath_log(phys_state(lchnk)%pint(:ncol,k), &
+ phys_state(lchnk)%lnpint(:ncol,k), ncol)
+ end do
+ call shr_vmath_log(phys_state(lchnk)%pint(:ncol,pverp), &
+ phys_state(lchnk)%lnpint(:ncol,pverp), ncol)
+
+ phys_state(lchnk)%ps(:ncol) = phys_state(lchnk)%pint(:ncol,pverp)
+
+ do k = 1, pver
+ phys_state(lchnk)%pmid(:ncol,k) = (phys_state(lchnk)%pint(:ncol,k+1) + &
+ phys_state(lchnk)%pint(:ncol,k)) / 2._r8
+
+ call shr_vmath_log(phys_state(lchnk)%pmid(:ncol,k), &
+ phys_state(lchnk)%lnpmid(:ncol,k), ncol)
+ end do
+
+ do k = 1, pver
+ phys_state(lchnk)%exner(:ncol,k) = (pref / phys_state(lchnk)%pmid(:ncol,k))**cappa
+ end do
+
+ ! Tracers from MPAS are in dry mixing ratio units. CAM's physics package expects constituents
+ ! which have been declared to be type 'wet' when they are registered to be represented by mixing
+ ! ratios based on moist air mass (dry air + water vapor). Do appropriate conversion here.
+ factor(:ncol,:) = 1._r8/factor(:ncol,:)
+ do m = 1,pcnst
+ if (cnst_type(m) == 'wet') then
+ phys_state(lchnk)%q(:ncol,:,m) = factor(:ncol,:)*phys_state(lchnk)%q(:ncol,:,m)
+ end if
+ end do
+
+ ! fill zvirv 2D variables to be compatible with geopotential_t interface
+ zvirv(:,:) = zvir
+
+ ! Compute geopotential height above surface - based on full pressure
+ call geopotential_t( &
+ phys_state(lchnk)%lnpint, phys_state(lchnk)%lnpmid, phys_state(lchnk)%pint, &
+ phys_state(lchnk)%pmid, phys_state(lchnk)%pdel, phys_state(lchnk)%rpdel, &
+ phys_state(lchnk)%t, phys_state(lchnk)%q(:,:,1), rairv(:,:,lchnk), gravit, zvirv, &
+ phys_state(lchnk)%zi, phys_state(lchnk)%zm, ncol)
+
+ ! Compute initial dry static energy, include surface geopotential
+ do k = 1, pver
+ phys_state(lchnk)%s(:ncol,k) = cpairv(:ncol,k,lchnk)*phys_state(lchnk)%t(:ncol,k) &
+ + gravit*phys_state(lchnk)%zm(:ncol,k) + phys_state(lchnk)%phis(:ncol)
+ end do
+
+ ! Compute energy and water integrals of input state
+ pbuf_chnk => pbuf_get_chunk(pbuf2d, lchnk)
+ call check_energy_timestep_init(phys_state(lchnk), phys_tend(lchnk), pbuf_chnk)
+
+ end do
+
+end subroutine derived_phys
+
+!=========================================================================================
+
+subroutine derived_tend(nCellsSolve, nCells, t_tend, u_tend, v_tend, qv_tend, dyn_in)
+
+ ! Derive the physics tendencies required by MPAS from the tendencies produced by
+ ! CAM's physics package.
+
+ use cam_mpas_subdriver, only : cam_mpas_cell_to_edge_winds, cam_mpas_update_halo
+ use mpas_constants, only : Rv_over_Rd => rvord
+
+ ! Arguments
+ integer, intent(in) :: nCellsSolve
+ integer, intent(in) :: nCells
+ real(r8), intent(in) :: t_tend(pver,nCellsSolve) ! physics dtdt
+ real(r8), intent(in) :: qv_tend(pver,nCellsSolve) ! physics dqvdt
+ real(r8), intent(inout) :: u_tend(pver,nCells+1) ! physics dudt
+ real(r8), intent(inout) :: v_tend(pver,nCells+1) ! physics dvdt
+ type(dyn_import_t), intent(inout) :: dyn_in
+
+
+ ! Local variables
+
+ ! variables from dynamics import container
+ integer :: nEdges
+ real(r8), pointer :: ru_tend(:,:)
+ real(r8), pointer :: rtheta_tend(:,:)
+ real(r8), pointer :: rho_tend(:,:)
+
+ real(r8), pointer :: normal(:,:)
+ real(r8), pointer :: east(:,:)
+ real(r8), pointer :: north(:,:)
+ integer, pointer :: cellsOnEdge(:,:)
+
+ real(r8), pointer :: theta(:,:)
+ real(r8), pointer :: exner(:,:)
+ real(r8), pointer :: rho_zz(:,:)
+ real(r8), pointer :: tracers(:,:,:)
+
+ integer :: index_qv
+
+ character(len=*), parameter :: subname = 'dp_coupling:derived_tend'
+ !----------------------------------------------------------------------------
+
+ nEdges = dyn_in % nEdges
+ ru_tend => dyn_in % ru_tend
+ rtheta_tend => dyn_in % rtheta_tend
+ rho_tend => dyn_in % rho_tend
+
+ east => dyn_in % east
+ north => dyn_in % north
+ normal => dyn_in % normal
+ cellsOnEdge => dyn_in % cellsOnEdge
+
+ theta => dyn_in % theta
+ exner => dyn_in % exner
+ rho_zz => dyn_in % rho_zz
+ tracers => dyn_in % tracers
+
+ index_qv = dyn_in % index_qv
+
+
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ ! Momentum tendency
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ !
+ ! Couple u and v tendencies with rho_zz
+ !
+ u_tend(:,:) = u_tend(:,:) * rho_zz(:,:)
+ v_tend(:,:) = v_tend(:,:) * rho_zz(:,:)
+
+ !
+ ! Update halos for u_tend and v_tend
+ !
+ call cam_mpas_update_halo('tend_uzonal', endrun) ! dyn_in % u_tend
+ call cam_mpas_update_halo('tend_umerid', endrun) ! dyn_in % v_tend
+
+ !
+ ! Project u and v tendencies to edge normal tendency
+ !
+ call cam_mpas_cell_to_edge_winds(nEdges, u_tend, v_tend, east, north, normal, &
+ cellsOnEdge, ru_tend)
+
+ !
+ ! Update halo for edge normal tendency
+ !
+ call cam_mpas_update_halo('tend_ru_physics', endrun)
+
+
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ ! Temperature tendency
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ !
+ ! Convert temperature tendency to potential temperature tendency
+ !
+ rtheta_tend(:,1:nCellsSolve) = t_tend(:,1:nCellsSolve) / exner(:,1:nCellsSolve)
+
+ !
+ ! Couple theta tendency with rho_zz
+ !
+ rtheta_tend(:,1:nCellsSolve) = rtheta_tend(:,1:nCellsSolve) * rho_zz(:,1:nCellsSolve)
+
+ !
+ ! Modify with moisture terms
+ !
+ rtheta_tend(:,1:nCellsSolve) = rtheta_tend(:,1:nCellsSolve) * (1.0_r8 + Rv_over_Rd * tracers(index_qv,:,1:nCellsSolve))
+ rtheta_tend(:,1:nCellsSolve) = rtheta_tend(:,1:nCellsSolve) + Rv_over_Rd * theta(:,1:nCellsSolve) * qv_tend(:,1:nCellsSolve)
+
+ !
+ ! Update halo for rtheta_m tendency
+ !
+ call cam_mpas_update_halo('tend_rtheta_physics', endrun)
+
+
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ ! Density tendency
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+
+ rho_tend = 0.0_r8
+
+end subroutine derived_tend
+
+!=========================================================================================
+
+subroutine dry_hydrostatic_pressure(nCells, nVertLevels, zz, zgrid, rho_zz, theta_m, pmiddry, pintdry)
+
+ ! Compute dry hydrostatic pressure at layer interfaces and midpoints
+ !
+ ! Given arrays of zz, zgrid, rho_zz, and theta_m from the MPAS-A prognostic
+ ! state, compute dry hydrostatic pressure at layer interfaces and midpoints.
+ ! The vertical dimension for 3-d arrays is innermost, and k=1 represents
+ ! the lowest layer or level in the fields.
+ !
+ ! IMPORTANT NOTE: At present, this routine is probably not correct when there
+ ! is moisture in the atmosphere.
+
+ use mpas_constants, only : cp, rgas, cv, gravity, p0
+
+ ! Arguments
+ integer, intent(in) :: nCells
+ integer, intent(in) :: nVertLevels
+ real(r8), dimension(nVertLevels, nCells), intent(in) :: zz ! d(zeta)/dz [-]
+ real(r8), dimension(nVertLevels+1, nCells), intent(in) :: zgrid ! geometric heights of layer interfaces [m]
+ real(r8), dimension(nVertLevels, nCells), intent(in) :: rho_zz ! dry density / zz [kg m^-3]
+ real(r8), dimension(nVertLevels, nCells), intent(in) :: theta_m ! potential temperature * (1 + Rv/Rd * qv)
+ real(r8), dimension(nVertLevels, nCells), intent(out) :: pmiddry ! layer midpoint dry hydrostatic pressure [Pa]
+ real(r8), dimension(nVertLevels+1, nCells), intent(out) :: pintdry ! layer interface dry hydrostatic pressure [Pa]
+
+ ! Local variables
+ integer :: iCell, k
+ real(r8), dimension(nCells) :: ptop_int ! Extrapolated pressure at top of the model
+ real(r8), dimension(nCells) :: ptop_mid ! Full non-hydrostatic pressure at top layer midpoint
+ real(r8), dimension(nCells) :: ttop_mid ! Temperature at top layer midpoint
+ real(r8), dimension(nVertLevels) :: dz ! Geometric layer thickness in column
+ real(r8) :: pi, t
+
+ !
+ ! Compute full non-hydrostatic pressure and temperature at top layer midpoint
+ !
+ ptop_mid(:) = p0 * (rgas * rho_zz(nVertLevels,:) * zz(nVertLevels,:) * theta_m(nVertLevels,:) / p0)**(cp/cv)
+ ttop_mid(:) = theta_m(nVertLevels,:) * &
+ (zz(nVertLevels,:) * rgas * rho_zz(nVertLevels,:) * theta_m(nVertLevels,:) / p0)**(rgas/(cp-rgas))
+
+
+ !
+ ! Extrapolate upward from top layer midpoint to top of the model
+ ! The formula used here results from combination of the hypsometric equation with the equation
+ ! for the layer mid-point pressure (i.e., (pint_top + pint_bot)/2 = pmid)
+ !
+ ! TODO: Should temperature here be virtual temperature?
+ !
+ ptop_int(:) = 2.0 * ptop_mid(:) &
+ / (1.0 + exp( (zgrid(nVertLevels+1,:) - zgrid(nVertLevels,:)) * gravity / rgas / ttop_mid(:)))
+
+
+ !
+ ! For each column, integrate downward from model top to compute dry hydrostatic pressure at layer
+ ! midpoints and interfaces. The pressure averaged to layer midpoints should be consistent with
+ ! the ideal gas law using the rho_zz and theta_m values prognosed by MPAS at layer midpoints.
+ !
+ ! TODO: Should temperature here be virtual temperature?
+ ! TODO: Is it problematic that the computed temperature is consistent with the non-hydrostatic pressure?
+ !
+ do iCell = 1, nCells
+
+ dz(:) = zgrid(2:nVertLevels+1,iCell) - zgrid(1:nVertLevels,iCell)
+
+ pintdry(nVertLevels+1,iCell) = ptop_int(iCell)
+ do k = nVertLevels, 1, -1
+ pintdry(k,iCell) = pintdry(k+1,iCell) + gravity * zz(k,iCell) * rho_zz(k,iCell) * dz(k)
+ pmiddry(k,iCell) = 0.5 * (pintdry(k+1,iCell) + pintdry(k,iCell))
+ end do
+ end do
+
+end subroutine dry_hydrostatic_pressure
+
+!=========================================================================================
+
+end module dp_coupling
diff --git a/src/dynamics/mpas/driver/README b/src/dynamics/mpas/driver/README
new file mode 100644
index 0000000000..63230751bd
--- /dev/null
+++ b/src/dynamics/mpas/driver/README
@@ -0,0 +1,2 @@
+Code in this directory is compiled using the MPAS-Atmosphere Makefile
+(src/dynamics/mpas/Makefile).
diff --git a/src/dynamics/mpas/driver/cam_mpas_subdriver.F90 b/src/dynamics/mpas/driver/cam_mpas_subdriver.F90
new file mode 100644
index 0000000000..b7f21c822d
--- /dev/null
+++ b/src/dynamics/mpas/driver/cam_mpas_subdriver.F90
@@ -0,0 +1,2510 @@
+module cam_mpas_subdriver
+
+!-------------------------------------------------------------------------------
+!
+! Handles the initialization of MPAS infrastructure in several phases
+!
+! This module mimics the functionality provided by stand-alone MPAS-Atmosphere's
+! sub-driver (i.e., mpas_subdriver.F), but with minor modifications to code to
+! accomodate the fact that MPAS-A is being driven by CAM.
+!
+!-------------------------------------------------------------------------------
+
+
+ use mpas_derived_types, only : core_type, dm_info, domain_type, MPAS_Clock_type
+
+ implicit none
+
+ public :: cam_mpas_init_phase1, &
+ cam_mpas_init_phase2, &
+ cam_mpas_init_phase3, &
+ cam_mpas_init_phase4, &
+ cam_mpas_define_scalars, &
+ cam_mpas_get_global_dims, &
+ cam_mpas_get_global_coords, &
+ cam_mpas_get_global_blocks, &
+ cam_mpas_read_static, &
+ cam_mpas_setup_restart, &
+ cam_mpas_read_restart, &
+ cam_mpas_write_restart, &
+ cam_mpas_compute_unit_vectors, &
+ cam_mpas_update_halo, &
+ cam_mpas_cell_to_edge_winds, &
+ cam_mpas_run, &
+ cam_mpas_finalize, &
+ cam_mpas_debug_stream
+ public :: corelist, domain_ptr
+
+ private
+
+
+ type (core_type), pointer :: corelist => null()
+ type (domain_type), pointer :: domain_ptr => null()
+ type (MPAS_Clock_type), pointer :: clock => null()
+
+ !
+ ! This interface should be compatible with CAM's endrun routine
+ !
+ abstract interface
+ subroutine halt_model(mesg, ierr)
+ use shr_kind_mod, only : shr_kind_in
+ character(len=*), intent(in), optional :: mesg
+ integer(kind=shr_kind_in), intent(in), optional :: ierr
+ end subroutine halt_model
+ end interface
+
+
+contains
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_init_phase1
+ !
+ !> \brief Tracks mpas_init up to the point of reading namelist
+ !> \author Michael Duda
+ !> \date 19 April 2019
+ !> \details
+ !> This routine follows the stand-alone MPAS subdriver up to, but not
+ !> including, the point where namelists are read.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_init_phase1(mpicom, endrun, logUnits, realkind)
+
+ use mpas_domain_routines, only : mpas_allocate_domain
+ use mpas_framework, only : mpas_framework_init_phase1
+ use atm_core_interface, only : atm_setup_core, atm_setup_domain
+ use mpas_pool_routines, only : mpas_pool_add_config
+ use mpas_kind_types, only : RKIND
+
+ ! Dummy argument
+ integer, intent(in) :: mpicom
+ procedure(halt_model) :: endrun
+ integer, dimension(2), intent(in) :: logUnits
+ integer, intent(in) :: realkind
+
+ ! Local variables
+ integer :: ierr
+
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_init_phase1'
+
+
+ allocate(corelist)
+ nullify(corelist % next)
+
+ allocate(corelist % domainlist)
+ nullify(corelist % domainlist % next)
+
+ domain_ptr => corelist % domainlist
+ domain_ptr % core => corelist
+
+ call mpas_allocate_domain(domain_ptr)
+
+
+ !
+ ! Initialize MPAS infrastructure (principally, the mpas_dmpar module)
+ !
+ call mpas_framework_init_phase1(domain_ptr % dminfo, mpi_comm=mpicom)
+
+ call atm_setup_core(corelist)
+ call atm_setup_domain(domain_ptr)
+
+
+ ! Set up the log manager as early as possible so we can use it for any errors/messages during subsequent init steps
+ ! We need:
+ ! 1) domain_ptr to be allocated,
+ ! 2) dmpar_init complete to access dminfo,
+ ! 3) *_setup_core to assign the setup_log function pointer
+ ierr = domain_ptr % core % setup_log(domain_ptr % logInfo, domain_ptr, unitNumbers=logUnits)
+ if ( ierr /= 0 ) then
+ call endrun(subname//': FATAL: Log setup failed for MPAS-A dycore')
+ end if
+
+ ! CAM does not yet allow running the dycore at a different precision than
+ ! the physics package. Check that the real kinds are the same.
+ if (realkind /= RKIND) then
+ call endrun(subname//': FATAL: CAM and MPAS real kinds do not match')
+ end if
+
+ end subroutine cam_mpas_init_phase1
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_init_phase2
+ !
+ !> \brief Tracks mpas_init after namelists have been read
+ !> \author Michael Duda
+ !> \date 19 April 2019
+ !> \details
+ !> This routine follows the stand-alone MPAS subdriver from the point
+ !> where we call the second phase of MPAS framework initialization up
+ !> to the check on the existence of the streams. file.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_init_phase2(pio_subsystem, endrun, cam_calendar)
+
+ use mpas_log, only : mpas_log_write
+ use mpas_kind_types, only : ShortStrKIND
+ use pio_types, only : iosystem_desc_t
+
+ use mpas_framework, only : mpas_framework_init_phase2
+
+ type (iosystem_desc_t), pointer :: pio_subsystem
+ procedure(halt_model) :: endrun
+ character(len=*), intent(in) :: cam_calendar
+
+ integer :: ierr
+ logical :: streamsExists
+
+ character(len=ShortStrKIND) :: mpas_calendar
+
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_init_phase2'
+
+
+ !
+ ! Translate between CAM calendar names and MPAS calendar names
+ !
+ select case(trim(cam_calendar))
+ case ('noleap')
+ mpas_calendar = 'gregorian_noleap'
+ case ('gregorian')
+ mpas_calendar = 'gregorian'
+ case default
+ call endrun(subname//': FATAL: Unrecognized calendar type '''//trim(cam_calendar)//'''')
+ end select
+
+ ! 4) Continue with normal procedure from MPAS subdriver
+ call mpas_framework_init_phase2(domain_ptr, io_system=pio_subsystem, calendar=trim(mpas_calendar))
+
+ ierr = domain_ptr % core % define_packages(domain_ptr % packages)
+ if ( ierr /= 0 ) then
+ call endrun(subname//': FATAL: Package definition failed for core '//trim(domain_ptr % core % coreName))
+ end if
+
+ ierr = domain_ptr % core % setup_packages(domain_ptr % configs, domain_ptr % packages, domain_ptr % iocontext)
+ if ( ierr /= 0 ) then
+ call endrun(subname//': FATAL: Package setup failed for core '//trim(domain_ptr % core % coreName))
+ end if
+
+ ierr = domain_ptr % core % setup_decompositions(domain_ptr % decompositions)
+ if ( ierr /= 0 ) then
+ call endrun(subname//': FATAL: Decomposition setup failed for core '//trim(domain_ptr % core % coreName))
+ end if
+
+ ierr = domain_ptr % core % setup_clock(domain_ptr % clock, domain_ptr % configs)
+ if ( ierr /= 0 ) then
+ call endrun(subname//': FATAL: Clock setup failed for core '//trim(domain_ptr % core % coreName))
+ end if
+
+ ! At this point, we should be ready to set up decompositions, build halos, allocate blocks, etc. in dyn_grid_init
+
+ end subroutine cam_mpas_init_phase2
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_init_phase3
+ !
+ !> \brief Finish MPAS initialization
+ !> \author Michael Duda
+ !> \date 19 April 2019
+ !> \details
+ !> This routine completes the initialization of the MPAS infrastructure and
+ !> the MPAS core, including the allocation of all fields managed by MPAS.
+ !> The num_scalars argument should be set to CAM's value for PCNST,
+ !> the number of constituents.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_init_phase3(fh_ini, num_scalars, endrun)
+
+ use mpas_log, only : mpas_log_write
+ use pio, only : file_desc_t
+ use iso_c_binding, only : c_int, c_char, c_ptr, c_loc
+
+ use mpas_derived_types, only : MPAS_Time_type, MPAS_TimeInterval_type
+ use mpas_derived_types, only : MPAS_IO_PNETCDF, MPAS_IO_PNETCDF5, MPAS_IO_NETCDF, MPAS_IO_NETCDF4
+ use mpas_derived_types, only : MPAS_START_TIME
+ use mpas_derived_types, only : MPAS_STREAM_MGR_NOERR
+ use mpas_timekeeping, only : mpas_get_clock_time, mpas_get_time, mpas_expand_string, mpas_set_time, &
+ mpas_set_timeInterval
+ use mpas_stream_manager, only : MPAS_stream_mgr_init, mpas_build_stream_filename, MPAS_stream_mgr_validate_streams
+ use mpas_kind_types, only : StrKIND
+ use mpas_c_interfacing, only : mpas_c_to_f_string, mpas_f_to_c_string
+ use mpas_bootstrapping, only : mpas_bootstrap_framework_phase1, mpas_bootstrap_framework_phase2
+ use mpas_pool_routines, only : mpas_pool_add_config
+
+ type (file_desc_t), intent(inout) :: fh_ini
+ integer, intent(in) :: num_scalars
+ procedure(halt_model) :: endrun
+
+ integer :: ierr
+ character(kind=c_char), dimension(StrKIND+1) :: c_filename ! StrKIND+1 for C null-termination character
+ integer(kind=c_int) :: c_comm
+ integer(kind=c_int) :: c_ierr
+ type (c_ptr) :: mgr_p
+ character(len=StrKIND) :: mesh_stream
+ character(len=StrKIND) :: mesh_filename
+ character(len=StrKIND) :: mesh_filename_temp
+ character(len=StrKIND) :: ref_time_temp
+ character(len=StrKIND) :: filename_interval_temp
+ character(kind=c_char), dimension(StrKIND+1) :: c_mesh_stream
+ character(kind=c_char), dimension(StrKIND+1) :: c_mesh_filename_temp
+ character(kind=c_char), dimension(StrKIND+1) :: c_ref_time_temp
+ character(kind=c_char), dimension(StrKIND+1) :: c_filename_interval_temp
+ character(kind=c_char), dimension(StrKIND+1) :: c_iotype
+ type (MPAS_Time_type) :: start_time
+ type (MPAS_Time_type) :: ref_time
+ type (MPAS_TimeInterval_type) :: filename_interval
+ character(len=StrKIND) :: start_timestamp
+ character(len=StrKIND) :: iotype
+ logical :: streamsExists
+ integer :: mesh_iotype
+ integer :: blockID
+ character(len=StrKIND) :: timeStamp
+
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_init_phase3'
+
+
+ mesh_filename = 'external mesh file'
+
+ !
+ ! Adding a config named 'cam_pcnst' with the number of constituents will indicate to
+ ! MPAS-A setup code that it is operating as a CAM dycore, and that it is necessary to
+ ! allocate scalars separately from other Registry-defined fields
+ !
+ call mpas_pool_add_config(domain_ptr % configs, 'cam_pcnst', num_scalars)
+
+ mesh_iotype = MPAS_IO_NETCDF ! Not actually used
+ call mpas_bootstrap_framework_phase1(domain_ptr, mesh_filename, mesh_iotype, pio_file_desc=fh_ini)
+
+ !
+ ! Finalize the setup of blocks and fields
+ !
+ call mpas_bootstrap_framework_phase2(domain_ptr, pio_file_desc=fh_ini)
+
+ end subroutine cam_mpas_init_phase3
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_init_phase4
+ !
+ !> \brief Finish MPAS initialization
+ !> \author Michael Duda
+ !> \date 29 February 2020
+ !> \details
+ !> This routine completes the initialization of the MPAS core, essentially
+ !> following what is done in mpas_atm_core.F::atm_core_init(), but without
+ !> any calls to the MPAS-A diagnostics framework or the MPAS stream manager.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_init_phase4(endrun)
+
+ use mpas_timekeeping, only : mpas_get_clock_time, mpas_get_time, MPAS_START_TIME
+ use mpas_kind_types, only : StrKIND, RKIND
+ use mpas_atm_dimensions, only : mpas_atm_set_dims
+ use mpas_atm_threading, only : mpas_atm_threading_init
+ use mpas_derived_types, only : mpas_pool_type, field2DReal, MPAS_Time_type
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_config, &
+ mpas_pool_get_field, mpas_pool_get_array, mpas_pool_initialize_time_levels
+ use atm_core, only : atm_mpas_init_block, core_clock => clock
+ use mpas_dmpar, only : mpas_dmpar_exch_halo_field
+
+ procedure(halt_model) :: endrun
+
+ real (kind=RKIND), pointer :: dt
+
+ character(len=StrKIND) :: timeStamp
+ integer :: i
+ logical, pointer :: config_do_restart
+
+ type (mpas_pool_type), pointer :: state
+ type (mpas_pool_type), pointer :: mesh
+ type (mpas_pool_type), pointer :: diag
+ type (field2DReal), pointer :: u_field, pv_edge_field, ru_field, rw_field
+ character (len=StrKIND), pointer :: xtime
+ character (len=StrKIND), pointer :: initial_time1, initial_time2
+ type (MPAS_Time_Type) :: startTime
+
+ integer, pointer :: nVertLevels, maxEdges, maxEdges2, num_scalars
+
+ integer :: ierr
+ character(len=StrKIND) :: startTimeStamp
+
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_init_phase4'
+
+ !
+ ! Setup threading
+ !
+ call mpas_atm_threading_init(domain_ptr % blocklist, ierr)
+ if ( ierr /= 0 ) then
+ call endrun('Threading setup failed for core '//trim(domain_ptr % core % coreName))
+ end if
+
+
+ !
+ ! Set up inner dimensions used by arrays in optimized dynamics routines
+ !
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state)
+ call mpas_pool_get_dimension(state, 'nVertLevels', nVertLevels)
+ call mpas_pool_get_dimension(state, 'maxEdges', maxEdges)
+ call mpas_pool_get_dimension(state, 'maxEdges2', maxEdges2)
+ call mpas_pool_get_dimension(state, 'num_scalars', num_scalars)
+ call mpas_atm_set_dims(nVertLevels, maxEdges, maxEdges2, num_scalars)
+
+ !
+ ! Set "local" clock to point to the clock contained in the domain type
+ !
+ clock => domain_ptr % clock
+ core_clock => domain_ptr % clock
+
+
+ call mpas_pool_get_config(domain_ptr % blocklist % configs, 'config_do_restart', config_do_restart)
+ call mpas_pool_get_config(domain_ptr % blocklist % configs, 'config_dt', dt)
+
+
+ if (.not. config_do_restart) then
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state)
+ call mpas_pool_initialize_time_levels(state)
+ end if
+
+
+ !
+ ! Set startTimeStamp based on the start time of the simulation clock
+ !
+ startTime = mpas_get_clock_time(clock, MPAS_START_TIME, ierr)
+ if ( ierr /= 0 ) then
+ call endrun(subname//': failed to get MPAS_START_TIME')
+ end if
+ call mpas_get_time(startTime, dateTimeString=startTimeStamp)
+
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state)
+ call mpas_pool_get_field(state, 'u', u_field, 1)
+ call mpas_dmpar_exch_halo_field(u_field)
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', mesh)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state)
+
+ call atm_mpas_init_block(domain_ptr % dminfo, domain_ptr % streamManager, domain_ptr % blocklist, mesh, dt)
+
+ call mpas_pool_get_array(state, 'xtime', xtime, 1)
+ xtime = startTimeStamp
+
+ ! Initialize initial_time in second time level. We need to do this because initial state
+ ! is read into time level 1, and if we write output from the set of state arrays that
+ ! represent the original time level 2, the initial_time field will be invalid.
+ call mpas_pool_get_array(state, 'initial_time', initial_time1, 1)
+ call mpas_pool_get_array(state, 'initial_time', initial_time2, 2)
+ initial_time2 = initial_time1
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'diag', diag)
+ call mpas_pool_get_field(diag, 'pv_edge', pv_edge_field)
+ call mpas_dmpar_exch_halo_field(pv_edge_field)
+
+ call mpas_pool_get_field(diag, 'ru', ru_field)
+ call mpas_dmpar_exch_halo_field(ru_field)
+
+ call mpas_pool_get_field(diag, 'rw', rw_field)
+ call mpas_dmpar_exch_halo_field(rw_field)
+
+ end subroutine cam_mpas_init_phase4
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_define_scalars
+ !
+ !> \brief Define the names of constituents at run-time
+ !> \author Michael Duda
+ !> \date 21 May 2020
+ !> \details
+ !> Given an array of constituent names, which must have size equal to the number
+ !> of scalars that were set in the call to cam_mpas_init_phase3, and given
+ !> a function to identify which scalars are moisture species, this routine defines
+ !> scalar constituents for the MPAS-A dycore.
+ !> Because the MPAS-A dycore expects all moisture constituents to appear in
+ !> a contiguous range of constituent indices, this routine may in general need
+ !> to reorder the constituents; to allow for mapping of indices between CAM
+ !> physics and the MPAS-A dycore, this routine returns index mapping arrays
+ !> mpas_from_cam_cnst and cam_from_mpas_cnst.
+ !>
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_define_scalars(block, mpas_from_cam_cnst, cam_from_mpas_cnst, ierr)
+
+ use mpas_derived_types, only : block_type
+
+ use mpas_derived_types, only : mpas_pool_type, field3dReal
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_field, &
+ mpas_pool_get_dimension, mpas_pool_add_dimension
+ use mpas_attlist, only : mpas_add_att
+ use mpas_log, only : mpas_log_write
+ use mpas_derived_types, only : MPAS_LOG_ERR
+
+ use constituents, only: cnst_name, cnst_is_a_water_species
+
+ ! Arguments
+ type (block_type), pointer :: block
+ integer, dimension(:), pointer :: mpas_from_cam_cnst, cam_from_mpas_cnst
+ integer, intent(out) :: ierr
+
+ ! Local variables
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_define_scalars'
+
+ integer :: i, j, timeLevs
+ integer, pointer :: num_scalars
+ integer :: num_moist
+ integer :: idx_passive
+ type (mpas_pool_type), pointer :: statePool
+ type (mpas_pool_type), pointer :: tendPool
+ type (field3dReal), pointer :: scalarsField
+ character(len=128) :: tempstr
+ character :: moisture_char
+
+
+ ierr = 0
+
+ !
+ ! Define scalars
+ !
+ nullify(statePool)
+ call mpas_pool_get_subpool(block % structs, 'state', statePool)
+
+ if (.not. associated(statePool)) then
+ call mpas_log_write(trim(subname)//': ERROR: The ''state'' pool was not found.', &
+ messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+
+ nullify(num_scalars)
+ call mpas_pool_get_dimension(statePool, 'num_scalars', num_scalars)
+
+ !
+ ! The num_scalars dimension should have been defined by atm_core_interface::atm_allocate_scalars, and
+ ! if this dimension does not exist, something has gone wrong
+ !
+ if (.not. associated(num_scalars)) then
+ call mpas_log_write(trim(subname)//': ERROR: The ''num_scalars'' dimension does not exist in the ''state'' pool.', &
+ messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+
+ !
+ ! If at runtime there are not num_scalars names in the array of constituent names provided by CAM,
+ ! something has gone wrong
+ !
+ if (size(cnst_name) /= num_scalars) then
+ call mpas_log_write(trim(subname)//': ERROR: The number of constituent names is not equal to the num_scalars dimension', &
+ messageType=MPAS_LOG_ERR)
+ call mpas_log_write('size(cnst_name) = $i, num_scalars = $i', intArgs=[size(cnst_name), num_scalars], &
+ messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+
+ !
+ ! In CAM, the first scalar (if there are any) is always Q (specific humidity); if this is not
+ ! the case, something has gone wrong
+ !
+ if (size(cnst_name) > 0) then
+ if (trim(cnst_name(1)) /= 'Q') then
+ call mpas_log_write(trim(subname)//': ERROR: The first constituent is not Q', messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+ end if
+
+ !
+ ! Determine which of the constituents are moisture species
+ !
+ allocate(mpas_from_cam_cnst(num_scalars))
+ mpas_from_cam_cnst(:) = 0
+ num_moist = 0
+ do i = 1, size(cnst_name)
+ if (cnst_is_a_water_species(cnst_name(i))) then
+ num_moist = num_moist + 1
+ mpas_from_cam_cnst(num_moist) = i
+ end if
+ end do
+
+ !
+ ! If CAM has no scalars, let the only scalar in MPAS be 'qv' (a moisture species)
+ !
+ if (num_scalars == 1 .and. size(cnst_name) == 0) then
+ num_moist = 1
+ end if
+
+ !
+ ! Assign non-moisture constituents to mpas_from_cam_cnst(num_moist+1:size(cnst_name))
+ !
+ idx_passive = num_moist + 1
+ do i = 1, size(cnst_name)
+
+ ! If CAM constituent i is not already mapped as a moist constituent
+ if (.not. cnst_is_a_water_species(cnst_name(i))) then
+ mpas_from_cam_cnst(idx_passive) = i
+ idx_passive = idx_passive + 1
+ end if
+ end do
+
+ !
+ ! Create inverse map, cam_from_mpas_cnst
+ !
+ allocate(cam_from_mpas_cnst(num_scalars))
+ cam_from_mpas_cnst(:) = 0
+
+ do i = 1, size(cnst_name)
+ cam_from_mpas_cnst(mpas_from_cam_cnst(i)) = i
+ end do
+
+ timeLevs = 2
+
+ do i = 1, timeLevs
+ nullify(scalarsField)
+ call mpas_pool_get_field(statePool, 'scalars', scalarsField, timeLevel=i)
+
+ if (.not. associated(scalarsField)) then
+ call mpas_log_write(trim(subname)//': ERROR: The ''scalars'' field was not found in the ''state'' pool', &
+ messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+
+ if (i == 1) call mpas_pool_add_dimension(statePool, 'index_qv', 1)
+ scalarsField % constituentNames(1) = 'qv'
+ call mpas_add_att(scalarsField % attLists(1) % attList, 'units', 'kg kg^{-1}')
+ call mpas_add_att(scalarsField % attLists(1) % attList, 'long_name', 'Water vapor mixing ratio')
+
+ do j = 2, size(cnst_name)
+ scalarsField % constituentNames(j) = trim(cnst_name(mpas_from_cam_cnst(j)))
+ end do
+
+ end do
+
+ call mpas_pool_add_dimension(statePool, 'moist_start', 1)
+ call mpas_pool_add_dimension(statePool, 'moist_end', num_moist)
+
+ !
+ ! Print a tabular summary of the mapping between constituent indices
+ !
+ call mpas_log_write('')
+ call mpas_log_write(' i MPAS constituent mpas_from_cam_cnst(i) i CAM constituent cam_from_mpas_cnst(i)')
+ call mpas_log_write('------------------------------------------ ------------------------------------------')
+ do i = 1, min(num_scalars, size(cnst_name))
+ if (i <= num_moist) then
+ moisture_char = '*'
+ else
+ moisture_char = ' '
+ end if
+ write(tempstr, '(i3,1x,a16,1x,i18,8x,i3,1x,a16,1x,i18)') i, trim(scalarsField % constituentNames(i))//moisture_char, &
+ mpas_from_cam_cnst(i), &
+ i, trim(cnst_name(i)), &
+ cam_from_mpas_cnst(i)
+ call mpas_log_write(trim(tempstr))
+ end do
+ call mpas_log_write('------------------------------------------ ------------------------------------------')
+ call mpas_log_write('* = constituent used as a moisture species in MPAS-A dycore')
+ call mpas_log_write('')
+
+
+ !
+ ! Define scalars_tend
+ !
+ nullify(tendPool)
+ call mpas_pool_get_subpool(block % structs, 'tend', tendPool)
+
+ if (.not. associated(tendPool)) then
+ call mpas_log_write(trim(subname)//': ERROR: The ''tend'' pool was not found.', &
+ messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+
+ timeLevs = 1
+
+ do i = 1, timeLevs
+ nullify(scalarsField)
+ call mpas_pool_get_field(tendPool, 'scalars_tend', scalarsField, timeLevel=i)
+
+ if (.not. associated(scalarsField)) then
+ call mpas_log_write(trim(subname)//': ERROR: The ''scalars_tend'' field was not found in the ''tend'' pool', &
+ messageType=MPAS_LOG_ERR)
+ ierr = 1
+ return
+ end if
+
+ if (i == 1) call mpas_pool_add_dimension(tendPool, 'index_qv', 1)
+ scalarsField % constituentNames(1) = 'tend_qv'
+ call mpas_add_att(scalarsField % attLists(1) % attList, 'units', 'kg m^{-3} s^{-1}')
+ call mpas_add_att(scalarsField % attLists(1) % attList, 'long_name', 'Tendency of water vapor mixing ratio')
+
+ do j = 2, size(cnst_name)
+ scalarsField % constituentNames(j) = 'tend_'//trim(cnst_name(mpas_from_cam_cnst(j)))
+ end do
+ end do
+
+ call mpas_pool_add_dimension(tendPool, 'moist_start', 1)
+ call mpas_pool_add_dimension(tendPool, 'moist_end', num_moist)
+
+ end subroutine cam_mpas_define_scalars
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_get_global_dims
+ !
+ !> \brief Returns global mesh dimensions
+ !> \author Michael Duda
+ !> \date 22 August 2019
+ !> \details
+ !> This routine returns on all tasks the number of global cells, edges,
+ !> vertices, maxEdges, vertical layers, and the maximum number of cells owned by any task.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_get_global_dims(nCellsGlobal, nEdgesGlobal, nVerticesGlobal, maxEdges, nVertLevels, maxNCells)
+
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_dimension
+ use mpas_derived_types, only : mpas_pool_type
+ use mpas_dmpar, only : mpas_dmpar_sum_int, mpas_dmpar_max_int
+
+ integer, intent(out) :: nCellsGlobal
+ integer, intent(out) :: nEdgesGlobal
+ integer, intent(out) :: nVerticesGlobal
+ integer, intent(out) :: maxEdges
+ integer, intent(out) :: nVertLevels
+ integer, intent(out) :: maxNCells
+
+ integer, pointer :: nCellsSolve
+ integer, pointer :: nEdgesSolve
+ integer, pointer :: nVerticesSolve
+ integer, pointer :: maxEdgesLocal
+ integer, pointer :: nVertLevelsLocal
+
+ type (mpas_pool_type), pointer :: meshPool
+
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+ call mpas_pool_get_dimension(meshPool, 'nCellsSolve', nCellsSolve)
+ call mpas_pool_get_dimension(meshPool, 'nEdgesSolve', nEdgesSolve)
+ call mpas_pool_get_dimension(meshPool, 'nVerticesSolve', nVerticesSolve)
+ call mpas_pool_get_dimension(meshPool, 'maxEdges', maxEdgesLocal)
+ call mpas_pool_get_dimension(meshPool, 'nVertLevels', nVertLevelsLocal)
+
+ call mpas_dmpar_sum_int(domain_ptr % dminfo, nCellsSolve, nCellsGlobal)
+ call mpas_dmpar_sum_int(domain_ptr % dminfo, nEdgesSolve, nEdgesGlobal)
+ call mpas_dmpar_sum_int(domain_ptr % dminfo, nVerticesSolve, nVerticesGlobal)
+
+ maxEdges = maxEdgesLocal
+ nVertLevels = nVertLevelsLocal
+
+ call mpas_dmpar_max_int(domain_ptr % dminfo, nCellsSolve, maxNCells)
+
+ end subroutine cam_mpas_get_global_dims
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_get_global_coords
+ !
+ !> \brief Returns global coordinate arrays
+ !> \author Michael Duda
+ !> \date 22 August 2019
+ !> \details
+ !> This routine returns on all tasks arrays of latitude, longitude, and cell
+ !> area for all (global) cells.
+ !>
+ !> It is assumed that latCellGlobal, lonCellGlobal, and areaCellGlobal have
+ !> been allocated by the caller with a size equal to the global number of
+ !> cells in the mesh.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_get_global_coords(latCellGlobal, lonCellGlobal, areaCellGlobal)
+
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_array
+ use mpas_derived_types, only : mpas_pool_type
+ use mpas_kind_types, only : RKIND
+ use mpas_dmpar, only : mpas_dmpar_sum_int, mpas_dmpar_max_int, mpas_dmpar_max_real_array
+
+ real (kind=RKIND), dimension(:), intent(out) :: latCellGlobal
+ real (kind=RKIND), dimension(:), intent(out) :: lonCellGlobal
+ real (kind=RKIND), dimension(:), intent(out) :: areaCellGlobal
+
+ integer :: iCell
+
+ integer, pointer :: nCellsSolve
+ integer, dimension(:), pointer :: indexToCellID
+
+ type (mpas_pool_type), pointer :: meshPool
+ integer :: nCellsGlobal
+
+ real (kind=RKIND), dimension(:), pointer :: latCell
+ real (kind=RKIND), dimension(:), pointer :: lonCell
+ real (kind=RKIND), dimension(:), pointer :: areaCell
+ real (kind=RKIND), dimension(:), pointer :: temp
+
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+ call mpas_pool_get_dimension(meshPool, 'nCellsSolve', nCellsSolve)
+ call mpas_pool_get_array(meshPool, 'indexToCellID', indexToCellID)
+ call mpas_pool_get_array(meshPool, 'latCell', latCell)
+ call mpas_pool_get_array(meshPool, 'lonCell', lonCell)
+ call mpas_pool_get_array(meshPool, 'areaCell', areaCell)
+
+ call mpas_dmpar_sum_int(domain_ptr % dminfo, nCellsSolve, nCellsGlobal)
+
+ ! check: size(latCellGlobal) ?= nCellsGlobal
+
+ allocate(temp(nCellsGlobal))
+
+ !
+ ! latCellGlobal
+ !
+ temp(:) = -huge(temp(0))
+ do iCell=1,nCellsSolve
+ temp(indexToCellID(iCell)) = latCell(iCell)
+ end do
+
+ call mpas_dmpar_max_real_array(domain_ptr % dminfo, nCellsGlobal, temp, latCellGlobal)
+
+ !
+ ! lonCellGlobal
+ !
+ temp(:) = -huge(temp(0))
+ do iCell=1,nCellsSolve
+ temp(indexToCellID(iCell)) = lonCell(iCell)
+ end do
+
+ call mpas_dmpar_max_real_array(domain_ptr % dminfo, nCellsGlobal, temp, lonCellGlobal)
+
+ !
+ ! areaCellGlobal
+ !
+ temp(:) = -huge(temp(0))
+ do iCell=1,nCellsSolve
+ temp(indexToCellID(iCell)) = areaCell(iCell)
+ end do
+
+ call mpas_dmpar_max_real_array(domain_ptr % dminfo, nCellsGlobal, temp, areaCellGlobal)
+
+ deallocate(temp)
+
+ end subroutine cam_mpas_get_global_coords
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_get_global_blocks
+ !
+ !> \brief Returns global block indexing arrays
+ !> \author Michael Duda
+ !> \date 22 August 2019
+ !> \details
+ !> Returns arrays with information about the number of columns in each global block,
+ !> which column indices are in each block, and which global block contains each
+ !> global column.
+ !>
+ !> It is assumed that nCellsPerBlock, indexToCellIDBlock, indexToBlockID, and
+ !> localCellIDBlock have been allocated by the caller with dimensions:
+ !>
+ !> nCellsPerBlock(num_blocks_global)
+ !> indexToCellIDBlock(maxNCells, num_blocks_global)
+ !> indexToBlockID(nCellsGlobal)
+ !> localCellIDBlock(nCellsGlobal)
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_get_global_blocks(nCellsPerBlock, indexToCellIDBlock, indexToBlockID, localCellIDBlock)
+
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_array
+ use mpas_derived_types, only : mpas_pool_type
+ use mpas_dmpar, only : mpas_dmpar_max_int_array
+
+ integer, dimension(:), intent(out) :: nCellsPerBlock
+ integer, dimension(:,:), intent(out) :: indexToCellIDBlock
+ integer, dimension(:), intent(out) :: indexToBlockID
+ integer, dimension(:), intent(out) :: localCellIDBlock
+
+ integer :: iCell
+ integer :: owningBlock, localCellID
+ type (mpas_pool_type), pointer :: meshPool
+ integer, pointer :: nCellsSolve
+ integer, dimension(:), pointer :: indexToCellID
+ integer, dimension(:), pointer :: temp1d
+
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+ call mpas_pool_get_dimension(meshPool, 'nCellsSolve', nCellsSolve)
+ call mpas_pool_get_array(meshPool, 'indexToCellID', indexToCellID)
+
+ !
+ ! nCellsPerBlock
+ !
+ allocate(temp1d(size(nCellsPerBlock)))
+ temp1d(:) = 0
+ temp1d(domain_ptr % dminfo % my_proc_id + 1) = nCellsSolve
+
+ call mpas_dmpar_max_int_array(domain_ptr % dminfo, size(temp1d), temp1d, nCellsPerBlock)
+
+ deallocate(temp1d)
+
+ !
+ ! indexToBlockID
+ !
+ allocate(temp1d(size(indexToBlockID)))
+ temp1d(:) = -1
+ do iCell=1,nCellsSolve
+ temp1d(indexToCellID(iCell)) = domain_ptr % dminfo % my_proc_id + 1 ! 1-based block indices?
+ end do
+
+ call mpas_dmpar_max_int_array(domain_ptr % dminfo, size(temp1d), temp1d, indexToBlockID)
+
+ deallocate(temp1d)
+
+ !
+ ! localCellIDBlock
+ !
+ allocate(temp1d(size(localCellIDBlock)))
+ temp1d(:) = 0
+ do iCell = 1, nCellsSolve
+ temp1d(indexToCellID(iCell)) = iCell
+ end do
+
+ call mpas_dmpar_max_int_array(domain_ptr % dminfo, size(temp1d), temp1d, localCellIDBlock)
+
+ deallocate(temp1d)
+
+ !
+ ! indexToCellIDBlock
+ !
+ indexToCellIDBlock(:,:) = 0
+ do iCell = 1, size(localCellIDBlock) ! nCellsGlobal
+ owningBlock = indexToBlockID(iCell)
+ localCellID = localCellIDBlock(iCell)
+ indexToCellIDBlock(localCellID, owningBlock) = iCell
+ end do
+
+ end subroutine cam_mpas_get_global_blocks
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_read_static
+ !
+ !> \brief Reads time-invariant ("static") fields from an MPAS-A mesh file
+ !> \author Michael Duda
+ !> \date 6 January 2020
+ !> \details
+ !> This routine takes as input an opened PIO file descriptor and a routine
+ !> to call if catastrophic errors are encountered. An MPAS stream is constructed
+ !> from this file descriptor, and most of the fields that exist in MPAS's
+ !> "mesh" pool are read from this stream.
+ !> Upon successful completion, valid mesh fields may be accessed from the mesh
+ !> pool.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_read_static(fh_ini, endrun)
+
+ use pio, only : file_desc_t
+
+ use mpas_kind_types, only : StrKIND
+ use mpas_io_streams, only : MPAS_createStream, MPAS_closeStream, MPAS_streamAddField, MPAS_readStream
+ use mpas_derived_types, only : MPAS_IO_READ, MPAS_IO_NETCDF, MPAS_Stream_type, MPAS_pool_type, &
+ field0DReal, field1DReal, field2DReal, field3DReal, field1DInteger, field2DInteger, &
+ MPAS_STREAM_NOERR
+ use mpas_pool_routines, only : MPAS_pool_get_subpool, MPAS_pool_get_field, MPAS_pool_create_pool, MPAS_pool_destroy_pool, &
+ MPAS_pool_add_config
+ use mpas_dmpar, only : MPAS_dmpar_exch_halo_field
+ use mpas_stream_manager, only : postread_reindex
+
+ ! Arguments
+ type (file_desc_t), pointer :: fh_ini
+ procedure(halt_model) :: endrun
+
+ ! Local variables
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_read_static'
+
+ character(len=StrKIND) :: errString
+
+ integer :: ierr
+ integer :: ierr_total
+ type (MPAS_pool_type), pointer :: meshPool
+ type (MPAS_pool_type), pointer :: reindexPool
+ type (field1DReal), pointer :: latCell, lonCell, xCell, yCell, zCell
+ type (field1DReal), pointer :: latEdge, lonEdge, xEdge, yEdge, zEdge
+ type (field1DReal), pointer :: latVertex, lonVertex, xVertex, yVertex, zVertex
+ type (field1DInteger), pointer :: indexToCellID, indexToEdgeID, indexToVertexID
+ type (field1DReal), pointer :: fEdge, fVertex
+ type (field1DReal), pointer :: areaCell, areaTriangle, dcEdge, dvEdge, angleEdge
+ type (field2DReal), pointer :: kiteAreasOnVertex, weightsOnEdge
+ type (field1DReal), pointer :: meshDensity
+ type (field1DInteger), pointer :: nEdgesOnCell, nEdgesOnEdge
+ type (field2DInteger), pointer :: cellsOnEdge, edgesOnCell, edgesOnEdge, cellsOnCell, verticesOnCell, &
+ verticesOnEdge, edgesOnVertex, cellsOnVertex
+ type (field0DReal), pointer :: cf1, cf2, cf3
+ type (field1DReal), pointer :: rdzw, dzu, rdzu, fzm, fzp
+ type (field2DReal), pointer :: zgrid, zxu, zz
+ type (field3DReal), pointer :: zb, zb3, deriv_two, cellTangentPlane, coeffs_reconstruct
+
+ type (field2DReal), pointer :: edgeNormalVectors, localVerticalUnitVectors, defc_a, defc_b
+
+ type (MPAS_Stream_type) :: mesh_stream
+
+
+ call MPAS_createStream(mesh_stream, domain_ptr % ioContext, 'not_used', MPAS_IO_NETCDF, MPAS_IO_READ, &
+ pio_file_desc=fh_ini, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to create static input stream.')
+ end if
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+
+ call mpas_pool_get_field(meshPool, 'latCell', latCell)
+ call mpas_pool_get_field(meshPool, 'lonCell', lonCell)
+ call mpas_pool_get_field(meshPool, 'xCell', xCell)
+ call mpas_pool_get_field(meshPool, 'yCell', yCell)
+ call mpas_pool_get_field(meshPool, 'zCell', zCell)
+
+ call mpas_pool_get_field(meshPool, 'latEdge', latEdge)
+ call mpas_pool_get_field(meshPool, 'lonEdge', lonEdge)
+ call mpas_pool_get_field(meshPool, 'xEdge', xEdge)
+ call mpas_pool_get_field(meshPool, 'yEdge', yEdge)
+ call mpas_pool_get_field(meshPool, 'zEdge', zEdge)
+
+ call mpas_pool_get_field(meshPool, 'latVertex', latVertex)
+ call mpas_pool_get_field(meshPool, 'lonVertex', lonVertex)
+ call mpas_pool_get_field(meshPool, 'xVertex', xVertex)
+ call mpas_pool_get_field(meshPool, 'yVertex', yVertex)
+ call mpas_pool_get_field(meshPool, 'zVertex', zVertex)
+
+ call mpas_pool_get_field(meshPool, 'indexToCellID', indexToCellID)
+ call mpas_pool_get_field(meshPool, 'indexToEdgeID', indexToEdgeID)
+ call mpas_pool_get_field(meshPool, 'indexToVertexID', indexToVertexID)
+
+ call mpas_pool_get_field(meshPool, 'fEdge', fEdge)
+ call mpas_pool_get_field(meshPool, 'fVertex', fVertex)
+
+ call mpas_pool_get_field(meshPool, 'areaCell', areaCell)
+ call mpas_pool_get_field(meshPool, 'areaTriangle', areaTriangle)
+ call mpas_pool_get_field(meshPool, 'dcEdge', dcEdge)
+ call mpas_pool_get_field(meshPool, 'dvEdge', dvEdge)
+ call mpas_pool_get_field(meshPool, 'angleEdge', angleEdge)
+ call mpas_pool_get_field(meshPool, 'kiteAreasOnVertex', kiteAreasOnVertex)
+ call mpas_pool_get_field(meshPool, 'weightsOnEdge', weightsOnEdge)
+
+ call mpas_pool_get_field(meshPool, 'meshDensity', meshDensity)
+
+ call mpas_pool_get_field(meshPool, 'nEdgesOnCell', nEdgesOnCell)
+ call mpas_pool_get_field(meshPool, 'nEdgesOnEdge', nEdgesOnEdge)
+
+ call mpas_pool_get_field(meshPool, 'cellsOnEdge', cellsOnEdge)
+ call mpas_pool_get_field(meshPool, 'edgesOnCell', edgesOnCell)
+ call mpas_pool_get_field(meshPool, 'edgesOnEdge', edgesOnEdge)
+ call mpas_pool_get_field(meshPool, 'cellsOnCell', cellsOnCell)
+ call mpas_pool_get_field(meshPool, 'verticesOnCell', verticesOnCell)
+ call mpas_pool_get_field(meshPool, 'verticesOnEdge', verticesOnEdge)
+ call mpas_pool_get_field(meshPool, 'edgesOnVertex', edgesOnVertex)
+ call mpas_pool_get_field(meshPool, 'cellsOnVertex', cellsOnVertex)
+
+ call mpas_pool_get_field(meshPool, 'cf1', cf1)
+ call mpas_pool_get_field(meshPool, 'cf2', cf2)
+ call mpas_pool_get_field(meshPool, 'cf3', cf3)
+
+ call mpas_pool_get_field(meshPool, 'rdzw', rdzw)
+ call mpas_pool_get_field(meshPool, 'dzu', dzu)
+ call mpas_pool_get_field(meshPool, 'rdzu', rdzu)
+ call mpas_pool_get_field(meshPool, 'fzm', fzm)
+ call mpas_pool_get_field(meshPool, 'fzp', fzp)
+
+ call mpas_pool_get_field(meshPool, 'zgrid', zgrid)
+ call mpas_pool_get_field(meshPool, 'zxu', zxu)
+ call mpas_pool_get_field(meshPool, 'zz', zz)
+ call mpas_pool_get_field(meshPool, 'zb', zb)
+ call mpas_pool_get_field(meshPool, 'zb3', zb3)
+
+ call mpas_pool_get_field(meshPool, 'deriv_two', deriv_two)
+ call mpas_pool_get_field(meshPool, 'cellTangentPlane', cellTangentPlane)
+ call mpas_pool_get_field(meshPool, 'coeffs_reconstruct', coeffs_reconstruct)
+
+ call mpas_pool_get_field(meshPool, 'edgeNormalVectors', edgeNormalVectors)
+ call mpas_pool_get_field(meshPool, 'localVerticalUnitVectors', localVerticalUnitVectors)
+ call mpas_pool_get_field(meshPool, 'defc_a', defc_a)
+ call mpas_pool_get_field(meshPool, 'defc_b', defc_b)
+
+ ierr_total = 0
+
+ call MPAS_streamAddField(mesh_stream, latCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, lonCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, xCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, yCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, latEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, lonEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, xEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, yEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, latVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, lonVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, xVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, yVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, indexToCellID, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, indexToEdgeID, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, indexToVertexID, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, fEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, fVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, areaCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, areaTriangle, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, dcEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, dvEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, angleEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, kiteAreasOnVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, weightsOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, meshDensity, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, nEdgesOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, nEdgesOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, cellsOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, edgesOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, edgesOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, cellsOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, verticesOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, verticesOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, edgesOnVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, cellsOnVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, cf1, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, cf2, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, cf3, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, rdzw, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, dzu, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, rdzu, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, fzm, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, fzp, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, zgrid, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zxu, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zz, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zb, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, zb3, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, deriv_two, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, cellTangentPlane, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, coeffs_reconstruct, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(mesh_stream, edgeNormalVectors, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, localVerticalUnitVectors, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, defc_a, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(mesh_stream, defc_b, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ if (ierr_total > 0) then
+ write(errString, '(a,i0,a)') subname//': FATAL: Failed to add ', ierr_total, ' fields to static input stream.'
+ call endrun(trim(errString))
+ end if
+
+ call MPAS_readStream(mesh_stream, 1, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to read static input stream.')
+ end if
+
+ call MPAS_closeStream(mesh_stream, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to close static input stream.')
+ end if
+
+ !
+ ! Perform halo updates for all decomposed fields (i.e., fields with
+ ! an outermost dimension of nCells, nVertices, or nEdges)
+ !
+ call MPAS_dmpar_exch_halo_field(latCell)
+ call MPAS_dmpar_exch_halo_field(lonCell)
+ call MPAS_dmpar_exch_halo_field(xCell)
+ call MPAS_dmpar_exch_halo_field(yCell)
+ call MPAS_dmpar_exch_halo_field(zCell)
+
+ call MPAS_dmpar_exch_halo_field(latEdge)
+ call MPAS_dmpar_exch_halo_field(lonEdge)
+ call MPAS_dmpar_exch_halo_field(xEdge)
+ call MPAS_dmpar_exch_halo_field(yEdge)
+ call MPAS_dmpar_exch_halo_field(zEdge)
+
+ call MPAS_dmpar_exch_halo_field(latVertex)
+ call MPAS_dmpar_exch_halo_field(lonVertex)
+ call MPAS_dmpar_exch_halo_field(xVertex)
+ call MPAS_dmpar_exch_halo_field(yVertex)
+ call MPAS_dmpar_exch_halo_field(zVertex)
+
+ call MPAS_dmpar_exch_halo_field(indexToCellID)
+ call MPAS_dmpar_exch_halo_field(indexToEdgeID)
+ call MPAS_dmpar_exch_halo_field(indexToVertexID)
+
+ call MPAS_dmpar_exch_halo_field(fEdge)
+ call MPAS_dmpar_exch_halo_field(fVertex)
+
+ call MPAS_dmpar_exch_halo_field(areaCell)
+ call MPAS_dmpar_exch_halo_field(areaTriangle)
+ call MPAS_dmpar_exch_halo_field(dcEdge)
+ call MPAS_dmpar_exch_halo_field(dvEdge)
+ call MPAS_dmpar_exch_halo_field(angleEdge)
+ call MPAS_dmpar_exch_halo_field(kiteAreasOnVertex)
+ call MPAS_dmpar_exch_halo_field(weightsOnEdge)
+
+ call MPAS_dmpar_exch_halo_field(meshDensity)
+
+ call MPAS_dmpar_exch_halo_field(nEdgesOnCell)
+ call MPAS_dmpar_exch_halo_field(nEdgesOnEdge)
+
+ call MPAS_dmpar_exch_halo_field(cellsOnEdge)
+ call MPAS_dmpar_exch_halo_field(edgesOnCell)
+ call MPAS_dmpar_exch_halo_field(edgesOnEdge)
+ call MPAS_dmpar_exch_halo_field(cellsOnCell)
+ call MPAS_dmpar_exch_halo_field(verticesOnCell)
+ call MPAS_dmpar_exch_halo_field(verticesOnEdge)
+ call MPAS_dmpar_exch_halo_field(edgesOnVertex)
+ call MPAS_dmpar_exch_halo_field(cellsOnVertex)
+
+ call MPAS_dmpar_exch_halo_field(zgrid)
+ call MPAS_dmpar_exch_halo_field(zxu)
+ call MPAS_dmpar_exch_halo_field(zz)
+ call MPAS_dmpar_exch_halo_field(zb)
+ call MPAS_dmpar_exch_halo_field(zb3)
+
+ call MPAS_dmpar_exch_halo_field(deriv_two)
+ call MPAS_dmpar_exch_halo_field(cellTangentPlane)
+ call MPAS_dmpar_exch_halo_field(coeffs_reconstruct)
+
+ call MPAS_dmpar_exch_halo_field(edgeNormalVectors)
+ call MPAS_dmpar_exch_halo_field(localVerticalUnitVectors)
+ call MPAS_dmpar_exch_halo_field(defc_a)
+ call MPAS_dmpar_exch_halo_field(defc_b)
+
+ !
+ ! Re-index from global index space to local index space
+ !
+ call MPAS_pool_create_pool(reindexPool)
+
+ call MPAS_pool_add_config(reindexPool, 'cellsOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'cellsOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'verticesOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'verticesOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnVertex', 1)
+ call MPAS_pool_add_config(reindexPool, 'cellsOnVertex', 1)
+
+ call postread_reindex(meshPool, reindexPool)
+
+ call MPAS_pool_destroy_pool(reindexPool)
+
+ end subroutine cam_mpas_read_static
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_setup_restart
+ !
+ !> \brief Set up a restart stream, but do not read or write the stream
+ !> \author Michael Duda
+ !> \date 21 July 2020
+ !> \details
+ !> This routine prepares an MPAS stream with fields needed to restart
+ !> the MPAS-A dynamics. The stream will read or write from an existing
+ !> PIO file descriptor, and whether the stream is set up for reading
+ !> or writing depends on the direction argument, which must be set to
+ !> either MPAS_IO_READ or MPAS_IO_WRITE.
+ !>
+ !> This routine does not actually read or write the stream. A subsequent
+ !> call to either cam_mpas_read_restart or cam_mpas_write_restart must
+ !> be made to do this.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_setup_restart(fh_rst, restart_stream, direction, endrun)
+
+ use pio, only : file_desc_t
+
+ use mpas_kind_types, only : StrKIND
+ use mpas_io_streams, only : MPAS_createStream, MPAS_streamAddField, MPAS_writeStreamAtt
+ use mpas_derived_types, only : MPAS_IO_NETCDF, MPAS_Stream_type, MPAS_pool_type, &
+ field0DReal, field1DReal, field2DReal, field3DReal, &
+ field1DInteger, field2DInteger, field0DChar, &
+ MPAS_IO_WRITE, MPAS_STREAM_NOERR
+ use mpas_pool_routines, only : MPAS_pool_get_field
+
+ ! Arguments
+ type (file_desc_t), intent(inout) :: fh_rst
+ type (MPAS_Stream_type), intent(inout) :: restart_stream
+ integer, intent(in) :: direction
+ procedure(halt_model) :: endrun
+
+ ! Local variables
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_setup_restart'
+
+ character(len=StrKIND) :: errString
+
+ integer :: ierr
+ integer :: ierr_total
+ type (MPAS_pool_type), pointer :: allFields
+ type (field1DReal), pointer :: latCell, lonCell, xCell, yCell, zCell
+ type (field1DReal), pointer :: latEdge, lonEdge, xEdge, yEdge, zEdge
+ type (field1DReal), pointer :: latVertex, lonVertex, xVertex, yVertex, zVertex
+ type (field1DInteger), pointer :: indexToCellID, indexToEdgeID, indexToVertexID
+ type (field1DReal), pointer :: fEdge, fVertex
+ type (field1DReal), pointer :: areaCell, areaTriangle, dcEdge, dvEdge, angleEdge
+ type (field2DReal), pointer :: kiteAreasOnVertex, weightsOnEdge
+ type (field1DReal), pointer :: meshDensity
+ type (field1DInteger), pointer :: nEdgesOnCell, nEdgesOnEdge
+ type (field2DInteger), pointer :: cellsOnEdge, edgesOnCell, edgesOnEdge, cellsOnCell, verticesOnCell, &
+ verticesOnEdge, edgesOnVertex, cellsOnVertex
+ type (field0DReal), pointer :: cf1, cf2, cf3
+ type (field1DReal), pointer :: rdzw, dzu, rdzu, fzm, fzp
+ type (field2DReal), pointer :: zgrid, zxu, zz
+ type (field3DReal), pointer :: zb, zb3, deriv_two, cellTangentPlane, coeffs_reconstruct
+
+ type (field2DReal), pointer :: edgeNormalVectors, localVerticalUnitVectors, defc_a, defc_b
+
+ type (field0DChar), pointer :: initial_time
+ type (field0DChar), pointer :: xtime
+ type (field2DReal), pointer :: u
+ type (field2DReal), pointer :: w
+ type (field2DReal), pointer :: rho_zz
+ type (field2DReal), pointer :: theta_m
+ type (field3DReal), pointer :: scalars
+
+ type (field1DReal), pointer :: meshScalingDel2
+ type (field1DReal), pointer :: meshScalingDel4
+ type (field2DReal), pointer :: dss
+ type (field2DReal), pointer :: east
+ type (field2DReal), pointer :: north
+ type (field2DReal), pointer :: pressure_p
+ type (field2DReal), pointer :: rho
+ type (field2DReal), pointer :: theta
+ type (field2DReal), pointer :: relhum
+ type (field2DReal), pointer :: uReconstructZonal
+ type (field2DReal), pointer :: uReconstructMeridional
+ type (field2DReal), pointer :: circulation
+ type (field2DReal), pointer :: exner
+ type (field2DReal), pointer :: exner_base
+ type (field2DReal), pointer :: rtheta_base
+ type (field2DReal), pointer :: pressure_base
+ type (field2DReal), pointer :: rho_base
+ type (field2DReal), pointer :: theta_base
+ type (field2DReal), pointer :: ru
+ type (field2DReal), pointer :: ru_p
+ type (field2DReal), pointer :: rw
+ type (field2DReal), pointer :: rw_p
+ type (field2DReal), pointer :: rtheta_p
+ type (field2DReal), pointer :: rho_p
+ type (field1DReal), pointer :: surface_pressure
+ type (field2DReal), pointer :: t_init
+
+ type (field1DReal), pointer :: u_init
+ type (field1DReal), pointer :: qv_init
+
+ type (field2DReal), pointer :: tend_ru_physics
+ type (field2DReal), pointer :: tend_rtheta_physics
+ type (field2DReal), pointer :: tend_rho_physics
+
+
+ call MPAS_createStream(restart_stream, domain_ptr % ioContext, 'not_used', MPAS_IO_NETCDF, &
+ direction, pio_file_desc=fh_rst, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to create restart stream.')
+ end if
+
+ allFields => domain_ptr % blocklist % allFields
+
+ call mpas_pool_get_field(allFields, 'latCell', latCell)
+ call mpas_pool_get_field(allFields, 'lonCell', lonCell)
+ call mpas_pool_get_field(allFields, 'xCell', xCell)
+ call mpas_pool_get_field(allFields, 'yCell', yCell)
+ call mpas_pool_get_field(allFields, 'zCell', zCell)
+
+ call mpas_pool_get_field(allFields, 'latEdge', latEdge)
+ call mpas_pool_get_field(allFields, 'lonEdge', lonEdge)
+ call mpas_pool_get_field(allFields, 'xEdge', xEdge)
+ call mpas_pool_get_field(allFields, 'yEdge', yEdge)
+ call mpas_pool_get_field(allFields, 'zEdge', zEdge)
+
+ call mpas_pool_get_field(allFields, 'latVertex', latVertex)
+ call mpas_pool_get_field(allFields, 'lonVertex', lonVertex)
+ call mpas_pool_get_field(allFields, 'xVertex', xVertex)
+ call mpas_pool_get_field(allFields, 'yVertex', yVertex)
+ call mpas_pool_get_field(allFields, 'zVertex', zVertex)
+
+ call mpas_pool_get_field(allFields, 'indexToCellID', indexToCellID)
+ call mpas_pool_get_field(allFields, 'indexToEdgeID', indexToEdgeID)
+ call mpas_pool_get_field(allFields, 'indexToVertexID', indexToVertexID)
+
+ call mpas_pool_get_field(allFields, 'fEdge', fEdge)
+ call mpas_pool_get_field(allFields, 'fVertex', fVertex)
+
+ call mpas_pool_get_field(allFields, 'areaCell', areaCell)
+ call mpas_pool_get_field(allFields, 'areaTriangle', areaTriangle)
+ call mpas_pool_get_field(allFields, 'dcEdge', dcEdge)
+ call mpas_pool_get_field(allFields, 'dvEdge', dvEdge)
+ call mpas_pool_get_field(allFields, 'angleEdge', angleEdge)
+ call mpas_pool_get_field(allFields, 'kiteAreasOnVertex', kiteAreasOnVertex)
+ call mpas_pool_get_field(allFields, 'weightsOnEdge', weightsOnEdge)
+
+ call mpas_pool_get_field(allFields, 'meshDensity', meshDensity)
+
+ call mpas_pool_get_field(allFields, 'nEdgesOnCell', nEdgesOnCell)
+ call mpas_pool_get_field(allFields, 'nEdgesOnEdge', nEdgesOnEdge)
+
+ call mpas_pool_get_field(allFields, 'cellsOnEdge', cellsOnEdge)
+ call mpas_pool_get_field(allFields, 'edgesOnCell', edgesOnCell)
+ call mpas_pool_get_field(allFields, 'edgesOnEdge', edgesOnEdge)
+ call mpas_pool_get_field(allFields, 'cellsOnCell', cellsOnCell)
+ call mpas_pool_get_field(allFields, 'verticesOnCell', verticesOnCell)
+ call mpas_pool_get_field(allFields, 'verticesOnEdge', verticesOnEdge)
+ call mpas_pool_get_field(allFields, 'edgesOnVertex', edgesOnVertex)
+ call mpas_pool_get_field(allFields, 'cellsOnVertex', cellsOnVertex)
+
+ call mpas_pool_get_field(allFields, 'cf1', cf1)
+ call mpas_pool_get_field(allFields, 'cf2', cf2)
+ call mpas_pool_get_field(allFields, 'cf3', cf3)
+
+ call mpas_pool_get_field(allFields, 'rdzw', rdzw)
+ call mpas_pool_get_field(allFields, 'dzu', dzu)
+ call mpas_pool_get_field(allFields, 'rdzu', rdzu)
+ call mpas_pool_get_field(allFields, 'fzm', fzm)
+ call mpas_pool_get_field(allFields, 'fzp', fzp)
+
+ call mpas_pool_get_field(allFields, 'zgrid', zgrid)
+ call mpas_pool_get_field(allFields, 'zxu', zxu)
+ call mpas_pool_get_field(allFields, 'zz', zz)
+ call mpas_pool_get_field(allFields, 'zb', zb)
+ call mpas_pool_get_field(allFields, 'zb3', zb3)
+
+ call mpas_pool_get_field(allFields, 'deriv_two', deriv_two)
+ call mpas_pool_get_field(allFields, 'cellTangentPlane', cellTangentPlane)
+ call mpas_pool_get_field(allFields, 'coeffs_reconstruct', coeffs_reconstruct)
+
+ call mpas_pool_get_field(allFields, 'edgeNormalVectors', edgeNormalVectors)
+ call mpas_pool_get_field(allFields, 'localVerticalUnitVectors', localVerticalUnitVectors)
+ call mpas_pool_get_field(allFields, 'defc_a', defc_a)
+ call mpas_pool_get_field(allFields, 'defc_b', defc_b)
+
+ call mpas_pool_get_field(allFields, 'initial_time', initial_time, timeLevel=1)
+ call mpas_pool_get_field(allFields, 'xtime', xtime, timeLevel=1)
+ call mpas_pool_get_field(allFields, 'u', u, timeLevel=1)
+ call mpas_pool_get_field(allFields, 'w', w, timeLevel=1)
+ call mpas_pool_get_field(allFields, 'rho_zz', rho_zz, timeLevel=1)
+ call mpas_pool_get_field(allFields, 'theta_m', theta_m, timeLevel=1)
+ call mpas_pool_get_field(allFields, 'scalars', scalars, timeLevel=1)
+
+ call mpas_pool_get_field(allFields, 'meshScalingDel2', meshScalingDel2)
+ call mpas_pool_get_field(allFields, 'meshScalingDel4', meshScalingDel4)
+ call mpas_pool_get_field(allFields, 'dss', dss)
+ call mpas_pool_get_field(allFields, 'east', east)
+ call mpas_pool_get_field(allFields, 'north', north)
+ call mpas_pool_get_field(allFields, 'pressure_p', pressure_p)
+ call mpas_pool_get_field(allFields, 'rho', rho)
+ call mpas_pool_get_field(allFields, 'theta', theta)
+ call mpas_pool_get_field(allFields, 'relhum', relhum)
+ call mpas_pool_get_field(allFields, 'uReconstructZonal', uReconstructZonal)
+ call mpas_pool_get_field(allFields, 'uReconstructMeridional', uReconstructMeridional)
+ call mpas_pool_get_field(allFields, 'circulation', circulation)
+ call mpas_pool_get_field(allFields, 'exner', exner)
+ call mpas_pool_get_field(allFields, 'exner_base', exner_base)
+ call mpas_pool_get_field(allFields, 'rtheta_base', rtheta_base)
+ call mpas_pool_get_field(allFields, 'pressure_base', pressure_base)
+ call mpas_pool_get_field(allFields, 'rho_base', rho_base)
+ call mpas_pool_get_field(allFields, 'theta_base', theta_base)
+ call mpas_pool_get_field(allFields, 'ru', ru)
+ call mpas_pool_get_field(allFields, 'ru_p', ru_p)
+ call mpas_pool_get_field(allFields, 'rw', rw)
+ call mpas_pool_get_field(allFields, 'rw_p', rw_p)
+ call mpas_pool_get_field(allFields, 'rtheta_p', rtheta_p)
+ call mpas_pool_get_field(allFields, 'rho_p', rho_p)
+ call mpas_pool_get_field(allFields, 'surface_pressure', surface_pressure)
+ call mpas_pool_get_field(allFields, 't_init', t_init)
+
+ call mpas_pool_get_field(allFields, 'u_init', u_init)
+ call mpas_pool_get_field(allFields, 'qv_init', qv_init)
+
+ call mpas_pool_get_field(allFields, 'tend_ru_physics', tend_ru_physics)
+ call mpas_pool_get_field(allFields, 'tend_rtheta_physics', tend_rtheta_physics)
+ call mpas_pool_get_field(allFields, 'tend_rho_physics', tend_rho_physics)
+
+ ierr_total = 0
+
+ call MPAS_streamAddField(restart_stream, latCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, lonCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, xCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, yCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, latEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, lonEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, xEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, yEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, latVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, lonVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, xVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, yVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, indexToCellID, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, indexToEdgeID, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, indexToVertexID, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, fEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, fVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, areaCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, areaTriangle, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, dcEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, dvEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, angleEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, kiteAreasOnVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, weightsOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, meshDensity, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, nEdgesOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, nEdgesOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, cellsOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, edgesOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, edgesOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, cellsOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, verticesOnCell, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, verticesOnEdge, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, edgesOnVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, cellsOnVertex, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, cf1, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, cf2, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, cf3, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, rdzw, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, dzu, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rdzu, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, fzm, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, fzp, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, zgrid, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zxu, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zz, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zb, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, zb3, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, deriv_two, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, cellTangentPlane, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, coeffs_reconstruct, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, edgeNormalVectors, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, localVerticalUnitVectors, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, defc_a, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, defc_b, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, initial_time, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, xtime, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, u, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, w, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rho_zz, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, theta_m, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, scalars, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, meshScalingDel2, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, meshScalingDel4, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, dss, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, east, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, north, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, pressure_p, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rho, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, theta, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, relhum, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, uReconstructZonal, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, uReconstructMeridional, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, circulation, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, exner, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, exner_base, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rtheta_base, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, pressure_base, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rho_base, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, theta_base, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, ru, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, ru_p, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rw, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rw_p, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rtheta_p, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, rho_p, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, surface_pressure, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, t_init, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, u_init, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, qv_init, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ call MPAS_streamAddField(restart_stream, tend_ru_physics, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, tend_rtheta_physics, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+ call MPAS_streamAddField(restart_stream, tend_rho_physics, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) ierr_total = ierr_total + 1
+
+ if (ierr_total > 0) then
+ write(errString, '(a,i0,a)') subname//': FATAL: Failed to add ', ierr_total, ' fields to restart stream.'
+ call endrun(trim(errString))
+ end if
+
+ if (direction == MPAS_IO_WRITE) then
+ !
+ ! Add global attributes to the stream
+ !
+ if (domain_ptr % on_a_sphere) then
+ call MPAS_writeStreamAtt(restart_stream, 'on_a_sphere', 'YES')
+ else
+ call MPAS_writeStreamAtt(restart_stream, 'on_a_sphere', 'NO')
+ end if
+ call MPAS_writeStreamAtt(restart_stream, 'sphere_radius', domain_ptr % sphere_radius)
+ if (domain_ptr % is_periodic) then
+ call MPAS_writeStreamAtt(restart_stream, 'is_periodic', 'YES')
+ else
+ call MPAS_writeStreamAtt(restart_stream, 'is_periodic', 'NO')
+ end if
+ call MPAS_writeStreamAtt(restart_stream, 'x_period', domain_ptr % x_period)
+ call MPAS_writeStreamAtt(restart_stream, 'y_period', domain_ptr % y_period)
+ call MPAS_writeStreamAtt(restart_stream, 'parent_id', domain_ptr % parent_id)
+ call MPAS_writeStreamAtt(restart_stream, 'mesh_spec', domain_ptr % mesh_spec)
+ end if
+
+ end subroutine cam_mpas_setup_restart
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_read_restart
+ !
+ !> \brief Reads a restart stream that was previously set up
+ !> \author Michael Duda
+ !> \date 22 July 2020
+ !> \details
+ !> From a restart stream previously set up with a call to
+ !> cam_mpas_setup_restart, read the stream, update halos for all fields
+ !> that were read, and re-index mesh indexing fields (cellsOnCell,
+ !> edgesOnCell, etc.) from global to local index space.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_read_restart(restart_stream, endrun)
+
+ use pio, only : file_desc_t
+
+ use mpas_io_streams, only : MPAS_readStream, MPAS_closeStream
+ use mpas_derived_types, only : MPAS_Stream_type, MPAS_pool_type, MPAS_STREAM_NOERR
+ use mpas_pool_routines, only : MPAS_pool_create_pool, MPAS_pool_destroy_pool, MPAS_pool_add_config
+ use mpas_stream_manager, only : postread_reindex
+
+ ! Arguments
+ type (MPAS_Stream_type), intent(inout) :: restart_stream
+ procedure(halt_model) :: endrun
+
+ ! Local variables
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_read_restart'
+
+ integer :: ierr
+ type (MPAS_pool_type), pointer :: reindexPool
+
+ call MPAS_readStream(restart_stream, 1, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to read restart stream.')
+ end if
+
+ call MPAS_closeStream(restart_stream, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to close restart stream.')
+ end if
+
+ !
+ ! Perform halo updates for all decomposed fields (i.e., fields with
+ ! an outermost dimension of nCells, nVertices, or nEdges)
+ !
+ call cam_mpas_update_halo('latCell', endrun)
+ call cam_mpas_update_halo('lonCell', endrun)
+ call cam_mpas_update_halo('xCell', endrun)
+ call cam_mpas_update_halo('yCell', endrun)
+ call cam_mpas_update_halo('zCell', endrun)
+
+ call cam_mpas_update_halo('latEdge', endrun)
+ call cam_mpas_update_halo('lonEdge', endrun)
+ call cam_mpas_update_halo('xEdge', endrun)
+ call cam_mpas_update_halo('yEdge', endrun)
+ call cam_mpas_update_halo('zEdge', endrun)
+
+ call cam_mpas_update_halo('latVertex', endrun)
+ call cam_mpas_update_halo('lonVertex', endrun)
+ call cam_mpas_update_halo('xVertex', endrun)
+ call cam_mpas_update_halo('yVertex', endrun)
+ call cam_mpas_update_halo('zVertex', endrun)
+
+ call cam_mpas_update_halo('indexToCellID', endrun)
+ call cam_mpas_update_halo('indexToEdgeID', endrun)
+ call cam_mpas_update_halo('indexToVertexID', endrun)
+
+ call cam_mpas_update_halo('fEdge', endrun)
+ call cam_mpas_update_halo('fVertex', endrun)
+
+ call cam_mpas_update_halo('areaCell', endrun)
+ call cam_mpas_update_halo('areaTriangle', endrun)
+ call cam_mpas_update_halo('dcEdge', endrun)
+ call cam_mpas_update_halo('dvEdge', endrun)
+ call cam_mpas_update_halo('angleEdge', endrun)
+ call cam_mpas_update_halo('kiteAreasOnVertex', endrun)
+ call cam_mpas_update_halo('weightsOnEdge', endrun)
+
+ call cam_mpas_update_halo('meshDensity', endrun)
+
+ call cam_mpas_update_halo('nEdgesOnCell', endrun)
+ call cam_mpas_update_halo('nEdgesOnEdge', endrun)
+
+ call cam_mpas_update_halo('cellsOnEdge', endrun)
+ call cam_mpas_update_halo('edgesOnCell', endrun)
+ call cam_mpas_update_halo('edgesOnEdge', endrun)
+ call cam_mpas_update_halo('cellsOnCell', endrun)
+ call cam_mpas_update_halo('verticesOnCell', endrun)
+ call cam_mpas_update_halo('verticesOnEdge', endrun)
+ call cam_mpas_update_halo('edgesOnVertex', endrun)
+ call cam_mpas_update_halo('cellsOnVertex', endrun)
+
+ call cam_mpas_update_halo('zgrid', endrun)
+ call cam_mpas_update_halo('zxu', endrun)
+ call cam_mpas_update_halo('zz', endrun)
+ call cam_mpas_update_halo('zb', endrun)
+ call cam_mpas_update_halo('zb3', endrun)
+
+ call cam_mpas_update_halo('deriv_two', endrun)
+ call cam_mpas_update_halo('cellTangentPlane', endrun)
+ call cam_mpas_update_halo('coeffs_reconstruct', endrun)
+
+ call cam_mpas_update_halo('edgeNormalVectors', endrun)
+ call cam_mpas_update_halo('localVerticalUnitVectors', endrun)
+ call cam_mpas_update_halo('defc_a', endrun)
+ call cam_mpas_update_halo('defc_b', endrun)
+
+ call cam_mpas_update_halo('u', endrun)
+ call cam_mpas_update_halo('w', endrun)
+ call cam_mpas_update_halo('rho_zz', endrun)
+ call cam_mpas_update_halo('theta_m', endrun)
+ call cam_mpas_update_halo('scalars', endrun)
+
+ call cam_mpas_update_halo('meshScalingDel2', endrun)
+ call cam_mpas_update_halo('meshScalingDel4', endrun)
+ call cam_mpas_update_halo('dss', endrun)
+ call cam_mpas_update_halo('east', endrun)
+ call cam_mpas_update_halo('north', endrun)
+ call cam_mpas_update_halo('pressure_p', endrun)
+ call cam_mpas_update_halo('rho', endrun)
+ call cam_mpas_update_halo('theta', endrun)
+ call cam_mpas_update_halo('relhum', endrun)
+ call cam_mpas_update_halo('uReconstructZonal', endrun)
+ call cam_mpas_update_halo('uReconstructMeridional', endrun)
+ call cam_mpas_update_halo('circulation', endrun)
+ call cam_mpas_update_halo('exner', endrun)
+ call cam_mpas_update_halo('exner_base', endrun)
+ call cam_mpas_update_halo('rtheta_base', endrun)
+ call cam_mpas_update_halo('pressure_base', endrun)
+ call cam_mpas_update_halo('rho_base', endrun)
+ call cam_mpas_update_halo('theta_base', endrun)
+ call cam_mpas_update_halo('ru', endrun)
+ call cam_mpas_update_halo('ru_p', endrun)
+ call cam_mpas_update_halo('rw', endrun)
+ call cam_mpas_update_halo('rw_p', endrun)
+ call cam_mpas_update_halo('rtheta_p', endrun)
+ call cam_mpas_update_halo('rho_p', endrun)
+ call cam_mpas_update_halo('surface_pressure', endrun)
+ call cam_mpas_update_halo('t_init', endrun)
+
+ call cam_mpas_update_halo('tend_ru_physics', endrun)
+ call cam_mpas_update_halo('tend_rtheta_physics', endrun)
+ call cam_mpas_update_halo('tend_rho_physics', endrun)
+
+ !
+ ! Re-index from global index space to local index space
+ !
+ call MPAS_pool_create_pool(reindexPool)
+
+ call MPAS_pool_add_config(reindexPool, 'cellsOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'cellsOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'verticesOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'verticesOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnVertex', 1)
+ call MPAS_pool_add_config(reindexPool, 'cellsOnVertex', 1)
+
+ call postread_reindex(domain_ptr % blocklist % allFields, reindexPool)
+
+ call MPAS_pool_destroy_pool(reindexPool)
+
+ end subroutine cam_mpas_read_restart
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_write_restart
+ !
+ !> \brief Writes a restart stream that was previously set up
+ !> \author Michael Duda
+ !> \date 22 July 2020
+ !> \details
+ !> From a restart stream previously set up with a call to
+ !> cam_mpas_setup_restart, re-index mesh indexing fields (cellsOnCell,
+ !> edgesOnCell, etc.) from local to global index space, and write
+ !> the stream.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_write_restart(restart_stream, endrun)
+
+ use pio, only : file_desc_t
+
+ use mpas_io_streams, only : MPAS_writeStream, MPAS_closeStream
+ use mpas_derived_types, only : MPAS_Stream_type, MPAS_pool_type, MPAS_STREAM_NOERR
+ use mpas_pool_routines, only : MPAS_pool_create_pool, MPAS_pool_destroy_pool, MPAS_pool_add_config
+ use mpas_stream_manager, only : prewrite_reindex, postwrite_reindex
+
+ ! Arguments
+ type (MPAS_Stream_type), intent(inout) :: restart_stream
+ procedure(halt_model) :: endrun
+
+ ! Local variables
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_write_restart'
+
+ integer :: ierr
+ type (MPAS_pool_type), pointer :: reindexPool
+
+ !
+ ! Re-index from local index space to global index space
+ !
+ call MPAS_pool_create_pool(reindexPool)
+
+ call MPAS_pool_add_config(reindexPool, 'cellsOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'cellsOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'verticesOnCell', 1)
+ call MPAS_pool_add_config(reindexPool, 'verticesOnEdge', 1)
+ call MPAS_pool_add_config(reindexPool, 'edgesOnVertex', 1)
+ call MPAS_pool_add_config(reindexPool, 'cellsOnVertex', 1)
+
+ call prewrite_reindex(domain_ptr % blocklist % allFields, reindexPool)
+
+ call MPAS_writeStream(restart_stream, 1, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to write restart stream.')
+ end if
+
+ call postwrite_reindex(domain_ptr % blocklist % allFields, reindexPool)
+
+ call MPAS_pool_destroy_pool(reindexPool)
+
+ call MPAS_closeStream(restart_stream, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ call endrun(subname//': FATAL: Failed to close restart stream.')
+ end if
+
+ end subroutine cam_mpas_write_restart
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_compute_unit_vectors
+ !
+ !> \brief Computes local unit north, east, and edge-normal vectors
+ !> \author Michael Duda
+ !> \date 15 January 2020
+ !> \details
+ !> This routine computes the local unit north and east vectors at all cell
+ !> centers, storing the resulting fields in the mesh pool as 'north' and
+ !> 'east'. It also computes the edge-normal unit vectors by calling
+ !> the mpas_initialize_vectors routine. Before this routine is called,
+ !> the mesh pool must contain 'latCell' and 'lonCell' fields that are valid
+ !> for all cells (not just solve cells), plus any fields that are required
+ !> by the mpas_initialize_vectors routine.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_compute_unit_vectors()
+
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_array
+ use mpas_derived_types, only : mpas_pool_type
+ use mpas_kind_types, only : RKIND
+ use mpas_vector_operations, only : mpas_initialize_vectors
+
+ type (mpas_pool_type), pointer :: meshPool
+ real(kind=RKIND), dimension(:), pointer :: latCell, lonCell
+ real(kind=RKIND), dimension(:,:), pointer :: east, north
+ integer, pointer :: nCells
+ integer :: iCell
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+ call mpas_pool_get_dimension(meshPool, 'nCells', nCells)
+ call mpas_pool_get_array(meshPool, 'latCell', latCell)
+ call mpas_pool_get_array(meshPool, 'lonCell', lonCell)
+ call mpas_pool_get_array(meshPool, 'east', east)
+ call mpas_pool_get_array(meshPool, 'north', north)
+
+ do iCell = 1, nCells
+
+ east(1,iCell) = -sin(lonCell(iCell))
+ east(2,iCell) = cos(lonCell(iCell))
+ east(3,iCell) = 0.0
+
+ ! Normalize
+ east(1:3,iCell) = east(1:3,iCell) / sqrt(sum(east(1:3,iCell) * east(1:3,iCell)))
+
+ north(1,iCell) = -cos(lonCell(iCell))*sin(latCell(iCell))
+ north(2,iCell) = -sin(lonCell(iCell))*sin(latCell(iCell))
+ north(3,iCell) = cos(latCell(iCell))
+
+ ! Normalize
+ north(1:3,iCell) = north(1:3,iCell) / sqrt(sum(north(1:3,iCell) * north(1:3,iCell)))
+
+ end do
+
+ call mpas_initialize_vectors(meshPool)
+
+ end subroutine cam_mpas_compute_unit_vectors
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_update_halo
+ !
+ !> \brief Updates the halo of the named field
+ !> \author Michael Duda
+ !> \date 16 January 2020
+ !> \details
+ !> Given the name of a field that is defined in the MPAS Registry.xml file,
+ !> this routine updates the halo for that field.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_update_halo(fieldName, endrun)
+
+ use mpas_derived_types, only : field1DReal, field2DReal, field3DReal, field4DReal, field5DReal, &
+ field1DInteger, field2DInteger, field3DInteger, &
+ mpas_pool_field_info_type, MPAS_POOL_REAL, MPAS_POOL_INTEGER
+ use mpas_pool_routines, only : MPAS_pool_get_field_info, MPAS_pool_get_field
+ use mpas_dmpar, only : MPAS_dmpar_exch_halo_field
+ use mpas_kind_types, only : StrKIND
+
+ ! Arguments
+ character(len=*), intent(in) :: fieldName
+ procedure(halt_model) :: endrun
+
+ ! Local variables
+ character(len=*), parameter :: subname = 'cam_mpas_subdriver::cam_mpas_update_halo'
+
+ character(len=StrKIND) :: errString
+
+ type (mpas_pool_field_info_type) :: fieldInfo
+ type (field1DReal), pointer :: field_real1d
+ type (field2DReal), pointer :: field_real2d
+ type (field3DReal), pointer :: field_real3d
+ type (field4DReal), pointer :: field_real4d
+ type (field5DReal), pointer :: field_real5d
+ type (field1DInteger), pointer :: field_int1d
+ type (field2DInteger), pointer :: field_int2d
+ type (field3DInteger), pointer :: field_int3d
+
+
+ call MPAS_pool_get_field_info(domain_ptr % blocklist % allFields, trim(fieldName), fieldInfo)
+
+ if (fieldInfo % fieldType == MPAS_POOL_REAL) then
+ if (fieldInfo % nDims == 1) then
+ nullify(field_real1d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_real1d)
+ if (associated(field_real1d)) then
+ call MPAS_dmpar_exch_halo_field(field_real1d)
+ end if
+ else if (fieldInfo % nDims == 2) then
+ nullify(field_real2d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_real2d)
+ if (associated(field_real2d)) then
+ call MPAS_dmpar_exch_halo_field(field_real2d)
+ end if
+ else if (fieldInfo % nDims == 3) then
+ nullify(field_real3d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_real3d)
+ if (associated(field_real3d)) then
+ call MPAS_dmpar_exch_halo_field(field_real3d)
+ end if
+ else if (fieldInfo % nDims == 4) then
+ nullify(field_real4d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_real4d)
+ if (associated(field_real4d)) then
+ call MPAS_dmpar_exch_halo_field(field_real4d)
+ end if
+ else if (fieldInfo % nDims == 5) then
+ nullify(field_real5d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_real5d)
+ if (associated(field_real5d)) then
+ call MPAS_dmpar_exch_halo_field(field_real5d)
+ end if
+ else
+ write(errString, '(a,i0,a)') subname//': FATAL: Unhandled dimensionality ', &
+ fieldInfo % nDims, ' for real-valued field'
+ call endrun(trim(errString))
+ end if
+ else if (fieldInfo % fieldType == MPAS_POOL_INTEGER) then
+ if (fieldInfo % nDims == 1) then
+ nullify(field_int1d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_int1d)
+ if (associated(field_int1d)) then
+ call MPAS_dmpar_exch_halo_field(field_int1d)
+ end if
+ else if (fieldInfo % nDims == 2) then
+ nullify(field_int2d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_int2d)
+ if (associated(field_int2d)) then
+ call MPAS_dmpar_exch_halo_field(field_int2d)
+ end if
+ else if (fieldInfo % nDims == 3) then
+ nullify(field_int3d)
+ call MPAS_pool_get_field(domain_ptr % blocklist % allFields, trim(fieldName), field_int3d)
+ if (associated(field_int3d)) then
+ call MPAS_dmpar_exch_halo_field(field_int3d)
+ end if
+ else
+ write(errString, '(a,i0,a)') subname//': FATAL: Unhandled dimensionality ', &
+ fieldInfo % nDims, ' for integer-valued field'
+ call endrun(trim(errString))
+ end if
+ else
+ write(errString, '(a,i0,a)') subname//': FATAL: Unhandled field type ', fieldInfo % fieldType
+ call endrun(trim(errString))
+ end if
+
+ end subroutine cam_mpas_update_halo
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_cell_to_edge_winds
+ !
+ !> \brief Projects cell-centered winds to the normal component of velocity on edges
+ !> \author Michael Duda
+ !> \date 16 January 2020
+ !> \details
+ !> Given zonal and meridional winds at cell centers, unit vectors in the east
+ !> and north directions at cell centers, and unit vectors in the normal
+ !> direction at edges, this routine projects the cell-centered winds onto
+ !> the normal vectors.
+ !>
+ !> Prior to calling this routine, the halos for the zonal and meridional
+ !> components of cell-centered winds should be updated. It is also critical
+ !> that the east, north, uZonal, and uMerid field are all allocated with
+ !> a "garbage" element; this is handled automatically for fields allocated
+ !> by the MPAS infrastructure.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_cell_to_edge_winds(nEdges, uZonal, uMerid, east, north, edgeNormalVectors, &
+ cellsOnEdge, uNormal)
+
+ use mpas_kind_types, only : RKIND
+
+ integer, intent(in) :: nEdges
+ real(kind=RKIND), dimension(:,:), intent(in) :: uZonal, uMerid
+ real(kind=RKIND), dimension(:,:), intent(in) :: east, north, edgeNormalVectors
+ integer, dimension(:,:), intent(in) :: cellsOnEdge
+ real(kind=RKIND), dimension(:,:), intent(out) :: uNormal
+
+ integer :: iEdge, cell1, cell2
+
+
+ do iEdge = 1, nEdges
+ cell1 = cellsOnEdge(1,iEdge)
+ cell2 = cellsOnEdge(2,iEdge)
+
+ uNormal(:,iEdge) = uZonal(:,cell1)*0.5_RKIND*(edgeNormalVectors(1,iEdge)*east(1,cell1) &
+ + edgeNormalVectors(2,iEdge)*east(2,cell1) &
+ + edgeNormalVectors(3,iEdge)*east(3,cell1)) &
+ + uMerid(:,cell1)*0.5_RKIND*(edgeNormalVectors(1,iEdge)*north(1,cell1) &
+ + edgeNormalVectors(2,iEdge)*north(2,cell1) &
+ + edgeNormalVectors(3,iEdge)*north(3,cell1)) &
+ + uZonal(:,cell2)*0.5_RKIND*(edgeNormalVectors(1,iEdge)*east(1,cell2) &
+ + edgeNormalVectors(2,iEdge)*east(2,cell2) &
+ + edgeNormalVectors(3,iEdge)*east(3,cell2)) &
+ + uMerid(:,cell2)*0.5_RKIND*(edgeNormalVectors(1,iEdge)*north(1,cell2) &
+ + edgeNormalVectors(2,iEdge)*north(2,cell2) &
+ + edgeNormalVectors(3,iEdge)*north(3,cell2))
+ end do
+
+ end subroutine cam_mpas_cell_to_edge_winds
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_run
+ !
+ !> \brief Integrate dynamical state for the specified length of time
+ !> \author Michael Duda
+ !> \date 29 February 2020
+ !> \details
+ !> This routine calls the dynamical solver in a loop, with each iteration
+ !> of the loop stepping the dynamical state forward by one dynamics
+ !> time step and stopping after the state has been advanced by the time
+ !> interval specified by the integrationLength argument.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_run(integrationLength)
+
+ use atm_core, only : atm_do_timestep
+ use mpas_derived_types, only : MPAS_Time_type, MPAS_TimeInterval_type, mpas_pool_type
+ use mpas_kind_types, only : StrKIND, RKIND
+ use mpas_log, only : mpas_log_write
+ use mpas_pool_routines, only : mpas_pool_get_config, mpas_pool_get_dimension, mpas_pool_get_array, &
+ mpas_pool_get_subpool, mpas_pool_shift_time_levels
+ use mpas_timekeeping, only : mpas_advance_clock, mpas_get_clock_time, mpas_get_time, MPAS_NOW, &
+ operator(.lt.), operator(+)
+ use mpas_timer, only : mpas_timer_start, mpas_timer_stop
+ use mpas_constants, only : Rv_over_Rd => rvord
+
+ ! Arguments
+ type (MPAS_TimeInterval_type), intent(in) :: integrationLength
+
+ ! Local variables
+ integer :: ierr
+
+ real (kind=RKIND), pointer :: dt
+ type (MPAS_Time_Type) :: currTime
+ type (MPAS_Time_type) :: runUntilTime
+ character(len=StrKIND) :: timeStamp
+ type (mpas_pool_type), pointer :: state, diag, mesh
+
+ integer, pointer :: index_qv
+ integer, pointer :: nCellsSolve
+ real(kind=RKIND), dimension(:,:), pointer :: theta_m, rho_zz, zz, theta, rho
+ real(kind=RKIND), dimension(:,:,:), pointer :: scalars
+
+ integer, save :: itimestep = 1
+
+ ! Eventually, dt should be domain specific
+ call mpas_pool_get_config(domain_ptr % blocklist % configs, 'config_dt', dt)
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'diag', diag)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', mesh)
+
+ ! During integration, time level 1 stores the model state at the beginning of the
+ ! time step, and time level 2 stores the state advanced dt in time by timestep(...)
+
+ currTime = mpas_get_clock_time(clock, MPAS_NOW, ierr)
+ runUntilTime = currTime + integrationLength
+
+ do while (currTime < runUntilTime)
+ call mpas_get_time(curr_time=currTime, dateTimeString=timeStamp, ierr=ierr)
+ call mpas_log_write('Dynamics timestep beginning at '//trim(timeStamp))
+
+ call mpas_timer_start('time integration')
+ call atm_do_timestep(domain_ptr, dt, itimestep)
+ call mpas_timer_stop('time integration')
+
+ ! Move time level 2 fields back into time level 1 for next time step
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state)
+ call mpas_pool_shift_time_levels(state)
+
+ ! Advance clock before writing output
+ itimestep = itimestep + 1
+ call mpas_advance_clock(clock)
+ currTime = mpas_get_clock_time(clock, MPAS_NOW, ierr)
+ end do
+
+ !
+ ! Compute diagnostic fields from the final prognostic state
+ !
+ call mpas_pool_get_dimension(state, 'index_qv', index_qv)
+ call mpas_pool_get_dimension(state, 'nCellsSolve', nCellsSolve)
+ call mpas_pool_get_array(state, 'theta_m', theta_m, timeLevel=1)
+ call mpas_pool_get_array(state, 'rho_zz', rho_zz, timeLevel=1)
+ call mpas_pool_get_array(state, 'scalars', scalars, timeLevel=1)
+ call mpas_pool_get_array(mesh, 'zz', zz)
+ call mpas_pool_get_array(diag, 'theta', theta)
+ call mpas_pool_get_array(diag, 'rho', rho)
+
+ rho(:,1:nCellsSolve) = rho_zz(:,1:nCellsSolve) * zz(:,1:nCellsSolve)
+ theta(:,1:nCellsSolve) = theta_m(:,1:nCellsSolve) / (1.0_RKIND + Rv_over_Rd * scalars(index_qv,:,1:nCellsSolve))
+
+ end subroutine cam_mpas_run
+
+
+ !-----------------------------------------------------------------------
+ ! routine cam_mpas_finalize
+ !
+ !> \brief Finalize the MPAS core and infrastructure
+ !> \author Michael Duda
+ !> \date 29 February 2020
+ !> \details
+ !> This routine finalizes the MPAS-A dycore and any infrastructure that
+ !> was set-up during the simulation. The work here mirrors that done in
+ !> mpas_atm_core.F::atm_core_finalize(), except there is no need to finalize
+ !> the MPAS-A diagnostics framework or stand-alone physics modules.
+ !
+ !-----------------------------------------------------------------------
+ subroutine cam_mpas_finalize()
+
+ use mpas_decomp, only : mpas_decomp_destroy_decomp_list
+ use mpas_timekeeping, only : mpas_destroy_clock
+ use mpas_atm_threading, only : mpas_atm_threading_finalize
+
+ integer :: ierr
+
+ call mpas_destroy_clock(clock, ierr)
+ call mpas_decomp_destroy_decomp_list(domain_ptr % decompositions)
+ call mpas_atm_threading_finalize(domain_ptr % blocklist)
+
+ end subroutine cam_mpas_finalize
+
+
+ subroutine cam_mpas_debug_stream(domain, filename, timeLevel)
+
+ use mpas_io_streams, only : MPAS_createStream, MPAS_closeStream, MPAS_streamAddField, MPAS_writeStream
+ use mpas_derived_types, only : MPAS_IO_WRITE, MPAS_IO_NETCDF, MPAS_STREAM_NOERR, MPAS_Stream_type, MPAS_pool_type, &
+ field0DReal, field1DReal, field2DReal, field3DReal, field4DReal, field5DReal, &
+ field1DInteger, field2DInteger, field3DInteger
+ use mpas_pool_routines, only : MPAS_pool_get_subpool, MPAS_pool_get_field, MPAS_pool_create_pool, MPAS_pool_destroy_pool, &
+ MPAS_pool_add_config
+
+ use mpas_derived_types, only : MPAS_Pool_iterator_type, MPAS_POOL_FIELD, MPAS_POOL_REAL, MPAS_POOL_INTEGER
+ use mpas_pool_routines, only : mpas_pool_begin_iteration, mpas_pool_get_next_member, mpas_pool_get_config
+
+ type (domain_type), intent(inout) :: domain
+ character(len=*), intent(in) :: filename
+ integer, intent(in), optional :: timeLevel
+
+ type (MPAS_Pool_iterator_type) :: itr
+
+ integer :: ierr
+ type (MPAS_pool_type), pointer :: allFields
+
+ type (field0DReal), pointer :: field_real0d
+ type (field1DReal), pointer :: field_real1d
+ type (field2DReal), pointer :: field_real2d
+ type (field3DReal), pointer :: field_real3d
+ type (field4DReal), pointer :: field_real4d
+ type (field5DReal), pointer :: field_real5d
+ type (field1DInteger), pointer :: field_int1d
+ type (field2DInteger), pointer :: field_int2d
+ type (field3DInteger), pointer :: field_int3d
+
+ type (MPAS_Stream_type) :: stream
+
+
+ call MPAS_createStream(stream, domain % ioContext, trim(filename), MPAS_IO_NETCDF, MPAS_IO_WRITE, &
+ clobberFiles=.true., clobberRecords=.true., truncateFiles=.true., ierr=ierr)
+
+ allFields => domain % blocklist % allFields
+
+ call mpas_pool_begin_iteration(allFields)
+ do while (mpas_pool_get_next_member(allFields, itr))
+
+ if (index(trim(itr % memberName), 'OwnedIndices') /= 0) then
+ cycle
+ end if
+
+ if ( itr % memberType == MPAS_POOL_FIELD) then
+
+ if (itr % dataType == MPAS_POOL_REAL) then
+ if (itr % nDims == 0) then
+ nullify(field_real0d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real0d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real0d)
+ end if
+ if (associated(field_real0d)) then
+ call MPAS_streamAddField(stream, field_real0d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 1) then
+ nullify(field_real1d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real1d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real1d)
+ end if
+ if (associated(field_real1d)) then
+ call MPAS_streamAddField(stream, field_real1d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 2) then
+ nullify(field_real2d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real2d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real2d)
+ end if
+ if (associated(field_real2d)) then
+ call MPAS_streamAddField(stream, field_real2d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 3) then
+ nullify(field_real3d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real3d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real3d)
+ end if
+ if (associated(field_real3d)) then
+ call MPAS_streamAddField(stream, field_real3d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 4) then
+ nullify(field_real4d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real4d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real4d)
+ end if
+ if (associated(field_real4d)) then
+ call MPAS_streamAddField(stream, field_real4d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 5) then
+ nullify(field_real5d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real5d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_real5d)
+ end if
+ if (associated(field_real5d)) then
+ call MPAS_streamAddField(stream, field_real5d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ end if
+ else if (itr % dataType == MPAS_POOL_INTEGER) then
+ if (itr % nDims == 1) then
+ nullify(field_int1d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_int1d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_int1d)
+ end if
+ if (associated(field_int1d)) then
+ call MPAS_streamAddField(stream, field_int1d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 2) then
+ nullify(field_int2d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_int2d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_int2d)
+ end if
+ if (associated(field_int2d)) then
+ call MPAS_streamAddField(stream, field_int2d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ else if (itr % nDims == 3) then
+ nullify(field_int3d)
+ if (itr % nTimeLevels > 1) then
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_int3d, timeLevel=timeLevel)
+ else
+ call mpas_pool_get_field(allFields, trim(itr % memberName), field_int3d)
+ end if
+ if (associated(field_int3d)) then
+ call MPAS_streamAddField(stream, field_int3d, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Failed to add field '//trim(itr % memberName)
+ end if
+ else
+ write(0,*) '*** Failed to get field '//trim(itr % memberName)
+ end if
+ end if
+ end if
+
+ end if
+ end do
+
+ call MPAS_writeStream(stream, 1, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Error writing stream ', ierr
+ end if
+
+ call MPAS_closeStream(stream, ierr=ierr)
+ if (ierr /= MPAS_STREAM_NOERR) then
+ write(0,*) '*** Error closing stream ', ierr
+ end if
+
+ end subroutine cam_mpas_debug_stream
+
+
+end module cam_mpas_subdriver
diff --git a/src/dynamics/mpas/dycore.F90 b/src/dynamics/mpas/dycore.F90
new file mode 100644
index 0000000000..e40dd80754
--- /dev/null
+++ b/src/dynamics/mpas/dycore.F90
@@ -0,0 +1,33 @@
+module dycore
+
+implicit none
+
+public :: dycore_is
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+logical function dycore_is(name)
+
+ ! Identifies that the MPAS dycore is being used.
+ ! Identifies that the MPAS dycore uses an 'unstructured' grid.
+
+ character(len=*), intent(in) :: name
+
+ dycore_is = .false.
+
+ if (name == 'unstructured' .or. &
+ name == 'UNSTRUCTURED' .or. &
+ name == 'mpas' .or. &
+ name == 'MPAS') then
+
+ dycore_is = .true.
+
+ end if
+
+end function dycore_is
+
+!=========================================================================================
+
+end module dycore
diff --git a/src/dynamics/mpas/dyn_comp.F90 b/src/dynamics/mpas/dyn_comp.F90
new file mode 100644
index 0000000000..d04fc7be88
--- /dev/null
+++ b/src/dynamics/mpas/dyn_comp.F90
@@ -0,0 +1,1421 @@
+module dyn_comp
+
+! CAM component interfaces to the MPAS Dynamical Core
+
+use shr_kind_mod, only: r8=>shr_kind_r8
+use spmd_utils, only: iam, masterproc, mpicom, npes
+use physconst, only: pi, gravit, rair, cpair
+
+use pmgrid, only: plev, plevp
+use constituents, only: pcnst, cnst_name, cnst_is_a_water_species, cnst_read_iv
+use const_init, only: cnst_init_default
+
+use cam_control_mod, only: initial_run
+use cam_initfiles, only: initial_file_get_id, topo_file_get_id
+
+use cam_grid_support, only: cam_grid_id, cam_grid_get_gcid, &
+ cam_grid_dimensions, cam_grid_get_dim_names, &
+ cam_grid_get_latvals, cam_grid_get_lonvals, &
+ max_hcoordname_len
+use cam_map_utils, only: iMap
+
+use inic_analytic, only: analytic_ic_active, dyn_set_inic_col
+use dyn_tests_utils, only: vcoord=>vc_height
+
+use cam_history, only: addfld, add_default, horiz_only, register_vector_field, &
+ outfld, hist_fld_active
+use cam_history_support, only: max_fieldname_len
+use string_utils, only: date2yyyymmdd, sec2hms, int2str
+
+use ncdio_atm, only: infld
+use pio, only: file_desc_t, pio_seterrorhandling, PIO_BCAST_ERROR, &
+ pio_inq_dimid, pio_inq_dimlen, PIO_NOERR
+use cam_pio_utils, only: clean_iodesc_list
+
+use time_manager, only: get_start_date, get_stop_date, get_run_duration, &
+ timemgr_get_calendar_cf, get_step_size
+
+use cam_logfile, only: iulog
+use cam_abortutils, only: endrun
+
+use mpas_timekeeping, only : MPAS_TimeInterval_type
+
+implicit none
+private
+save
+
+public :: &
+ dyn_import_t, &
+ dyn_export_t, &
+ dyn_readnl, &
+ dyn_register, &
+ dyn_init, &
+ dyn_run, &
+ dyn_final, &
+ swap_time_level_ptrs
+
+! Note that the fields in the import and export states are pointers into the MPAS dycore internal
+! data structures. These fields have the order of the vertical and horizontal dimensions swapped
+! relative to the CAM convention, as well as having the vertical indices ordered from bottom to top
+! of atm. An exception is that the export state contains two fields, pmiddry and pintdry, not managed
+! by the MPAS infrastructure. These fields are only used by the physics package and are computed
+! in the dp_coupling module.
+
+type dyn_import_t
+ !
+ ! Number of cells, edges, vertices, and vertical layers in this block
+ !
+ integer :: nCells ! Number of cells, including halo cells
+ integer :: nEdges ! Number of edges, including halo edges
+ integer :: nVertices ! Number of vertices, including halo vertices
+ integer :: nVertLevels ! Number of vertical layers
+
+ integer :: nCellsSolve ! Number of cells, excluding halo cells
+ integer :: nEdgesSolve ! Number of edges, excluding halo edges
+ integer :: nVerticesSolve ! Number of vertices, excluding halo vertices
+
+ !
+ ! State that is directly prognosed by the dycore
+ !
+ real(r8), dimension(:,:), pointer :: uperp ! Normal velocity at edges [m/s] (nver,nedge)
+ real(r8), dimension(:,:), pointer :: w ! Vertical velocity [m/s] (nver+1,ncol)
+ real(r8), dimension(:,:), pointer :: theta_m ! Moist potential temperature [K] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: rho_zz ! Dry density [kg/m^3]
+ ! divided by d(zeta)/dz (nver,ncol)
+ real(r8), dimension(:,:,:), pointer :: tracers ! Tracers [kg/kg dry air] (nq,nver,ncol)
+
+ !
+ ! Index map between MPAS tracers and CAM constituents
+ !
+ integer, dimension(:), pointer :: mpas_from_cam_cnst => null() ! indices into CAM constituent array
+
+ !
+ ! Base state variables
+ !
+ real(r8), dimension(:,:), pointer :: rho_base ! Base-state dry air density [kg/m^3] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: theta_base ! Base-state potential temperature [K] (nver,ncol)
+
+ !
+ ! Indices of tracers
+ !
+ integer :: index_qv ! Index in tracers array of water vapor
+ ! mixing ratio
+
+ !
+ ! Invariant -- the vertical coordinate in MPAS-A is a height coordinate
+ !
+ real(r8), dimension(:,:), pointer :: zint ! Geometric height [m]
+ ! at layer interfaces (nver+1,ncol)
+ real(r8), dimension(:,:), pointer :: zz ! Vertical coordinate metric [dimensionless]
+ ! at layer midpoints (nver,ncol)
+ real(r8), dimension(:), pointer :: fzm ! Interp weight from k layer midpoint to k layer
+ ! interface [dimensionless] (nver)
+ real(r8), dimension(:), pointer :: fzp ! Interp weight from k-1 layer midpoint to k
+ ! layer interface [dimensionless] (nver)
+
+ !
+ ! Invariant -- needed to compute edge-normal velocities
+ !
+ real(r8), dimension(:,:), pointer :: east ! Cartesian components of unit east vector
+ ! at cell centers [dimensionless] (3,ncol)
+ real(r8), dimension(:,:), pointer :: north ! Cartesian components of unit north vector
+ ! at cell centers [dimensionless] (3,ncol)
+ real(r8), dimension(:,:), pointer :: normal ! Cartesian components of the vector normal
+ ! to an edge and tangential to the surface
+ ! of the sphere [dimensionless] (3,ncol)
+ integer, dimension(:,:), pointer :: cellsOnEdge ! Indices of cells separated by an edge (2,nedge)
+
+
+ !
+ ! State that may be directly derived from dycore prognostic state
+ !
+ real(r8), dimension(:,:), pointer :: theta ! Potential temperature [K] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: exner ! Exner function [-] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: rho ! Dry density [kg/m^3] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: ux ! Zonal veloc at center [m/s] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: uy ! Meridional veloc at center [m/s] (nver,ncol)
+
+ !
+ ! Tendencies from physics
+ !
+ real(r8), dimension(:,:), pointer :: ru_tend ! Normal horizontal momentum tendency
+ ! from physics [kg/m^2/s] (nver,nedge)
+ real(r8), dimension(:,:), pointer :: rtheta_tend ! Tendency of rho*theta/zz
+ ! from physics [kg K/m^3/s] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: rho_tend ! Dry air density tendency
+ ! from physics [kg/m^3/s] (nver,ncol)
+end type dyn_import_t
+
+type dyn_export_t
+ !
+ ! Number of cells, edges, vertices, and vertical layers in this block
+ !
+ integer :: nCells ! Number of cells, including halo cells
+ integer :: nEdges ! Number of edges, including halo edges
+ integer :: nVertices ! Number of vertices, including halo vertices
+ integer :: nVertLevels ! Number of vertical layers
+
+ integer :: nCellsSolve ! Number of cells, excluding halo cells
+ integer :: nEdgesSolve ! Number of edges, excluding halo edges
+ integer :: nVerticesSolve ! Number of vertices, excluding halo vertices
+
+ !
+ ! State that is directly prognosed by the dycore
+ !
+ real(r8), dimension(:,:), pointer :: uperp ! Normal velocity at edges [m/s] (nver,nedge)
+ real(r8), dimension(:,:), pointer :: w ! Vertical velocity [m/s] (nver+1,ncol)
+ real(r8), dimension(:,:), pointer :: theta_m ! Moist potential temperature [K] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: rho_zz ! Dry density [kg/m^3]
+ ! divided by d(zeta)/dz (nver,ncol)
+ real(r8), dimension(:,:,:), pointer :: tracers ! Tracers [kg/kg dry air] (nq,nver,ncol)
+
+ !
+ ! Indices of tracers
+ !
+ integer :: index_qv ! Index in tracers array of water vapor
+ ! mixing ratio
+
+ !
+ ! Index map between MPAS tracers and CAM constituents
+ !
+ integer, dimension(:), pointer :: cam_from_mpas_cnst => null() ! indices into MPAS tracers array
+
+ !
+ ! Invariant -- the vertical coordinate in MPAS-A is a height coordinate
+ !
+ real(r8), dimension(:,:), pointer :: zint ! Geometric height [m]
+ ! at layer interfaces (nver+1,ncol)
+ real(r8), dimension(:,:), pointer :: zz ! Vertical coordinate metric [dimensionless]
+ ! at layer midpoints (nver,ncol)
+ real(r8), dimension(:), pointer :: fzm ! Interp weight from k layer midpoint to k layer
+ ! interface [dimensionless] (nver)
+ real(r8), dimension(:), pointer :: fzp ! Interp weight from k-1 layer midpoint to k
+ ! layer interface [dimensionless] (nver)
+
+ !
+ ! State that may be directly derived from dycore prognostic state
+ !
+ real(r8), dimension(:,:), pointer :: theta ! Potential temperature [K] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: exner ! Exner function [-] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: rho ! Dry density [kg/m^3] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: ux ! Zonal veloc at center [m/s] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: uy ! Meridional veloc at center [m/s] (nver,ncol)
+ real(r8), dimension(:,:), pointer :: pmiddry ! Dry hydrostatic pressure [Pa]
+ ! at layer midpoints (nver,ncol)
+ real(r8), dimension(:,:), pointer :: pintdry ! Dry hydrostatic pressure [Pa]
+ ! at layer interfaces (nver+1,ncol)
+ real(r8), dimension(:,:), pointer :: vorticity ! Relative vertical vorticity [s^-1]
+ ! (nver,nvtx)
+ real(r8), dimension(:,:), pointer :: divergence ! Horizontal velocity divergence [s^-1]
+ ! (nver,ncol)
+end type dyn_export_t
+
+real(r8), parameter :: rad2deg = 180.0_r8 / pi
+real(r8), parameter :: deg2rad = pi / 180.0_r8
+
+! The global cell indices are used to seed the RNG which is used to apply
+! random perturbations to the initial temperature field. These global indices
+! are just those for the local dynamics block.
+integer, allocatable :: glob_ind(:)
+
+type (MPAS_TimeInterval_type) :: integrationLength ! set to CAM's dynamics/physics coupling interval
+logical :: swap_time_level_ptrs
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+subroutine dyn_readnl(NLFileName)
+
+ ! Read the dycore-relevant namelists from the input file.
+ ! First must set up basic MPAS infrastructure to allow the MPAS-A dycore
+ ! to save namelist options into MPAS-native datastructures called "pools".
+
+ use units, only: getunit
+ use cam_pio_utils, only: pio_subsystem
+
+ use cam_mpas_subdriver, only : domain_ptr, cam_mpas_init_phase1, cam_mpas_init_phase2
+ use mpas_pool_routines, only : mpas_pool_add_config
+
+
+ ! Dummy argument
+ character(len=*), intent(in) :: NLFileName
+
+ ! Local variables
+ integer :: ierr
+ integer, dimension(2) :: logUnits ! stdout and stderr for MPAS logging
+ integer :: yr, mon, day, tod, ndate, nday, nsec
+ character(len=10) :: date_str
+ character(len=8) :: tod_str
+ !----------------------------------------------------------------------------
+
+ logUnits(1) = iulog
+ logUnits(2) = getunit()
+
+ call cam_mpas_init_phase1(mpicom, endrun, logUnits, r8)
+
+ ! read namelist
+ call cam_mpas_namelist_read(NLFileName, domain_ptr % configs)
+
+ ! Set config_start_date, etc. (these will not appear in the dycore namelist)
+ call get_start_date(yr, mon, day, tod)
+ ndate = yr*10000 + mon*100 + day
+ call mpas_pool_add_config(domain_ptr % configs, 'config_start_time', date2yyyymmdd(ndate)//'_'//sec2hms(tod))
+
+ call get_stop_date(yr, mon, day, tod)
+ ndate = yr*10000 + mon*100 + day
+ call mpas_pool_add_config(domain_ptr % configs, 'config_stop_time', date2yyyymmdd(ndate)//'_'//sec2hms(tod))
+
+ call get_run_duration(nday, nsec)
+ call mpas_pool_add_config(domain_ptr % configs, 'config_run_duration', trim(int2str(nday))//'_'//sec2hms(nsec))
+
+ ! Although the following namelist options are not expected to be used by CAM-MPAS, the MPAS-A dycore
+ ! references these options, and they therefore must be defined in the configs pool
+ call mpas_pool_add_config(domain_ptr % configs, 'config_restart_timestamp_name', 'restart_timestamp')
+ call mpas_pool_add_config(domain_ptr % configs, 'config_IAU_option', 'off')
+ call mpas_pool_add_config(domain_ptr % configs, 'config_do_DAcycling', .false.)
+
+ call cam_mpas_init_phase2(pio_subsystem, endrun, timemgr_get_calendar_cf())
+
+end subroutine dyn_readnl
+
+!=========================================================================================
+
+subroutine dyn_register()
+
+ ! Register fields that are computed by the dycore and passed to the physics via the
+ ! physics buffer.
+
+ use physics_buffer, only: pbuf_add_field, dtype_r8
+ use ppgrid, only: pcols, pver
+ !----------------------------------------------------------------------------
+
+
+end subroutine dyn_register
+
+!=========================================================================================
+
+subroutine dyn_init(dyn_in, dyn_out)
+
+ use cam_mpas_subdriver, only : domain_ptr, cam_mpas_init_phase4
+ use cam_mpas_subdriver, only : cam_mpas_define_scalars
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_array, mpas_pool_get_dimension, &
+ mpas_pool_get_config
+ use mpas_timekeeping, only : MPAS_set_timeInterval
+ use mpas_derived_types, only : mpas_pool_type
+ use mpas_constants, only : mpas_constants_compute_derived
+
+ ! arguments:
+ type(dyn_import_t), intent(inout) :: dyn_in
+ type(dyn_export_t), intent(inout) :: dyn_out
+
+ ! Local variables:
+ integer :: ierr
+
+ type(mpas_pool_type), pointer :: mesh_pool
+ type(mpas_pool_type), pointer :: state_pool
+ type(mpas_pool_type), pointer :: diag_pool
+ type(mpas_pool_type), pointer :: tend_physics_pool
+
+ integer, pointer :: nCells
+ integer, pointer :: nEdges
+ integer, pointer :: nVertices
+ integer, pointer :: nVertLevels
+ integer, pointer :: nCellsSolve
+ integer, pointer :: nEdgesSolve
+ integer, pointer :: nVerticesSolve
+ integer, pointer :: index_qv
+
+ integer, pointer :: indexToCellID(:) ! global indices of cell centers of local block
+
+ real(r8) :: dtime
+ real(r8), pointer :: mpas_dt
+ real(r8) :: dt_ratio
+ character(len=128) :: errmsg
+
+ character(len=*), parameter :: subname = 'dyn_comp::dyn_init'
+ !----------------------------------------------------------------------------
+
+ if (initial_run) then
+ call cam_mpas_define_scalars(domain_ptr % blocklist, dyn_in % mpas_from_cam_cnst, &
+ dyn_out % cam_from_mpas_cnst, ierr)
+ if (ierr /= 0) then
+ call endrun(subname//': Set-up of constituents for MPAS-A dycore failed.')
+ end if
+ end if
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', mesh_pool)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'state', state_pool)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'diag', diag_pool)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'tend_physics', tend_physics_pool)
+
+ ! Let dynamics import state point to memory managed by MPAS-Atmosphere
+
+ call mpas_pool_get_dimension(mesh_pool, 'nCells', nCells)
+ dyn_in % nCells = nCells
+
+ call mpas_pool_get_dimension(mesh_pool, 'nEdges', nEdges)
+ dyn_in % nEdges = nEdges
+
+ call mpas_pool_get_dimension(mesh_pool, 'nVertices', nVertices)
+ dyn_in % nVertices = nVertices
+
+ call mpas_pool_get_dimension(mesh_pool, 'nVertLevels', nVertLevels)
+ dyn_in % nVertLevels = nVertLevels
+
+ call mpas_pool_get_dimension(mesh_pool, 'nCellsSolve', nCellsSolve)
+ dyn_in % nCellsSolve = nCellsSolve
+
+ call mpas_pool_get_dimension(mesh_pool, 'nEdgesSolve', nEdgesSolve)
+ dyn_in % nEdgesSolve = nEdgesSolve
+
+ call mpas_pool_get_dimension(mesh_pool, 'nVerticesSolve', nVerticesSolve)
+ dyn_in % nVerticesSolve = nVerticesSolve
+
+ call mpas_pool_get_array(state_pool, 'u', dyn_in % uperp, timeLevel=1)
+ call mpas_pool_get_array(state_pool, 'w', dyn_in % w, timeLevel=1)
+ call mpas_pool_get_array(state_pool, 'theta_m', dyn_in % theta_m, timeLevel=1)
+ call mpas_pool_get_array(state_pool, 'rho_zz', dyn_in % rho_zz, timeLevel=1)
+ call mpas_pool_get_array(state_pool, 'scalars', dyn_in % tracers, timeLevel=1)
+
+ call mpas_pool_get_array(diag_pool, 'rho_base', dyn_in % rho_base)
+ call mpas_pool_get_array(diag_pool, 'theta_base', dyn_in % theta_base)
+
+ call mpas_pool_get_dimension(state_pool, 'index_qv', index_qv)
+ dyn_in % index_qv = index_qv
+
+ call mpas_pool_get_array(mesh_pool, 'zgrid', dyn_in % zint)
+ call mpas_pool_get_array(mesh_pool, 'zz', dyn_in % zz)
+ call mpas_pool_get_array(mesh_pool, 'fzm', dyn_in % fzm)
+ call mpas_pool_get_array(mesh_pool, 'fzp', dyn_in % fzp)
+
+ call mpas_pool_get_array(mesh_pool, 'east', dyn_in % east)
+ call mpas_pool_get_array(mesh_pool, 'north', dyn_in % north)
+ call mpas_pool_get_array(mesh_pool, 'edgeNormalVectors', dyn_in % normal)
+ call mpas_pool_get_array(mesh_pool, 'cellsOnEdge', dyn_in % cellsOnEdge)
+
+ call mpas_pool_get_array(diag_pool, 'theta', dyn_in % theta)
+ call mpas_pool_get_array(diag_pool, 'exner', dyn_in % exner)
+ call mpas_pool_get_array(diag_pool, 'rho', dyn_in % rho)
+ call mpas_pool_get_array(diag_pool, 'uReconstructZonal', dyn_in % ux)
+ call mpas_pool_get_array(diag_pool, 'uReconstructMeridional', dyn_in % uy)
+
+ call mpas_pool_get_array(tend_physics_pool, 'tend_ru_physics', dyn_in % ru_tend)
+ call mpas_pool_get_array(tend_physics_pool, 'tend_rtheta_physics', dyn_in % rtheta_tend)
+ call mpas_pool_get_array(tend_physics_pool, 'tend_rho_physics', dyn_in % rho_tend)
+
+ ! Let dynamics export state point to memory managed by MPAS-Atmosphere
+ ! Exception: pmiddry and pintdry are not managed by the MPAS infrastructure
+
+ dyn_out % nCells = dyn_in % nCells
+ dyn_out % nEdges = dyn_in % nEdges
+ dyn_out % nVertices = dyn_in % nVertices
+ dyn_out % nVertLevels = dyn_in % nVertLevels
+ dyn_out % nCellsSolve = dyn_in % nCellsSolve
+ dyn_out % nEdgesSolve = dyn_in % nEdgesSolve
+ dyn_out % nVerticesSolve = dyn_in % nVerticesSolve
+
+ call mpas_pool_get_array(state_pool, 'u', dyn_out % uperp, timeLevel=2)
+ call mpas_pool_get_array(state_pool, 'w', dyn_out % w, timeLevel=2)
+ call mpas_pool_get_array(state_pool, 'theta_m', dyn_out % theta_m, timeLevel=2)
+ call mpas_pool_get_array(state_pool, 'rho_zz', dyn_out % rho_zz, timeLevel=2)
+ call mpas_pool_get_array(state_pool, 'scalars', dyn_out % tracers, timeLevel=2)
+
+ dyn_out % index_qv = dyn_in % index_qv
+
+ dyn_out % zint => dyn_in % zint
+ dyn_out % zz => dyn_in % zz
+ dyn_out % fzm => dyn_in % fzm
+ dyn_out % fzp => dyn_in % fzp
+
+ dyn_out % theta => dyn_in % theta
+ dyn_out % exner => dyn_in % exner
+ dyn_out % rho => dyn_in % rho
+ dyn_out % ux => dyn_in % ux
+ dyn_out % uy => dyn_in % uy
+
+ allocate(dyn_out % pmiddry(nVertLevels, nCells))
+ allocate(dyn_out % pintdry(nVertLevels+1, nCells))
+
+ call mpas_pool_get_array(diag_pool, 'vorticity', dyn_out % vorticity)
+ call mpas_pool_get_array(diag_pool, 'divergence', dyn_out % divergence)
+
+ call mpas_pool_get_array(mesh_pool, 'indexToCellID', indexToCellID)
+ allocate(glob_ind(nCellsSolve))
+ glob_ind = indexToCellID(1:nCellsSolve)
+
+ call mpas_constants_compute_derived()
+
+ if (initial_run) then
+
+ call read_inidat(dyn_in)
+ call clean_iodesc_list()
+
+ end if
+
+ ! Initialize dyn_out from dyn_in since it is needed to run the physics package
+ ! as part of the CAM initialization before a dycore step is taken. This is only
+ ! needed for the fields that have 2 time levels in the MPAS state_pool.
+ dyn_out % uperp(:,:nEdgesSolve) = dyn_in % uperp(:,:nEdgesSolve)
+ dyn_out % w(:,:nCellsSolve) = dyn_in % w(:,:nCellsSolve)
+ dyn_out % theta_m(:,:nCellsSolve) = dyn_in % theta_m(:,:nCellsSolve)
+ dyn_out % rho_zz(:,:nCellsSolve) = dyn_in % rho_zz(:,:nCellsSolve)
+ dyn_out % tracers(:,:,:nCellsSolve) = dyn_in % tracers(:,:,:nCellsSolve)
+
+ call cam_mpas_init_phase4(endrun)
+
+ ! Check that CAM's timestep, i.e., the dynamics/physics coupling interval, is an integer multiple
+ ! of the MPAS timestep.
+
+ ! Get CAM time step
+ dtime = get_step_size()
+
+ ! Get MPAS-A dycore time step
+ call mpas_pool_get_config(domain_ptr % configs, 'config_dt', mpas_dt)
+
+ ! Calculate time step ratio
+ dt_ratio = dtime / mpas_dt
+
+ ! Stop if the dycore time step does not evenly divide the CAM time step
+ if (ceiling(dt_ratio) /= floor(dt_ratio)) then
+ write(errmsg, '(a,f9.3,a,f9.3,a)') 'The ratio of the CAM timestep, ', dtime, &
+ ' to the MPAS-A dycore timestep, ', mpas_dt, ' is not an integer'
+ call endrun(subname//': '//trim(errmsg))
+ end if
+
+ ! dtime has no fractional part, but use nint to deal with any roundoff errors.
+ ! Set the interval over which the dycore should integrate during each call to dyn_run.
+ call MPAS_set_timeInterval(integrationLength, S=nint(dtime), S_n=0, S_d=1)
+
+ ! MPAS updates the time level index in its state pool each dycore time step (mpas_dt). If
+ ! the CAM timestep is an odd multiple of mpas_dt, then the pointers in the dyn_in/dyn_out
+ ! objects need a corresponding update. Set the following logical variable to indicate
+ ! whether the pointer update is needed.
+ swap_time_level_ptrs = mod( nint(dt_ratio), 2) == 1
+
+end subroutine dyn_init
+
+!=========================================================================================
+
+subroutine dyn_run(dyn_in, dyn_out)
+
+ use cam_mpas_subdriver, only : cam_mpas_run
+
+ ! Advances the dynamics state provided in dyn_in by one physics
+ ! timestep to produce dynamics state held in dyn_out.
+
+ type (dyn_import_t), intent(inout) :: dyn_in
+ type (dyn_export_t), intent(inout) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ ! Call the MPAS-A dycore
+ call cam_mpas_run(integrationLength)
+
+end subroutine dyn_run
+
+!=========================================================================================
+
+subroutine dyn_final(dyn_in, dyn_out)
+
+ use cam_mpas_subdriver, only : cam_mpas_finalize
+
+ ! Deallocates the dynamics import and export states, and finalizes
+ ! the MPAS dycore.
+
+ type (dyn_import_t), intent(inout) :: dyn_in
+ type (dyn_export_t), intent(inout) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ !
+ ! Prevent any further access to MPAS-Atmosphere memory
+ !
+ dyn_in % nCells = 0
+ dyn_in % nEdges = 0
+ dyn_in % nVertices = 0
+ dyn_in % nVertLevels = 0
+ dyn_in % nCellsSolve = 0
+ dyn_in % nEdgesSolve = 0
+ dyn_in % nVerticesSolve = 0
+ nullify(dyn_in % uperp)
+ nullify(dyn_in % w)
+ nullify(dyn_in % theta_m)
+ nullify(dyn_in % rho_zz)
+ nullify(dyn_in % tracers)
+ deallocate(dyn_in % mpas_from_cam_cnst)
+ nullify(dyn_in % rho_base)
+ nullify(dyn_in % theta_base)
+ dyn_in % index_qv = 0
+ nullify(dyn_in % zint)
+ nullify(dyn_in % zz)
+ nullify(dyn_in % fzm)
+ nullify(dyn_in % fzp)
+ nullify(dyn_in % east)
+ nullify(dyn_in % north)
+ nullify(dyn_in % normal)
+ nullify(dyn_in % cellsOnEdge)
+ nullify(dyn_in % theta)
+ nullify(dyn_in % exner)
+ nullify(dyn_in % rho)
+ nullify(dyn_in % ux)
+ nullify(dyn_in % uy)
+ nullify(dyn_in % ru_tend)
+ nullify(dyn_in % rtheta_tend)
+ nullify(dyn_in % rho_tend)
+
+ !
+ ! Prevent any further access to MPAS-Atmosphere memory
+ !
+ dyn_out % nCells = 0
+ dyn_out % nEdges = 0
+ dyn_out % nVertices = 0
+ dyn_out % nVertLevels = 0
+ dyn_out % nCellsSolve = 0
+ dyn_out % nEdgesSolve = 0
+ dyn_out % nVerticesSolve = 0
+ nullify(dyn_out % uperp)
+ nullify(dyn_out % w)
+ nullify(dyn_out % theta_m)
+ nullify(dyn_out % rho_zz)
+ nullify(dyn_out % tracers)
+ deallocate(dyn_out % cam_from_mpas_cnst)
+ dyn_out % index_qv = 0
+ nullify(dyn_out % zint)
+ nullify(dyn_out % zz)
+ nullify(dyn_out % fzm)
+ nullify(dyn_out % fzp)
+ nullify(dyn_out % theta)
+ nullify(dyn_out % exner)
+ nullify(dyn_out % rho)
+ nullify(dyn_out % ux)
+ nullify(dyn_out % uy)
+ deallocate(dyn_out % pmiddry)
+ deallocate(dyn_out % pintdry)
+ nullify(dyn_out % vorticity)
+ nullify(dyn_out % divergence)
+
+ call cam_mpas_finalize()
+
+end subroutine dyn_final
+
+!=========================================================================================
+! Private routines.
+!=========================================================================================
+
+subroutine read_inidat(dyn_in)
+
+ ! Set initial conditions. Either from analytic expressions or read from file.
+
+ use cam_mpas_subdriver, only : domain_ptr, cam_mpas_update_halo, cam_mpas_cell_to_edge_winds
+ use mpas_pool_routines, only : mpas_pool_get_subpool, mpas_pool_get_array
+ use mpas_derived_types, only : mpas_pool_type
+ use mpas_vector_reconstruction, only : mpas_reconstruct
+ use mpas_constants, only : Rv_over_Rd => rvord
+
+ ! arguments
+ type(dyn_import_t), target, intent(inout) :: dyn_in
+
+ ! Local variables
+ integer :: nCellsSolve, nEdgesSolve
+ integer :: i, k, kk, m
+
+ type(file_desc_t), pointer :: fh_ini
+ type(file_desc_t), pointer :: fh_topo
+
+ real(r8), allocatable :: latvals(:)
+ real(r8), allocatable :: lonvals(:)
+ real(r8), pointer :: latvals_deg(:)
+ real(r8), pointer :: lonvals_deg(:)
+
+ real(r8), pointer :: uperp(:,:) ! Normal velocity at edges [m/s] (nver,nedge)
+ real(r8), pointer :: w(:,:) ! Vertical velocity [m/s] (nver+1,ncol)
+ real(r8), pointer :: theta_m(:,:) ! Moist potential temperature [K] (nver,ncol)
+ real(r8), pointer :: rho_zz(:,:) ! Dry density [kg/m^3]
+ ! divided by d(zeta)/dz (nver,ncol)
+ real(r8), pointer :: tracers(:,:,:) ! Tracers [kg/kg dry air] (nq,nver,ncol)
+ real(r8), pointer :: zint(:,:) ! Geometric height [m]
+ ! at layer interfaces (nver+1,ncol)
+ real(r8), pointer :: zz(:,:) ! Vertical coordinate metric [dimensionless]
+ ! at layer midpoints (nver,ncol)
+ real(r8), pointer :: theta(:,:) ! Potential temperature [K] (nver,ncol)
+ real(r8), pointer :: rho(:,:) ! Dry density [kg/m^3] (nver,ncol)
+ real(r8), pointer :: ux(:,:) ! Zonal veloc at center [m/s] (nver,ncol)
+ real(r8), pointer :: uy(:,:) ! Meridional veloc at center [m/s] (nver,ncol)
+ real(r8), pointer :: theta_base(:,:)
+ real(r8), pointer :: rho_base(:,:)
+
+ integer :: ixqv
+ integer, dimension(:), pointer :: mpas_from_cam_cnst
+
+ integer, allocatable :: m_ind(:)
+ real(r8), allocatable :: &
+ cam2d(:), cam3d(:,:), cam4d(:,:,:), zi(:,:) ! temp arrays using CAM data order
+ real(r8), allocatable :: zsurf(:)
+
+ ! temp arrays using MPAS data order
+ real(r8), allocatable :: t(:,:) ! temperature
+ real(r8), allocatable :: pintdry(:,:) ! dry interface pressures
+ real(r8), allocatable :: pmiddry(:,:) ! dry midpoint pressures
+ real(r8), allocatable :: pmid(:,:) ! midpoint pressures
+ real(r8), allocatable :: mpas3d(:,:,:)
+
+ real(r8) :: dz, h
+ logical :: readvar
+
+ type(mpas_pool_type), pointer :: mesh_pool
+ type(mpas_pool_type), pointer :: diag_pool
+
+ real(r8), pointer :: uReconstructX(:,:)
+ real(r8), pointer :: uReconstructY(:,:)
+ real(r8), pointer :: uReconstructZ(:,:)
+
+ integer :: mpas_idx, cam_idx
+ character(len=16) :: trac_name
+
+ character(len=*), parameter :: subname = 'dyn_comp:read_inidat'
+ !--------------------------------------------------------------------------------------
+
+ fh_ini => initial_file_get_id()
+ fh_topo => topo_file_get_id()
+
+ nCellsSolve = dyn_in % nCellsSolve
+ nEdgesSolve = dyn_in % nEdgesSolve
+
+ ixqv = dyn_in % index_qv
+ mpas_from_cam_cnst => dyn_in % mpas_from_cam_cnst
+
+ uperp => dyn_in % uperp
+ w => dyn_in % w
+ theta_m => dyn_in % theta_m
+ rho_zz => dyn_in % rho_zz
+ tracers => dyn_in % tracers
+
+ zint => dyn_in % zint
+ zz => dyn_in % zz
+ theta => dyn_in % theta
+ rho => dyn_in % rho
+ ux => dyn_in % ux
+ uy => dyn_in % uy
+ rho_base => dyn_in % rho_base
+ theta_base => dyn_in % theta_base
+
+ ! Check that number of advected tracers is consistent with MPAS.
+ if (pcnst /= size(tracers, 1)) then
+ write(iulog,*) subname//': number of tracers, pcnst:', size(tracers,1), pcnst
+ call endrun(subname//': number of tracers /= pcnst')
+ end if
+
+ ! lat/lon needed in radians
+ latvals_deg => cam_grid_get_latvals(cam_grid_id('mpas_cell'))
+ lonvals_deg => cam_grid_get_lonvals(cam_grid_id('mpas_cell'))
+ allocate(latvals(nCellsSolve))
+ allocate(lonvals(nCellsSolve))
+ latvals(:) = latvals_deg(:)*deg2rad
+ lonvals(:) = lonvals_deg(:)*deg2rad
+
+ ! Set ICs. Either from analytic expressions or read from file.
+
+ allocate( &
+ ! temporary arrays using CAM indexing
+ cam2d(nCellsSolve), &
+ cam3d(nCellsSolve,plev), &
+ cam4d(nCellsSolve,plev,pcnst), &
+ zi(nCellsSolve,plevp), &
+ ! temporary arrays using MPAS indexing
+ t(plev,nCellsSolve), &
+ pintdry(plevp,nCellsSolve), &
+ pmiddry(plev,nCellsSolve), &
+ pmid(plev,nCellsSolve) )
+
+ do k = 1, plevp
+ kk = plevp - k + 1
+ zi(:,kk) = zint(k,:nCellsSolve)
+ end do
+
+ ! If using a topo file check that PHIS is consistent with the surface z coordinate.
+ if (associated(fh_topo)) then
+
+ allocate(zsurf(nCellsSolve))
+
+ call get_zsurf_from_topo(fh_topo, zsurf)
+
+ do i = 1, nCellsSolve
+ if (abs(zi(i,plevp) - zsurf(i)) > 0.001_r8) then
+ write(iulog,*) subname//': ERROR: zi= ', zi(i,plevp), ' zsurf= ', zsurf(i)
+ call endrun(subname//': ERROR: PHIS not consistent with surface z coordinate')
+ end if
+ end do
+
+ deallocate(zsurf)
+
+ end if
+
+ if (analytic_ic_active()) then
+
+ w(:,1:nCellsSolve) = 0.0_r8
+
+ ! U, V cell center velocity components
+
+ call dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, zint=zi, U=cam3d)
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ ux(kk,i) = cam3d(i,k)
+ end do
+ end do
+
+ call dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, zint=zi, V=cam3d)
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ uy(kk,i) = cam3d(i,k)
+ end do
+ end do
+
+ ! Compute uperp by projecting ux and uy from cell centers to edges
+ call cam_mpas_update_halo('uReconstructZonal', endrun) ! ux => uReconstructZonal
+ call cam_mpas_update_halo('uReconstructMeridional', endrun) ! uy => uReconstructMeridional
+ call cam_mpas_cell_to_edge_winds(dyn_in % nEdges, ux, uy, dyn_in % east, dyn_in % north, &
+ dyn_in % normal, dyn_in % cellsOnEdge, uperp)
+
+ call cam_mpas_update_halo('u', endrun) ! u is the name of uperp in the MPAS state pool
+
+ ! Constituents
+
+ allocate(m_ind(pcnst))
+ do m = 1, pcnst
+ m_ind(m) = m
+ end do
+ call dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, zint=zi, m_cnst=m_ind, Q=cam4d)
+ do m = 1, pcnst ! index into MPAS tracers array
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ tracers(m,kk,i) = cam4d(i,k,mpas_from_cam_cnst(m))
+ end do
+ end do
+ end do
+ deallocate(m_ind)
+
+ ! Temperature
+
+ call dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, zint=zi, T=cam3d)
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ t(kk,i) = cam3d(i,k)
+ end do
+ end do
+
+ ! Pressures are needed to convert temperature to potential temperature.
+
+ call dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, zint=zi, PS=cam2d)
+ do i = 1, nCellsSolve
+ pintdry(1,i) = cam2d(i)
+ end do
+
+ ! Use Hypsometric eqn to set pressure profiles
+ do i = 1, nCellsSolve
+ do k = 2, plevp
+ dz = zint(k,i) - zint(k-1,i)
+ h = rair * t(k-1,i) / gravit
+ pintdry(k,i) = pintdry(k-1,i)*exp(-dz/h)
+ pmiddry(k-1,i) = 0.5_r8*(pintdry(k-1,i) + pintdry(k,i))
+ ! for now assume dry atm
+ pmid(k-1,i) = pmiddry(k-1,i)
+ end do
+ end do
+
+ do i = 1, nCellsSolve
+ do k = 1, plev
+ theta(k,i) = t(k,i) * (1.0e5_r8 / pmid(k,i))**(rair/cpair)
+ rho(k,i) = pmid(k,i) / (rair * t(k,i))
+ end do
+ end do
+
+ rho_zz(:,1:nCellsSolve) = rho(:,1:nCellsSolve) / zz(:,1:nCellsSolve)
+
+ ! Set theta_base and rho_base
+ call set_base_state(dyn_in)
+
+ else
+
+ ! read uperp
+ allocate( mpas3d(plev,nEdgesSolve,1) )
+ call infld('u', fh_ini, 'lev', 'nEdges', 1, plev, 1, nEdgesSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_edge')
+ if (readvar) then
+ uperp(:,:nEdgesSolve) = mpas3d(:,:nEdgesSolve,1)
+ else
+ call endrun(subname//': failed to read u (uperp) from initial file')
+ end if
+ deallocate( mpas3d )
+
+ call cam_mpas_update_halo('u', endrun) ! u is the name of uperp in the MPAS state pool
+
+ ! Reconstruct ux and uy from uperp.
+ ! This is only needed because during CAM's initialization the physics package
+ ! is called before the dycore advances a step.
+ nullify(mesh_pool)
+ nullify(diag_pool)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', mesh_pool)
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'diag', diag_pool)
+
+ ! The uReconstruct{X,Y,Z} arguments to mpas_reconstruct are required, but these
+ ! field already exist in the diag pool
+ nullify(uReconstructX)
+ nullify(uReconstructY)
+ nullify(uReconstructZ)
+ call mpas_pool_get_array(diag_pool, 'uReconstructX', uReconstructX)
+ call mpas_pool_get_array(diag_pool, 'uReconstructY', uReconstructY)
+ call mpas_pool_get_array(diag_pool, 'uReconstructZ', uReconstructZ)
+
+ call mpas_reconstruct(mesh_pool, uperp, &
+ uReconstructX, uReconstructY, uReconstructZ, &
+ ux, uy)
+
+ ! read w
+ allocate( mpas3d(plevp,nCellsSolve,1) )
+ call infld('w', fh_ini, 'ilev', 'nCells', 1, plevp, 1, nCellsSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_cell')
+ if (readvar) then
+ w(:,1:nCellsSolve) = mpas3d(:,:nCellsSolve,1)
+ else
+ call endrun(subname//': failed to read w from initial file')
+ end if
+ deallocate( mpas3d )
+
+ allocate( mpas3d(plev,nCellsSolve,1) )
+
+ ! read theta
+ call infld('theta', fh_ini, 'lev', 'nCells', 1, plev, 1, nCellsSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_cell')
+ if (readvar) then
+ theta(:,1:nCellsSolve) = mpas3d(:,:nCellsSolve,1)
+ else
+ call endrun(subname//': failed to read theta from initial file')
+ end if
+
+ ! read rho
+ call infld('rho', fh_ini, 'lev', 'nCells', 1, plev, 1, nCellsSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_cell')
+ if (readvar) then
+ rho(:,1:nCellsSolve) = mpas3d(:,:nCellsSolve,1)
+ else
+ call endrun(subname//': failed to read rho from initial file')
+ end if
+
+ rho_zz(:,1:nCellsSolve) = rho(:,1:nCellsSolve) / zz(:,1:nCellsSolve)
+
+ ! read theta_base
+ call infld('theta_base', fh_ini, 'lev', 'nCells', 1, plev, 1, nCellsSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_cell')
+ if (readvar) then
+ theta_base(:,1:nCellsSolve) = mpas3d(:,:nCellsSolve,1)
+ else
+ call endrun(subname//': failed to read theta_base from initial file')
+ end if
+
+ ! read rho_base
+ call infld('rho_base', fh_ini, 'lev', 'nCells', 1, plev, 1, nCellsSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_cell')
+ if (readvar) then
+ rho_base(:,1:nCellsSolve) = mpas3d(:,:nCellsSolve,1)
+ else
+ call endrun(subname//': failed to read rho_base from initial file')
+ end if
+
+ deallocate( mpas3d )
+
+ end if
+
+ ! Finish initialization of tracer fields.
+ !
+ ! If analytic ICs are being used, we allow constituents in an initial
+ ! file to overwrite mixing ratios set by the default constituent initialization
+ ! except for the water species.
+
+ allocate( mpas3d(plev,nCellsSolve,1) )
+
+ do mpas_idx = 1, pcnst
+
+ ! The names of the species in the MPAS initial file may be different from the
+ ! names in the CAM constituent module. Also the species order in the MPAS
+ ! tracers array may be different from the order in the CAM constituent array.
+
+ cam_idx = mpas_from_cam_cnst(mpas_idx)
+
+ if (analytic_ic_active() .and. cnst_is_a_water_species(cnst_name(cam_idx))) cycle
+
+ ! The name translation is hardcoded here temporarily...
+ trac_name = cnst_name(cam_idx)
+ if (mpas_idx == 1) trac_name = 'qv'
+
+
+ readvar = .false.
+ if (cnst_read_iv(cam_idx)) then
+
+ ! read constituent from the initial file if present
+ call infld(trac_name, fh_ini, 'lev', 'nCells', 1, plev, 1, nCellsSolve, 1, 1, &
+ mpas3d, readvar, gridname='mpas_cell')
+ if (readvar) then
+ tracers(mpas_idx,:,1:nCellsSolve) = mpas3d(:,:nCellsSolve,1)
+ end if
+ end if
+ if (.not. readvar .and. .not. analytic_ic_active()) then
+ ! default constituent initialization (this was already done if analytic ICs are active)
+ call cnst_init_default(cam_idx, latvals, lonvals, cam3d)
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ tracers(mpas_idx,kk,i) = cam3d(i,k)
+ end do
+ end do
+
+ end if
+ end do
+
+ deallocate( mpas3d )
+
+ theta_m(:,1:nCellsSolve) = theta(:,1:nCellsSolve) * (1.0_r8 + Rv_over_Rd * tracers(ixqv,:,1:nCellsSolve))
+
+ ! Update halos for initial state fields
+ ! halo for 'u' updated in both branches of conditional above
+ call cam_mpas_update_halo('w', endrun)
+ call cam_mpas_update_halo('scalars', endrun) ! scalars is the name of tracers in the MPAS state pool
+ call cam_mpas_update_halo('theta_m', endrun)
+ call cam_mpas_update_halo('theta', endrun)
+ call cam_mpas_update_halo('rho_zz', endrun)
+ call cam_mpas_update_halo('rho', endrun)
+ call cam_mpas_update_halo('rho_base', endrun)
+ call cam_mpas_update_halo('theta_base', endrun)
+
+ deallocate(cam2d, cam3d, cam4d, zi, t, pintdry, pmiddry, pmid)
+
+end subroutine read_inidat
+
+!========================================================================================
+
+subroutine get_zsurf_from_topo(fh_topo, zsurf)
+
+ ! Read PHIS from the topo file and convert it to a surface height field.
+
+ ! Arguments
+ type(file_desc_t), pointer :: fh_topo
+
+ real(r8), intent(out) :: zsurf(:)
+
+ ! Local variables
+ integer :: zsurf_len
+ real(r8), allocatable :: phis(:,:)
+ logical :: readvar
+
+ character(len=*), parameter :: subname = 'dyn_comp:get_zsurf_from_topo'
+ !--------------------------------------------------------------------------------------
+
+ zsurf_len = size(zsurf)
+ allocate(phis(zsurf_len,1))
+
+ ! read theta
+ call infld('PHIS', fh_topo, 'ncol', 1, zsurf_len, 1, 1, &
+ phis, readvar, gridname='cam_cell')
+ if (readvar) then
+ zsurf = phis(:,1) / gravit
+ else
+ call endrun(subname//': failed to read PHIS from topo file')
+ end if
+
+end subroutine get_zsurf_from_topo
+
+!========================================================================================
+
+subroutine set_base_state(dyn_in)
+
+ use mpas_constants, only : gravity, cp, Rgas, p0
+
+ ! Set base-state fields for dynamics assuming an isothermal atmosphere
+
+ ! Arguments
+ type(dyn_import_t), intent(inout) :: dyn_in
+
+ ! Local variables
+ real(r8), parameter :: t0b = 250.0_r8 ! Temperature [K]
+
+ integer :: iCell, klev
+ real(r8), dimension(:,:), pointer :: zint
+ real(r8), dimension(:,:), pointer :: zz
+ real(r8), dimension(:,:), pointer :: rho_base
+ real(r8), dimension(:,:), pointer :: theta_base
+ real(r8) :: zmid
+ real(r8) :: pres
+ !--------------------------------------------------------------------------------------
+
+ zint => dyn_in % zint
+ zz => dyn_in % zz
+ rho_base => dyn_in % rho_base
+ theta_base => dyn_in % theta_base
+
+ do iCell = 1, dyn_in % nCellsSolve
+ do klev = 1, dyn_in % nVertLevels
+ zmid = 0.5_r8 * (zint(klev,iCell) + zint(klev+1,iCell)) ! Layer midpoint geometric height
+ pres = p0 * exp(-gravity * zmid / (Rgas * t0b))
+ theta_base(klev,iCell) = t0b / (pres / p0)**(Rgas/cp)
+ rho_base(klev,iCell) = pres / ( Rgas * t0b * zz(klev,iCell))
+ end do
+ end do
+
+end subroutine set_base_state
+
+!========================================================================================
+
+subroutine cam_mpas_namelist_read(namelistFilename, configPool)
+
+ ! Read MPAS-A dycore namelists and add the namelists to the MPAS configPool.
+ !
+ ! Only the CAM masterproc actually opens and reads from the specified file. Upon return,
+ ! if no errors were encountered, all MPI ranks have valid namelists in their configPool.
+
+ use spmd_utils, only: mpicom, masterproc, masterprocid, &
+ mpi_integer, mpi_real8, mpi_logical, mpi_character, mpi_success
+ use namelist_utils, only: find_group_name
+
+ use mpas_derived_types, only: mpas_pool_type
+ use mpas_kind_types, only: StrKIND
+ use mpas_pool_routines, only: mpas_pool_add_config
+
+ ! Arguments
+ character(len=*), intent(in) :: namelistFilename
+ type (mpas_pool_type), intent(inout) :: configPool
+
+ ! Local variables
+ integer :: unitNumber
+
+ integer :: ierr, ierr2
+ integer :: mpi_ierr
+
+ character (len=StrKIND) :: mpas_time_integration = 'SRK3'
+ integer :: mpas_time_integration_order = 2
+ real(r8) :: mpas_dt = 720.0_r8
+ logical :: mpas_split_dynamics_transport = .true.
+ integer :: mpas_number_of_sub_steps = 2
+ integer :: mpas_dynamics_split_steps = 3
+ real(r8) :: mpas_h_mom_eddy_visc2 = 0.0_r8
+ real(r8) :: mpas_h_mom_eddy_visc4 = 0.0_r8
+ real(r8) :: mpas_v_mom_eddy_visc2 = 0.0_r8
+ real(r8) :: mpas_h_theta_eddy_visc2 = 0.0_r8
+ real(r8) :: mpas_h_theta_eddy_visc4 = 0.0_r8
+ real(r8) :: mpas_v_theta_eddy_visc2 = 0.0_r8
+ character (len=StrKIND) :: mpas_horiz_mixing = '2d_smagorinsky'
+ real(r8) :: mpas_len_disp = 120000.0_r8
+ real(r8) :: mpas_visc4_2dsmag = 0.05_r8
+ real(r8) :: mpas_del4u_div_factor = 10.0_r8
+ integer :: mpas_w_adv_order = 3
+ integer :: mpas_theta_adv_order = 3
+ integer :: mpas_scalar_adv_order = 3
+ integer :: mpas_u_vadv_order = 3
+ integer :: mpas_w_vadv_order = 3
+ integer :: mpas_theta_vadv_order = 3
+ integer :: mpas_scalar_vadv_order = 3
+ logical :: mpas_scalar_advection = .true.
+ logical :: mpas_positive_definite = .false.
+ logical :: mpas_monotonic = .true.
+ real(r8) :: mpas_coef_3rd_order = 0.25_r8
+ real(r8) :: mpas_smagorinsky_coef = 0.125_r8
+ logical :: mpas_mix_full = .true.
+ real(r8) :: mpas_epssm = 0.1_r8
+ real(r8) :: mpas_smdiv = 0.1_r8
+ real(r8) :: mpas_apvm_upwinding = 0.5_r8
+ logical :: mpas_h_ScaleWithMesh = .true.
+ real(r8) :: mpas_zd = 22000.0_r8
+ real(r8) :: mpas_xnutr = 0.2_r8
+ character (len=StrKIND) :: mpas_block_decomp_file_prefix = 'x1.40962.graph.info.part.'
+ logical :: mpas_do_restart = .false.
+ logical :: mpas_print_global_minmax_vel = .true.
+ logical :: mpas_print_detailed_minmax_vel = .false.
+ logical :: mpas_print_global_minmax_sca = .false.
+
+ namelist /nhyd_model/ &
+ mpas_time_integration, &
+ mpas_time_integration_order, &
+ mpas_dt, &
+ mpas_split_dynamics_transport, &
+ mpas_number_of_sub_steps, &
+ mpas_dynamics_split_steps, &
+ mpas_h_mom_eddy_visc2, &
+ mpas_h_mom_eddy_visc4, &
+ mpas_v_mom_eddy_visc2, &
+ mpas_h_theta_eddy_visc2, &
+ mpas_h_theta_eddy_visc4, &
+ mpas_v_theta_eddy_visc2, &
+ mpas_horiz_mixing, &
+ mpas_len_disp, &
+ mpas_visc4_2dsmag, &
+ mpas_del4u_div_factor, &
+ mpas_w_adv_order, &
+ mpas_theta_adv_order, &
+ mpas_scalar_adv_order, &
+ mpas_u_vadv_order, &
+ mpas_w_vadv_order, &
+ mpas_theta_vadv_order, &
+ mpas_scalar_vadv_order, &
+ mpas_scalar_advection, &
+ mpas_positive_definite, &
+ mpas_monotonic, &
+ mpas_coef_3rd_order, &
+ mpas_smagorinsky_coef, &
+ mpas_mix_full, &
+ mpas_epssm, &
+ mpas_smdiv, &
+ mpas_apvm_upwinding, &
+ mpas_h_ScaleWithMesh
+
+ namelist /damping/ &
+ mpas_zd, &
+ mpas_xnutr
+
+ namelist /decomposition/ &
+ mpas_block_decomp_file_prefix
+
+ namelist /restart/ &
+ mpas_do_restart
+
+ namelist /printout/ &
+ mpas_print_global_minmax_vel, &
+ mpas_print_detailed_minmax_vel, &
+ mpas_print_global_minmax_sca
+
+ ! These configuration parameters must be set in the MPAS configPool, but can't
+ ! be changed in CAM.
+ integer :: config_num_halos = 2
+ integer :: config_number_of_blocks = 0
+ logical :: config_explicit_proc_decomp = .false.
+ character(len=StrKIND) :: config_proc_decomp_file_prefix = 'graph.info.part'
+
+ character(len=*), parameter :: subname = 'dyn_comp::cam_mpas_namelist_read'
+ !----------------------------------------------------------------------------
+
+ if (masterproc) then
+ write(iulog,*) 'Reading MPAS-A dycore namelist from ', trim(namelistFilename)
+ open(newunit=unitNumber, file=trim(namelistFilename), status='old', form='formatted')
+ end if
+
+ ! Read namelist group &nhyd_model
+ if (masterproc) then
+ rewind(unitNumber)
+ call find_group_name(unitNumber, 'nhyd_model', status=ierr)
+ if (ierr == 0) then
+ read(unitNumber, nhyd_model, iostat=ierr2)
+ if (ierr2 /= 0) then
+ call endrun(subname // ':: Failed to read namelist group &nhyd_model')
+ end if
+ else
+ call endrun(subname // ':: Failed to find namelist group &nhyd_model')
+ end if
+ end if
+
+ call mpi_bcast(mpas_time_integration, StrKIND, mpi_character, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_time_integration_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_dt, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_split_dynamics_transport, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_number_of_sub_steps, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_dynamics_split_steps, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_h_mom_eddy_visc2, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_h_mom_eddy_visc4, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_v_mom_eddy_visc2, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_h_theta_eddy_visc2, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_h_theta_eddy_visc4, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_v_theta_eddy_visc2, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_horiz_mixing, StrKIND, mpi_character, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_len_disp, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_visc4_2dsmag, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_del4u_div_factor, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_w_adv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_theta_adv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_scalar_adv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_u_vadv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_w_vadv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_theta_vadv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_scalar_vadv_order, 1, mpi_integer, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_scalar_advection, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_positive_definite, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_monotonic, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_coef_3rd_order, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_smagorinsky_coef, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_mix_full, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_epssm, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_smdiv, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_apvm_upwinding, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_h_ScaleWithMesh, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+
+ call mpas_pool_add_config(configPool, 'config_time_integration', mpas_time_integration)
+ call mpas_pool_add_config(configPool, 'config_time_integration_order', mpas_time_integration_order)
+ call mpas_pool_add_config(configPool, 'config_dt', mpas_dt)
+ call mpas_pool_add_config(configPool, 'config_split_dynamics_transport', mpas_split_dynamics_transport)
+ call mpas_pool_add_config(configPool, 'config_number_of_sub_steps', mpas_number_of_sub_steps)
+ call mpas_pool_add_config(configPool, 'config_dynamics_split_steps', mpas_dynamics_split_steps)
+ call mpas_pool_add_config(configPool, 'config_h_mom_eddy_visc2', mpas_h_mom_eddy_visc2)
+ call mpas_pool_add_config(configPool, 'config_h_mom_eddy_visc4', mpas_h_mom_eddy_visc4)
+ call mpas_pool_add_config(configPool, 'config_v_mom_eddy_visc2', mpas_v_mom_eddy_visc2)
+ call mpas_pool_add_config(configPool, 'config_h_theta_eddy_visc2', mpas_h_theta_eddy_visc2)
+ call mpas_pool_add_config(configPool, 'config_h_theta_eddy_visc4', mpas_h_theta_eddy_visc4)
+ call mpas_pool_add_config(configPool, 'config_v_theta_eddy_visc2', mpas_v_theta_eddy_visc2)
+ call mpas_pool_add_config(configPool, 'config_horiz_mixing', mpas_horiz_mixing)
+ call mpas_pool_add_config(configPool, 'config_len_disp', mpas_len_disp)
+ call mpas_pool_add_config(configPool, 'config_visc4_2dsmag', mpas_visc4_2dsmag)
+ call mpas_pool_add_config(configPool, 'config_del4u_div_factor', mpas_del4u_div_factor)
+ call mpas_pool_add_config(configPool, 'config_w_adv_order', mpas_w_adv_order)
+ call mpas_pool_add_config(configPool, 'config_theta_adv_order', mpas_theta_adv_order)
+ call mpas_pool_add_config(configPool, 'config_scalar_adv_order', mpas_scalar_adv_order)
+ call mpas_pool_add_config(configPool, 'config_u_vadv_order', mpas_u_vadv_order)
+ call mpas_pool_add_config(configPool, 'config_w_vadv_order', mpas_w_vadv_order)
+ call mpas_pool_add_config(configPool, 'config_theta_vadv_order', mpas_theta_vadv_order)
+ call mpas_pool_add_config(configPool, 'config_scalar_vadv_order', mpas_scalar_vadv_order)
+ call mpas_pool_add_config(configPool, 'config_scalar_advection', mpas_scalar_advection)
+ call mpas_pool_add_config(configPool, 'config_positive_definite', mpas_positive_definite)
+ call mpas_pool_add_config(configPool, 'config_monotonic', mpas_monotonic)
+ call mpas_pool_add_config(configPool, 'config_coef_3rd_order', mpas_coef_3rd_order)
+ call mpas_pool_add_config(configPool, 'config_smagorinsky_coef', mpas_smagorinsky_coef)
+ call mpas_pool_add_config(configPool, 'config_mix_full', mpas_mix_full)
+ call mpas_pool_add_config(configPool, 'config_epssm', mpas_epssm)
+ call mpas_pool_add_config(configPool, 'config_smdiv', mpas_smdiv)
+ call mpas_pool_add_config(configPool, 'config_apvm_upwinding', mpas_apvm_upwinding)
+ call mpas_pool_add_config(configPool, 'config_h_ScaleWithMesh', mpas_h_ScaleWithMesh)
+
+ ! Read namelist group &damping
+ if (masterproc) then
+ rewind(unitNumber)
+ call find_group_name(unitNumber, 'damping', status=ierr)
+ if (ierr == 0) then
+ read(unitNumber, damping, iostat=ierr2)
+ if (ierr2 /= 0) then
+ call endrun(subname // ':: Failed to read namelist group &damping')
+ end if
+ else
+ call endrun(subname // ':: Failed to find namelist group &damping')
+ end if
+ end if
+
+ call mpi_bcast(mpas_zd, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_xnutr, 1, mpi_real8, masterprocid, mpicom, mpi_ierr)
+
+ call mpas_pool_add_config(configPool, 'config_zd', mpas_zd)
+ call mpas_pool_add_config(configPool, 'config_xnutr', mpas_xnutr)
+
+ ! Read namelist group &decomposition
+ if (masterproc) then
+ rewind(unitNumber)
+ call find_group_name(unitNumber, 'decomposition', status=ierr)
+ if (ierr == 0) then
+ read(unitNumber, decomposition, iostat=ierr2)
+ if (ierr2 /= 0) then
+ call endrun(subname // ':: Failed to read namelist group &decomposition')
+ end if
+ else
+ call endrun(subname // ':: Failed to find namelist group &decomposition')
+ end if
+ end if
+
+ call mpi_bcast(mpas_block_decomp_file_prefix, StrKIND, mpi_character, masterprocid, mpicom, mpi_ierr)
+
+ call mpas_pool_add_config(configPool, 'config_block_decomp_file_prefix', mpas_block_decomp_file_prefix)
+
+ ! Read namelist group &restart
+ if (masterproc) then
+ rewind(unitNumber)
+ call find_group_name(unitNumber, 'restart', status=ierr)
+ if (ierr == 0) then
+ read(unitNumber, restart, iostat=ierr2)
+ if (ierr2 /= 0) then
+ call endrun(subname // ':: Failed to read namelist group &restart')
+ end if
+ else
+ call endrun(subname // ':: Failed to find namelist group &restart')
+ end if
+ end if
+
+ call mpi_bcast(mpas_do_restart, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+
+ ! Set mpas_do_restart based on information from the driver code.
+ if (.not. initial_run) mpas_do_restart = .true.
+
+ call mpas_pool_add_config(configPool, 'config_do_restart', mpas_do_restart)
+
+ ! Read namelist group &printout
+ if (masterproc) then
+ rewind(unitNumber)
+ call find_group_name(unitNumber, 'printout', status=ierr)
+ if (ierr == 0) then
+ read(unitNumber, printout, iostat=ierr2)
+ if (ierr2 /= 0) then
+ call endrun(subname // ':: Failed to read namelist group &printout')
+ end if
+ else
+ call endrun(subname // ':: Failed to find namelist group &printout')
+ end if
+ end if
+
+ call mpi_bcast(mpas_print_global_minmax_vel, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_print_detailed_minmax_vel, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+ call mpi_bcast(mpas_print_global_minmax_sca, 1, mpi_logical, masterprocid, mpicom, mpi_ierr)
+
+ call mpas_pool_add_config(configPool, 'config_print_global_minmax_vel', mpas_print_global_minmax_vel)
+ call mpas_pool_add_config(configPool, 'config_print_detailed_minmax_vel', mpas_print_detailed_minmax_vel)
+ call mpas_pool_add_config(configPool, 'config_print_global_minmax_sca', mpas_print_global_minmax_sca)
+
+ if (masterproc) then
+ close(unit=unitNumber)
+ end if
+
+ ! Set some configuration parameters that cannot be changed by CAM.
+ call mpas_pool_add_config(configPool, 'config_num_halos', config_num_halos)
+ call mpas_pool_add_config(configPool, 'config_number_of_blocks', config_number_of_blocks)
+ call mpas_pool_add_config(configPool, 'config_explicit_proc_decomp', config_explicit_proc_decomp)
+ call mpas_pool_add_config(configPool, 'config_proc_decomp_file_prefix', config_proc_decomp_file_prefix)
+
+
+ if (masterproc) then
+ write(iulog,*) 'MPAS-A dycore configuration:'
+ write(iulog,*) ' mpas_time_integration = ', trim(mpas_time_integration)
+ write(iulog,*) ' mpas_time_integration_order = ', mpas_time_integration_order
+ write(iulog,*) ' mpas_dt = ', mpas_dt
+ write(iulog,*) ' mpas_split_dynamics_transport = ', mpas_split_dynamics_transport
+ write(iulog,*) ' mpas_number_of_sub_steps = ', mpas_number_of_sub_steps
+ write(iulog,*) ' mpas_dynamics_split_steps = ', mpas_dynamics_split_steps
+ write(iulog,*) ' mpas_h_mom_eddy_visc2 = ', mpas_h_mom_eddy_visc2
+ write(iulog,*) ' mpas_h_mom_eddy_visc4 = ', mpas_h_mom_eddy_visc4
+ write(iulog,*) ' mpas_v_mom_eddy_visc2 = ', mpas_v_mom_eddy_visc2
+ write(iulog,*) ' mpas_h_theta_eddy_visc2 = ', mpas_h_theta_eddy_visc2
+ write(iulog,*) ' mpas_h_theta_eddy_visc4 = ', mpas_h_theta_eddy_visc4
+ write(iulog,*) ' mpas_v_theta_eddy_visc2 = ', mpas_v_theta_eddy_visc2
+ write(iulog,*) ' mpas_horiz_mixing = ', trim(mpas_horiz_mixing)
+ write(iulog,*) ' mpas_len_disp = ', mpas_len_disp
+ write(iulog,*) ' mpas_visc4_2dsmag = ', mpas_visc4_2dsmag
+ write(iulog,*) ' mpas_del4u_div_factor = ', mpas_del4u_div_factor
+ write(iulog,*) ' mpas_w_adv_order = ', mpas_w_adv_order
+ write(iulog,*) ' mpas_theta_adv_order = ', mpas_theta_adv_order
+ write(iulog,*) ' mpas_scalar_adv_order = ', mpas_scalar_adv_order
+ write(iulog,*) ' mpas_u_vadv_order = ', mpas_u_vadv_order
+ write(iulog,*) ' mpas_w_vadv_order = ', mpas_w_vadv_order
+ write(iulog,*) ' mpas_theta_vadv_order = ', mpas_theta_vadv_order
+ write(iulog,*) ' mpas_scalar_vadv_order = ', mpas_scalar_vadv_order
+ write(iulog,*) ' mpas_scalar_advection = ', mpas_scalar_advection
+ write(iulog,*) ' mpas_positive_definite = ', mpas_positive_definite
+ write(iulog,*) ' mpas_monotonic = ', mpas_monotonic
+ write(iulog,*) ' mpas_coef_3rd_order = ', mpas_coef_3rd_order
+ write(iulog,*) ' mpas_smagorinsky_coef = ', mpas_smagorinsky_coef
+ write(iulog,*) ' mpas_mix_full = ', mpas_mix_full
+ write(iulog,*) ' mpas_epssm = ', mpas_epssm
+ write(iulog,*) ' mpas_smdiv = ', mpas_smdiv
+ write(iulog,*) ' mpas_apvm_upwinding = ', mpas_apvm_upwinding
+ write(iulog,*) ' mpas_h_ScaleWithMesh = ', mpas_h_ScaleWithMesh
+ write(iulog,*) ' mpas_zd = ', mpas_zd
+ write(iulog,*) ' mpas_xnutr = ', mpas_xnutr
+ write(iulog,*) ' mpas_block_decomp_file_prefix = ', trim(mpas_block_decomp_file_prefix)
+ write(iulog,*) ' mpas_do_restart = ', mpas_do_restart
+ write(iulog,*) ' mpas_print_global_minmax_vel = ', mpas_print_global_minmax_vel
+ write(iulog,*) ' mpas_print_detailed_minmax_vel = ', mpas_print_detailed_minmax_vel
+ write(iulog,*) ' mpas_print_global_minmax_sca = ', mpas_print_global_minmax_sca
+ end if
+
+end subroutine cam_mpas_namelist_read
+
+end module dyn_comp
diff --git a/src/dynamics/mpas/dyn_grid.F90 b/src/dynamics/mpas/dyn_grid.F90
new file mode 100644
index 0000000000..0976c3fbc4
--- /dev/null
+++ b/src/dynamics/mpas/dyn_grid.F90
@@ -0,0 +1,822 @@
+module dyn_grid
+
+!-------------------------------------------------------------------------------
+!
+! Define MPAS computational grids on the dynamics decomposition.
+!
+! Module responsibilities:
+!
+! . Provide the physics/dynamics coupler (in module phys_grid) with data for the
+! physics grid (cell centers) on the dynamics decomposition.
+!
+! . Create CAM grid objects that are used by the I/O functionality to read
+! data from an unstructured grid format to the dynamics data structures, and
+! to write from the dynamics data structures to unstructured grid format. The
+! global column ordering for the unstructured grid is determined by the dycore.
+!
+! The MPAS grid is decomposed into "blocks" which contain the cells that are solved
+! plus a set of halo cells. The dycore assigns one block per task.
+!
+!-------------------------------------------------------------------------------
+
+use shr_kind_mod, only: r8 => shr_kind_r8
+use spmd_utils, only: iam, masterproc, mpicom, npes
+
+use pmgrid, only: plev, plevp
+use physconst, only: pi
+
+use cam_logfile, only: iulog
+use cam_abortutils, only: endrun
+
+use pio, only: file_desc_t, pio_global, pio_get_att
+
+use cam_mpas_subdriver, only: domain_ptr, cam_mpas_init_phase3, cam_mpas_get_global_dims, &
+ cam_mpas_get_global_coords, cam_mpas_get_global_blocks, &
+ cam_mpas_read_static, cam_mpas_compute_unit_vectors
+
+use mpas_pool_routines, only: mpas_pool_get_subpool, mpas_pool_get_dimension, mpas_pool_get_array
+use mpas_derived_types, only: mpas_pool_type
+
+
+implicit none
+private
+save
+
+integer, parameter :: dyn_decomp = 101 ! cell center grid (this parameter is public to provide a dycore
+ ! independent way to identify the physics grid on the dynamics
+ ! decomposition)
+integer, parameter :: cam_cell_decomp = 104 ! same grid decomp as dyn_decomp, but the grid definition
+ ! uses ncol, lat, lon
+integer, parameter :: edge_decomp = 102 ! edge node grid
+integer, parameter :: vertex_decomp = 103 ! vertex node grid
+integer, parameter :: ptimelevels = 2
+
+public :: &
+ dyn_decomp, &
+ ptimelevels, &
+ dyn_grid_init, &
+ get_block_bounds_d, &
+ get_block_gcol_cnt_d, &
+ get_block_gcol_d, &
+ get_block_lvl_cnt_d, &
+ get_block_levels_d, &
+ get_block_owner_d, &
+ get_gcol_block_d, &
+ get_gcol_block_cnt_d, &
+ get_horiz_grid_dim_d, &
+ get_horiz_grid_d, &
+ get_dyn_grid_parm, &
+ get_dyn_grid_parm_real1d, &
+ dyn_grid_get_elem_coords, &
+ dyn_grid_get_colndx, &
+ physgrid_copy_attributes_d
+
+! vertical reference heights (m) in CAM top to bottom order.
+real(r8) :: zw(plevp), zw_mid(plev)
+
+integer :: &
+ maxNCells, & ! maximum number of cells for any task (nCellsSolve <= maxNCells)
+ maxEdges, & ! maximum number of edges per cell
+ nVertLevels ! number of vertical layers (midpoints)
+
+integer, pointer :: &
+ nCellsSolve, & ! number of cells that a task solves
+ nEdgesSolve, & ! number of edges (velocity) that a task solves
+ nVerticesSolve, & ! number of vertices (vorticity) that a task solves
+ nVertLevelsSolve
+
+real(r8), parameter :: rad2deg=180.0_r8/pi ! convert radians to degrees
+
+! sphere_radius is a global attribute in the MPAS initial file. It is needed to
+! normalize the cell areas to a unit sphere.
+real(r8) :: sphere_radius
+
+! global grid data
+
+integer :: &
+ nCells_g, & ! global number of cells/columns
+ nEdges_g, & ! global number of edges
+ nVertices_g ! global number of vertices
+
+integer, allocatable :: col_indices_in_block(:,:) ! global column indices in each block
+integer, allocatable :: num_col_per_block(:) ! number of columns in each block
+integer, allocatable :: global_blockid(:) ! block id for each global column
+integer, allocatable :: local_col_index(:) ! local column index (in block) for each global column
+
+real(r8), dimension(:), pointer :: lonCell_g ! global cell longitudes
+real(r8), dimension(:), pointer :: latCell_g ! global cell latitudes
+real(r8), dimension(:), pointer :: areaCell_g ! global cell areas
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+subroutine dyn_grid_init()
+
+ ! Initialize grids on the dynamics decomposition and create associated
+ ! grid objects for use by I/O utilities. The current physics/dynamics
+ ! coupling code requires constructing global fields for the cell center
+ ! grid which is used by the physics parameterizations.
+
+ use ref_pres, only: ref_pres_init
+ use std_atm_profile, only: std_atm_pres
+ use time_manager, only: get_step_size
+
+ use cam_initfiles, only: initial_file_get_id
+
+ use cam_history_support, only: add_vert_coord
+
+ use constituents, only: pcnst
+
+ type(file_desc_t), pointer :: fh_ini
+
+ integer :: k, ierr
+ integer :: num_pr_lev ! number of top levels using pure pressure representation
+ real(r8) :: pref_edge(plevp) ! reference pressure at layer edges (Pa)
+ real(r8) :: pref_mid(plev) ! reference pressure at layer midpoints (Pa)
+
+ character(len=*), parameter :: subname = 'dyn_grid::dyn_grid_init'
+ !----------------------------------------------------------------------------
+
+ ! Get filehandle for initial file
+ fh_ini => initial_file_get_id()
+
+ ! MPAS-A always requires at least one scalar (qv). CAM has the same requirement
+ ! and it is enforced by the configure script which sets the cpp macrop PCNST.
+ call cam_mpas_init_phase3(fh_ini, pcnst, endrun)
+
+ ! Read or compute all time-invariant fields for the MPAS-A dycore
+ ! Time-invariant fields are stored in the MPAS mesh pool. This call
+ ! also sets the module data zw and zw_mid.
+ call setup_time_invariant(fh_ini)
+
+ ! Read the global sphere_radius attribute. This is needed to normalize the cell areas.
+ ierr = pio_get_att(fh_ini, pio_global, 'sphere_radius', sphere_radius)
+
+ ! Compute reference pressures from reference heights.
+ call std_atm_pres(zw, pref_edge)
+ pref_mid = (pref_edge(1:plev) + pref_edge(2:plevp)) * 0.5_r8
+
+ num_pr_lev = 0
+ call ref_pres_init(pref_edge, pref_mid, num_pr_lev)
+
+ ! Vertical coordinates for output streams
+ call add_vert_coord('lev', plev, &
+ 'zeta level at vertical midpoints', 'm', zw_mid)
+ call add_vert_coord('ilev', plevp, &
+ 'zeta level at vertical interfaces', 'm', zw)
+
+ if (masterproc) then
+ write(iulog,'(a)')' Reference Layer Locations: '
+ write(iulog,'(a)')' index height (m) pressure (hPa) '
+ do k= 1, plev
+ write(iulog,9830) k, zw(k), pref_edge(k)/100._r8
+ write(iulog,9840) zw_mid(k), pref_mid(k)/100._r8
+ end do
+ write(iulog,9830) plevp, zw(plevp), pref_edge(plevp)/100._r8
+
+9830 format(1x, i3, f15.4, 9x, f15.4)
+9840 format(1x, 3x, 12x, f15.4, 9x, f15.4)
+ end if
+
+ ! Query global grid dimensions from MPAS
+ call cam_mpas_get_global_dims(nCells_g, nEdges_g, nVertices_g, maxEdges, nVertLevels, maxNCells)
+
+ ! Temporary global arrays needed by phys_grid_init
+ allocate(lonCell_g(nCells_g))
+ allocate(latCell_g(nCells_g))
+ allocate(areaCell_g(nCells_g))
+ call cam_mpas_get_global_coords(latCell_g, lonCell_g, areaCell_g)
+
+ allocate(num_col_per_block(npes))
+ allocate(col_indices_in_block(maxNCells,npes))
+ allocate(global_blockid(nCells_g))
+ allocate(local_col_index(nCells_g))
+ call cam_mpas_get_global_blocks(num_col_per_block, col_indices_in_block, global_blockID, local_col_index)
+
+ ! Define the dynamics grids on the dynamics decompostion. The cell
+ ! centered grid is used by the physics parameterizations. The physics
+ ! decomposition of the cell centered grid is defined in phys_grid_init.
+ call define_cam_grids()
+
+end subroutine dyn_grid_init
+
+!=========================================================================================
+
+subroutine get_block_bounds_d(block_first, block_last)
+
+ ! Return first and last indices used in global block ordering.
+ ! The indexing is 1-based.
+
+ integer, intent(out) :: block_first ! first global index used for blocks
+ integer, intent(out) :: block_last ! last global index used for blocks
+ !----------------------------------------------------------------------------
+
+ ! MPAS assigns 1 block per task.
+
+ block_first = 1
+ block_last = npes
+
+end subroutine get_block_bounds_d
+
+!=========================================================================================
+
+integer function get_block_gcol_cnt_d(blockid)
+
+ ! Return the number of dynamics columns in the block with the specified
+ ! global block ID. The blockid can be for a block owned by any MPI
+ ! task.
+
+ integer, intent(in) :: blockid
+ !----------------------------------------------------------------------------
+
+ get_block_gcol_cnt_d = num_col_per_block(blockid)
+
+end function get_block_gcol_cnt_d
+
+!=========================================================================================
+
+subroutine get_block_gcol_d(blockid, asize, cdex)
+
+ ! Return list of global dynamics column indices in the block with the
+ ! specified global block ID. The blockid can be for a block owned by
+ ! any MPI task.
+
+ integer, intent(in) :: blockid ! global block id
+ integer, intent(in) :: asize ! array size
+
+ integer, intent(out):: cdex(asize) ! global column indices
+
+ integer :: icol
+
+ character(len=*), parameter :: subname = 'dyn_grid::get_block_gcol_d'
+ !----------------------------------------------------------------------------
+
+ if (asize < num_col_per_block(blockid)) then
+ write(iulog,*) subname//': array size too small: asize, num_col_per_block=', &
+ asize, num_col_per_block(blockid)
+ call endrun(subname//': array size too small')
+ end if
+
+ do icol = 1, num_col_per_block(blockid)
+ cdex(icol) = col_indices_in_block(icol, blockid)
+ end do
+ do icol = num_col_per_block(blockid)+1, asize
+ cdex(icol) = 0
+ end do
+
+end subroutine get_block_gcol_d
+
+!=========================================================================================
+
+integer function get_block_lvl_cnt_d(blockid, bcid)
+
+ ! Returns the number of levels in the specified column of the specified block.
+ ! If column includes surface fields, then it is defined to also
+ ! include level 0.
+
+ integer, intent(in) :: blockid ! global block id
+ integer, intent(in) :: bcid ! column index within block
+ !----------------------------------------------------------------------------
+
+ ! All blocks have the same number of levels.
+ get_block_lvl_cnt_d = plevp
+
+end function get_block_lvl_cnt_d
+
+!=========================================================================================
+
+subroutine get_block_levels_d(blockid, bcid, lvlsiz, levels)
+
+ ! Returns the level indices in the column of the specified global block.
+ ! For MPAS decomposition all columns in a block contain complete vertical grid.
+
+ integer, intent(in) :: blockid ! global block id
+ integer, intent(in) :: bcid ! column index within block
+ integer, intent(in) :: lvlsiz ! dimension of levels array
+
+ integer, intent(out) :: levels(lvlsiz) ! level indices for block
+
+ integer :: k
+ character(len=128) :: errmsg
+
+ character(len=*), parameter :: subname = 'dyn_grid::get_block_levels_d'
+ !----------------------------------------------------------------------------
+
+ if ( lvlsiz < plev + 1 ) then
+ write(errmsg,*) ': levels array not large enough (', lvlsiz,' < ',plev + 1,')'
+ call endrun( subname // trim(errmsg) )
+ else
+ do k = 0, plev
+ levels(k+1) = k
+ end do
+ do k = plev+2, lvlsiz
+ levels(k) = -1
+ end do
+ end if
+
+end subroutine get_block_levels_d
+
+!=========================================================================================
+
+integer function get_gcol_block_cnt_d(gcol)
+
+ ! Return number of blocks containing data for the vertical column
+ ! with the specified global column index.
+
+ integer, intent(in) :: gcol ! global column index
+ !----------------------------------------------------------------------------
+
+ ! Each global column is solved in just one block. The blocks where that column may
+ ! be in a halo cell are not counted.
+ get_gcol_block_cnt_d = 1
+
+end function get_gcol_block_cnt_d
+
+!=========================================================================================
+
+subroutine get_gcol_block_d(gcol, cnt, blockid, bcid)
+
+ ! Return global block index and local column index for a global column index.
+ ! This routine can be called for global columns that are not owned by
+ ! the calling task.
+
+ integer, intent(in) :: gcol ! global column index
+ integer, intent(in) :: cnt ! size of blockid and bcid arrays
+
+ integer, intent(out) :: blockid(cnt) ! block index
+ integer, intent(out) :: bcid(cnt) ! column index within block
+
+ integer :: j
+
+ character(len=*), parameter :: subname = 'dyn_grid::get_gcol_block_d'
+ !----------------------------------------------------------------------------
+
+ if ( cnt < 1 ) then
+ write(iulog,*) subname//': arrays not large enough: cnt= ', cnt
+ call endrun( subname // ': arrays not large enough' )
+ end if
+
+ ! Each global column is solved in just one block.
+ blockid(1) = global_blockid(gcol)
+ bcid(1) = local_col_index(gcol)
+
+ do j=2,cnt
+ blockid(j) = -1
+ bcid(j) = -1
+ end do
+
+end subroutine get_gcol_block_d
+
+!=========================================================================================
+
+integer function get_block_owner_d(blockid)
+
+ ! Return the ID of the task that owns the indicated global block.
+ ! Assume that task IDs are 0-based as in MPI.
+
+ integer, intent(in) :: blockid ! global block id
+ !----------------------------------------------------------------------------
+
+ ! MPAS assigns one block per task.
+ get_block_owner_d = (blockid - 1)
+
+end function get_block_owner_d
+
+!=========================================================================================
+
+subroutine get_horiz_grid_dim_d(hdim1_d, hdim2_d)
+
+ ! Return declared horizontal dimensions of global grid.
+ ! For non-lon/lat grids, declare grid to be one-dimensional,
+ ! i.e., (ngcols,1) where ngcols is total number of columns in grid.
+
+ integer, intent(out) :: hdim1_d ! first horizontal dimension
+ integer, intent(out), optional :: hdim2_d ! second horizontal dimension
+ !----------------------------------------------------------------------------
+
+ hdim1_d = nCells_g
+
+ if( present(hdim2_d) ) hdim2_d = 1
+
+end subroutine get_horiz_grid_dim_d
+
+!=========================================================================================
+
+subroutine get_horiz_grid_d(nxy, clat_d_out, clon_d_out, area_d_out, &
+ wght_d_out, lat_d_out, lon_d_out)
+
+ ! Return global arrays of latitude and longitude (in radians), column
+ ! surface area (in radians squared) and surface integration weights for
+ ! columns in physics grid (cell centers)
+
+ integer, intent(in) :: nxy ! array sizes
+
+ real(r8), intent(out), optional :: clat_d_out(:) ! column latitudes (radians)
+ real(r8), intent(out), optional :: clon_d_out(:) ! column longitudes (radians)
+ real(r8), intent(out), target, optional :: area_d_out(:) ! sum to 4*pi (radians^2)
+ real(r8), intent(out), target, optional :: wght_d_out(:) ! normalized to sum to 4*pi
+ real(r8), intent(out), optional :: lat_d_out(:) ! column latitudes (degrees)
+ real(r8), intent(out), optional :: lon_d_out(:) ! column longitudes (degrees)
+
+ character(len=*), parameter :: subname = 'dyn_grid::get_horiz_grid_d'
+ !----------------------------------------------------------------------------
+
+ if ( nxy /= nCells_g ) then
+ write(iulog,*) subname//': incorrect number of cells: nxy, nCells_g= ', &
+ nxy, nCells_g
+ call endrun(subname//': incorrect number of cells')
+ end if
+
+ if ( present( clat_d_out ) ) then
+ clat_d_out(:) = latCell_g(:)
+ end if
+
+ if ( present( clon_d_out ) ) then
+ clon_d_out(:) = lonCell_g(:)
+ end if
+
+ if ( present( area_d_out ) ) then
+ area_d_out(:) = areaCell_g(:) / (sphere_radius**2)
+ end if
+
+ if ( present( wght_d_out ) ) then
+ wght_d_out(:) = areaCell_g(:) / (sphere_radius**2)
+ end if
+
+ if ( present( lat_d_out ) ) then
+ lat_d_out(:) = latCell_g(:) * rad2deg
+ end if
+
+ if ( present( lon_d_out ) ) then
+ lon_d_out(:) = lonCell_g(:) * rad2deg
+ end if
+
+end subroutine get_horiz_grid_d
+
+!=========================================================================================
+
+subroutine physgrid_copy_attributes_d(gridname, grid_attribute_names)
+
+ ! Create list of attributes for the physics grid that should be copied
+ ! from the corresponding grid object on the dynamics decomposition
+
+ use cam_grid_support, only: max_hcoordname_len
+
+ character(len=max_hcoordname_len), intent(out) :: gridname
+ character(len=max_hcoordname_len), pointer, intent(out) :: grid_attribute_names(:)
+ !----------------------------------------------------------------------------
+
+
+ ! Do not let the physics grid copy the mpas_cell "area" attribute because
+ ! it is using a different dimension name.
+ gridname = 'mpas_cell'
+ allocate(grid_attribute_names(0))
+
+end subroutine physgrid_copy_attributes_d
+
+!=========================================================================================
+
+function get_dyn_grid_parm_real1d(name) result(rval)
+
+ ! This routine is not used for unstructured grids, but still needed as a
+ ! dummy interface to satisfy references (for linking executable) from mo_synoz.F90
+ ! and phys_gmean.F90.
+
+ character(len=*), intent(in) :: name
+ real(r8), pointer :: rval(:)
+
+ character(len=*), parameter :: subname = 'dyn_grid::get_dyn_grid_parm_real1d'
+ !----------------------------------------------------------------------------
+
+ if (name .eq. 'w') then
+ call endrun(subname//': w not defined')
+ else if( name .eq. 'clat') then
+ call endrun(subname//': clat not supported, use get_horiz_grid_d')
+ else if( name .eq. 'latdeg') then
+ call endrun(subname//': latdeg not defined')
+ else
+ nullify(rval)
+ end if
+
+end function get_dyn_grid_parm_real1d
+
+!=========================================================================================
+
+integer function get_dyn_grid_parm(name) result(ival)
+
+ ! This function is in the process of being deprecated, but is still needed
+ ! as a dummy interface to satisfy external references from some chemistry routines.
+
+ character(len=*), intent(in) :: name
+ !----------------------------------------------------------------------------
+
+ if (name == 'plat') then
+ ival = 1
+ else if (name == 'plon') then
+ ival = nCells_g
+ else if(name == 'plev') then
+ ival = plev
+ else
+ ival = -1
+ end if
+
+end function get_dyn_grid_parm
+
+!=========================================================================================
+
+subroutine dyn_grid_get_colndx(igcol, ncols, owners, col, lbk )
+
+ ! For each global column index return the owning task. If the column is owned
+ ! by this task, then also return the local block number and column index in that
+ ! block.
+
+ integer, intent(in) :: ncols
+ integer, intent(in) :: igcol(ncols)
+ integer, intent(out) :: owners(ncols)
+ integer, intent(out) :: col(ncols)
+ integer, intent(out) :: lbk(ncols)
+
+ integer :: i
+ integer :: blockid(1), bcid(1)
+ !----------------------------------------------------------------------------
+
+ do i = 1,ncols
+
+ call get_gcol_block_d(igcol(i), 1, blockid, bcid)
+ owners(i) = get_block_owner_d(blockid(1))
+
+ if ( iam==owners(i) ) then
+ lbk(i) = 1 ! only 1 block per task
+ col(i) = bcid(1)
+ else
+ lbk(i) = -1
+ col(i) = -1
+ end if
+
+ end do
+
+end subroutine dyn_grid_get_colndx
+
+!=========================================================================================
+
+subroutine dyn_grid_get_elem_coords(ie, rlon, rlat, cdex )
+
+ ! Returns the latitude and longitude coordinates, as well as global IDs,
+ ! for the columns in a block.
+
+ integer, intent(in) :: ie ! block index
+
+ real(r8),optional, intent(out) :: rlon(:) ! longitudes of the columns in the block
+ real(r8),optional, intent(out) :: rlat(:) ! latitudes of the columns in the block
+ integer, optional, intent(out) :: cdex(:) ! global column index
+
+ character(len=*), parameter :: subname = 'dyn_grid::dyn_grid_get_elem_coords'
+ !----------------------------------------------------------------------------
+
+ ! This routine is called for history output when local time averaging is requested
+ ! for a field on a dynamics decomposition. The code in hbuf_accum_addlcltime appears
+ ! to also assume that the field is on the physics grid since there is no argument
+ ! passed to specify which dynamics grid the coordinates are for.
+
+ call endrun(subname//': not implemented for the MPAS grids')
+
+end subroutine dyn_grid_get_elem_coords
+
+!=========================================================================================
+! Private routines.
+!=========================================================================================
+
+subroutine setup_time_invariant(fh_ini)
+
+ ! Initialize all time-invariant fields needed by the MPAS-Atmosphere dycore,
+ ! by reading these fields from the initial file.
+
+ use mpas_rbf_interpolation, only : mpas_rbf_interp_initialize
+ use mpas_vector_reconstruction, only : mpas_init_reconstruct
+
+ ! Arguments
+ type(file_desc_t), pointer :: fh_ini
+
+ ! Local variables
+ type(mpas_pool_type), pointer :: meshPool
+ real(r8), pointer :: rdzw(:)
+ real(r8), allocatable :: dzw(:)
+
+ integer :: k, kk
+
+ character(len=*), parameter :: routine = 'dyn_grid::setup_time_invariant'
+ !----------------------------------------------------------------------------
+
+ ! Read time-invariant fields
+ call cam_mpas_read_static(fh_ini, endrun)
+
+ ! Compute unit vectors giving the local north and east directions as well as
+ ! the unit normal vector for edges
+ call cam_mpas_compute_unit_vectors()
+
+ ! Access dimensions that are made public via this module
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+ call mpas_pool_get_dimension(meshPool, 'nCellsSolve', nCellsSolve)
+ call mpas_pool_get_dimension(meshPool, 'nEdgesSolve', nEdgesSolve)
+ call mpas_pool_get_dimension(meshPool, 'nVerticesSolve', nVerticesSolve)
+ call mpas_pool_get_dimension(meshPool, 'nVertLevels', nVertLevelsSolve) ! MPAS always solves over the full column
+
+ ! check that number of vertical layers matches MPAS grid data
+ if (plev /= nVertLevelsSolve) then
+ write(iulog,*) routine//': ERROR: number of levels in IC file does not match plev: file, plev=', &
+ nVertLevelsSolve, plev
+ call endrun(routine//': ERROR: number of levels in IC file does not match plev.')
+ end if
+
+ ! Initialize fields needed for reconstruction of cell-centered winds from edge-normal winds
+ ! Note: This same pair of calls happens a second time later in the initialization of
+ ! the MPAS-A dycore (in atm_mpas_init_block), but the redundant calls do no harm
+ call mpas_rbf_interp_initialize(meshPool)
+ call mpas_init_reconstruct(meshPool)
+
+ ! Compute the zeta coordinate at layer interfaces and midpoints. Store
+ ! in arrays using CAM vertical index order (top to bottom of atm) for use
+ ! in CAM coordinate objects.
+ call mpas_pool_get_array(meshPool, 'rdzw', rdzw)
+
+ allocate(dzw(plev))
+ dzw = 1._r8 / rdzw
+ zw(plev+1) = 0._r8
+ do k = plev, 1, -1
+ kk = plev - k + 1
+ zw(k) = zw(k+1) + dzw(kk)
+ zw_mid(k) = 0.5_r8 * (zw(k+1) + zw(k))
+ end do
+
+ deallocate(dzw)
+
+end subroutine setup_time_invariant
+
+!=========================================================================================
+
+subroutine define_cam_grids()
+
+ ! Define the dynamics grids on the dynamics decompostion. The 'physics'
+ ! grid contains the same nodes as the dynamics cell center grid, but is
+ ! on the physics decomposition and is defined in phys_grid_init.
+ !
+ ! Note that there are two versions of cell center grid defined here.
+ ! The 'mpas_cell' grid uses 'nCells' rather than 'ncol' as the dimension
+ ! name and 'latCell', 'lonCell' rather than 'lat' and 'lon' as the
+ ! coordinate names. This allows us to read the same initial file that
+ ! is used by the standalone MPAS-A model. The second cell center grid
+ ! is called 'cam_cell' and uses the standard CAM names: ncol, lat, and
+ ! lon. This grid allows us to read the PHIS field from the CAM topo
+ ! file. There is just a single version of the grids to read data on the
+ ! cell edge and vertex locations. These are used to read data from the
+ ! initial file and to write data from the dynamics decomposition to the
+ ! CAM history file.
+
+ use cam_grid_support, only: horiz_coord_t, horiz_coord_create, iMap
+ use cam_grid_support, only: cam_grid_register, cam_grid_attribute_register
+
+ ! Local variables
+ integer :: i, j
+
+ type(horiz_coord_t), pointer :: lat_coord
+ type(horiz_coord_t), pointer :: lon_coord
+ integer(iMap), allocatable :: gidx(:) ! global indices
+ integer(iMap), pointer :: grid_map(:,:)
+
+ type(mpas_pool_type), pointer :: meshPool
+
+ integer, dimension(:), pointer :: indexToCellID ! global indices of cell centers
+ real(r8), dimension(:), pointer :: latCell ! cell center latitude (radians)
+ real(r8), dimension(:), pointer :: lonCell ! cell center longitude (radians)
+ real(r8), dimension(:), pointer :: areaCell ! cell areas in m^2
+
+ integer, dimension(:), pointer :: indexToEdgeID ! global indices of edge nodes
+ real(r8), dimension(:), pointer :: latEdge ! edge node latitude (radians)
+ real(r8), dimension(:), pointer :: lonEdge ! edge node longitude (radians)
+
+ integer, dimension(:), pointer :: indexToVertexID ! global indices of vertex nodes
+ real(r8), dimension(:), pointer :: latVertex ! vertex node latitude (radians)
+ real(r8), dimension(:), pointer :: lonVertex ! vertex node longitude (radians)
+ !----------------------------------------------------------------------------
+
+ call mpas_pool_get_subpool(domain_ptr % blocklist % structs, 'mesh', meshPool)
+
+ !-------------------------------------------------------------!
+ ! Construct coordinate and grid objects for cell center grid. !
+ !-------------------------------------------------------------!
+
+ call mpas_pool_get_array(meshPool, 'indexToCellID', indexToCellID)
+ call mpas_pool_get_array(meshPool, 'latCell', latCell)
+ call mpas_pool_get_array(meshPool, 'lonCell', lonCell)
+ call mpas_pool_get_array(meshPool, 'areaCell', areaCell)
+
+ allocate(gidx(nCellsSolve))
+ gidx = indexToCellID(1:nCellsSolve)
+
+ lat_coord => horiz_coord_create('latCell', 'nCells', nCells_g, 'latitude', &
+ 'degrees_north', 1, nCellsSolve, latCell(1:nCellsSolve)*rad2deg, map=gidx)
+ lon_coord => horiz_coord_create('lonCell', 'nCells', nCells_g, 'longitude', &
+ 'degrees_east', 1, nCellsSolve, lonCell(1:nCellsSolve)*rad2deg, map=gidx)
+
+ ! Map for cell centers grid
+ allocate(grid_map(3, nCellsSolve))
+ do i = 1, nCellsSolve
+ grid_map(1, i) = i
+ grid_map(2, i) = 1
+ grid_map(3, i) = gidx(i)
+ end do
+
+ ! cell center grid for I/O using MPAS names
+ call cam_grid_register('mpas_cell', dyn_decomp, lat_coord, lon_coord, &
+ grid_map, block_indexed=.false., unstruct=.true.)
+
+ ! create new coordinates and grid using CAM names
+ lat_coord => horiz_coord_create('lat', 'ncol', nCells_g, 'latitude', &
+ 'degrees_north', 1, nCellsSolve, latCell(1:nCellsSolve)*rad2deg, map=gidx)
+ lon_coord => horiz_coord_create('lon', 'ncol', nCells_g, 'longitude', &
+ 'degrees_east', 1, nCellsSolve, lonCell(1:nCellsSolve)*rad2deg, map=gidx)
+ call cam_grid_register('cam_cell', cam_cell_decomp, lat_coord, lon_coord, &
+ grid_map, block_indexed=.false., unstruct=.true.)
+
+ ! gidx can be deallocated. Values are copied into the coordinate and attribute objects.
+ deallocate(gidx)
+
+ ! grid_map memory cannot be deallocated. The cam_filemap_t object just points
+ ! to it. Pointer can be disassociated.
+ nullify(grid_map) ! Map belongs to grid now
+
+ ! pointers to coordinate objects can be nullified. Memory is now pointed to by the
+ ! grid object.
+ nullify(lat_coord)
+ nullify(lon_coord)
+
+ !-----------------------------------------------------------!
+ ! Construct coordinate and grid objects for edge node grid. !
+ !-----------------------------------------------------------!
+
+ call mpas_pool_get_array(meshPool, 'indexToEdgeID', indexToEdgeID)
+ call mpas_pool_get_array(meshPool, 'latEdge', latEdge)
+ call mpas_pool_get_array(meshPool, 'lonEdge', lonEdge)
+
+ allocate(gidx(nEdgesSolve))
+ gidx = indexToEdgeID(1:nEdgesSolve)
+
+ lat_coord => horiz_coord_create('latEdge', 'nEdges', nEdges_g, 'latitude', &
+ 'degrees_north', 1, nEdgesSolve, latEdge(1:nEdgesSolve)*rad2deg, map=gidx)
+ lon_coord => horiz_coord_create('lonEdge', 'nEdges', nEdges_g, 'longitude', &
+ 'degrees_east', 1, nEdgesSolve, lonEdge(1:nEdgesSolve)*rad2deg, map=gidx)
+
+ ! Map for edge node grid
+ allocate(grid_map(3, nEdgesSolve))
+ do i = 1, nEdgesSolve
+ grid_map(1, i) = i
+ grid_map(2, i) = 1
+ grid_map(3, i) = gidx(i)
+ end do
+
+ ! Edge node grid object
+ call cam_grid_register('mpas_edge', edge_decomp, lat_coord, lon_coord, &
+ grid_map, block_indexed=.false., unstruct=.true.)
+
+ deallocate(gidx)
+ nullify(grid_map)
+ nullify(lat_coord)
+ nullify(lon_coord)
+
+ !-------------------------------------------------------------!
+ ! Construct coordinate and grid objects for vertex node grid. !
+ !-------------------------------------------------------------!
+
+ call mpas_pool_get_array(meshPool, 'indexToVertexID', indexToVertexID)
+ call mpas_pool_get_array(meshPool, 'latVertex', latVertex)
+ call mpas_pool_get_array(meshPool, 'lonVertex', lonVertex)
+
+ allocate(gidx(nVerticesSolve))
+ gidx = indexToVertexID(1:nVerticesSolve)
+
+ lat_coord => horiz_coord_create('latVertex', 'nVertices', nVertices_g, 'latitude', &
+ 'degrees_north', 1, nVerticesSolve, latVertex(1:nVerticesSolve)*rad2deg, map=gidx)
+ lon_coord => horiz_coord_create('lonVertex', 'nVertices', nVertices_g, 'longitude', &
+ 'degrees_east', 1, nVerticesSolve, lonVertex(1:nVerticesSolve)*rad2deg, map=gidx)
+
+ ! Map for vertex node grid
+ allocate(grid_map(3, nVerticesSolve))
+ do i = 1, nVerticesSolve
+ grid_map(1, i) = i
+ grid_map(2, i) = 1
+ grid_map(3, i) = gidx(i)
+ end do
+
+ ! Vertex node grid object
+ call cam_grid_register('mpas_vertex', vertex_decomp, lat_coord, lon_coord, &
+ grid_map, block_indexed=.false., unstruct=.true.)
+
+ deallocate(gidx)
+ nullify(grid_map)
+ nullify(lat_coord)
+ nullify(lon_coord)
+
+end subroutine define_cam_grids
+
+end module dyn_grid
diff --git a/src/dynamics/mpas/interp_mod.F90 b/src/dynamics/mpas/interp_mod.F90
new file mode 100644
index 0000000000..f22883ee4f
--- /dev/null
+++ b/src/dynamics/mpas/interp_mod.F90
@@ -0,0 +1,91 @@
+module interp_mod
+
+! This is a stub module. Online interpolation is not currently available.
+
+use shr_kind_mod, only: r8=>shr_kind_r8
+
+use cam_history_support, only: interp_info_t
+
+use pio, only: file_desc_t, var_desc_t
+
+use cam_logfile, only: iulog
+use cam_abortutils, only: endrun
+
+implicit none
+private
+save
+
+public :: setup_history_interpolation
+public :: set_interp_hfile
+public :: write_interpolated
+
+interface write_interpolated
+ module procedure write_interpolated_scalar
+ module procedure write_interpolated_vector
+end interface write_interpolated
+
+integer, parameter :: nlat=0, nlon=0
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+subroutine setup_history_interpolation(interp_ok, mtapes, interp_output, interp_info)
+
+ logical, intent(inout) :: interp_ok
+ integer, intent(in) :: mtapes
+ logical, intent(in) :: interp_output(:)
+ type(interp_info_t), intent(inout) :: interp_info(:)
+ !----------------------------------------------------------------------------
+
+ interp_ok = .false.
+
+ write (iulog,*) 'INFO - setup_history_interpolation is a no-op'
+
+end subroutine setup_history_interpolation
+
+!=========================================================================================
+
+subroutine set_interp_hfile(hfilenum, interp_info)
+
+ ! arguments
+ integer, intent(in) :: hfilenum
+ type(interp_info_t), intent(inout) :: interp_info(:)
+ !----------------------------------------------------------------------------
+
+
+end subroutine set_interp_hfile
+
+!=========================================================================================
+
+subroutine write_interpolated_scalar(File, varid, fld, numlev, data_type, decomp_type)
+
+ type(file_desc_t), intent(inout) :: File
+ type(var_desc_t), intent(inout) :: varid
+ real(r8), intent(in) :: fld(:,:,:)
+ integer, intent(in) :: numlev, data_type, decomp_type
+ !----------------------------------------------------------------------------
+
+
+ call endrun('FATAL - write_interpolated_scalar is a stub, you shouldnt get here')
+
+end subroutine write_interpolated_scalar
+
+!=========================================================================================
+
+subroutine write_interpolated_vector(File, varidu, varidv, fldu, fldv, &
+ numlev, data_type, decomp_type)
+
+ type(file_desc_t), intent(inout) :: File
+ type(var_desc_t), intent(inout) :: varidu, varidv
+ real(r8), intent(in) :: fldu(:,:,:), fldv(:,:,:)
+ integer, intent(in) :: numlev, data_type, decomp_type
+ !----------------------------------------------------------------------------
+
+ call endrun('FATAL - write_interpolated_vector is a stub, you shouldnt get here')
+
+end subroutine write_interpolated_vector
+
+!=========================================================================================
+
+end module interp_mod
diff --git a/src/dynamics/mpas/pmgrid.F90 b/src/dynamics/mpas/pmgrid.F90
new file mode 100644
index 0000000000..096ad32c6b
--- /dev/null
+++ b/src/dynamics/mpas/pmgrid.F90
@@ -0,0 +1,14 @@
+module pmgrid
+
+ ! PLON and PLAT do not correspond to the number of latitudes and longitudes in
+ ! this version of dynamics.
+
+ implicit none
+
+ integer, parameter :: plev = PLEV ! number of vertical levels
+ integer, parameter :: plevp = plev + 1
+
+ integer, parameter :: plon = 1
+ integer, parameter :: plat = 1
+
+end module pmgrid
diff --git a/src/dynamics/mpas/restart_dynamics.F90 b/src/dynamics/mpas/restart_dynamics.F90
new file mode 100644
index 0000000000..4977d50038
--- /dev/null
+++ b/src/dynamics/mpas/restart_dynamics.F90
@@ -0,0 +1,87 @@
+module restart_dynamics
+
+! Writing and reading grid and dynamics state information to/from restart files is
+! delegated to MPAS utility code. This module provides the CAM interfaces for the
+! restart functionality. CAM just provides MPAS with the PIO filehandle to the
+! restart file.
+
+use dyn_comp, only: dyn_import_t, dyn_export_t, dyn_init
+use pio, only: file_desc_t
+
+use cam_abortutils, only: endrun
+
+use mpas_derived_types, only: MPAS_Stream_type, MPAS_IO_WRITE, MPAS_IO_READ
+use cam_mpas_subdriver, only: domain_ptr, cam_mpas_setup_restart, cam_mpas_write_restart, &
+ cam_mpas_read_restart, cam_mpas_define_scalars
+
+implicit none
+private
+save
+
+public :: &
+ init_restart_dynamics, &
+ write_restart_dynamics, &
+ read_restart_dynamics
+
+! The restart_stream is set up in init_restart_dynamics and used later in
+! write_restart_dynamics and read_restart_dynamics.
+type (MPAS_Stream_type) :: restart_stream
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+subroutine init_restart_dynamics(file, dyn_out)
+
+ ! arguments
+ type(file_desc_t), target :: File
+ type(dyn_export_t), intent(in) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ call cam_mpas_setup_restart(file, restart_stream, MPAS_IO_WRITE, endrun)
+
+end subroutine init_restart_dynamics
+
+!=========================================================================================
+
+subroutine write_restart_dynamics(File, dyn_out)
+
+ ! arguments
+ type(File_desc_t), intent(inout) :: File
+ type(dyn_export_t), intent(in) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ call cam_mpas_write_restart(restart_stream, endrun)
+
+end subroutine write_restart_dynamics
+
+!=========================================================================================
+
+subroutine read_restart_dynamics(File, dyn_in, dyn_out)
+
+ ! arguments
+ type(File_desc_t), intent(inout) :: File
+ type(dyn_import_t), intent(out) :: dyn_in
+ type(dyn_export_t), intent(out) :: dyn_out
+
+ ! local variables
+ character(len=*), parameter :: subname = 'restart_dynamics::read_restart_dynamics'
+ integer :: ierr
+ !----------------------------------------------------------------------------
+
+ ! Before setting up the restart stream, names for each scalar constitutent must be defined
+ call cam_mpas_define_scalars(domain_ptr % blocklist, dyn_in % mpas_from_cam_cnst, &
+ dyn_out % cam_from_mpas_cnst, ierr)
+ if (ierr /= 0) then
+ call endrun(subname//': Set-up of constituents for MPAS-A dycore failed.')
+ end if
+
+ call cam_mpas_setup_restart(file, restart_stream, MPAS_IO_READ, endrun)
+ call cam_mpas_read_restart(restart_stream, endrun)
+
+ ! Finish initializing the dynamics
+ call dyn_init(dyn_in, dyn_out)
+
+end subroutine read_restart_dynamics
+
+end module restart_dynamics
diff --git a/src/dynamics/mpas/spmd_dyn.F90 b/src/dynamics/mpas/spmd_dyn.F90
new file mode 100644
index 0000000000..b1a82c8928
--- /dev/null
+++ b/src/dynamics/mpas/spmd_dyn.F90
@@ -0,0 +1,11 @@
+module spmd_dyn
+
+ implicit none
+
+ logical, public :: local_dp_map = .true. ! flag indicates that mapping between dynamics
+ ! and physics decompositions does not require
+ ! interprocess communication
+ integer, public :: block_buf_nrecs
+ integer, public :: chunk_buf_nrecs
+
+end module spmd_dyn
diff --git a/src/dynamics/mpas/stepon.F90 b/src/dynamics/mpas/stepon.F90
new file mode 100644
index 0000000000..7fc0c196d2
--- /dev/null
+++ b/src/dynamics/mpas/stepon.F90
@@ -0,0 +1,398 @@
+module stepon
+
+use shr_kind_mod, only: r8 => shr_kind_r8
+use spmd_utils, only: mpicom
+
+use pmgrid, only: plev, plevp
+
+use ppgrid, only: begchunk, endchunk
+use physics_types, only: physics_state, physics_tend
+use physics_buffer, only: physics_buffer_desc
+
+use dyn_comp, only: dyn_import_t, dyn_export_t, dyn_run, dyn_final, &
+ swap_time_level_ptrs
+
+use dp_coupling, only: d_p_coupling, p_d_coupling
+
+use camsrfexch, only: cam_out_t
+
+use cam_history, only: addfld, outfld, hist_fld_active
+
+use time_manager, only: get_step_size, get_nstep, is_first_step, is_first_restart_step
+
+use perf_mod, only: t_startf, t_stopf, t_barrierf
+
+implicit none
+private
+save
+
+public :: &
+ stepon_init, &
+ stepon_run1, &
+ stepon_run2, &
+ stepon_run3, &
+ stepon_final
+
+!=========================================================================================
+contains
+!=========================================================================================
+
+subroutine stepon_init(dyn_in, dyn_out)
+
+ ! arguments
+ type (dyn_import_t), intent(inout) :: dyn_in ! Dynamics import container
+ type (dyn_export_t), intent(inout) :: dyn_out ! Dynamics export container
+ !----------------------------------------------------------------------------
+
+ ! dycore state variables on MPAS grids
+ call addfld ('u', (/ 'lev' /), 'A', 'm/s', 'normal velocity at edges', gridname='mpas_edge')
+ call addfld ('w', (/ 'ilev' /), 'A', 'm/s', 'vertical velocity', gridname='mpas_cell')
+ call addfld ('theta', (/ 'lev' /), 'A', 'K', 'potential temperature', gridname='mpas_cell')
+ call addfld ('rho', (/ 'lev' /), 'A', 'kg/m^3', 'dry air density', gridname='mpas_cell')
+ call addfld ('qv', (/ 'lev' /), 'A', 'kg/kg', 'water vapor dry mmr', gridname='mpas_cell')
+ call addfld ('uReconstructZonal', (/ 'lev' /), 'A', 'm/s', &
+ 'zonal velocity at cell centers', gridname='mpas_cell')
+ call addfld ('uReconstructMeridional', (/ 'lev' /), 'A', 'm/s', &
+ 'meridional velocity at cell centers', gridname='mpas_cell')
+ call addfld ('divergence', (/ 'lev' /), 'A', '1/s', &
+ 'Horizontal velocity divergence at cell center', gridname='mpas_cell')
+ call addfld ('vorticity', (/ 'lev' /), 'A', '1/s', &
+ 'Relative vorticity at vertices', gridname='mpas_vertex')
+
+ ! physics forcings on MPAS grids
+ call addfld ('ru_tend', (/ 'lev' /), 'A', 'kg/m^2/s', &
+ 'physics tendency of normal horizontal momentum', gridname='mpas_edge')
+ call addfld ('rtheta_tend', (/ 'lev' /), 'A', 'kg K/m^3/s', &
+ 'physics tendency of rho*theta/zz', gridname='mpas_cell')
+ call addfld ('rho_tend', (/ 'lev' /), 'A', 'kg/m^3/s', &
+ 'physics tendency of dry air density', gridname='mpas_cell')
+
+end subroutine stepon_init
+
+!=========================================================================================
+
+subroutine stepon_run1(dtime_out, phys_state, phys_tend, &
+ pbuf2d, dyn_in, dyn_out)
+
+ ! arguments
+ real(r8), intent(out) :: dtime_out ! Time-step
+ type(physics_state), intent(inout) :: phys_state(begchunk:endchunk)
+ type(physics_tend), intent(inout) :: phys_tend(begchunk:endchunk)
+ type (physics_buffer_desc), pointer :: pbuf2d(:,:)
+ type (dyn_import_t), intent(inout) :: dyn_in
+ type (dyn_export_t), intent(inout) :: dyn_out
+
+ ! local variables
+ integer :: nstep
+ !----------------------------------------------------------------------------
+
+ nstep = get_nstep()
+ dtime_out = get_step_size()
+
+ ! This call writes the dycore output (on the dynamics grids) to the
+ ! history file. Note that when nstep=0, these fields will be the result
+ ! of the dynamics initialization (done in dyn_init) since the dycore
+ ! does not run and dyn_in is simply copied to dyn_out for use in the cam
+ ! initialization sequence. On subsequent calls dyn_out will contain the
+ ! dycore output.
+ call write_dynvar(dyn_out)
+
+ call t_barrierf('sync_d_p_coupling', mpicom)
+ call t_startf('d_p_coupling')
+ ! Move data into phys_state structure.
+ call d_p_coupling (phys_state, phys_tend, pbuf2d, dyn_out)
+ call t_stopf('d_p_coupling')
+
+ ! Update pointers for prognostic fields if necessary. Note that this shift
+ ! should not take place the first time stepon_run1 is called which is during
+ ! the CAM initialization sequence before the dycore is called. Nor should it
+ ! occur for the first step of a restart run.
+ if (.not. is_first_step() .and. &
+ .not. is_first_restart_step() .and. &
+ swap_time_level_ptrs) then
+
+ call shift_time_levels(dyn_in, dyn_out)
+
+ end if
+
+end subroutine stepon_run1
+
+!=========================================================================================
+
+subroutine stepon_run2(phys_state, phys_tend, dyn_in, dyn_out)
+
+ ! arguments
+ type(physics_state), intent(inout) :: phys_state(begchunk:endchunk)
+ type(physics_tend), intent(inout) :: phys_tend(begchunk:endchunk)
+ type (dyn_import_t), intent(inout) :: dyn_in
+ type (dyn_export_t), intent(inout) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ call t_barrierf('sync_p_d_coupling', mpicom)
+ call t_startf('p_d_coupling')
+ ! copy from phys structures -> dynamics structures
+ call p_d_coupling(phys_state, phys_tend, dyn_in)
+ call t_stopf('p_d_coupling')
+
+ ! write physics forcings which are input to dycore
+ call write_forcings(dyn_in)
+
+end subroutine stepon_run2
+
+!=========================================================================================
+
+subroutine stepon_run3(dtime, cam_out, phys_state, dyn_in, dyn_out)
+
+ ! arguments
+ real(r8), intent(in) :: dtime
+ type(cam_out_t), intent(inout) :: cam_out(:) ! CAM export to surface models
+ type(physics_state), intent(inout) :: phys_state(begchunk:endchunk)
+ type (dyn_import_t), intent(inout) :: dyn_in
+ type (dyn_export_t), intent(inout) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ call t_barrierf('sync_dyn_run', mpicom)
+ call t_startf('dyn_run')
+ call dyn_run(dyn_in, dyn_out)
+ call t_stopf('dyn_run')
+
+end subroutine stepon_run3
+
+!=========================================================================================
+
+subroutine stepon_final(dyn_in, dyn_out)
+
+ ! arguments
+ type(dyn_import_t), intent(inout) :: dyn_in
+ type(dyn_export_t), intent(inout) :: dyn_out
+ !----------------------------------------------------------------------------
+
+ call dyn_final(dyn_in, dyn_out)
+
+end subroutine stepon_final
+
+!=========================================================================================
+! Private
+!=========================================================================================
+
+subroutine write_dynvar(dyn_out)
+
+ ! Output from the internal MPAS data structures to CAM history files.
+
+ ! agruments
+ type(dyn_export_t), intent(in) :: dyn_out
+
+ ! local variables
+ integer :: i, k, kk
+ integer :: nCellsSolve, nEdgesSolve, nVerticesSolve
+ integer :: qv_idx
+ real(r8), allocatable :: arr2d(:,:)
+ !----------------------------------------------------------------------------
+
+ nCellsSolve = dyn_out%nCellsSolve
+ nEdgesSolve = dyn_out%nEdgesSolve
+ nVerticesSolve = dyn_out%nVerticesSolve
+ qv_idx = dyn_out%index_qv
+
+ if (hist_fld_active('u')) then
+ allocate(arr2d(nEdgesSolve,plev))
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nEdgesSolve
+ arr2d(i,k) = dyn_out%uperp(kk,i)
+ end do
+ end do
+ call outfld('u', arr2d, nEdgesSolve, 1)
+ deallocate(arr2d)
+ end if
+
+ if (hist_fld_active('w')) then
+ allocate(arr2d(nCellsSolve,plevp))
+ do k = 1, plevp
+ kk = plevp - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%w(kk,i)
+ end do
+ end do
+ call outfld('w', arr2d, nCellsSolve, 1)
+ deallocate(arr2d)
+ end if
+
+ allocate(arr2d(nCellsSolve,plev))
+
+ if (hist_fld_active('theta')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%theta(kk,i)
+ end do
+ end do
+ call outfld('theta', arr2d, nCellsSolve, 1)
+ end if
+
+ if (hist_fld_active('rho')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%rho(kk,i)
+ end do
+ end do
+ call outfld('rho', arr2d, nCellsSolve, 1)
+ end if
+
+ if (hist_fld_active('qv')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%tracers(qv_idx,kk,i)
+ end do
+ end do
+ call outfld('qv', arr2d, nCellsSolve, 1)
+ end if
+
+ if (hist_fld_active('uReconstructZonal')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%ux(kk,i)
+ end do
+ end do
+ call outfld('uReconstructZonal', arr2d, nCellsSolve, 1)
+ end if
+
+ if (hist_fld_active('uReconstructMeridional')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%uy(kk,i)
+ end do
+ end do
+ call outfld('uReconstructMeridional', arr2d, nCellsSolve, 1)
+ end if
+
+ if (hist_fld_active('divergence')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_out%divergence(kk,i)
+ end do
+ end do
+ call outfld('divergence', arr2d, nCellsSolve, 1)
+ end if
+
+ deallocate(arr2d)
+
+ if (hist_fld_active('vorticity')) then
+ allocate(arr2d(nVerticesSolve,plev))
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nVerticesSolve
+ arr2d(i,k) = dyn_out%vorticity(kk,i)
+ end do
+ end do
+ call outfld('vorticity', arr2d, nVerticesSolve, 1)
+ deallocate(arr2d)
+ end if
+
+end subroutine write_dynvar
+
+!=========================================================================================
+
+subroutine write_forcings(dyn_in)
+
+ ! Output from the internal MPAS data structures to CAM history files.
+
+ ! agruments
+ type(dyn_import_t), intent(in) :: dyn_in
+
+ ! local variables
+ integer :: i, k, kk
+ integer :: nCellsSolve, nEdgesSolve
+ real(r8), allocatable :: arr2d(:,:)
+
+ !----------------------------------------------------------------------------
+
+
+ nCellsSolve = dyn_in%nCellsSolve
+ nEdgesSolve = dyn_in%nEdgesSolve
+
+ if (hist_fld_active('ru_tend')) then
+ allocate(arr2d(nEdgesSolve,plev))
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nEdgesSolve
+ arr2d(i,k) = dyn_in%ru_tend(kk,i)
+ end do
+ end do
+ call outfld('ru_tend', arr2d, nEdgesSolve, 1)
+ deallocate(arr2d)
+ end if
+
+ allocate(arr2d(nCellsSolve,plev))
+
+ if (hist_fld_active('rtheta_tend')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_in%rtheta_tend(kk,i)
+ end do
+ end do
+ call outfld('rtheta_tend', arr2d, nCellsSolve, 1)
+ end if
+
+ if (hist_fld_active('rho_tend')) then
+ do k = 1, plev
+ kk = plev - k + 1
+ do i = 1, nCellsSolve
+ arr2d(i,k) = dyn_in%rho_tend(kk,i)
+ end do
+ end do
+ call outfld('rho_tend', arr2d, nCellsSolve, 1)
+ end if
+
+ deallocate(arr2d)
+
+end subroutine write_forcings
+
+!========================================================================================
+
+subroutine shift_time_levels(dyn_in, dyn_out)
+
+ ! The MPAS dycore swaps the pool time indices after each timestep
+ ! (mpas_dt). If an odd number of these shifts occur during the CAM
+ ! timestep (i.e., the dynamics/physics coupling interval), then CAM
+ ! needs a corresponding update to the pointers in the dyn_in and dyn_out
+ ! objects.
+
+ ! arguments
+ type (dyn_import_t), intent(inout) :: dyn_in
+ type (dyn_export_t), intent(inout) :: dyn_out
+
+ ! local variables
+ real(r8), dimension(:,:), pointer :: ptr2d
+ real(r8), dimension(:,:,:), pointer :: ptr3d
+ !--------------------------------------------------------------------------------------
+
+ ptr2d => dyn_out % uperp
+ dyn_out % uperp => dyn_in % uperp
+ dyn_in % uperp => ptr2d
+
+ ptr2d => dyn_out % w
+ dyn_out % w => dyn_in % w
+ dyn_in % w => ptr2d
+
+ ptr2d => dyn_out % theta_m
+ dyn_out % theta_m => dyn_in % theta_m
+ dyn_in % theta_m => ptr2d
+
+ ptr2d => dyn_out % rho_zz
+ dyn_out % rho_zz => dyn_in % rho_zz
+ dyn_in % rho_zz => ptr2d
+
+ ptr3d => dyn_out % tracers
+ dyn_out % tracers => dyn_in % tracers
+ dyn_in % tracers => ptr3d
+
+end subroutine shift_time_levels
+
+end module stepon
diff --git a/src/dynamics/se/dp_coupling.F90 b/src/dynamics/se/dp_coupling.F90
index 7d8904600b..40efae09a2 100644
--- a/src/dynamics/se/dp_coupling.F90
+++ b/src/dynamics/se/dp_coupling.F90
@@ -925,18 +925,8 @@ subroutine derived_phys_dry(phys_state, phys_tend, pbuf2d)
! Compute initial dry static energy, include surface geopotential
do k = 1, pver
do i = 1, ncol
-#if FIX_TOTE
- ! general formula: E = CV_air T + phis + gravit*zi )
- ! hydrostatic case: integrate zi term by parts, use CP=CV+R to get:
- ! E = CP_air T + phis (Holton Section 8.3)
- ! to use this, update geopotential.F90, and other not-yet-found physics routines:
- ! (check boundary layer code, others which have gravit and zi() or zm()
- phys_state(lchnk)%s(i,k) = cpair*phys_state(lchnk)%t(i,k) &
- + phys_state(lchnk)%phis(i)
-#else
phys_state(lchnk)%s(i,k) = cpair*phys_state(lchnk)%t(i,k) &
+ gravit*phys_state(lchnk)%zm(i,k) + phys_state(lchnk)%phis(i)
-#endif
end do
end do
diff --git a/src/dynamics/se/dyn_comp.F90 b/src/dynamics/se/dyn_comp.F90
index e8391d8d0a..bb98e9692e 100644
--- a/src/dynamics/se/dyn_comp.F90
+++ b/src/dynamics/se/dyn_comp.F90
@@ -11,7 +11,7 @@ module dyn_comp
use cam_control_mod, only: initial_run, simple_phys
use cam_initfiles, only: initial_file_get_id, topo_file_get_id, pertlim
use phys_control, only: use_gw_front, use_gw_front_igw, waccmx_is
-use dyn_grid, only: timelevel, hvcoord, edgebuf
+use dyn_grid, only: ini_grid_name, timelevel, hvcoord, edgebuf
use cam_grid_support, only: cam_grid_id, cam_grid_get_gcid, &
cam_grid_dimensions, cam_grid_get_dim_names, &
@@ -1219,13 +1219,13 @@ subroutine read_inidat(dyn_in)
! Set mask to indicate which columns are active
nullify(ldof)
- call cam_grid_get_gcid(cam_grid_id('GLL'), ldof)
+ call cam_grid_get_gcid(cam_grid_id(ini_grid_name), ldof)
allocate(pmask(npsq*nelemd))
pmask(:) = (ldof /= 0)
! lat/lon needed in radians
- latvals_deg => cam_grid_get_latvals(cam_grid_id('GLL'))
- lonvals_deg => cam_grid_get_lonvals(cam_grid_id('GLL'))
+ latvals_deg => cam_grid_get_latvals(cam_grid_id(ini_grid_name))
+ lonvals_deg => cam_grid_get_lonvals(cam_grid_id(ini_grid_name))
allocate(latvals(np*np*nelemd))
allocate(lonvals(np*np*nelemd))
latvals(:) = latvals_deg(:)*deg2rad
@@ -1234,9 +1234,8 @@ subroutine read_inidat(dyn_in)
! Set PIO to return error codes when reading data from IC file.
call pio_seterrorhandling(fh_ini, PIO_BCAST_ERROR, pio_errtype)
- ! The grid name is defined in dyn_grid::define_cam_grids.
! Get the number of columns in the global GLL grid.
- call cam_grid_dimensions('GLL', dims)
+ call cam_grid_dimensions(ini_grid_name, dims)
dyn_cols = dims(1)
! Set ICs. Either from analytic expressions or read from file.
@@ -1330,7 +1329,7 @@ subroutine read_inidat(dyn_in)
allocate(dbuf2(npsq,nelemd))
allocate(dbuf3(npsq,nlev,nelemd))
- ! Check that number of columns in IC file matches grid definition.
+ ! Check that columns in IC file match grid definition.
call check_file_layout(fh_ini, elem, dyn_cols, 'ncdata', .true., dimname)
! Read 2-D field
@@ -2003,6 +2002,9 @@ end subroutine set_phis
subroutine check_file_layout(file, elem, dyn_cols, file_desc, dyn_ok, dimname)
+ ! This routine is only called when data will be read from the initial file. It is not
+ ! called when the initial file is only supplying vertical coordinate info.
+
type(file_desc_t), pointer :: file
type(element_t), pointer :: elem(:)
integer, intent(in) :: dyn_cols
@@ -2023,22 +2025,15 @@ subroutine check_file_layout(file, elem, dyn_cols, file_desc, dyn_ok, dimname)
!----------------------------------------------------------------------------
! Check that number of columns in IC file matches grid definition.
- ! The dimension of the unstructured grid in the IC file can either be 'ncol'
- ! or 'ncol_d'. Check for ncol_d first since if a file contains distinct GLL
- ! and physics grids the GLL grid will use dimension ncol_d.
- ierr = pio_inq_dimid(file, 'ncol_d', ncol_did)
+
+ call cam_grid_get_dim_names(cam_grid_id(ini_grid_name), dimname, dimname2)
+
+ ierr = pio_inq_dimid(file, trim(dimname), ncol_did)
if (ierr /= PIO_NOERR) then
- if (dyn_ok) then
- ierr = pio_inq_dimid(file, 'ncol', ncol_did)
- if (ierr /= PIO_NOERR) then
- call endrun(subname//': ERROR: neither ncol nor ncol_d dimension found in ' &
- //trim(file_desc)//' file')
- end if
- else
- call endrun(trim(subname)//': ERROR: ncol dimension not found in '//trim(file_desc) &
- //' file')
- end if
+ call endrun(subname//': ERROR: either ncol or ncol_d dimension not found in ' &
+ //trim(file_desc)//' file')
end if
+
ierr = pio_inq_dimlen(file, ncol_did, ncol_size)
if (ncol_size /= dyn_cols) then
if (masterproc) then
@@ -2048,22 +2043,11 @@ subroutine check_file_layout(file, elem, dyn_cols, file_desc, dyn_ok, dimname)
call endrun(subname//': ERROR: dimension ncol size not same as in ncdata file')
end if
- ! The dimname that's passed to the read_dyn_var routines must match the
- ! dimname that's in the GLL grid object definition. The mapping info used by
- ! pio is constructed using the grid object. So this dimname is not necessarily
- ! the one in the IC (or topo) file.
- grid_id = cam_grid_id('GLL')
- call cam_grid_get_dim_names(grid_id, dimname, dimname2)
-
- ! If coordinates come from an initial file containing only the GLL grid then the
- ! the variable names will be lat/lon. On the other hand if the file contains both
- ! GLL and a distinct physics grid, then the variable names will be lat_d/lon_d.
- ! Check whether lat_d/lon_d are present and use them if they are. Otherwise use
- ! lat/lon.
- if (dyn_field_exists(file, 'lat_d', required=.false.)) then
- coordname = 'lat_d'
- else
+ ! Set coordinate name associated with dimname.
+ if (dimname == 'ncol') then
coordname = 'lat'
+ else
+ coordname = 'lat_d'
end if
!! Check to make sure file is in correct order
@@ -2087,10 +2071,10 @@ subroutine check_file_layout(file, elem, dyn_cols, file_desc, dyn_ok, dimname)
call endrun("ncdata file latitudes not in correct column order")
end if
- if (dyn_field_exists(file, 'lon_d', required=.false.)) then
- coordname = 'lon_d'
- else
+ if (dimname == 'ncol') then
coordname = 'lon'
+ else
+ coordname = 'lon_d'
end if
call read_dyn_var(coordname, file, dimname, dbuf2)
@@ -2167,7 +2151,7 @@ subroutine read_dyn_field_2d(fieldname, fh, dimname, buffer)
buffer = 0.0_r8
call infld(trim(fieldname), fh, dimname, 1, npsq, 1, nelemd, buffer, &
- found, gridname='GLL')
+ found, gridname=ini_grid_name)
if(.not. found) then
call endrun('READ_DYN_FIELD_2D: Could not find '//trim(fieldname)//' field on input datafile')
end if
@@ -2195,7 +2179,7 @@ subroutine read_dyn_field_3d(fieldname, fh, dimname, buffer)
buffer = 0.0_r8
call infld(trim(fieldname), fh, dimname, 'lev', 1, npsq, 1, nlev, &
- 1, nelemd, buffer, found, gridname='GLL')
+ 1, nelemd, buffer, found, gridname=ini_grid_name)
if(.not. found) then
call endrun('READ_DYN_FIELD_3D: Could not find '//trim(fieldname)//' field on input datafile')
end if
diff --git a/src/dynamics/se/dyn_grid.F90 b/src/dynamics/se/dyn_grid.F90
index 4957425f0c..8423132a43 100644
--- a/src/dynamics/se/dyn_grid.F90
+++ b/src/dynamics/se/dyn_grid.F90
@@ -61,6 +61,10 @@ module dyn_grid
integer, parameter :: dyn_decomp = 101 ! The SE dynamics grid
integer, parameter :: fvm_decomp = 102 ! The FVM (CSLAM) grid
integer, parameter :: physgrid_d = 103 ! physics grid on dynamics decomp
+integer, parameter :: ini_decomp = 104 ! alternate dynamics grid for reading initial file
+
+character(len=3), protected :: ini_grid_name
+
integer, parameter :: ptimelevels = 2
type (TimeLevel_t) :: TimeLevel ! main time level struct (used by tracers)
@@ -68,13 +72,14 @@ module dyn_grid
type(element_t), pointer :: elem(:) => null() ! local GLL elements for this task
type(fvm_struct), pointer :: fvm(:) => null() ! local FVM elements for this task
-public :: &
- dyn_decomp, &
- ptimelevels, &
- TimeLevel, &
- hvcoord, &
- elem, &
- fvm, &
+public :: &
+ dyn_decomp, &
+ ini_grid_name, &
+ ptimelevels, &
+ TimeLevel, &
+ hvcoord, &
+ elem, &
+ fvm, &
edgebuf
public :: &
@@ -111,6 +116,9 @@ module dyn_grid
integer :: Owner ! task id of element owner
end type block_global_data
+! Name of horizontal grid dimension in initial file.
+character(len=6) :: ini_grid_hdim_name = ' '
+
! Only need this global data for the GLL grid if it is also the physics grid.
type(block_global_data), allocatable :: gblocks(:)
@@ -231,6 +239,9 @@ subroutine dyn_grid_init()
! initial SE (subcycled) nstep
TimeLevel%nstep0 = 0
+ ! determine whether initial file uses 'ncol' or 'ncol_d'
+ call get_hdim_name(fh_ini, ini_grid_hdim_name)
+
! Define the dynamics and physics grids on the dynamics decompostion.
! Physics grid on the physics decomposition is defined in phys_grid_init.
call define_cam_grids()
@@ -879,6 +890,57 @@ end subroutine dyn_grid_get_elem_coords
! Private routines.
!=========================================================================================
+subroutine get_hdim_name(fh_ini, ini_grid_hdim_name)
+
+ ! Determine whether the initial file uses 'ncol' or 'ncol_d' as the horizontal
+ ! dimension in the unstructured grid. It is also possible when using analytic
+ ! initial conditions that the initial file only contains vertical coordinates.
+ ! Return 'none' if that is the case.
+
+ ! arguments
+ type(file_desc_t), pointer :: fh_ini
+ character(len=6), intent(out) :: ini_grid_hdim_name ! horizontal dimension name
+
+ ! local variables
+ integer :: ierr, pio_errtype
+ integer :: ncol_did
+
+ character(len=*), parameter :: sub = 'get_hdim_name'
+ !----------------------------------------------------------------------------
+
+ ! Set PIO to return error flags.
+ call pio_seterrorhandling(fh_ini, PIO_BCAST_ERROR, pio_errtype)
+
+ ! Check for ncol_d first just in case the initial file also contains fields on
+ ! the physics grid.
+ ierr = pio_inq_dimid(fh_ini, 'ncol_d', ncol_did)
+ if (ierr == PIO_NOERR) then
+
+ ini_grid_hdim_name = 'ncol_d'
+
+ else
+
+ ! if 'ncol_d' not in file, check for 'ncol'
+ ierr = pio_inq_dimid(fh_ini, 'ncol', ncol_did)
+
+ if (ierr == PIO_NOERR) then
+
+ ini_grid_hdim_name = 'ncol'
+
+ else
+
+ ini_grid_hdim_name = 'none'
+
+ end if
+ end if
+
+ ! Return PIO to previous error handling.
+ call pio_seterrorhandling(fh_ini, pio_errtype)
+
+end subroutine get_hdim_name
+
+!=========================================================================================
+
subroutine define_cam_grids()
! Create grid objects on the dynamics decomposition for grids used by
@@ -1003,6 +1065,25 @@ subroutine define_cam_grids()
call cam_grid_attribute_register('GLL', 'np', '', np)
call cam_grid_attribute_register('GLL', 'ne', '', ne)
+ ! With CSLAM if the initial file uses the horizontal dimension 'ncol' rather than
+ ! 'ncol_d' then we need a grid object with the names ncol,lat,lon to read it.
+ ! Create that grid object here if it's needed.
+ if (fv_nphys > 0 .and. ini_grid_hdim_name == 'ncol') then
+
+ lat_coord => horiz_coord_create('lat', 'ncol', ngcols_d, &
+ 'latitude', 'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
+ lon_coord => horiz_coord_create('lon', 'ncol', ngcols_d, &
+ 'longitude', 'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
+
+ call cam_grid_register('INI', ini_decomp, lat_coord, lon_coord, &
+ grid_map, block_indexed=.false., unstruct=.true.)
+
+ ini_grid_name = 'INI'
+ else
+ ! The dyn_decomp grid can be used to read the initial file.
+ ini_grid_name = 'GLL'
+ end if
+
! Coordinate values and maps are copied into the coordinate and attribute objects.
! Locally allocated storage is no longer needed.
deallocate(pelat_deg)
diff --git a/src/dynamics/tests/inic_analytic.F90 b/src/dynamics/tests/inic_analytic.F90
index 93f0413eb0..5722d865e3 100644
--- a/src/dynamics/tests/inic_analytic.F90
+++ b/src/dynamics/tests/inic_analytic.F90
@@ -17,6 +17,7 @@ module inic_analytic
public :: analytic_ic_active ! forwarded from init_analytic_utils
public :: analytic_ic_set_ic ! Set analytic initial conditions
+ public :: dyn_set_inic_col
interface analytic_ic_set_ic
module procedure dyn_set_inic_cblock
@@ -37,7 +38,7 @@ module inic_analytic
CONTAINS
!==============================================================================
- subroutine dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, U, V, T, &
+ subroutine dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, zint, U, V, T, &
PS, PHIS_IN, PHIS_OUT, Q, m_cnst, mask, verbose)
use cam_initfiles, only: pertlim
#ifdef ANALYTIC_IC
@@ -58,6 +59,7 @@ subroutine dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, U, V, T, &
real(r8), intent(in) :: latvals(:) ! lat in degrees (ncol)
real(r8), intent(in) :: lonvals(:) ! lon in degrees (ncol)
integer, intent(in) :: glob_ind(:) ! global column index
+ real(r8), optional, intent(in) :: zint(:,:) ! height at layer interfaces
real(r8), optional, intent(inout) :: U(:,:) ! zonal velocity
real(r8), optional, intent(inout) :: V(:,:) ! meridional velocity
real(r8), optional, intent(inout) :: T(:,:) ! temperature
@@ -162,7 +164,7 @@ subroutine dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, U, V, T, &
Q=Q, m_cnst=m_cnst, mask=mask_use, verbose=verbose_use)
case('moist_baroclinic_wave_dcmip2016', 'dry_baroclinic_wave_dcmip2016')
- call bc_wav_set_ic(vcoord, latvals, lonvals, U=U, V=V, T=T, PS=PS, &
+ call bc_wav_set_ic(vcoord, latvals, lonvals, zint=zint, U=U, V=V, T=T, PS=PS, &
PHIS=PHIS_OUT, Q=Q, m_cnst=m_cnst, mask=mask_use, verbose=verbose_use)
case('dry_baroclinic_wave_jw2006')
@@ -170,7 +172,7 @@ subroutine dyn_set_inic_col(vcoord, latvals, lonvals, glob_ind, U, V, T, &
PHIS=PHIS_OUT, Q=Q, m_cnst=m_cnst, mask=mask_use, verbose=verbose_use)
case('us_standard_atmosphere')
- call us_std_atm_set_ic(latvals, lonvals, U=U, V=V, T=T, PS=PS, PHIS_IN=PHIS_IN, &
+ call us_std_atm_set_ic(latvals, lonvals, zint=zint, U=U, V=V, T=T, PS=PS, PHIS_IN=PHIS_IN, &
PHIS_OUT=PHIS_OUT, Q=Q, m_cnst=m_cnst, mask=mask_use, verbose=verbose_use)
case default
@@ -334,7 +336,7 @@ subroutine dyn_set_inic_cblock(vcoord,latvals, lonvals, glob_ind, U, V, T, &
if (present(PS).and.present(PHIS_IN).and.present(T)) then
call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
glob_ind(bbeg:bend), PHIS_IN=PHIS_IN(:,i),PS=PS(:,i),T=T(:,:,i), &
- verbose=verbose)
+ verbose=verbose)
else
if (present(T)) then
call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
@@ -345,10 +347,10 @@ subroutine dyn_set_inic_cblock(vcoord,latvals, lonvals, glob_ind, U, V, T, &
glob_ind(bbeg:bend), PS=PS(:,i), verbose=verbose)
end if
if (present(PHIS_OUT)) then
- call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
- glob_ind(bbeg:bend), PHIS_OUT=PHIS_OUT(:,i), verbose=verbose)
+ call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
+ glob_ind(bbeg:bend), PHIS_OUT=PHIS_OUT(:,i), verbose=verbose)
+ end if
end if
- end if
if (present(Q)) then
call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
glob_ind(bbeg:bend), Q=Q(:,:,i,:), m_cnst=m_cnst, &
@@ -415,7 +417,7 @@ subroutine dyn_set_inic_cblock(vcoord,latvals, lonvals, glob_ind, U, V, T, &
if (present(PS).and.present(PHIS_IN).and.present(T)) then
call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
glob_ind(bbeg:bend), PHIS_IN=PHIS_IN(:,i),PS=PS(:,i),T=T(:,i,:), &
- verbose=verbose)
+ verbose=verbose)
else
if (present(T)) then
call dyn_set_inic_col(vcoord,latvals(bbeg:bend), lonvals(bbeg:bend), &
diff --git a/src/dynamics/tests/initial_conditions/ic_baroclinic.F90 b/src/dynamics/tests/initial_conditions/ic_baroclinic.F90
index d2c38f4b3c..19d9dad35f 100644
--- a/src/dynamics/tests/initial_conditions/ic_baroclinic.F90
+++ b/src/dynamics/tests/initial_conditions/ic_baroclinic.F90
@@ -73,8 +73,8 @@ module ic_baroclinic
contains
- subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
- Q, Z, m_cnst, mask, verbose)
+ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, zint, U, V, T, PS, PHIS, &
+ Q, m_cnst, mask, verbose)
use dyn_tests_utils, only: vc_moist_pressure, vc_dry_pressure, vc_height
use constituents, only: cnst_name
use const_init, only: cnst_init_default
@@ -87,20 +87,20 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
!-----------------------------------------------------------------------
! Dummy arguments
- integer, intent(in) :: vcoord
+ integer, intent(in) :: vcoord ! vertical coordinate type
real(r8), intent(in) :: latvals(:) ! lat in degrees (ncol)
real(r8), intent(in) :: lonvals(:) ! lon in degrees (ncol)
- ! z_k for vccord 1)
+ real(r8), optional, intent(in) :: zint(:,:) ! interface height (ncol,ilev), ordered top to bottom
real(r8), optional, intent(inout) :: U(:,:) ! zonal velocity
real(r8), optional, intent(inout) :: V(:,:) ! meridional velocity
real(r8), optional, intent(inout) :: T(:,:) ! temperature
real(r8), optional, intent(inout) :: PS(:) ! surface pressure
real(r8), optional, intent(out) :: PHIS(:) ! surface geopotential
real(r8), optional, intent(inout) :: Q(:,:,:) ! tracer (ncol, lev, m)
- real(r8), optional, intent(inout) :: Z(:,:) ! height (ncol, lev)
integer, optional, intent(in) :: m_cnst(:) ! tracer indices (reqd. if Q)
logical, optional, intent(in) :: mask(:) ! only init where .true.
logical, optional, intent(in) :: verbose ! for internal use
+
! Local variables
logical, allocatable :: mask_use(:)
logical :: verbose_use
@@ -116,7 +116,7 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
logical :: lU, lV, lT, lQ, l3d_vars
logical :: cnst1_is_moisture
real(r8), allocatable :: pdry_half(:), pwet_half(:),zdry_half(:),zk(:)
- real(r8), allocatable :: zlocal(:,:)! height of full level p for test tracer initialization
+ real(r8), allocatable :: zmid(:,:) ! layer midpoint heights for test tracer initialization
if ((vcoord == vc_moist_pressure) .or. (vcoord == vc_dry_pressure)) then
!
@@ -127,11 +127,16 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
call endrun(subname//' ERROR: For iterate_z_given_pressure to work ptop must be less than 100hPa')
end if
ztop = iterate_z_given_pressure(ptop,.false.,ptop,0.0_r8,-1000._r8) !Find height of top pressure surface
+
else if (vcoord == vc_height) then
- !
- ! height-based vertical coordinate
- !
- call endrun(subname//' ERROR: z-based vertical coordinate not coded yet')
+ !
+ ! height-based vertical coordinate
+ !
+ if (present(zint)) then
+ ztop = zint(1,1)
+ else
+ call endrun(subname//' ERROR: z-based vertical coordinate requires using optional arg zint')
+ end if
else
call endrun(subname//' ERROR: vcoord value out of range')
end if
@@ -166,7 +171,7 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
!*******************************
!
if (present(PS)) then
- if (vcoord == vc_moist_pressure) then
+ if (vcoord == vc_moist_pressure .or. vcoord == vc_height) then
where(mask_use)
PS = psurf_moist
end where
@@ -222,7 +227,7 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
nlev = size(Q, 2)
! check whether first constituent in Q is water vapor.
cnst1_is_moisture = m_cnst(1) == 1
- allocate(zlocal(size(Q, 1),nlev))
+ allocate(zmid(size(Q, 1),nlev))
end if
allocate(zk(nlev))
@@ -244,21 +249,20 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
psurface = psurf_moist-wvp
end if
- do k=1,nlev
- ! compute pressure levels
- pk = hyam(k)*ps0 + hybm(k)*psurface
- ! find height of pressure surface
- zk(k) = iterate_z_given_pressure(pk,(vcoord == vc_dry_pressure),ptop,latvals(i),ztop)
- end do
+ if (vcoord == vc_moist_pressure .or. vcoord == vc_dry_pressure) then
+ do k=1,nlev
+ ! compute pressure levels
+ pk = hyam(k)*ps0 + hybm(k)*psurface
+ ! find height of pressure surface
+ zk(k) = iterate_z_given_pressure(pk,(vcoord == vc_dry_pressure),ptop,latvals(i),ztop)
+ end do
+ else if (vcoord == vc_height) then
+ zk = 0.5_r8*(zint(i,1:nlev) + zint(i,2:nlev+1))
+ end if
if (lq) then
- if (present(Z)) then
- zlocal(i,1:nlev) = Z(i,1:nlev)
- else
- zlocal(i,1:nlev) = zk(:)
- end if
+ zmid(i,:) = zk(:)
end if
-
do k=1,nlev
!
@@ -270,7 +274,8 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
!
! temperature and moisture for moist vertical coordinates
!
- if ((lq.or.lt).and.(vcoord == vc_moist_pressure)) then
+ if ( (lq .or. lt) .and. &
+ (vcoord==vc_moist_pressure .or. vcoord==vc_height) ) then
if (analytic_ic_is_moist()) then
pk = moist_pressure_given_z(zk(k),latvals(i))
qk = qv_given_moist_pressure(pk,latvals(i))
@@ -330,20 +335,19 @@ subroutine bc_wav_set_ic(vcoord,latvals, lonvals, U, V, T, PS, PHIS, &
end if
if (lq) then
- ncnst = size(m_cnst, 1)
- if ((vcoord == vc_moist_pressure) .or. (vcoord == vc_dry_pressure)) then
- do m = 1, ncnst
+
+ ncnst = size(m_cnst, 1)
+
+ do m = 1, ncnst
! water vapor already done above
if (m_cnst(m) == 1) cycle
call cnst_init_default(m_cnst(m), latvals, lonvals, Q(:,:,m),&
mask=mask_use, verbose=verbose_use, notfound=.false.,&
- z=zlocal)
+ z=zmid)
- end do
-
- end if ! vcoord
+ end do
end if ! lq
deallocate(mask_use)
diff --git a/src/dynamics/tests/initial_conditions/ic_us_standard_atm.F90 b/src/dynamics/tests/initial_conditions/ic_us_standard_atm.F90
index 6100104e42..b97fa35979 100644
--- a/src/dynamics/tests/initial_conditions/ic_us_standard_atm.F90
+++ b/src/dynamics/tests/initial_conditions/ic_us_standard_atm.F90
@@ -30,7 +30,7 @@ module ic_us_standard_atmosphere
CONTAINS
!=========================================================================================
-subroutine us_std_atm_set_ic(latvals, lonvals, U, V, T, PS, PHIS_IN, &
+subroutine us_std_atm_set_ic(latvals, lonvals, zint, U, V, T, PS, PHIS_IN, &
PHIS_OUT, Q, m_cnst, mask, verbose)
!----------------------------------------------------------------------------
@@ -43,6 +43,7 @@ subroutine us_std_atm_set_ic(latvals, lonvals, U, V, T, PS, PHIS_IN, &
! Arguments
real(r8), intent(in) :: latvals(:) ! lat in degrees (ncol)
real(r8), intent(in) :: lonvals(:) ! lon in degrees (ncol)
+ real(r8), optional, intent(in) :: zint(:,:) ! height at layer interfaces
real(r8), optional, intent(inout) :: U(:,:) ! zonal velocity
real(r8), optional, intent(inout) :: V(:,:) ! meridional velocity
real(r8), optional, intent(inout) :: T(:,:) ! temperature
@@ -59,13 +60,18 @@ subroutine us_std_atm_set_ic(latvals, lonvals, U, V, T, PS, PHIS_IN, &
logical :: verbose_use
integer :: i, k, m
integer :: ncol
- integer :: nlev
+ integer :: nlev, nlevp
integer :: ncnst
character(len=*), parameter :: subname = 'us_std_atm_set_ic'
real(r8) :: psurf(1)
- real(r8), allocatable :: pmid(:), zmid(:)
+ real(r8), allocatable :: pmid(:), zmid(:), zmid2d(:,:)
!----------------------------------------------------------------------------
+ ! check input consistency
+ if (present(zint) .and. present(PHIS_IN)) then
+ call endrun(subname//': Only one of the args zint and PHIS_IN can be present')
+ end if
+
ncol = size(latvals, 1)
allocate(mask_use(ncol))
if (present(mask)) then
@@ -116,25 +122,40 @@ subroutine us_std_atm_set_ic(latvals, lonvals, U, V, T, PS, PHIS_IN, &
end if
if (present(T)) then
- if (.not.present(PHIS_IN)) then
- call endrun(subname//': PHIS_IN must be specified to initiallize T')
- end if
nlev = size(T, 2)
allocate(pmid(nlev), zmid(nlev))
- do i = 1, ncol
- if (mask_use(i)) then
- ! get surface pressure
- call std_atm_pres(PHIS_IN(i:i)/gravit, psurf)
- ! get pressure levels
- do k = 1, nlev
- pmid(k) = hyam(k)*ps0 + hybm(k)*psurf(1)
- end do
- ! get height of pressure level
- call std_atm_height(pmid, zmid)
- ! given height get temperature
- call std_atm_temp(zmid, T(i,:))
- end if
- end do
+
+ if (present(PHIS_IN)) then
+
+ do i = 1, ncol
+ if (mask_use(i)) then
+ ! get surface pressure
+ call std_atm_pres(PHIS_IN(i:i)/gravit, psurf)
+ ! get pressure levels
+ do k = 1, nlev
+ pmid(k) = hyam(k)*ps0 + hybm(k)*psurf(1)
+ end do
+ ! get height of pressure level
+ call std_atm_height(pmid, zmid)
+ ! given height get temperature
+ call std_atm_temp(zmid, T(i,:))
+ end if
+ end do
+
+ else if (present(zint)) then
+
+ do i = 1, ncol
+ if (mask_use(i)) then
+ zmid = 0.5_r8*(zint(i,1:nlev) + zint(i,2:nlev+1))
+ ! given height get temperature
+ call std_atm_temp(zmid, T(i,:))
+ end if
+ end do
+
+ else
+ call endrun(subname//': either PHIS or zint must be specified to initiallize T')
+ end if
+
deallocate(pmid, zmid)
if(masterproc .and. verbose_use) then
@@ -143,22 +164,42 @@ subroutine us_std_atm_set_ic(latvals, lonvals, U, V, T, PS, PHIS_IN, &
end if
if (present(PS)) then
- if (.not.present(PHIS_IN)) then
- call endrun(subname//': PHIS_IN must be specified to initiallize PS')
+
+ if (present(PHIS_IN)) then
+
+ do i = 1, ncol
+ if (mask_use(i)) then
+ call std_atm_pres(PHIS_IN(i:i)/gravit, PS(i:i))
+ end if
+ end do
+
+ else if (present(zint)) then
+
+ nlevp = size(zint, 2)
+
+ do i = 1, ncol
+ if (mask_use(i)) then
+ call std_atm_pres(zint(i:i,nlevp), PS(i:i))
+ end if
+ end do
+
+ else
+ call endrun(subname//': either PHIS or zint must be specified to initiallize PS')
end if
- do i = 1, ncol
- if (mask_use(i)) then
- call std_atm_pres(PHIS_IN(i:i)/gravit, PS(i:i))
- end if
- end do
if(masterproc .and. verbose_use) then
write(iulog,*) ' PS initialized by "',subname,'"'
end if
end if
if (present(Q)) then
+
nlev = size(Q, 2)
+ if (present(zint)) then
+ allocate(zmid2d(ncol,nlev))
+ zmid2d = 0.5_r8*(zint(:,1:nlev) + zint(:,2:nlev+1))
+ end if
+
ncnst = size(m_cnst, 1)
do m = 1, ncnst
if (m_cnst(m) == 1) then
@@ -172,10 +213,18 @@ subroutine us_std_atm_set_ic(latvals, lonvals, U, V, T, PS, PHIS_IN, &
write(iulog,*) ' ', trim(cnst_name(m_cnst(m))), ' initialized by '//subname
end if
else
- call cnst_init_default(m_cnst(m), latvals, lonvals, Q(:,:,m_cnst(m)),&
- mask=mask_use, verbose=verbose_use, notfound=.false.)
+ if (present(zint)) then
+ call cnst_init_default(m_cnst(m), latvals, lonvals, Q(:,:,m_cnst(m)),&
+ mask=mask_use, verbose=verbose_use, notfound=.false., z=zmid2d)
+ else
+ call cnst_init_default(m_cnst(m), latvals, lonvals, Q(:,:,m_cnst(m)),&
+ mask=mask_use, verbose=verbose_use, notfound=.false.)
+ end if
end if
end do
+
+ if (allocated(zmid2d)) deallocate(zmid2d)
+
end if
deallocate(mask_use)
diff --git a/src/physics/cam/const_init.F90 b/src/physics/cam/const_init.F90
index 1086c5d5df..849e4a290e 100644
--- a/src/physics/cam/const_init.F90
+++ b/src/physics/cam/const_init.F90
@@ -130,11 +130,7 @@ subroutine cnst_init_default_col(m_cnst, latvals, lonvals, q, mask, &
write(iulog,*) ' ', trim(name), ' initialized by "rk_stratiform_init_cnst"'
end if
else if (tracers_implements_cnst(trim(name))) then
- if (present(z)) then
- call tracers_init_cnst(trim(name), latvals, lonvals, mask_use, q,z=z)
- else
- call tracers_init_cnst(trim(name), latvals, lonvals, mask_use, q)
- end if
+ call tracers_init_cnst(trim(name), latvals, lonvals, mask_use, q, z=z)
if(masterproc .and. verbose_use) then
write(iulog,*) ' ', trim(name), ' initialized by "tracers_init_cnst"'
end if
diff --git a/src/physics/cam/tracers.F90 b/src/physics/cam/tracers.F90
index e54e084321..3b1773745c 100644
--- a/src/physics/cam/tracers.F90
+++ b/src/physics/cam/tracers.F90
@@ -291,11 +291,7 @@ subroutine tracers_init_cnst(name, latvals, lonvals, mask, q, z)
do m = 1, test_tracer_num
if (name == test_tracer_names(m)) then
if (analytic_tracer(m)) then
- if (present(z)) then
- call test_func_set(name, latvals, lonvals, mask, q, z=z)
- else
- call test_func_set(name, latvals, lonvals, mask, q)
- end if
+ call test_func_set(name, latvals, lonvals, mask, q, z=z)
found = .true.
exit
else
diff --git a/src/physics/simple/physpkg.F90 b/src/physics/simple/physpkg.F90
index 6dc974f467..0440b54e07 100644
--- a/src/physics/simple/physpkg.F90
+++ b/src/physics/simple/physpkg.F90
@@ -739,7 +739,7 @@ subroutine tphysbc (ztodt, state, tend, pbuf, cam_out, cam_in )
call t_startf('energy_fixer')
- if (adiabatic .and. (.not. dycore_is('EUL'))) then
+ if (adiabatic .and. (.not. dycore_is('EUL')) .and. (.not. dycore_is('MPAS'))) then
call check_energy_fix(state, ptend, nstep, flx_heat)
call physics_update(state, ptend, ztodt, tend)
call check_energy_chng(state, tend, "chkengyfix", nstep, ztodt, zero, zero, zero, flx_heat)
diff --git a/src/utils/cam_map_utils.F90 b/src/utils/cam_map_utils.F90
index 591fa9ff6c..8c2295e5af 100644
--- a/src/utils/cam_map_utils.F90
+++ b/src/utils/cam_map_utils.F90
@@ -552,11 +552,14 @@ integer(iMap) function cam_filemap_mapVal(this, index, dsize, dest_in)
integer, optional, intent(in) :: dest_in(:)
! Local variables
+ integer :: ndest
integer :: d(max_dests)
if (associated(this%map)) then
if (present(dest_in)) then
- d = dest_in
+ ndest = size(dest_in)
+ d = 0
+ d(:ndest) = dest_in(:ndest)
else
d = this%dest
end if
@@ -937,7 +940,9 @@ subroutine cam_filemap_get_array_bounds(this, dims)
dims(rank + this%src(i) + 1, 1) = 0
dims(rank + this%src(i) + 1, 2) = -1
end if
- ! No else (zero means unused position)
+ else
+ ! src(i)==0 means unused position
+ dims(i,:) = 0
end if
end do
end subroutine cam_filemap_get_array_bounds
diff --git a/src/utils/string_utils.F90 b/src/utils/string_utils.F90
index 555da47edd..b81c04612f 100644
--- a/src/utils/string_utils.F90
+++ b/src/utils/string_utils.F90
@@ -11,11 +11,14 @@ module string_utils
! Public interface methods
public ::&
- to_upper, & ! Convert character string to upper case
- to_lower, & ! Convert character string to lower case
- INCSTR, & ! increments a string
- GLC, & ! Position of last significant character in string
- strlist_get_ind ! find string in a list of strings and return its index
+ to_upper, & ! Convert character string to upper case
+ to_lower, & ! Convert character string to lower case
+ INCSTR, & ! increments a string
+ GLC, & ! Position of last significant character in string
+ strlist_get_ind, & ! find string in a list of strings and return its index
+ date2yyyymmdd, & ! convert encoded date integer to "yyyy-mm-dd" format
+ sec2hms, & ! convert integer seconds past midnight to "hh:mm:ss" format
+ int2str ! convert integer to left justified string
contains
@@ -283,4 +286,73 @@ end subroutine strlist_get_ind
!=========================================================================================
+character(len=10) function date2yyyymmdd (date)
+
+ ! Input arguments
+
+ integer, intent(in) :: date
+
+ ! Local workspace
+
+ integer :: year ! year of yyyy-mm-dd
+ integer :: month ! month of yyyy-mm-dd
+ integer :: day ! day of yyyy-mm-dd
+
+ if (date < 0) then
+ call endrun ('DATE2YYYYMMDD: negative date not allowed')
+ end if
+
+ year = date / 10000
+ month = (date - year*10000) / 100
+ day = date - year*10000 - month*100
+
+ write(date2yyyymmdd,80) year, month, day
+80 format(i4.4,'-',i2.2,'-',i2.2)
+
+end function date2yyyymmdd
+
+!=========================================================================================
+
+character(len=8) function sec2hms (seconds)
+
+ ! Input arguments
+
+ integer, intent(in) :: seconds
+
+ ! Local workspace
+
+ integer :: hours ! hours of hh:mm:ss
+ integer :: minutes ! minutes of hh:mm:ss
+ integer :: secs ! seconds of hh:mm:ss
+
+ if (seconds < 0 .or. seconds > 86400) then
+ write(iulog,*)'SEC2HMS: bad input seconds:', seconds
+ call endrun ('SEC2HMS: bad input seconds:')
+ end if
+
+ hours = seconds / 3600
+ minutes = (seconds - hours*3600) / 60
+ secs = (seconds - hours*3600 - minutes*60)
+
+ write(sec2hms,80) hours, minutes, secs
+80 format(i2.2,':',i2.2,':',i2.2)
+
+end function sec2hms
+
+!=========================================================================================
+
+character(len=10) function int2str(n)
+
+ ! return default integer as a left justified string
+
+ ! arguments
+ integer, intent(in) :: n
+ !----------------------------------------------------------------------------
+
+ write(int2str,'(i0)') n
+
+end function int2str
+
+!=========================================================================================
+
end module string_utils
diff --git a/src/utils/time_manager.F90 b/src/utils/time_manager.F90
index bd1fda998a..38ad3b4db2 100644
--- a/src/utils/time_manager.F90
+++ b/src/utils/time_manager.F90
@@ -26,6 +26,8 @@ module time_manager
get_curr_date, &! return date components at end of current timestep
get_prev_date, &! return date components at beginning of current timestep
get_start_date, &! return components of the start date
+ get_stop_date, &! return components of the stop date
+ get_run_duration, &! return run duration in whole days and remaining seconds
get_ref_date, &! return components of the reference date
get_perp_date, &! return components of the perpetual date, and current time of day
get_curr_time, &! return components of elapsed time since reference date at end of current timestep
@@ -677,6 +679,62 @@ subroutine get_start_date(yr, mon, day, tod)
call chkrc(rc, sub//': error return from ESMF_TimeGet')
end subroutine get_start_date
+
+!=========================================================================================
+
+subroutine get_stop_date(yr, mon, day, tod)
+
+ ! Return date components valid at end of run
+
+ ! Arguments
+ integer, intent(out) ::&
+ yr, &! year
+ mon, &! month
+ day, &! day of month
+ tod ! time of day (seconds past 0Z)
+
+ ! Local variables
+ character(len=*), parameter :: sub = 'get_stop_date'
+ integer :: rc
+ type(ESMF_Time) :: date
+ !----------------------------------------------------------------------------
+
+ call ESMF_ClockGet(tm_clock, stopTime=date, rc=rc)
+ call chkrc(rc, sub//': error return from ESMF_ClockGet')
+
+ call ESMF_TimeGet(date, yy=yr, mm=mon, dd=day, s=tod, rc=rc)
+ call chkrc(rc, sub//': error return from ESMF_TimeGet')
+
+end subroutine get_stop_date
+
+!=========================================================================================
+
+subroutine get_run_duration(nday, nsec)
+
+ ! Return run duration in days and seconds
+
+ ! Arguments
+ integer, intent(out) ::&
+ nday, &! number of days in interval
+ nsec ! remainder in seconds
+
+ ! Local variables
+ character(len=*), parameter :: sub = 'get_run_duration'
+ integer :: rc
+ type(ESMF_Time) :: start_time, stop_time
+ type(ESMF_TimeInterval) :: diff
+ !----------------------------------------------------------------------------
+
+ call ESMF_ClockGet(tm_clock, startTime=start_time, stopTime=stop_time, rc=rc)
+ call chkrc(rc, sub//': error return from ESMF_ClockGet')
+
+ diff = stop_time - start_time
+
+ call ESMF_TimeIntervalGet(diff, d=nday, s=nsec, rc=rc)
+ call chkrc(rc, sub//': error return from ESMF_TimeIntervalGet')
+
+end subroutine get_run_duration
+
!=========================================================================================
subroutine get_ref_date(yr, mon, day, tod)
diff --git a/test/system/TR8.sh b/test/system/TR8.sh
index aff7aca910..3a10e9d637 100755
--- a/test/system/TR8.sh
+++ b/test/system/TR8.sh
@@ -73,6 +73,8 @@ ruby $ADDREALKIND_EXE -r r8 -l 1 -d $CAM_ROOT/components/cam/src/dynamics/fv
rc=`expr $? + $rc`
ruby $ADDREALKIND_EXE -r r8 -l 1 -d $CAM_ROOT/components/cam/src/dynamics/eul
rc=`expr $? + $rc`
+ruby $ADDREALKIND_EXE -r r8 -l 1 -d $CAM_ROOT/components/cam/src/dynamics/mpas -s dycore
+rc=`expr $? + $rc`
else
@@ -84,6 +86,8 @@ ruby $ADDREALKIND_EXE -r r8 -l 1 -d $CAM_ROOT/src/dynamics/fv
rc=`expr $? + $rc`
ruby $ADDREALKIND_EXE -r r8 -l 1 -d $CAM_ROOT/src/dynamics/eul
rc=`expr $? + $rc`
+ruby $ADDREALKIND_EXE -r r8 -l 1 -d $CAM_ROOT/src/dynamics/mpas -s dycore
+rc=`expr $? + $rc`
fi