diff --git a/CHANGELOG.md b/CHANGELOG.md index 69f0ea67..63f09c13 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,14 +21,39 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ----------------------------- +## [v3.2.0] - 2025-11-26 + +- 0-diff vs. v3.1.0 (except for lat/lon fields in "1d" nc4 output, which have roundoff differences between files directly generated with MAPL [new default] and files generated with tile_bin2nc4 [discontinued]). + +### Added + +- Added reader for surface meteorological forcing from S2S-3 ([PR #138](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/138)). +- Added matlab reader for binary mwRTM vegopacity file ([PR #142](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/142)). + +### Changed + +- Changed default format of tile-space HISTORY output to nc4 ([PR #144](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/144)). +- Enable remapping of landice restarts from ldas_setup ([PR #146](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/146)). +- Commented out static QC mask in CYGNSS obs reader ([PR #151](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/151)). +- Cleaned up ldas_setup; split out ldas.py and setup_utils.py; restored ntasks-per-node option ([PR #107](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/107)). +- Update `GEOSlandassim_GridComp/io_hdf5.F90` to allow for use with HDF5 1.14 ([PR #139](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/139)). + +### Fixed + +- Fixed bug in ASCAT EUMET soil moisture obs reader; bumped max_obs limit ([PR #148](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/148), [PR #151](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/151)). +- Provide default "zoom" value for remap_restarts yaml file ([PR #137](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/137)). +- Fixed Restart=1 when the domain is not global ([PR #107](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/107)). + +----------------------------- + ## [v3.1.0] - 2025-06-26 - 0-diff vs. v3.0.0. ### Added -- Added python package for post-processing ObsFcstAna output into data assimilation diagnostics ([PR #87](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/87), [PR #111](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/111)). -- Support for 2d output from EASE tile space and 2d output on EASE grid: +- Added python package for post-processing ObsFcstAna output into data assimilation diagnostics ([PR #87](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/87), [PR #111](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/111)). +- Support for 2d output from EASE tile space and 2d output on EASE grid: - Switched EASE grid handling to new MAPL EASE Grid Factory ([PR #115](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/115)). - Revised pre-processing of HISTORY template ([PR #118](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/118)). - Support for tile space of stretched cube-sphere grids ([PR #109](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/109)). @@ -140,4 +165,3 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ----------------------------- - diff --git a/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 b/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 index 67aa07fe..4bd263d6 100644 --- a/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 +++ b/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 @@ -1643,7 +1643,7 @@ subroutine read_obs_sm_ASCAT_EUMET( & integer, parameter :: lnbufr = 50 ! BUFR file unit number integer, parameter :: max_rec = 50000 ! max number of obs after QC (expecting < 6 hr assim window) - integer, parameter :: max_obs = 250000 ! max number of obs read by subroutine (expecting < 6 hr assim window) + integer, parameter :: max_obs = 280000 ! max number of obs read by subroutine (expecting < 6 hr assim window) integer :: idate, iret, ireadmg, ireadsb @@ -1951,8 +1951,8 @@ subroutine read_obs_sm_ASCAT_EUMET( & date_time_tmp%sec = int(tmp_data(kk, 6)) ! skip if record outside of current assim window - if ( datetime_lt_refdatetime( date_time_tmp, date_time_low ) .and. & - datetime_le_refdatetime( date_time_up, date_time_tmp ) ) cycle + if ( datetime_lt_refdatetime( date_time_tmp, date_time_low ) .or. & ! obs is before start of assim window *or* + datetime_le_refdatetime( date_time_up, date_time_tmp ) ) cycle ! obs is after end of assim window ! skip if record contains invalid soil moisture value if ( tmp_data(kk, 7) > 100. .or. tmp_data(kk, 7) < 0. ) cycle @@ -2427,78 +2427,87 @@ subroutine read_obs_sm_CYGNSS( & ! close the obs file ierr = nf90_close(ncid) - ! get name for CYGNSS mask file - - tmpmaskname = trim(this_obs_param%maskpath) // '/' // trim(this_obs_param%maskname) // '.nc' - - inquire(file=tmpfname, exist=file_exists) - - if (.not. file_exists) then - err_msg = 'CYGNSS mask file not found!' - call ldas_abort(LDAS_GENERIC_ERROR, Iam, err_msg) - end if - - ! open the CYGNSS mask file - - ierr = nf90_open(trim(tmpmaskname), nf90_nowrite, ncid) - - ! get variable dimension IDs - ierr = nf90_inq_dimid(ncid, 'lon', lon_dimid) - ierr = nf90_inq_dimid(ncid, 'lat', lat_dimid) - - ! dimensions sizes - ierr = nf90_inquire_dimension(ncid, lon_dimid, len=N_lon_m) - ierr = nf90_inquire_dimension(ncid, lat_dimid, len=N_lat_m) - - ! get variable IDs - ierr = nf90_inq_varid(ncid, 'longitude', longitudes_m_varid) - ierr = nf90_inq_varid(ncid, 'latitude', latitudes_m_varid) - ierr = nf90_inq_varid(ncid, 'flag_small_SM_range', small_SM_range_varid) - ierr = nf90_inq_varid(ncid, 'flag_poor_SMAP', poor_SMAP_varid) - ierr = nf90_inq_varid(ncid, 'flag_high_ubrmsd', high_ubrmsd_varid) - ierr = nf90_inq_varid(ncid, 'flag_few_obs', few_obs_varid) - ierr = nf90_inq_varid(ncid, 'flag_low_signal', low_signal_varid) - - ! allocate memory for the variables - allocate(latitudes_m( N_lon_m, N_lat_m)) - allocate(longitudes_m( N_lon_m, N_lat_m)) - allocate(small_SM_range_flag(N_lon_m, N_lat_m)) - allocate(poor_SMAP_flag( N_lon_m, N_lat_m)) - allocate(high_ubrmsd_flag( N_lon_m, N_lat_m)) - allocate(few_obs_flag( N_lon_m, N_lat_m)) - allocate(low_signal_flag( N_lon_m, N_lat_m)) - - ! read the variables - ierr = nf90_get_var(ncid, latitudes_m_varid, latitudes_m) - ierr = nf90_get_var(ncid, longitudes_m_varid, longitudes_m) - ierr = nf90_get_var(ncid, small_SM_range_varid, small_SM_range_flag) - ierr = nf90_get_var(ncid, poor_SMAP_varid, poor_SMAP_flag) - ierr = nf90_get_var(ncid, high_ubrmsd_varid, high_ubrmsd_flag) - ierr = nf90_get_var(ncid, few_obs_varid, few_obs_flag) - ierr = nf90_get_var(ncid, low_signal_varid, low_signal_flag) - - ! close the mask file - ierr = nf90_close(ncid) - - ! check the obs data and mask data are the same resolution - if (N_lon /= N_lon_m .or. N_lat /= N_lat_m) then - err_msg = 'The mask file ' // trim(this_obs_param%maskname) // ' does not match the obs resolution' - call ldas_abort(LDAS_GENERIC_ERROR, Iam, err_msg) - end if - - good_flag_value = 255 ! should really be 0 but is 255 because of unsigned v. signed byte issues + ! ---------------------------------------------------------------- + ! AMF, November 2025 + ! Original CYGNSS mask reading section commented out - masks not currently used + ! Mask originally developed for version 1.0 of CYGNSS soil moisture product and + ! not believed to be appropriate for version 3.2 product being used here. + ! ---------------------------------------------------------------- + + ! ! get name for CYGNSS mask file + + ! tmpmaskname = trim(this_obs_param%maskpath) // '/' // trim(this_obs_param%maskname) // '.nc' + + ! inquire(file=tmpfname, exist=file_exists) + + ! if (.not. file_exists) then + ! err_msg = 'CYGNSS mask file not found!' + ! call ldas_abort(LDAS_GENERIC_ERROR, Iam, err_msg) + ! end if + + ! ! open the CYGNSS mask file + + ! ierr = nf90_open(trim(tmpmaskname), nf90_nowrite, ncid) + + ! ! get variable dimension IDs + ! ierr = nf90_inq_dimid(ncid, 'lon', lon_dimid) + ! ierr = nf90_inq_dimid(ncid, 'lat', lat_dimid) + + ! ! dimensions sizes + ! ierr = nf90_inquire_dimension(ncid, lon_dimid, len=N_lon_m) + ! ierr = nf90_inquire_dimension(ncid, lat_dimid, len=N_lat_m) + + ! ! get variable IDs + ! ierr = nf90_inq_varid(ncid, 'longitude', longitudes_m_varid) + ! ierr = nf90_inq_varid(ncid, 'latitude', latitudes_m_varid) + ! ierr = nf90_inq_varid(ncid, 'flag_small_SM_range', small_SM_range_varid) + ! ierr = nf90_inq_varid(ncid, 'flag_poor_SMAP', poor_SMAP_varid) + ! ierr = nf90_inq_varid(ncid, 'flag_high_ubrmsd', high_ubrmsd_varid) + ! ierr = nf90_inq_varid(ncid, 'flag_few_obs', few_obs_varid) + ! ierr = nf90_inq_varid(ncid, 'flag_low_signal', low_signal_varid) + + ! ! allocate memory for the variables + ! allocate(latitudes_m( N_lon_m, N_lat_m)) + ! allocate(longitudes_m( N_lon_m, N_lat_m)) + ! allocate(small_SM_range_flag(N_lon_m, N_lat_m)) + ! allocate(poor_SMAP_flag( N_lon_m, N_lat_m)) + ! allocate(high_ubrmsd_flag( N_lon_m, N_lat_m)) + ! allocate(few_obs_flag( N_lon_m, N_lat_m)) + ! allocate(low_signal_flag( N_lon_m, N_lat_m)) + + ! ! read the variables + ! ierr = nf90_get_var(ncid, latitudes_m_varid, latitudes_m) + ! ierr = nf90_get_var(ncid, longitudes_m_varid, longitudes_m) + ! ierr = nf90_get_var(ncid, small_SM_range_varid, small_SM_range_flag) + ! ierr = nf90_get_var(ncid, poor_SMAP_varid, poor_SMAP_flag) + ! ierr = nf90_get_var(ncid, high_ubrmsd_varid, high_ubrmsd_flag) + ! ierr = nf90_get_var(ncid, few_obs_varid, few_obs_flag) + ! ierr = nf90_get_var(ncid, low_signal_varid, low_signal_flag) + + ! ! close the mask file + ! ierr = nf90_close(ncid) + + ! ! check the obs data and mask data are the same resolution + ! if (N_lon /= N_lon_m .or. N_lat /= N_lat_m) then + ! err_msg = 'The mask file ' // trim(this_obs_param%maskname) // ' does not match the obs resolution' + ! call ldas_abort(LDAS_GENERIC_ERROR, Iam, err_msg) + ! end if + + ! good_flag_value = 255 ! should really be 0 but is 255 because of unsigned v. signed byte issues ! fill tmp arrays N_obs = 0 do i = 1, N_lon do j = 1, N_lat - if (tmp_sm(i,j) .ne. this_obs_param%nodata .and. & - small_SM_range_flag(i,j) == good_flag_value .and. & - poor_SMAP_flag(i,j) == good_flag_value .and. & - high_ubrmsd_flag(i,j) == good_flag_value .and. & - few_obs_flag(i,j) == good_flag_value .and. & - low_signal_flag(i,j) == good_flag_value ) then + ! if (tmp_sm(i,j) .ne. this_obs_param%nodata .and. & + ! small_SM_range_flag(i,j) == good_flag_value .and. & + ! poor_SMAP_flag(i,j) == good_flag_value .and. & + ! high_ubrmsd_flag(i,j) == good_flag_value .and. & + ! few_obs_flag(i,j) == good_flag_value .and. & + ! low_signal_flag(i,j) == good_flag_value ) then + + if (tmp_sm(i,j) .ne. this_obs_param%nodata) then ! valid observation N_obs = N_obs + 1 @@ -2631,18 +2640,18 @@ subroutine read_obs_sm_CYGNSS( & ! clean up - deallocate(timeintervals) - deallocate(latitudes) - deallocate(longitudes) - deallocate(tmp_sm) - deallocate(tmp_sigma) - deallocate(latitudes_m) - deallocate(longitudes_m) - deallocate(small_SM_range_flag) - deallocate(poor_SMAP_flag) - deallocate(high_ubrmsd_flag) - deallocate(few_obs_flag) - deallocate(low_signal_flag) + if (allocated(timeintervals)) deallocate(timeintervals) + if (allocated(latitudes)) deallocate(latitudes) + if (allocated(longitudes)) deallocate(longitudes) + if (allocated(tmp_sm)) deallocate(tmp_sm) + if (allocated(tmp_sigma)) deallocate(tmp_sigma) + if (allocated(latitudes_m)) deallocate(latitudes_m) + if (allocated(longitudes_m)) deallocate(longitudes_m) + if (allocated(small_SM_range_flag)) deallocate(small_SM_range_flag) + if (allocated(poor_SMAP_flag)) deallocate(poor_SMAP_flag) + if (allocated(high_ubrmsd_flag)) deallocate(high_ubrmsd_flag) + if (allocated(few_obs_flag)) deallocate(few_obs_flag) + if (allocated(low_signal_flag)) deallocate(low_signal_flag) if (associated(tmp_obs)) deallocate(tmp_obs) if (associated(tmp_lon)) deallocate(tmp_lon) @@ -11069,4 +11078,3 @@ end program test #endif ! ******* EOF ************************************************************* - diff --git a/GEOSlandassim_GridComp/io_hdf5.F90 b/GEOSlandassim_GridComp/io_hdf5.F90 index fb339d0b..eba2c8c5 100644 --- a/GEOSlandassim_GridComp/io_hdf5.F90 +++ b/GEOSlandassim_GridComp/io_hdf5.F90 @@ -32,15 +32,19 @@ module io_hdf5 integer, parameter :: UNINIT_INT = -99999 character(len=*), parameter :: UNINIT_STR = "" + logical, save :: hdf5_inited = .false. + integer(hid_t), parameter :: INVALID_HID = int(-1,kind=hid_t) + type, public :: hdf5read private - character(len=256) :: file_name = UNINIT_STR - integer(hid_t) :: file_id = UNINIT_INT - character(len=256) :: dset_name = UNINIT_STR - integer(hid_t) :: dset_id = UNINIT_INT, dspace_id = UNINIT_INT, dtype_id = UNINIT_INT - integer :: dset_rank = UNINIT_INT - ! 7 is the max dimension of a fortran array - integer(hsize_t) :: dset_size(7) = UNINIT_INT, dset_max_size(7) = UNINIT_INT + character(len=1024) :: file_name = UNINIT_STR + integer(hid_t) :: file_id = INVALID_HID + character(len=1024) :: dset_name = UNINIT_STR + integer(hid_t) :: dset_id = INVALID_HID + integer(hid_t) :: dspace_id = INVALID_HID + integer :: dset_rank = 0 + integer(hsize_t) :: dset_size(7) = 0_hsize_t + integer(hsize_t) :: dset_max_size(7) = 0_hsize_t contains ! public procedure, public :: openFile @@ -60,50 +64,48 @@ module io_hdf5 ! open file subroutine openFile(this, filename) - - ! input/output variables - ! NEED class(hdf5read) instead of type(hdf5read) class (hdf5read), intent(inout) :: this character(len=*), intent(in) :: filename - - ! local variable integer :: hdf5err - ! set obj param val this%file_name = filename - ! initialize fortran interface - call h5open_f(hdf5err) - call checkErrCode_('h5open_f', hdf5err) + ! Initialize HDF5 fortran interface once per process + if (.not. hdf5_inited) then + call h5open_f(hdf5err) + call checkErrCode_('h5open_f', hdf5err) + hdf5_inited = .true. + end if - ! open existing file call h5fopen_f(this%file_name, H5F_ACC_RDONLY_F, this%file_id, hdf5err) call checkErrCode_('h5fopen_f', hdf5err) - end subroutine openFile ! close already opened file subroutine closeFile(this) - - ! input/output variables class (hdf5read), intent(inout) :: this - - ! local variable integer :: hdf5err - ! ensure that dataset has been closed - if (this%dset_name/=UNINIT_STR) stop "ERROR: Open dataset needs to be closed first. Stopping!" + if (this%dset_name /= UNINIT_STR) call this%uninitDataset - ! close file - call h5fclose_f(this%file_id, hdf5err) - call checkErrCode_('h5fclose_f', hdf5err) - this%file_name = UNINIT_STR - this%file_id = UNINIT_INT + if (this%dspace_id >= 0) then + call h5sclose_f(this%dspace_id, hdf5err) + if (hdf5err >= 0) this%dspace_id = INVALID_HID + end if - ! close fortran interface - call h5close_f(hdf5err) - call checkErrCode_('h5close_f', hdf5err) - + if (this%dset_id >= 0) then + call h5dclose_f(this%dset_id, hdf5err) + if (hdf5err >= 0) this%dset_id = INVALID_HID + end if + + if (this%file_id >= 0) then + call h5fclose_f(this%file_id, hdf5err) + call checkErrCode_('h5fclose_f', hdf5err) + this%file_name = UNINIT_STR + this%file_id = INVALID_HID + end if + + ! Do NOT call h5close_f() here. end subroutine closeFile ! query dataset for number of dims and its shape @@ -118,124 +120,127 @@ subroutine queryDataset(this, dsetName, dsetRank, dsetSize) ! local variable integer :: hdf5err - ! ensure that file_name is set i.e. openFile - ! must have been called prior to this routine - if (this%file_name==UNINIT_STR) stop "ERROR: No open file available. Stopping!" + ! ensure that dataset has been uninitialized + if (this%dset_name/=UNINIT_STR) call this%uninitDataset ! set obj param val - this%dset_name = dsetname + this%dset_name = dsetName - ! open datset from already opened file + ! open dataset call h5dopen_f(this%file_id, this%dset_name, this%dset_id, hdf5err) call checkErrCode_('h5dopen_f', hdf5err) - ! get dataspace id + ! get data space call h5dget_space_f(this%dset_id, this%dspace_id, hdf5err) call checkErrCode_('h5dget_space_f', hdf5err) - ! get num of dimensions + ! get number of dims and their sizes call h5sget_simple_extent_ndims_f(this%dspace_id, this%dset_rank, hdf5err) call checkErrCode_('h5sget_simple_extent_ndims_f', hdf5err) - dsetRank = this%dset_rank - ! get size of array call h5sget_simple_extent_dims_f(this%dspace_id, this%dset_size, this%dset_max_size, hdf5err) call checkErrCode_('h5sget_simple_extent_dims_f', hdf5err) - dsetSize = this%dset_size - end subroutine queryDataset + ! return variables + dsetRank = this%dset_rank + dsetSize = int(this%dset_size) + end subroutine queryDataset - ! uninitalize dataset + ! uninitialize dataset (close dataset and data space) subroutine uninitDataset(this) ! input/output variables class (hdf5read), intent(inout) :: this - ! un-initialize everything related to - ! the dataset queried/read + ! local variable + integer :: hdf5err + + ! close data space + if (this%dspace_id >= 0) then + call h5sclose_f(this%dspace_id, hdf5err) + call checkErrCode_('h5sclose_f', hdf5err) + this%dspace_id = INVALID_HID + end if + + ! close dataset + if (this%dset_id >= 0) then + call h5dclose_f(this%dset_id, hdf5err) + call checkErrCode_('h5dclose_f', hdf5err) + this%dset_id = INVALID_HID + end if + + ! uninit obj param val this%dset_name = UNINIT_STR - this%dset_id = UNINIT_INT - this%dspace_id = UNINIT_INT - this%dset_rank = UNINIT_INT - this%dset_size = UNINIT_INT - this%dset_max_size = UNINIT_INT - this%dtype_id = UNINIT_INT + this%dset_rank = 0 + this%dset_size = 0_hsize_t + this%dset_max_size = 0_hsize_t end subroutine uninitDataset - - ! read the dataset that was queried earlier + ! read 1D character*24 dataset subroutine readDataset1DChar24(this, dataChar) - - ! input/output variables class (hdf5read), intent(inout) :: this character(len=24), intent(out) :: dataChar(:) - - ! local variable integer :: hdf5err + integer(hid_t) :: memtype_id - ! ensure that dset_name is set i.e. openDataset - ! must have been called prior to this routine - if (this%dset_name==UNINIT_STR) stop "ERROR: No open dataset available. Stopping!" + ! ensure that dset_name is set + if (this%dset_name == UNINIT_STR) then + write(*,*) 'ERROR readDataset1DChar24: No open dataset available' + stop + end if - if (this%dset_size(1)==0) then - print *, 'Datset ', trim(this%dset_name), ' in file ', trim(this%file_name), ' is empty' - else - ! get data type - call h5dget_type_f(this%dset_id, this%dtype_id, hdf5err) - - ! read data - call h5dread_f(this%dset_id, this%dtype_id, dataChar, this%dset_size, hdf5err) - call checkErrCode_('h5dread_f', hdf5err) + if (this%dset_size(1) == 0) then + write(*,*) 'Dataset ', trim(this%dset_name), ' is empty' + return end if - ! close dataset - call h5dclose_f(this%dset_id, hdf5err) - call checkErrCode_('h5dclose_f', hdf5err) + ! Create the memory datatype for the fixed-length strings + call h5tcopy_f(H5T_FORTRAN_S1, memtype_id, hdf5err) + call checkErrCode_('h5tcopy_f', hdf5err) + + ! Set size to 24 characters + call h5tset_size_f(memtype_id, 24_size_t, hdf5err) + call checkErrCode_('h5tset_size_f', hdf5err) + + ! Read the data using our created type + call h5dread_f(this%dset_id, memtype_id, dataChar, this%dset_size(1:this%dset_rank), hdf5err) + call checkErrCode_('h5dread_f', hdf5err) + + ! Close the memory datatype + call h5tclose_f(memtype_id, hdf5err) + call checkErrCode_('h5tclose_f', hdf5err) ! un-initialize dataset just queried/read call this%uninitDataset end subroutine readDataset1DChar24 - - ! read the dataset that was queried earlier + ! read 1D real dataset subroutine readDataset1DReal(this, data1D) - - ! input/output variables class (hdf5read), intent(inout) :: this - real, intent(out) :: data1D(:) - - ! local variable + real(REAL32), intent(out) :: data1D(:) integer :: hdf5err - ! ensure that dset_name is set i.e. openDataset - ! must have been called prior to this routine - if (this%dset_name==UNINIT_STR) stop "ERROR: No open dataset available. Stopping!" + if (this%dset_name==UNINIT_STR) then + write(*,*) 'ERROR: readDataset1DReal No open dataset available' + stop + end if if (this%dset_size(1)==0) then - print *, 'Datset ', trim(this%dset_name), ' in file ', trim(this%file_name), ' is empty' + print *, 'Dataset ', trim(this%dset_name), ' is empty' else - ! get data type - call h5dget_type_f(this%dset_id, this%dtype_id, hdf5err) - - ! read data - call h5dread_f(this%dset_id, this%dtype_id, data1D, this%dset_size, hdf5err) + call h5dread_f(this%dset_id, H5T_NATIVE_REAL, data1D, this%dset_size(1:this%dset_rank), hdf5err) call checkErrCode_('h5dread_f', hdf5err) end if - ! close dataset - call h5dclose_f(this%dset_id, hdf5err) - call checkErrCode_('h5dclose_f', hdf5err) - ! un-initialize dataset just queried/read call this%uninitDataset end subroutine readDataset1DReal - - ! read the dataset that was queried earlier + ! read 1D real8 dataset subroutine readDataset1DReal8(this, data1D) ! input/output variables @@ -245,31 +250,25 @@ subroutine readDataset1DReal8(this, data1D) ! local variable integer :: hdf5err - ! ensure that dset_name is set i.e. openDataset - ! must have been called prior to this routine - if (this%dset_name==UNINIT_STR) stop "ERROR: No open dataset available. Stopping!" + ! check dataset state + if (this%dset_name == UNINIT_STR) then + write(*,*) 'ERROR readDataset1DReal8: No open dataset available' + stop + end if - if (this%dset_size(1)==0) then - print *, 'Datset ', trim(this%dset_name), ' in file ', trim(this%file_name), ' is empty' + if (this%dset_size(1) == 0) then + write(*,*) 'Dataset ', trim(this%dset_name), ' is empty' else - ! get data type - call h5dget_type_f(this%dset_id, this%dtype_id, hdf5err) - - ! read data - call h5dread_f(this%dset_id, this%dtype_id, data1D, this%dset_size, hdf5err) + call h5dread_f(this%dset_id, H5T_NATIVE_DOUBLE, data1D, this%dset_size(1:this%dset_rank), hdf5err) call checkErrCode_('h5dread_f', hdf5err) end if - ! close dataset - call h5dclose_f(this%dset_id, hdf5err) - call checkErrCode_('h5dclose_f', hdf5err) - ! un-initialize dataset just queried/read call this%uninitDataset end subroutine readDataset1DReal8 - + ! read 1D integer dataset subroutine readDataset1DInt(this, data1D) ! input/output variables @@ -279,32 +278,25 @@ subroutine readDataset1DInt(this, data1D) ! local variable integer :: hdf5err - ! ensure that dset_name is set i.e. openDataset - ! must have been called prior to this routine - if (this%dset_name==UNINIT_STR) stop "ERROR: No open dataset available. Stopping!" + ! check dataset state + if (this%dset_name == UNINIT_STR) then + write(*,*) 'ERROR readDataset1DInt: No open dataset available' + stop + end if - if (this%dset_size(1)==0) then - print *, 'Datset ', trim(this%dset_name), ' in file ', trim(this%file_name), ' is empty' + if (this%dset_size(1) == 0) then + write(*,*) 'Dataset ', trim(this%dset_name), ' is empty' else - ! get data type - call h5dget_type_f(this%dset_id, this%dtype_id, hdf5err) - - ! read data - !call h5dread_f(this%dset_id, this%dtype_id, data1D, this%dset_size, hdf5err) - call h5dread_f(this%dset_id, H5T_NATIVE_INTEGER, data1D, this%dset_size, hdf5err) + call h5dread_f(this%dset_id, H5T_NATIVE_INTEGER, data1D, this%dset_size(1:this%dset_rank), hdf5err) call checkErrCode_('h5dread_f', hdf5err) end if - ! close dataset - call h5dclose_f(this%dset_id, hdf5err) - call checkErrCode_('h5dclose_f', hdf5err) - ! un-initialize dataset just queried/read call this%uninitDataset end subroutine readDataset1DInt - + ! read 2D real dataset subroutine readDataset2DReal(this, data2D) ! input/output variables @@ -314,25 +306,19 @@ subroutine readDataset2DReal(this, data2D) ! local variable integer :: hdf5err - ! ensure that dset_name is set i.e. openDataset - ! must have been called prior to this routine - if (this%dset_name==UNINIT_STR) stop "ERROR: No open dataset available. Stopping!" + ! check dataset state + if (this%dset_name == UNINIT_STR) then + write(*,*) 'ERROR readDataset2DReal: No open dataset available' + stop + end if - if (this%dset_size(1)==0) then - print *, 'Datset ', trim(this%dset_name), ' in file ', trim(this%file_name), ' is empty' + if (this%dset_size(1) == 0) then + write(*,*) 'Dataset ', trim(this%dset_name), ' is empty' else - ! get data type - call h5dget_type_f(this%dset_id, this%dtype_id, hdf5err) - - ! read data - call h5dread_f(this%dset_id, this%dtype_id, data2D, this%dset_size, hdf5err) + call h5dread_f(this%dset_id, H5T_NATIVE_REAL, data2D, this%dset_size(1:this%dset_rank), hdf5err) call checkErrCode_('h5dread_f', hdf5err) end if - ! close dataset - call h5dclose_f(this%dset_id, hdf5err) - call checkErrCode_('h5dclose_f', hdf5err) - ! un-initialize dataset just queried/read call this%uninitDataset @@ -347,7 +333,7 @@ subroutine checkErrCode_(routineName, hdf5errCode) integer, intent(in) :: hdf5errCode if (hdf5errCode<0) then - write(*,*) 'ERROR: ', routineName, ' returned NEGATIVE err code. Stopping!' + write(*,*) 'ERROR: ', routineName, ' returned NEGATIVE err code: ', hdf5errCode, '. Stopping!' stop end if @@ -363,10 +349,11 @@ end module io_hdf5 program test_read use io_hdf5 + use iso_fortran_env implicit none - character(len=*), parameter :: file_name = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB/Y2001/M07/D20/SMAP_L1C_TB_02915_D_20010720T002132_D04003_000.h5' + character(len=*), parameter :: file_name = '/discover/nobackup/mathomp4/LDAS_Restarts/NGHTLY_TST_TV4000/obs/SMAP/L1C_TB//Y2017/M10/D15/SMAP_L1C_TB_14443_A_20171015T021929_T15160_001.h5' character(len=300) :: dsetName type(hdf5read) :: h5r @@ -381,7 +368,7 @@ program test_read character(len=24), pointer, dimension(:) :: tb_time_utc_aft => null() end type MyDataType type(MyDataType), dimension(1) :: data - + print *, 'HDF5 file: ', trim(file_name) print *, '' diff --git a/GEOSldas_App/CMakeLists.txt b/GEOSldas_App/CMakeLists.txt index 0f4024a7..a2cf7c34 100644 --- a/GEOSldas_App/CMakeLists.txt +++ b/GEOSldas_App/CMakeLists.txt @@ -9,17 +9,14 @@ ecbuild_add_executable ( SOURCES preprocess_ldas.F90 preprocess_ldas_routines.F90 LIBS GEOSldas_GridComp MAPL) -ecbuild_add_executable ( - TARGET tile_bin2nc4.x - SOURCES tile_bin2nc4.F90 - LIBS MAPL) - ecbuild_add_executable ( TARGET mwrtm_bin2nc4.x SOURCES util/inputs/mwRTM_params/mwrtm_bin2nc4.F90 LIBS GEOSlandassim_GridComp) set (scripts + ldas_setup + setup_utils.py process_hist.csh ens_forcing/average_ensemble_forcing.py ens_forcing/ensemble_forc.py @@ -34,7 +31,7 @@ install ( DESTINATION bin ) -set(file ldas_setup) +set(file ldas.py) configure_file(${file} ${file} @ONLY) install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${file} DESTINATION bin) diff --git a/GEOSldas_App/GEOSldas_HIST.rc b/GEOSldas_App/GEOSldas_HIST.rc index 37ce4b59..3063e13d 100644 --- a/GEOSldas_App/GEOSldas_HIST.rc +++ b/GEOSldas_App/GEOSldas_HIST.rc @@ -13,7 +13,6 @@ EXPID: GEOSldas_expid # pre-defined Collections COLLECTIONS: -#OUT1d 'tavg24_1d_lfs_Nt' #OUT2d 'tavg24_2d_lfs_Nx' #OUT1d 'tavg24_1d_lnd_Nt' #OUT2d 'tavg24_2d_lnd_Nx' @@ -68,32 +67,14 @@ EASEv2_M36.LM: 1 # with "gzip"; nc4 files can be compressed using the "compress_bit-shaved_nc4.sh" # utility script. - tavg24_1d_lfs_Nt.descr: 'Tile-space,Daily,Time-Averaged,Single-Level,Assimilation,Land Surface Forcings and States', - tavg24_1d_lfs_Nt.nbits: 12, - tavg24_1d_lfs_Nt.template: '%y4%m2%d2_%h2%n2z.bin', - tavg24_1d_lfs_Nt.archive: '%c/Y%y4', - tavg24_1d_lfs_Nt.mode: 'time-averaged', - tavg24_1d_lfs_Nt.frequency: 240000, - tavg24_1d_lfs_Nt.ref_time: 000000, - tavg24_1d_lfs_Nt.fields:'Tair' , 'METFORCE' , - 'Qair' , 'METFORCE' , - 'LWdown' , 'METFORCE' , - 'SWdown' , 'METFORCE' , - 'Wind' , 'METFORCE' , - 'Psurf' , 'METFORCE' , - 'Rainf_C' , 'METFORCE' , - 'Rainf' , 'METFORCE' , - 'Snowf' , 'METFORCE' , - 'RainfSnowf' , 'METFORCE' , - 'RefH' , 'METFORCE' , - 'CATDEF' , 'GridComp' , - 'RZEXC' , 'GridComp' , - 'SRFEXC' , 'GridComp' , - 'WESNN1' , 'GridComp' , - 'WESNN2' , 'GridComp' , - 'WESNN3' , 'GridComp' , - 'HLWUP' , 'GridComp' , - :: +# NOTE (Nov 2025): +# Changes for *1d* output with recent introduction of land+landice simulations: +# - Direct *1d* output to nc4 via MAPL. +# - Removed tile_bin2nc4.F90; no longer works for land-only collections in land+landice +# simulations because it uses tilecoord.bin, which contains info for all tiles. +# - Removed *1d* lfs collection; no longer works in land+landice simulations because of +# different tile spaces for CATCH and METFORCE. +# tavg24_2d_lfs_Nx.descr: '2d,Daily,Time-Averaged,Single-Level,Assimilation,Land Surface Forcings and States', tavg24_2d_lfs_Nx.nbits: 12, @@ -129,8 +110,9 @@ EASEv2_M36.LM: 1 tavg24_1d_lnd_Nt.descr: 'Tile-space,Daily,Time-Averaged,Single-Level,Assimilation,Land Surface Diagnostics', tavg24_1d_lnd_Nt.nbits: 12, - tavg24_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.bin', + tavg24_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.nc4', tavg24_1d_lnd_Nt.mode: 'time-averaged', + tavg24_1d_lnd_Nt.format: 'CFIO', tavg24_1d_lnd_Nt.frequency: 240000, tavg24_1d_lnd_Nt.ref_time: 000000, tavg24_1d_lnd_Nt.fields: 'GRN' , 'VEGDYN' , @@ -334,8 +316,9 @@ EASEv2_M36.LM: 1 const_1d_lnd_Nt.descr: 'Tile-space,Constant,Time-invariant,Single-Level,Assimilation,Land Surface Model Parameters', - const_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.bin', + const_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.nc4', const_1d_lnd_Nt.mode: 'instantaneous', + const_1d_lnd_Nt.format: 'CFIO', const_1d_lnd_Nt.frequency: 240000, const_1d_lnd_Nt.ref_time: 000000, const_1d_lnd_Nt.fields: 'DZGT1' , 'GridComp' , @@ -386,8 +369,9 @@ EASEv2_M36.LM: 1 SMAP_L4_SM_gph.descr: 'Tile-space,3-Hourly,Time-Averaged,Single-Level,Assimilation,SMAP L4_SM Land Geophysical Diagnostics', SMAP_L4_SM_gph.nbits: 12, - SMAP_L4_SM_gph.template: '%y4%m2%d2_%h2%n2z.bin', + SMAP_L4_SM_gph.template: '%y4%m2%d2_%h2%n2z.nc4', SMAP_L4_SM_gph.mode: 'time-averaged', + SMAP_L4_SM_gph.format: 'CFIO', SMAP_L4_SM_gph.frequency: 030000, SMAP_L4_SM_gph.ref_time: 000000, SMAP_L4_SM_gph.fields: 'WCSF' , 'ENSAVG' , 'sm_surface' , @@ -437,8 +421,9 @@ EASEv2_M36.LM: 1 inst1_1d_lnr_Nt.descr: 'Tile-space,1-Hourly,Instantaneous,Single-Level,Assimilation,Land Nature Run Diagnostics', inst1_1d_lnr_Nt.nbits: 12, - inst1_1d_lnr_Nt.template: '%y4%m2%d2_%h2%n2z.bin' , + inst1_1d_lnr_Nt.template: '%y4%m2%d2_%h2%n2z.nc4' , inst1_1d_lnr_Nt.mode: 'instantaneous' , + inst1_1d_lnr_Nt.format: 'CFIO', inst1_1d_lnr_Nt.frequency: 010000 , inst1_1d_lnr_Nt.ref_time: 000000, inst1_1d_lnr_Nt.fields: 'TPSURF' , 'ENSAVG' , 'surface_temp' , @@ -453,8 +438,9 @@ EASEv2_M36.LM: 1 # By default, no bit shaving for increments output. catch_progn_incr.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation,Ensemble-Average Land Prognostics Increments', - catch_progn_incr.template: '%y4%m2%d2_%h2%n2z.bin', + catch_progn_incr.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr.mode: 'instantaneous', + catch_progn_incr.format: 'CFIO', catch_progn_incr.frequency: 030000, catch_progn_incr.ref_time: 000000, catch_progn_incr.fields: 'TCFSAT_INCR' , 'LANDASSIM' , @@ -491,8 +477,9 @@ EASEv2_M36.LM: 1 inst3_1d_lndfcstana_Nt.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation,Ensemble Land Forecast and Analysis Diagnostics', inst3_1d_lndfcstana_Nt.nbits: 12, - inst3_1d_lndfcstana_Nt.template: '%y4%m2%d2_%h2%n2z.bin', + inst3_1d_lndfcstana_Nt.template: '%y4%m2%d2_%h2%n2z.nc4', inst3_1d_lndfcstana_Nt.mode: 'instantaneous', + inst3_1d_lndfcstana_Nt.format: 'CFIO', inst3_1d_lndfcstana_Nt.frequency: 030000, inst3_1d_lndfcstana_Nt.ref_time: 000000, inst3_1d_lndfcstana_Nt.fields: 'WCSF' , 'ENSAVG' , 'SFMC_FCST' , @@ -601,8 +588,9 @@ EASEv2_M36.LM: 1 tavg24_1d_glc_Nt.descr: 'Tile-space,Daily,Time-Averaged,Single-level,Land Ice Diagnostics', tavg24_1d_glc_Nt.nbits: 12, - tavg24_1d_glc_Nt.template: '%y4%m2%d2_%h2%n2z.bin' , + tavg24_1d_glc_Nt.template: '%y4%m2%d2_%h2%n2z.nc4' , tavg24_1d_glc_Nt.mode: 'time-averaged' , + tavg24_1d_glc_Nt.format: 'CFIO', tavg24_1d_glc_Nt.frequency: 240000 , tavg24_1d_glc_Nt.ref_time: 000000 , tavg24_1d_glc_Nt.fields: 'ASNOW_GL' , 'LANDICE' , diff --git a/GEOSldas_App/GEOSldas_HISTdet.rc b/GEOSldas_App/GEOSldas_HISTdet.rc index 55995fc3..93442adb 100644 --- a/GEOSldas_App/GEOSldas_HISTdet.rc +++ b/GEOSldas_App/GEOSldas_HISTdet.rc @@ -37,8 +37,9 @@ PC576x361-DC.LM: 1 catch_progn_incr.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation,Ensemble-Average Land Prognostics Increments', -catch_progn_incr.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr.mode: 'instantaneous', +catch_progn_incr.format: 'CFIO', catch_progn_incr.frequency: 030000, catch_progn_incr.ref_time: 013000, catch_progn_incr.fields: 'TCFSAT_INCR' , 'LANDASSIM' , @@ -70,8 +71,9 @@ catch_progn_incr.fields: 'TCFSAT_INCR' , 'LANDASSIM' , tavg3_1d_lnd_Nt.descr: 'Tile-space,Time-Averaged,Single-Level,Assimilation,Land Surface Diagnostics', tavg3_1d_lnd_Nt.nbits: 12, - tavg3_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.bin', + tavg3_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.nc4', tavg3_1d_lnd_Nt.mode: 'time-averaged', + tavg3_1d_lnd_Nt.format: 'CFIO', tavg3_1d_lnd_Nt.frequency: 030000, tavg3_1d_lnd_Nt.ref_time: 013000, tavg3_1d_lnd_Nt.fields: 'WET3' , 'ENSAVG' , 'GWETPROF' , diff --git a/GEOSldas_App/GEOSldas_HISTens.rc b/GEOSldas_App/GEOSldas_HISTens.rc index 872be4ca..135ca2bd 100644 --- a/GEOSldas_App/GEOSldas_HISTens.rc +++ b/GEOSldas_App/GEOSldas_HISTens.rc @@ -85,8 +85,9 @@ PC360x181-DC.LM: 1 tavg3_1d_lnd_Nt.descr: 'Tile-space,Time-Averaged,Single-Level,Assimilation,Land Surface Diagnostics', tavg3_1d_lnd_Nt.nbits: 12, -tavg3_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.bin', +tavg3_1d_lnd_Nt.template: '%y4%m2%d2_%h2%n2z.nc4', tavg3_1d_lnd_Nt.mode: 'time-averaged', +tavg3_1d_lnd_Nt.format: 'CFIO', tavg3_1d_lnd_Nt.frequency: 030000, tavg3_1d_lnd_Nt.ref_time: 000000, tavg3_1d_lnd_Nt.fields: 'WET3' , 'ENSAVG' , 'GWETPROF' , @@ -181,8 +182,9 @@ inst3_2d_lndfcstana_Nx.fields: 'WCSF' , 'ENSAVG' , 'SFMC_FCS catch_progn_incr0001.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0001.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0001.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0001.mode: 'instantaneous', +catch_progn_incr0001.format: 'CFIO', catch_progn_incr0001.frequency: 030000, catch_progn_incr0001.ref_time: 013000, catch_progn_incr0001.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0001' , @@ -213,8 +215,9 @@ catch_progn_incr0001.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0001' , :: catch_progn_incr0002.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0002.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0002.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0002.mode: 'instantaneous', +catch_progn_incr0002.format: 'CFIO', catch_progn_incr0002.frequency: 030000, catch_progn_incr0002.ref_time: 013000, catch_progn_incr0002.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0002' , @@ -245,8 +248,9 @@ catch_progn_incr0002.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0002' , :: catch_progn_incr0003.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0003.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0003.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0003.mode: 'instantaneous', +catch_progn_incr0003.format: 'CFIO', catch_progn_incr0003.frequency: 030000, catch_progn_incr0003.ref_time: 013000, catch_progn_incr0003.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0003' , @@ -277,8 +281,9 @@ catch_progn_incr0003.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0003' , :: catch_progn_incr0004.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0004.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0004.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0004.mode: 'instantaneous', +catch_progn_incr0004.format: 'CFIO', catch_progn_incr0004.frequency: 030000, catch_progn_incr0004.ref_time: 013000, catch_progn_incr0004.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0004' , @@ -309,8 +314,9 @@ catch_progn_incr0004.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0004' , :: catch_progn_incr0005.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0005.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0005.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0005.mode: 'instantaneous', +catch_progn_incr0005.format: 'CFIO', catch_progn_incr0005.frequency: 030000, catch_progn_incr0005.ref_time: 013000, catch_progn_incr0005.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0005' , @@ -341,8 +347,9 @@ catch_progn_incr0005.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0005' , :: catch_progn_incr0006.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0006.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0006.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0006.mode: 'instantaneous', +catch_progn_incr0006.format: 'CFIO', catch_progn_incr0006.frequency: 030000, catch_progn_incr0006.ref_time: 013000, catch_progn_incr0006.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0006' , @@ -373,8 +380,9 @@ catch_progn_incr0006.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0006' , :: catch_progn_incr0007.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0007.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0007.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0007.mode: 'instantaneous', +catch_progn_incr0007.format: 'CFIO', catch_progn_incr0007.frequency: 030000, catch_progn_incr0007.ref_time: 013000, catch_progn_incr0007.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0007' , @@ -405,8 +413,9 @@ catch_progn_incr0007.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0007' , :: catch_progn_incr0008.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0008.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0008.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0008.mode: 'instantaneous', +catch_progn_incr0008.format: 'CFIO', catch_progn_incr0008.frequency: 030000, catch_progn_incr0008.ref_time: 013000, catch_progn_incr0008.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0008' , @@ -437,8 +446,9 @@ catch_progn_incr0008.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0008' , :: catch_progn_incr0009.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0009.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0009.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0009.mode: 'instantaneous', +catch_progn_incr0009.format: 'CFIO', catch_progn_incr0009.frequency: 030000, catch_progn_incr0009.ref_time: 013000, catch_progn_incr0009.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0009' , @@ -469,8 +479,9 @@ catch_progn_incr0009.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0009' , :: catch_progn_incr0010.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0010.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0010.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0010.mode: 'instantaneous', +catch_progn_incr0010.format: 'CFIO', catch_progn_incr0010.frequency: 030000, catch_progn_incr0010.ref_time: 013000, catch_progn_incr0010.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0010' , @@ -501,8 +512,9 @@ catch_progn_incr0010.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0010' , :: catch_progn_incr0011.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0011.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0011.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0011.mode: 'instantaneous', +catch_progn_incr0011.format: 'CFIO', catch_progn_incr0011.frequency: 030000, catch_progn_incr0011.ref_time: 013000, catch_progn_incr0011.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0011' , @@ -533,8 +545,9 @@ catch_progn_incr0011.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0011' , :: catch_progn_incr0012.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0012.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0012.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0012.mode: 'instantaneous', +catch_progn_incr0012.format: 'CFIO', catch_progn_incr0012.frequency: 030000, catch_progn_incr0012.ref_time: 013000, catch_progn_incr0012.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0012' , @@ -565,8 +578,9 @@ catch_progn_incr0012.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0012' , :: catch_progn_incr0013.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0013.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0013.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0013.mode: 'instantaneous', +catch_progn_incr0013.format: 'CFIO', catch_progn_incr0013.frequency: 030000, catch_progn_incr0013.ref_time: 013000, catch_progn_incr0013.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0013' , @@ -597,8 +611,9 @@ catch_progn_incr0013.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0013' , :: catch_progn_incr0014.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0014.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0014.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0014.mode: 'instantaneous', +catch_progn_incr0014.format: 'CFIO', catch_progn_incr0014.frequency: 030000, catch_progn_incr0014.ref_time: 013000, catch_progn_incr0014.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0014' , @@ -629,8 +644,9 @@ catch_progn_incr0014.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0014' , :: catch_progn_incr0015.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0015.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0015.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0015.mode: 'instantaneous', +catch_progn_incr0015.format: 'CFIO', catch_progn_incr0015.frequency: 030000, catch_progn_incr0015.ref_time: 013000, catch_progn_incr0015.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0015' , @@ -661,8 +677,9 @@ catch_progn_incr0015.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0015' , :: catch_progn_incr0016.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0016.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0016.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0016.mode: 'instantaneous', +catch_progn_incr0016.format: 'CFIO', catch_progn_incr0016.frequency: 030000, catch_progn_incr0016.ref_time: 013000, catch_progn_incr0016.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0016' , @@ -693,8 +710,9 @@ catch_progn_incr0016.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0016' , :: catch_progn_incr0017.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0017.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0017.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0017.mode: 'instantaneous', +catch_progn_incr0017.format: 'CFIO', catch_progn_incr0017.frequency: 030000, catch_progn_incr0017.ref_time: 013000, catch_progn_incr0017.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0017' , @@ -725,8 +743,9 @@ catch_progn_incr0017.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0017' , :: catch_progn_incr0018.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0018.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0018.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0018.mode: 'instantaneous', +catch_progn_incr0018.format: 'CFIO', catch_progn_incr0018.frequency: 030000, catch_progn_incr0018.ref_time: 013000, catch_progn_incr0018.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0018' , @@ -757,8 +776,9 @@ catch_progn_incr0018.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0018' , :: catch_progn_incr0019.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0019.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0019.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0019.mode: 'instantaneous', +catch_progn_incr0019.format: 'CFIO', catch_progn_incr0019.frequency: 030000, catch_progn_incr0019.ref_time: 013000, catch_progn_incr0019.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0019' , @@ -789,8 +809,9 @@ catch_progn_incr0019.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0019' , :: catch_progn_incr0020.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0020.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0020.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0020.mode: 'instantaneous', +catch_progn_incr0020.format: 'CFIO', catch_progn_incr0020.frequency: 030000, catch_progn_incr0020.ref_time: 013000, catch_progn_incr0020.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0020' , @@ -821,8 +842,9 @@ catch_progn_incr0020.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0020' , :: catch_progn_incr0021.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0021.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0021.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0021.mode: 'instantaneous', +catch_progn_incr0021.format: 'CFIO', catch_progn_incr0021.frequency: 030000, catch_progn_incr0021.ref_time: 013000, catch_progn_incr0021.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0021' , @@ -853,8 +875,9 @@ catch_progn_incr0021.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0021' , :: catch_progn_incr0022.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0022.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0022.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0022.mode: 'instantaneous', +catch_progn_incr0022.format: 'CFIO', catch_progn_incr0022.frequency: 030000, catch_progn_incr0022.ref_time: 013000, catch_progn_incr0022.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0022' , @@ -885,8 +908,9 @@ catch_progn_incr0022.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0022' , :: catch_progn_incr0023.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0023.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0023.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0023.mode: 'instantaneous', +catch_progn_incr0023.format: 'CFIO', catch_progn_incr0023.frequency: 030000, catch_progn_incr0023.ref_time: 013000, catch_progn_incr0023.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0023' , @@ -917,8 +941,9 @@ catch_progn_incr0023.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0023' , :: catch_progn_incr0024.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0024.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0024.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0024.mode: 'instantaneous', +catch_progn_incr0024.format: 'CFIO', catch_progn_incr0024.frequency: 030000, catch_progn_incr0024.ref_time: 013000, catch_progn_incr0024.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0024' , @@ -949,8 +974,9 @@ catch_progn_incr0024.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0024' , :: catch_progn_incr0025.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0025.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0025.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0025.mode: 'instantaneous', +catch_progn_incr0025.format: 'CFIO', catch_progn_incr0025.frequency: 030000, catch_progn_incr0025.ref_time: 013000, catch_progn_incr0025.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0025' , @@ -981,8 +1007,9 @@ catch_progn_incr0025.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0025' , :: catch_progn_incr0026.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0026.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0026.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0026.mode: 'instantaneous', +catch_progn_incr0026.format: 'CFIO', catch_progn_incr0026.frequency: 030000, catch_progn_incr0026.ref_time: 013000, catch_progn_incr0026.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0026' , @@ -1013,8 +1040,9 @@ catch_progn_incr0026.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0026' , :: catch_progn_incr0027.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0027.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0027.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0027.mode: 'instantaneous', +catch_progn_incr0027.format: 'CFIO', catch_progn_incr0027.frequency: 030000, catch_progn_incr0027.ref_time: 013000, catch_progn_incr0027.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0027' , @@ -1045,8 +1073,9 @@ catch_progn_incr0027.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0027' , :: catch_progn_incr0028.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0028.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0028.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0028.mode: 'instantaneous', +catch_progn_incr0028.format: 'CFIO', catch_progn_incr0028.frequency: 030000, catch_progn_incr0028.ref_time: 013000, catch_progn_incr0028.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0028' , @@ -1077,8 +1106,9 @@ catch_progn_incr0028.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0028' , :: catch_progn_incr0029.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0029.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0029.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0029.mode: 'instantaneous', +catch_progn_incr0029.format: 'CFIO', catch_progn_incr0029.frequency: 030000, catch_progn_incr0029.ref_time: 013000, catch_progn_incr0029.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0029' , @@ -1109,8 +1139,9 @@ catch_progn_incr0029.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0029' , :: catch_progn_incr0030.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0030.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0030.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0030.mode: 'instantaneous', +catch_progn_incr0030.format: 'CFIO', catch_progn_incr0030.frequency: 030000, catch_progn_incr0030.ref_time: 013000, catch_progn_incr0030.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0030' , @@ -1141,8 +1172,9 @@ catch_progn_incr0030.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0030' , :: catch_progn_incr0031.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0031.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0031.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0031.mode: 'instantaneous', +catch_progn_incr0031.format: 'CFIO', catch_progn_incr0031.frequency: 030000, catch_progn_incr0031.ref_time: 013000, catch_progn_incr0031.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0031' , @@ -1173,8 +1205,9 @@ catch_progn_incr0031.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0031' , :: catch_progn_incr0032.descr: 'Tile-space,3-Hourly,Instantaneous,Single-Level,Assimilation, Land Prognostics Increments', -catch_progn_incr0032.template: '%y4%m2%d2_%h2%n2z.bin', +catch_progn_incr0032.template: '%y4%m2%d2_%h2%n2z.nc4', catch_progn_incr0032.mode: 'instantaneous', +catch_progn_incr0032.format: 'CFIO', catch_progn_incr0032.frequency: 030000, catch_progn_incr0032.ref_time: 013000, catch_progn_incr0032.fields: 'TCFSAT_INCR' , 'CATCHINCR_e0032' , diff --git a/GEOSldas_App/GEOSldas_LDAS.rc b/GEOSldas_App/GEOSldas_LDAS.rc index c77ae6bd..8ca6018c 100644 --- a/GEOSldas_App/GEOSldas_LDAS.rc +++ b/GEOSldas_App/GEOSldas_LDAS.rc @@ -1,14 +1,14 @@ -#################################################################################### -# # -# GEOSldas Resource Parameters # -# # -# Values below override the hardcoded default values # -# in *.F90 calls to MAPL_GetResource(). # -# # -# Users can further override the values below by # -# editing the "exeinp" file during ldas setup. # -# # -#################################################################################### +################################################################################### +# # +# GEOSldas Resource Parameters # +# # +# Values below override the hardcoded default values # +# in *.F90 calls to MAPL_GetResource(). # +# # +# Users can further override the values below by # +# editing the "exeinp" input file for ldas_setup. # +# # +################################################################################### # ---- Using Catchment[CN] offline? diff --git a/GEOSldas_App/ldas.py b/GEOSldas_App/ldas.py new file mode 100644 index 00000000..bdab4f6e --- /dev/null +++ b/GEOSldas_App/ldas.py @@ -0,0 +1,1496 @@ +#!/usr/bin/env python3 + +import os +import sys +import glob +import linecache +import shutil +import fileinput +import time +import subprocess as sp +import shlex +import tempfile +import netCDF4 +from dateutil import rrule +from datetime import datetime +from datetime import timedelta +from collections import OrderedDict +from dateutil.relativedelta import relativedelta +from remap_utils import * +from remap_lake_landice_saltwater import * +from remap_catchANDcn import * +from lenkf_j_template import * +from setup_utils import * + +""" +ldas class is used by ldas_setup +""" + +class ldas: + + def __init__(self, cmdLineArgs): + """ + """ + # These keywords are excluded from LDAS.rc (i.e., only needed in pre- or post-processing) + self.NoneLDASrcKeys=['EXP_ID', 'EXP_DOMAIN', + 'BEG_DATE', 'END_DATE','RESTART','RESTART_PATH', + 'RESTART_DOMAIN','RESTART_ID','BCS_PATH','TILING_FILE','GRN_FILE','LAI_FILE','LNFM_FILE','NIRDF_FILE', + 'VISDF_FILE','CATCH_DEF_FILE','NDVI_FILE', + 'NML_INPUT_PATH','HISTRC_FILE','RST_FROM_GLOBAL','JOB_SGMT','NUM_SGMT','POSTPROC_HIST', + 'MINLON','MAXLON','MINLAT','MAXLAT','EXCLUDE_FILE','INCLUDE_FILE','MWRTM_PATH','GRIDNAME', + 'ADAS_EXPDIR', 'BCS_RESOLUTION', 'TILE_FILE_FORMAT' ] + + self.GEOS_SITE = "@GEOS_SITE@" + + # =============================================================================================== + # + # ------ + # ./ldas_setup setup ... + # ------ + # Instance variables + self.exeinpfile = cmdLineArgs['exeinpfile'] + self.batinpfile = cmdLineArgs['batinpfile'] + exphome_ = cmdLineArgs['exphome'].rstrip('/') + assert os.path.isdir(exphome_) # exphome should exist + self.exphome = os.path.abspath(exphome_) + self.verbose = cmdLineArgs['verbose'] + + # command line args for coupled land-atm DAS (see "help" strings in parseCmdLine() for details) + self.ladas_cpl = cmdLineArgs['ladas_cpl'] + self.nymdb = cmdLineArgs['nymdb'] + self.nhmsb = cmdLineArgs['nhmsb'] + self.agcm_res = cmdLineArgs['agcm_res'] + self.bcs_version = cmdLineArgs['bcs_version'] + self.rstloc = cmdLineArgs['rstloc'] + self.varwindow = cmdLineArgs['varwindow'] + self.nens = cmdLineArgs['nens'] + + # obsolete command line args + self.runmodel = cmdLineArgs['runmodel'] + if self.runmodel : + print('\n The option "--runmodel" is out of date, not necessary anymore. \n') + + self.daysperjob = cmdLineArgs['daysperjob'] + self.monthsperjob = cmdLineArgs['monthsperjob'] + + self.ExeInputs = OrderedDict() + self.RmInputs = OrderedDict() + self.rundir = None + self.blddir = None + self.blddirLn = None + self.outdir = None + self.out_path = None + self.inpdir = None + self.exefyl = None + self.isZoomIn = False + self.catch = '' + self.has_mwrtm = False + self.has_vegopacity = False + self.assim = False + self.has_landassim_seed = False + self.has_geos_pert = False + self.nSegments = 1 + self.perturb = 0 + self.first_ens_id = 0 + self.in_rstdir = None + self.in_tilefile = None # default string + self.ens_id_width = 6 # _eXXXX + self.bcs_dir_land = '' + self.bcs_dir_geom = '' + self.bcs_dir_landshared = '' + self.tile_types = '' + self.with_land = False + self.with_landice = False + self.adas_expdir = '' + + # assert necessary optional arguments in command line if exeinp file does not exsit + if not os.path.exists(cmdLineArgs['exeinpfile']): + # make sure all necessary command line arguments were supplied + assert self.ladas_cpl is not None, "Error. Must have command line arg ladas_cpl for coupled land-atm DAS.\n" + self.ladas_cpl = int(self.ladas_cpl) + assert self.ladas_cpl > 0, "Error. If not ladas coupling, exeinpfile must be provided.\n" + assert self.nymdb is not None, "Error. Must have command line arg nymdb for coupled land-atm DAS.\n" + assert self.nhmsb is not None, "Error. Must have command line arg nhmsb for coupled land-atm DAS.\n" + assert self.agcm_res is not None, "Error. Must have command line arg agcm_res for coupled land-atm DAS.\n" + assert self.bcs_version is not None, "Error. Must have command line arg bcs_version for coupled land-atm DAS.\n" + assert self.rstloc is not None, "Error. Must have command line arg rstloc for coupled land-atm DAS.\n" + assert self.varwindow is not None, "Error. Must have command line arg varwindow for coupled land-atm DAS.\n" + assert self.nens is not None, "Error. Must have command line arg nens for coupled land-atm DAS (ens component).\n" + self.ladas_cpl = int(self.ladas_cpl) + else: + self.ladas_cpl = 0 + + # ------ + # Read exe input file which is required to set up the dir + # ------ + self.ExeInputs = parseInputFile(cmdLineArgs['exeinpfile'], ladas_cpl = self.ladas_cpl ) + + # verifing the required input + if 'RESTART' not in self.ExeInputs : + self.ExeInputs['RESTART'] = "1" + + if self.ExeInputs['RESTART'].isdigit() : + if int(self.ExeInputs['RESTART']) ==0 : + self.ExeInputs['RESTART_ID'] = 'None' + self.ExeInputs['RESTART_DOMAIN'] = 'None' + self.ExeInputs['RESTART_PATH'] = 'None' + else: + if self.ExeInputs['RESTART'] =='G' : + self.ExeInputs['RESTART_DOMAIN'] = 'None' + else: + self.ExeInputs['RESTART_ID'] = 'None' + self.ExeInputs['RESTART_DOMAIN'] = 'None' + self.ExeInputs['RESTART_PATH'] = 'None' + + ### check if ldas is coupled to adas; if so, set/overwrite input parameters accordingly + if self.ladas_cpl > 0 : + self.ExeInputs['BEG_DATE'] = f"{self.nymdb} {self.nhmsb}" + rstloc_ = self.rstloc.rstrip('/') # remove trailing '/' + assert os.path.isdir(rstloc_) # make sure rstloc_ is a valid directory + self.rstloc = os.path.abspath(rstloc_) + self.ExeInputs['RESTART_PATH'] = os.path.dirname( self.rstloc) + self.ExeInputs['RESTART_ID'] = os.path.basename(self.rstloc) + self.adas_expdir = os.path.dirname( self.exphome) + self.ExeInputs['ADAS_EXPDIR'] = self.adas_expdir + self.adas_expid = os.path.basename(self.adas_expdir) + self.ExeInputs['MET_TAG'] = self.adas_expid + '__bkg' + + if self.ladas_cpl == 1 : + # ldas coupled with determistic component of ADAS + self.ExeInputs['EXP_ID'] = self.adas_expid + '_LDAS' + self.ExeInputs['MET_PATH'] = self.adas_expdir + '/recycle/holdpredout' + self.ExeInputs['ENSEMBLE_FORCING'] = 'NO' + elif self.ladas_cpl == 2 : + # ldas coupled with ensemble component of ADAS + self.ExeInputs['EXP_ID'] = self.adas_expid + '_LDAS4ens' + self.ExeInputs['MET_PATH'] = self.adas_expdir + '/atmens/mem' + self.ExeInputs['ENSEMBLE_FORCING'] = 'YES' + else : + exit("Error. Unknown value of self.ladas_cpl.\n") + + self.ExeInputs['NUM_LDAS_ENSEMBLE'] = self.nens # fvsetup finds Nens by counting restart files + self.first_ens_id = 1 # match ADAS convention + self.ExeInputs['FIRST_ENS_ID'] = self.first_ens_id + + self.agcm_res = 'CF' + self.agcm_res # change format to "CFnnnn" + self.ExeInputs['EXP_DOMAIN'] = self.agcm_res +'x6C_GLOBAL' + + # when coupled to ADAS, "BCS_PATH" EXCLUDE bcs version info + # hard-wired BCS_PATH for now + self.ExeInputs['BCS_PATH'] = "/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles" + self.ExeInputs['BCS_PATH'] = self.ExeInputs['BCS_PATH'].rstrip('/') + '/' + self.bcs_version + if self.bcs_version == "Icarus-NLv3" : + self.ExeInputs['BCS_PATH'] = self.ExeInputs['BCS_PATH'] + '_new_layout' + self.ExeInputs['BCS_RESOLUTION'] = self.agcm_res +'x6C_' + self.agcm_res +'x6C' + self.ExeInputs['RESTART_DOMAIN'] = self.agcm_res +'x6C_GLOBAL' + + # the following are not in default ExeInputs list; hardwire for now + self.ExeInputs['MWRTM_PATH'] = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/RTM_params/RTMParam_SMAP_L4SM_v006/' + self.ExeInputs['LAND_ASSIM'] = "YES" + self.ExeInputs['MET_HINTERP'] = 0 + self.landassim_dt = 10800 # seconds + # make sure ADAS analysis window [minutes] is multiple of LANDASSIM_DT [seconds] + if int(self.varwindow) % (self.landassim_dt/60) == 0 : + self.ExeInputs['LANDASSIM_DT'] = self.landassim_dt + else : + exit("Error. LANDASSIM_DT is inconsistent with ADAS analysis window.\n") + self.ExeInputs['LANDASSIM_T0'] = "013000" # HHMMSS + jsgmt1 = "00000000" + jsgmt2 = hours_to_hhmmss(int(self.varwindow)/60) # convert minutes to HHMMSS + self.ExeInputs['JOB_SGMT'] = f"{jsgmt1} {jsgmt2}" + self.ExeInputs['NUM_SGMT'] = 1 + self.ExeInputs['FORCE_DTSTEP'] = 3600 + + # determine END_DATE = BEG_DATE + TIME_STEP_OF_ADAS_CYCLE + _beg_date = datetime.strptime( self.ExeInputs['BEG_DATE'], "%Y%m%d %H%M%S") + _hours = int(self.ExeInputs['JOB_SGMT'][ 9:11]) + _end_date = _beg_date + timedelta(hours=int(self.varwindow)/60) + self.ExeInputs['END_DATE'] = _end_date.strftime("%Y%m%d %H%M%S") + + # end if self.ladas_cpl > 0 ----------------------------------------------------------------------------------------- + + + # print exe inputs + if self.verbose: + print ('\nInputs from exeinp file:\n') + printDictionary(self.ExeInputs) + + if 'LSM_CHOICE' not in self.ExeInputs: + self.ExeInputs['LSM_CHOICE'] = 1 + _lsm_choice_int = int(self.ExeInputs['LSM_CHOICE']) + if _lsm_choice_int == 1: + self.catch = 'catch' + elif _lsm_choice_int == 2 : + self.catch = 'catchcnclm40' + elif _lsm_choice_int == 3 : + self.catch = 'catchcnclm45' + elif _lsm_choice_int == 4 : + self.catch = 'catchcnclm51' + _lsm_choice_int = None + + self.tile_types = self.ExeInputs.get('TILE_TYPES',"100").split() + if "100" in self.tile_types : + self.with_land = True + assert int(self.ExeInputs['LSM_CHOICE']) <= 2, "\nLSM_CHOICE=3 (Catchment-CN4.5) is no longer supported. Please set LSM_CHOICE to 1 (Catchment) or 2 (Catchment-CN4.0)" + if "20" in self.tile_types : + self.with_landice = True + + self.nens = int(self.ExeInputs['NUM_LDAS_ENSEMBLE']) # fails if value of Nens is not an integer + self.first_ens_id = int(self.ExeInputs.get('FIRST_ENS_ID',0)) + self.perturb = int(self.ExeInputs.get('PERTURBATIONS',0)) + if self.nens > 1: + self.perturb = 1 + self.ensdirs = ['ens%04d'%iens for iens in range(self.first_ens_id, self.nens + self.first_ens_id)] + # if self.ens_id_width = 4, _width = '_e%04d' + _width = '_e%0{}d'.format(self.ens_id_width-2) + # self.ensids will be a list of [_e0000, _e0001, ...] + self.ensids = [ _width%iens for iens in range(self.first_ens_id, self.nens + self.first_ens_id)] + if (self.nens == 1) : + self.ensdirs_avg = self.ensdirs + self.ensids=[''] + else : + self.ensdirs_avg = self.ensdirs + ['ens_avg'] + + self._verifyExeInputs() + + self._calculateJobSegments() + + # assemble bcs sub-directories + self.bcs_dir_land = self.ExeInputs['BCS_PATH']+ '/land/' + self.ExeInputs['BCS_RESOLUTION']+'/' + self.bcs_dir_geom = self.ExeInputs['BCS_PATH']+ '/geometry/' + self.ExeInputs['BCS_RESOLUTION']+'/' + self.bcs_dir_landshared = self.ExeInputs['BCS_PATH']+ '/land/shared/' + + # make sure MET_PATH and RESTART_PATH have trailing '/' + if self.ExeInputs['MET_PATH'][-1] != '/': + self.ExeInputs['MET_PATH'] = self.ExeInputs['MET_PATH']+'/' + if self.ExeInputs['RESTART_PATH'][-1] != '/': + self.ExeInputs['RESTART_PATH'] = self.ExeInputs['RESTART_PATH']+'/' + + # make sure catchment and vegdyn restart files ( at least one for each) exist + if 'CATCH_DEF_FILE' not in self.ExeInputs : + self.ExeInputs['CATCH_DEF_FILE']= self.bcs_dir_land + 'clsm/catchment.def' + if self.with_land : + assert os.path.isfile(self.ExeInputs['CATCH_DEF_FILE']),"[%s] file does not exist " % self.ExeInputs['CATCH_DEF_FILE'] + + # assigning BC files + self.ExeInputs['LNFM_FILE'] = '' + tile_file_format = self.ExeInputs.get('TILE_FILE_FORMAT', 'DEFAULT') + domain_ = '' + inpdir_ = self.bcs_dir_land + inpgeom_ = self.bcs_dir_geom + + if self.ExeInputs['RESTART'] == '1' : + inp_ = self.ExeInputs['RESTART_PATH']+'/'.join([self.ExeInputs['RESTART_ID'],'output', + self.ExeInputs['RESTART_DOMAIN'], 'rc_out/']) + txt_tile = glob.glob(inp_ + '*.domain') + if len(txt_tile) > 0: + domain_ = '.domain' + inpdir_ = inp_ + inpgeom_ = inp_ + + inpdir_ = os.path.realpath(inpdir_)+'/' + inpgeom_ = os.path.realpath(inpgeom_)+'/' + + txt_tile = glob.glob(inpgeom_ + '*.til' + domain_) + for f in txt_tile: + if 'MAPL_' in os.path.basename(f): + txt_tile = [f] + break + nc4_tile = glob.glob(inpgeom_ + '*.nc4' + domain_) + if tile_file_format.upper() == 'TXT' : self.ExeInputs['TILING_FILE'] = txt_tile[0] + if tile_file_format.upper() == 'DEFAULT' : self.ExeInputs['TILING_FILE'] = (txt_tile+nc4_tile)[-1] + + self.ExeInputs['GRN_FILE'] = glob.glob(inpdir_ + 'green_clim_*.data'+domain_)[0] + self.ExeInputs['LAI_FILE'] = glob.glob(inpdir_ + 'lai_clim_*.data' +domain_)[0] + tmp_ = glob.glob(inpdir_ + 'lnfm_clim_*.data'+domain_) + if (len(tmp_) ==1) : + self.ExeInputs['LNFM_FILE'] = tmp_[0] + self.ExeInputs['NDVI_FILE'] = glob.glob(inpdir_ + 'ndvi_clim_*.data'+domain_ )[0] + self.ExeInputs['NIRDF_FILE'] = glob.glob(inpdir_ + 'nirdf_*.dat' +domain_ )[0] + self.ExeInputs['VISDF_FILE'] = glob.glob(inpdir_ + 'visdf_*.dat' +domain_ )[0] + inpdir_ = None + domain_ = None + inpgeom_= None + # assigning Gridname + if 'GRIDNAME' not in self.ExeInputs : + tmptile = os.path.realpath(self.ExeInputs['TILING_FILE']) + extension = os.path.splitext(tmptile)[1] + if extension == '.domain': + extension = os.path.splitext(tmptile)[0] + gridname_ ='' + if extension == '.til': + gridname_ = linecache.getline(tmptile, 3).strip() + else: + nc_file = netCDF4.Dataset(tmptile,'r') + gridname_ = nc_file.getncattr('Grid_Name') + # in case it is an old name: SMAP-EASEvx-Mxx + gridname_ = gridname_.replace('SMAP-','').replace('-M','_M') + self.ExeInputs['GRIDNAME'] = gridname_ + + if 'POSTPROC_HIST' not in self.ExeInputs: + self.ExeInputs['POSTPROC_HIST'] = 0 + + if 'RUN_IRRIG' not in self.ExeInputs: + self.ExeInputs['RUN_IRRIG'] = 0 + + if 'AEROSOL_DEPOSITION' not in self.ExeInputs: + self.ExeInputs['AEROSOL_DEPOSITION'] = 0 + # default is global + _domain_dic=OrderedDict() + _domain_dic['MINLON']=-180. + _domain_dic['MAXLON']= 180. + _domain_dic['MINLAT']= -90. + _domain_dic['MAXLAT']= 90. + _domain_dic['EXCLUDE_FILE']= "''" + _domain_dic['INCLUDE_FILE']= "''" + + for key,val in _domain_dic.items() : + if key in self.ExeInputs : + _domain_dic[key]= self.ExeInputs[key] + self.domain_def = tempfile.NamedTemporaryFile(mode='w', delete=False) + self.domain_def.write('&domain_inputs\n') + for key,val in _domain_dic.items() : + keyn=(key+" = ").ljust(16) + valn = str(val) + if '_FILE' in key: + self.domain_def.write(keyn+ "'"+valn+"'"+'\n') + else : + self.domain_def.write(keyn+ valn +'\n') + self.domain_def.write('/\n') + self.domain_def.close() + + # find restart files and tile files (if necessary) + RESTART_str = str(self.ExeInputs['RESTART']) + + if RESTART_str == '2': + inpdir=self.ExeInputs['RESTART_PATH']+self.ExeInputs['RESTART_ID']+'/input/' + in_tilefiles_ = glob.glob(inpdir+'*tile.data') + if len(in_tilefiles_) == 0 : + inpdir=self.ExeInputs['RESTART_PATH']+self.ExeInputs['RESTART_ID']+'/output/'+self.ExeInputs['RESTART_DOMAIN']+'/rc_out/' + in_tilefiles_ = glob.glob(inpdir+'MAPL_*.til') + if len(in_tilefiles_) == 0 : + in_tilefiles_ = glob.glob(inpdir+'/*.til') + if len(in_tilefiles_) == 0 : + in_tilefiles_ = glob.glob(inpdir+'/*.nc4') + self.in_tilefile =os.path.realpath(in_tilefiles_[0]) + + if RESTART_str in ['1', '2']: + y4m2='Y%4d/M%02d' % (self.begDates[0].year, self.begDates[0].month) + y4m2d2_h2m2='%4d%02d%02d_%02d%02d' % (self.begDates[0].year, self.begDates[0].month, + self.begDates[0].day,self.begDates[0].hour,self.begDates[0].minute) + self.in_rstdir = self.ExeInputs['RESTART_PATH']+'/'.join([self.ExeInputs['RESTART_ID'],'output', + self.ExeInputs['RESTART_DOMAIN'],'rs',self.ensdirs[0],y4m2]) + if self.with_land: + tmpFile=self.ExeInputs['RESTART_ID']+'.'+self.catch+'_internal_rst.'+y4m2d2_h2m2 + catchRstFile=self.in_rstdir+'/'+tmpFile + assert os.path.isfile(catchRstFile), self.catch+'_internal_rst file [%s] does not exist!' %(catchRstFile) + if RESTART_str == '1': + tmpFile=self.ExeInputs['RESTART_ID']+'.landpert_internal_rst.'+y4m2d2_h2m2 + landpertRstFile=self.in_rstdir+'/'+tmpFile + if ( os.path.isfile(landpertRstFile)) : + self.has_geos_pert = True + + if self.with_landice: + tmpFile=self.ExeInputs['RESTART_ID']+'.landice_internal_rst.'+y4m2d2_h2m2 + landiceRstFile=self.in_rstdir+'/'+tmpFile + assert os.path.isfile(landiceRstFile), 'landice_internal_rst file [%s] does not exist!' %(landiceRstFile) + + if RESTART_str == '0': + assert ( self.with_land and not self.with_landice), "RESTART = 0 is only for land" + if (self.catch == 'catch'): + self.in_rstdir = '/discover/nobackup/projects/gmao/ssd/land/l_data/LandRestarts_for_Regridding' \ + '/Catch/M09/20170101/' #catch_internal_rst + self.in_tilefile = '/discover/nobackup/projects/gmao/ssd/land/l_data/geos5/bcs/CLSM_params' \ + '/mkCatchParam_SMAP_L4SM_v002/SMAP_EASEv2_M09/SMAP_EASEv2_M09_3856x1624.til' + elif (self.catch == 'catchcnclm40'): + self.in_rstdir = '/discover/nobackup/projects/gmao/ssd/land/l_data/LandRestarts_for_Regridding' \ + '/CatchCN/M36/20150301_0000/' #catchcnclm40_internal_dummy + self.in_tilefile = '/discover/nobackup/projects/gmao/bcs_shared/legacy_bcs/Heracles-NL/SMAP_EASEv2_M36/SMAP_EASEv2_M36_964x406.til' + elif (self.catch == 'catchcnclm45'): + self.in_rstdir = '/discover/nobackup/projects/gmao/ssd/land/l_data/LandRestarts_for_Regridding' \ + '/CatchCN/M36/19800101_0000/' #catchcnclm45_internal_dummy + self.in_tilefile = '/discover/nobackup/projects/gmao/bcs_shared/legacy_bcs/Icarus-NLv3/Icarus-NLv3_EASE/SMAP_EASEv2_M36/SMAP_EASEv2_M36_964x406.til' + else: + sys.exit('need to provide at least dummy files') + + # DEAL WITH mwRTM input from exec + self.assim = True if self.ExeInputs.get('LAND_ASSIM', 'NO').upper() == 'YES' and self.with_land else False + # verify mwrtm file + if 'MWRTM_PATH' in self.ExeInputs and self.with_land : + self.ExeInputs['MWRTM_PATH'] = self.ExeInputs['MWRTM_PATH']+'/'+ self.ExeInputs['BCS_RESOLUTION']+'/' + mwrtm_param_file_ = self.ExeInputs['MWRTM_PATH']+'mwRTM_param.nc4' + vegopacity_file_ = self.ExeInputs['MWRTM_PATH']+'vegopacity.bin' + if os.path.isfile(mwrtm_param_file_) : + self.has_mwrtm = True + self.mwrtm_file = mwrtm_param_file_ + else : + assert not mwrtm_param_file_.strip(), ' MWRTM_PATH: %s should contain mwRTM_param.nc4'% self.ExeInputs['MWRTM_PATH'] + del self.ExeInputs['MWRTM_PATH'] + if os.path.isfile(vegopacity_file_) : + self.has_vegopacity = True + self.ExeInputs['VEGOPACITY_FILE'] = vegopacity_file_ + + + # ------------------ + # Read rm input file + # ------------------ + + if self.ladas_cpl == 0 : + self.RmInputs = parseInputFile(cmdLineArgs['batinpfile']) + else : + self.RmInputs['account'] = cmdLineArgs['account'] + self.RmInputs['walltime'] = "01:00:00" + self.RmInputs['ntasks_model'] = 120 + + self._verifyResourceInputs() + + # print rm inputs + if self.verbose: + print ('\n\nRequired inputs for resource manager:') + printDictionary(self.RmInputs) + print ('\n\nOptional inputs for resource manager:') + printDictionary(self.RmInputs) + print ('\n\n') + + # ------ + # set top level directories + # rundir, inpdir, outdir, blddir + # executable + # exefyl + # ------ + + self.bindir = os.path.dirname(os.path.realpath(__file__)) + self.blddir = self.bindir.rsplit('/',1)[0] + exefyl = '/bin/GEOSldas.x' + tmp_execfyl = self.blddir + exefyl + assert os.path.isfile(tmp_execfyl),\ + 'Executable [%s] does not exist!' % tmp_execfyl + self.expdir = self.exphome + '/' + self.ExeInputs['EXP_ID'] + self.rundir = self.expdir + '/run' + self.inpdir = self.expdir + '/input' + self.outdir = self.expdir + '/output' + self.scratchdir = self.expdir + '/scratch' + self.blddirLn = self.expdir + '/build' + self.out_path = self.outdir + '/'+self.ExeInputs['EXP_DOMAIN'] + self.bcsdir = self.outdir + '/'+self.ExeInputs['EXP_DOMAIN']+'/rc_out/' + self.rstdir = self.outdir + '/'+self.ExeInputs['EXP_DOMAIN']+'/rs/' + self.exefyl = self.blddirLn + exefyl + + # default is set to 0 ( no output server) + if 'oserver_nodes' not in self.RmInputs : + self.RmInputs['oserver_nodes'] = 0 + + if (int(self.RmInputs['oserver_nodes']) >=1) : + self.ExeInputs['WRITE_RESTART_BY_OSERVER'] = "YES" + # set default for now + if 'writers-per-node' not in self.RmInputs: + self.RmInputs['writers-per-node'] = 5 + else: + self.RmInputs['writers-per-node'] = 0 + + # ----------------------------------------------------------------------------------- + def _verifyExeInputs(self): + ExeInputs = self.ExeInputs + #) verify keys + option = '1' + if (ExeInputs['RESTART'] == 'G' or ExeInputs['RESTART'] == '0'): + option = '0' + + rqdExeInpKeys = getExeKeys(option) + for key in rqdExeInpKeys: + assert key in ExeInputs,' "%s" is required in the inputs ( from exeinpfile or command line) ' % (key) + + _mydir = self.exphome + '/' + self.ExeInputs['EXP_ID'] + assert not os.path.isdir(_mydir), 'Dir [%s] already exists!' % _mydir + _mydir = None + + # nens is an integer and =1 for model run + assert self.nens>0, 'NUM_LDAS_ENSEMBLE [%d] <= 0' % self.nens + # ----------------------------------------------------------------------------------- + def _verifyResourceInputs(self): + #----- + # verify resource input keys are correct + #----- + ResourceInputs = self.RmInputs + rqdRmInpKeys = getResourceKeys('required') + optSlurmInpKeys = getResourceKeys('optional') + allKeys = rqdRmInpKeys + optSlurmInpKeys + for key in rqdRmInpKeys: + assert key in ResourceInputs,' "%s" is required in the inputs ( from batinpfile or command line) ' % (key) + + for key in ResourceInputs: + assert key in allKeys, ' "%s" is not recognized ' % key + + # --------------------- + # calculate JobSegments + # --------------------- + def _calculateJobSegments(self): + ## convert date-time strings to datetime object + ## start/end_time are converted to lists + ## ensure end>start + + self.begDates=[] + self.endDates=[] + self.begDates.append( + datetime.strptime( + self.ExeInputs['BEG_DATE'], + '%Y%m%d %H%M%S' + ) + ) + self.endDates.append( + datetime.strptime( + self.ExeInputs['END_DATE'], + '%Y%m%d %H%M%S' + ) + ) + if self.ExeInputs['RESTART'].isdigit() : + if int(self.ExeInputs['RESTART']) == 0 : + print ("No restart file (cold restart): Forcing start date to January 1, 0z") + year = self.begDates[0].year + self.begDates[0]=datetime(year =year,month=1,day =1,hour =0, minute= 0,second= 0) + + assert self.endDates[0]>self.begDates[0], \ + 'END_DATE <= BEG_DATE' + + self.job_sgmt = [] + if 'JOB_SGMT' in self.ExeInputs: + self.job_sgmt.append("JOB_SGMT: "+self.ExeInputs['JOB_SGMT']) + else: + _datediff = relativedelta(self.endDates[0],self.begDates[0]) + self.ExeInputs['JOB_SGMT'] = "%04d%02d%02d %02d%02d%02d" %(_datediff.years, + _datediff.months, + _datediff.days, + _datediff.hours, + _datediff.minutes, + _datediff.seconds) + self.job_sgmt.append("JOB_SGMT: "+self.ExeInputs['JOB_SGMT']) + + if 'NUM_SGMT' not in self.ExeInputs: + self.ExeInputs['NUM_SGMT'] = 1 + + _years = int(self.ExeInputs['JOB_SGMT'][ 0: 4]) + _months = int(self.ExeInputs['JOB_SGMT'][ 4: 6]) + _days = int(self.ExeInputs['JOB_SGMT'][ 6: 8]) + assert self.ExeInputs['JOB_SGMT'][8] == ' ' and self.ExeInputs['JOB_SGMT'][9] != ' ', "JOB_SGMT format is not right" + _hours = int(self.ExeInputs['JOB_SGMT'][ 9:11]) + _mins = int(self.ExeInputs['JOB_SGMT'][11:13]) + _seconds= int(self.ExeInputs['JOB_SGMT'][13:15]) + + + _difftime =timedelta(days = _years*365+_months*30+_days,hours = _hours,minutes=_mins,seconds=_seconds) + _difftime = int(self.ExeInputs['NUM_SGMT'])*_difftime + _d = self.begDates[0] + _endDate = self.endDates[0] + _d = _d + _difftime + while _d < _endDate : + print (_difftime.days) + self.nSegments +=1 + print (_d.year, _d.month, _d.day) + self.begDates.append(_d) + self.endDates.insert(-1,_d) + _d = _d+ _difftime + + def createDirStructure(self): + """ + Create required dir structure + """ + + status = False + + # shorthands + _nens = self.nens + + # run/inp/wrk dirs + os.makedirs(self.exphome+'/'+self.ExeInputs['EXP_ID'], exist_ok=True) + os.makedirs(self.rundir, exist_ok=True) + os.makedirs(self.inpdir, exist_ok=True) + os.makedirs(self.outdir, exist_ok=True) + os.makedirs(self.scratchdir, exist_ok=True) + + #-start-shorthand-function- + def _getDirName(outtyp, ensdir, yyyymm): + return '/'.join([ + self.outdir, + self.ExeInputs['EXP_DOMAIN'], + outtyp, # ana/cat/rs/rc_out + ensdir, + yyyymm + ]) + #-end-shorthand-function- + + # met forcing dir + myMetDir = self.inpdir + '/met_forcing' + os.makedirs(myMetDir, exist_ok=True) + + # ensxxxx directories + nSegments = self.nSegments + for iseg in range(nSegments): + _start = self.begDates[iseg] + _end = self.endDates[iseg] + + # Yyyyy/Mmm between StartDateTime and EndDateTime + newDate = _start + y4m2_list = [('Y%4d/M%02d' % (newDate.year, newDate.month))] + while newDate<_end: + newDate += relativedelta(months=1) + y4m2_list.append('Y%4d/M%02d' % (newDate.year, newDate.month)) + + # ExpDomain/ana/, /cat/ directories + for ensdir in self.ensdirs_avg: + for y4m2 in y4m2_list: + os.makedirs(_getDirName('ana', ensdir, y4m2), exist_ok=True) + os.makedirs(_getDirName('cat', ensdir, y4m2), exist_ok=True) + + # ExpDomain/rs/ directories + for ensdir in self.ensdirs: + for y4m2 in y4m2_list: + os.makedirs(_getDirName('rs', ensdir, y4m2), exist_ok=True) + + # ExpDomain/rc_out/ - only for _start + os.makedirs(_getDirName('rc_out', '', y4m2_list[0]), exist_ok=True) + + # restart dir + os.makedirs(self.inpdir + '/restart', exist_ok=True) + + status = True + return status + + + # create links to BCs, restarts, met forcing, ... + def createLnRstBc(self) : + # link bld dir + status = False + + _nens = self.nens + + os.symlink(self.blddir, self.blddirLn) + + # met forcing dir + self.ensemble_forcing = True if self.ExeInputs.get('ENSEMBLE_FORCING', 'NO').upper() == 'YES' else False + + myMetPath ='' + for _i in range(self.first_ens_id, _nens + self.first_ens_id) : + str_ens = '' + if ( _nens != 1 and self.ensemble_forcing): + str_ens = '%03d'%(_i) + metpath = self.ExeInputs['MET_PATH'].rstrip('/')+str_ens + myMetDir = self.inpdir + '/met_forcing' + myMetPath = myMetDir + '/' + metpath.split('/')[-1] + os.symlink(metpath, myMetPath) + # update 'met_path' to use relative path from outdir + if ( not self.ensemble_forcing): + break + if ( _nens !=1 and self.ensemble_forcing) : + # replace last three character with '%s" + self.ExeInputs['MET_PATH'] = os.path.relpath(myMetPath, self.rundir)[:-3]+'%s' + else: + self.ExeInputs['MET_PATH'] = os.path.relpath(myMetPath, self.rundir) + + # update tile file + tile= self.ExeInputs['TILING_FILE'] + short_tile= os.path.basename(self.ExeInputs['TILING_FILE']) + newtile = self.bcsdir+'/'+short_tile + shutil.copy(tile, newtile) + tile=newtile + # if three extra lines exist, remove them and save it to inputdir + + print ('\nCorrect the tile file if it is an old EASE tile format... \n') + EASEtile=self.bcsdir+'/MAPL_'+short_tile + cmd = self.bindir + '/preprocess_ldas.x correctease '+ tile + ' '+ EASEtile + print ("cmd: " + cmd) + + sp.call(shlex.split(cmd)) + + if os.path.isfile(EASEtile) : + #update tile file name + short_tile ='MAPL_'+short_tile + tile=EASEtile + # setup BC files + + catchment_def = self.ExeInputs['CATCH_DEF_FILE'] + exp_id = self.ExeInputs['EXP_ID'] + + _start = self.begDates[0] + _y4m2d2h2m2 ='%4d%02d%02d%02d%02d' % (_start.year, _start.month,_start.day,_start.hour,_start.minute) + + dzsf = '50.0' + if 'SURFLAY' in self.ExeInputs : + dzsf = self.ExeInputs['SURFLAY'] + + # These are dummy values for *cold* restart: + wemin_in = '13' # WEmin input/output for scale_catch(cn), + wemin_out = '13' # + if 'WEMIN_IN' in self.ExeInputs : + wemin_in = self.ExeInputs['WEMIN_IN'] + if 'WEMIN_OUT' in self.ExeInputs : + wemin_out = self.ExeInputs['WEMIN_OUT'] + + tmp_f2g_file = tempfile.NamedTemporaryFile(delete=False) + cmd = self.bindir +'/preprocess_ldas.x c_f2g ' + tile + ' ' + self.domain_def.name + ' '+ self.out_path + ' ' + catchment_def + ' ' + exp_id + ' ' + _y4m2d2h2m2 + ' '+ dzsf + ' ' + tmp_f2g_file.name + ' ' + '_'.join(self.tile_types) + + print ('Creating f2g file if necessary: '+ tmp_f2g_file.name +'....\n') + print ("cmd: " + cmd) + sp.call(shlex.split(cmd)) + # check if it is local or global + if os.path.getsize(tmp_f2g_file.name) !=0 : + self.isZoomIn= True + #os.remove(self.domain_def.name) + + # update tile domain + if self.isZoomIn: + newZoominTile = tile+'.domain' + print ("\nCreating local tile file :"+ newZoominTile) + print ("\nAdding 1000 to type of tiles to be excluded from domain...\n") + cmd = self.bindir +'/preprocess_ldas.x zoomin_tile ' + tile + ' ' + newZoominTile + ' '+ tmp_f2g_file.name + print ("cmd: " + cmd) + sp.call(shlex.split(cmd)) + short_tile=short_tile +'.domain' + tile = newZoominTile + + myTile=self.inpdir+'/tile.data' + os.symlink(tile,myTile) + + if self.with_land: + bcs=[self.ExeInputs['GRN_FILE'], + self.ExeInputs['LAI_FILE'], + self.ExeInputs['NDVI_FILE'], + self.ExeInputs['NIRDF_FILE'], + self.ExeInputs['VISDF_FILE'] ] + if (self.ExeInputs['LNFM_FILE'] != ''): + bcs += [self.ExeInputs['LNFM_FILE']] + if (self.has_vegopacity): + bcs += [self.ExeInputs['VEGOPACITY_FILE']] + bcstmp=[] + for bcf in bcs : + shutil.copy(bcf, self.bcsdir+'/') + bcstmp=bcstmp+[self.bcsdir+'/'+os.path.basename(bcf)] + bcs=bcstmp + + if self.isZoomIn: + print ("Creating the boundary files for the simulation domain...\n") + bcs_tmp=[] + for bcf in bcs : + cmd = self.bindir +'/preprocess_ldas.x zoomin_bc ' + bcf + ' '+ bcf+'.domain' + ' '+ tmp_f2g_file.name + print ("cmd: " + cmd) + sp.call(shlex.split(cmd)) + bcs_tmp=bcs_tmp+[bcf+'.domain'] + bcs=bcs_tmp + + + # link BC + print ("linking bcs...") + bcnames=['green','lai','ndvi','nirdf','visdf'] + if (self.ExeInputs['LNFM_FILE'] != ''): + bcnames += ['lnfm'] + if (self.has_vegopacity): + bcnames += ['vegopacity'] + for bcln,bc in zip(bcnames,bcs) : + myBC=self.inpdir+'/'+bcln+'.data' + os.symlink(bc,myBC) + + if ("catchcn" in self.catch): + os.symlink(self.bcs_dir_landshared + 'CO2_MonthlyMean_DiurnalCycle.nc4', \ + self.inpdir+'/CO2_MonthlyMean_DiurnalCycle.nc4') + + # create and link restart + print ("Creating and linking restart...") + _start = self.begDates[0] + + y4m2='Y%4d/M%02d'%(_start.year, _start.month) + y4m2d2_h2m2 ='%4d%02d%02d_%02d%02d' % (_start.year, _start.month,_start.day,_start.hour,_start.minute) + + myRstDir = self.inpdir + '/restart/' + + rstpath = self.ExeInputs['RESTART_PATH']+ \ + self.ExeInputs['RESTART_ID'] + \ + '/output/'+self.ExeInputs['RESTART_DOMAIN']+'/rs/' + rcoutpath = self.ExeInputs['RESTART_PATH']+ \ + self.ExeInputs['RESTART_ID'] + \ + '/output/'+self.ExeInputs['RESTART_DOMAIN']+'/rc_out/' + + # pass into remap_config_ldas + exp_id = self.ExeInputs['EXP_ID'] + RESTART_str = str(self.ExeInputs['RESTART']) + YYYYMMDD = '%4d%02d%02d' % (_start.year, _start.month,_start.day) + YYYYMMDDHH= '%4d%02d%02d%02d' % (_start.year, _start.month,_start.day, _start.hour) + rstid = self.ExeInputs['RESTART_ID'] + rstdomain = self.ExeInputs['RESTART_DOMAIN'] + rstpath0 = self.ExeInputs['RESTART_PATH'] + + # just copy the landassim pert seed if it exists + for iens in range(self.nens) : + _ensdir = self.ensdirs[iens] + _ensid = self.ensids[iens] + landassim_seeds = rstpath + _ensdir + '/' + y4m2+'/' + rstid + '.landassim_obspertrseed_rst.'+y4m2d2_h2m2 + if os.path.isfile(landassim_seeds) and self.assim : + _seeds = self.rstdir + _ensdir + '/' + y4m2+'/' + exp_id + '.landassim_obspertrseed_rst.'+y4m2d2_h2m2 + shutil.copy(landassim_seeds, _seeds) + os.symlink(_seeds, myRstDir+ '/landassim_obspertrseed'+ _ensid +'_rst') + self.has_landassim_seed = True + mk_outdir = self.exphome+'/'+exp_id+'/mk_restarts/' + + if (RESTART_str != '1' and (self.with_land or self.with_landice)): + bcs_path = self.ExeInputs['BCS_PATH'] + while bcs_path[-1] == '/' : bcs_path = bcs_path[0:-1] + bc_base = os.path.dirname(bcs_path) + bc_version = os.path.basename(bcs_path) + + remap_tpl = os.path.dirname(os.path.realpath(__file__)) + '/remap_params.tpl' + config = yaml_to_config(remap_tpl) + + config['slurm_pbs']['account'] = self.RmInputs['account'] + config['slurm_pbs']['qos'] = 'debug' + + config['input']['surface']['catch_tilefile'] = self.in_tilefile + config['input']['shared']['expid'] = self.ExeInputs['RESTART_ID'] + config['input']['shared']['yyyymmddhh'] = YYYYMMDDHH + if RESTART_str != 'M': + config['input']['shared']['rst_dir'] = self.in_rstdir + config['input']['surface']['wemin'] = wemin_in + config['input']['surface']['catch_model'] = self.catch + + config['output']['shared']['out_dir'] = mk_outdir + config['output']['surface']['catch_remap'] = True + config['output']['surface']['catch_tilefile'] = self.ExeInputs['TILING_FILE'] + config['output']['shared']['bc_base'] = bc_base + config['output']['shared']['bc_version'] = bc_version + config['output']['surface']['EASE_grid'] = self.ExeInputs['BCS_RESOLUTION'] + + config['output']['shared']['expid'] = self.ExeInputs['EXP_ID'] + config['output']['surface']['surflay'] = dzsf + config['output']['surface']['wemin'] = wemin_out + + if RESTART_str == "M" : # restart from merra2 + yyyymm = int(YYYYMMDDHH[0:6]) + merra2_expid = "d5124_m2_jan10" + if yyyymm < 197901 : + exit("Error. MERRA-2 data < 1979 not available\n") + elif (yyyymm < 199201): + merra2_expid = "d5124_m2_jan79" + elif (yyyymm < 200106): + merra2_expid = "d5124_m2_jan91" + elif (yyyymm < 201101): + merra2_expid = "d5124_m2_jan00" + elif (yyyymm < 202106): + merra2_expid = "d5124_m2_jan10" + # There was a rewind in MERRA2 from Jun 2021 to Sept 2021 + elif (yyyymm < 202110): + merra2_expid = "d5124_m2_jun21" + config['input']['shared']['expid'] = merra2_expid + config['input']['shared']['rst_dir'] = mk_outdir+ '/merra2_tmp_'+ YYYYMMDDHH + config['input']['surface']['wemin'] = 26 + config['input']['shared']['bc_base'] = '/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles' + config['input']['shared']['bc_version'] = 'GM4' + config['input']['shared']['agrid'] = 'C180' + config['input']['shared']['ogrid'] = '1440x720' + config['input']['shared']['omodel'] = 'data' + config['input']['shared']['MERRA-2'] = True + config['input']['surface']['catch_tilefile'] = '/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles/GM4/geometry/CF0180x6C_DE1440xPE0720/CF0180x6C_DE1440xPE0720-Pfafstetter.til' + + if self.with_land: + catch_obj = catchANDcn(config_obj = config) + catch_obj.remap() + if self.with_landice: + config['output']['surface']['remap_water'] = True + config['input']['surface']['zoom'] = '2' + landice_obj = lake_landice_saltwater(config_obj = config) + landice_obj.remap() + + #for ens in self.ensdirs : + catchRstFile0 = '' + vegdynRstFile0 = '' + landiceRstFile0 = '' + for iens in range(self.nens) : + ensdir = self.ensdirs[iens] + ensid = self.ensids[iens] + myCatchRst = myRstDir+'/'+self.catch +ensid +'_internal_rst' + myLandiceRst = myRstDir+'/'+ 'landice' +ensid +'_internal_rst' + myVegRst = myRstDir+'/'+'vegdyn'+ensid +'_internal_rst' + myPertRst = myRstDir+'/'+ 'landpert' +ensid +'_internal_rst' + + catchRstFile = '' + vegdynRstFile = '' + pertRstFile = '' + print ("restart: " + self.ExeInputs['RESTART']) + + if self.with_land : + if self.ExeInputs['RESTART'].isdigit() : + + if int(self.ExeInputs['RESTART']) == 0 or int(self.ExeInputs['RESTART']) == 2 : + vegdynRstFile = glob.glob(self.bcs_dir_land + 'vegdyn_*.dat')[0] + catchRstFile = glob.glob(self.exphome+'/'+exp_id+'/mk_restarts/*'+self.catch+'_internal_rst.'+YYYYMMDD+'*')[0] + else : # RESTART == 1 + catchRstFile = rstpath+ensdir +'/'+ y4m2+'/'+self.ExeInputs['RESTART_ID']+'.'+self.catch+'_internal_rst.'+y4m2d2_h2m2 + vegdynRstFile= rstpath+ensdir +'/'+self.ExeInputs['RESTART_ID']+ '.vegdyn_internal_rst' + if not os.path.isfile(vegdynRstFile): # no vegdyn restart from LDASsa + if not os.path.isfile(vegdynRstFile0): + vegdynRstFile = glob.glob(self.bcs_dir_land + 'vegdyn_*.dat')[0] + else : + vegdynRstFile = glob.glob(self.bcs_dir_land + 'vegdyn_*.dat')[0] + if self.with_land: + catchRstFile = glob.glob(self.exphome+'/'+exp_id+'/mk_restarts/*'+self.catch+'_internal_rst.'+YYYYMMDD+'*')[0] + + # catchment restart file + if os.path.isfile(catchRstFile) : + catchLocal = self.rstdir+ensdir +'/'+ y4m2+'/'+self.ExeInputs['EXP_ID']+'.'+self.catch+'_internal_rst.'+y4m2d2_h2m2 + if self.isZoomIn : + print( "Creating local catchment restart file... \n") + cmd=self.bindir +'/preprocess_ldas.x zoomin_catchrst '+ catchRstFile +' ' + catchLocal + ' '+ tmp_f2g_file.name + print ("cmd: "+cmd) + sp.call(shlex.split(cmd)) + else : + shutil.copy(catchRstFile,catchLocal) + + catchRstFile = catchLocal + + if '0000' in ensdir : + catchRstFile0 = catchRstFile + else : # re-use 0000 catch file + catchRstFile = catchRstFile0 + + # vegdyn restart file + if os.path.isfile(vegdynRstFile) : + vegdynLocal = self.rstdir+ensdir +'/'+self.ExeInputs['EXP_ID']+'.vegdyn_internal_rst' + if self.isZoomIn : + print ("Creating the local veg restart file... \n") + cmd=self.bindir + '/preprocess_ldas.x zoomin_vegrst '+ vegdynRstFile +' ' + vegdynLocal + ' '+ tmp_f2g_file.name + print ("cmd: " + cmd) + sp.call(shlex.split(cmd)) + else : + shutil.copy(vegdynRstFile,vegdynLocal) + + vegdynRstFile = vegdynLocal + + if '0000' in ensdir : + vegdynRstFile0 = vegdynRstFile + else : + vegdynRstFile = vegdynRstFile0 + + landiceRstFile = '' + if self.with_landice : + if RESTART_str == '0' : + exit("RESTART=0 not supported for landice tiles. Please use RESTART=M, 1, or 2") + if RESTART_str == '1' : + landiceRstFile = rstpath+ensdir +'/'+ y4m2+'/'+self.ExeInputs['RESTART_ID']+'.'+'landice_internal_rst.'+y4m2d2_h2m2 + if RESTART_str == '2' or RESTART_str == 'M': + landiceRstFile = glob.glob(self.exphome+'/'+exp_id+'/mk_restarts/*'+'landice_internal_rst.'+YYYYMMDD+'*')[0] + + if os.path.isfile(landiceRstFile) : + landiceLocal = self.rstdir+ensdir +'/'+ y4m2+'/'+self.ExeInputs['EXP_ID']+'.landice_internal_rst.'+y4m2d2_h2m2 + if self.isZoomIn : + print ("Creating zoom-in of landice restart file... \n") + cmd=self.bindir + '/preprocess_ldas.x zoomin_landicerst '+ landiceRstFile +' ' + landiceLocal + ' '+ tmp_f2g_file.name + print ("cmd: " + cmd) + sp.call(shlex.split(cmd)) + else : + shutil.copy(landiceRstFile,landiceLocal) + + landiceRstFile = landiceLocal + + if '0000' in ensdir : + landiceRstFile0 = landiceRstFile + else : + landiceRstFile = landiceRstFile0 + + if (self.has_geos_pert and self.perturb == 1) : + pertRstFile = rstpath+ensdir +'/'+ y4m2+'/'+self.ExeInputs['RESTART_ID']+'.landpert_internal_rst.'+y4m2d2_h2m2 + pertLocal = self.rstdir+ensdir +'/'+ y4m2+'/'+self.ExeInputs['EXP_ID']+'.landpert_internal_rst.'+y4m2d2_h2m2 + shutil.copy(pertRstFile,pertLocal) + pertRstFile = pertLocal + + if self.with_land : + print ('catchRstFile: ' + catchRstFile) + print ('vegdynRstFile: ' + vegdynRstFile) + os.symlink(catchRstFile, myCatchRst) + os.symlink(vegdynRstFile, myVegRst) + if self.with_landice : + print("link landice restart: " + myLandiceRst) + os.symlink(landiceRstFile, myLandiceRst) + if ( self.has_geos_pert and self.perturb == 1 ): + os.symlink(pertRstFile, myPertRst) + + # catch_param restar file + catch_param_file = self.bcsdir+'/'+ y4m2+'/'+self.ExeInputs['EXP_ID']+'.ldas_catparam.'+y4m2d2_h2m2+'z.bin' + if self.with_land: + assert os.path.isfile(catch_param_file), "need catch_param file %s" % catch_param_file + + if self.has_mwrtm : + mwRTMRstFile = self.mwrtm_file + mwRTMLocal = self.bcsdir+'/'+ y4m2+'/'+self.ExeInputs['EXP_ID']+'.ldas_mwRTMparam.'+y4m2d2_h2m2+'z.nc4' + if self.isZoomIn : + print ("Creating the local mwRTM restart file... \n") + cmd= self.bindir +'/preprocess_ldas.x zoomin_mwrtmrst '+ mwRTMRstFile +' ' + mwRTMLocal + ' '+ tmp_f2g_file.name + + print ("cmd: " + cmd) + sp.call(shlex.split(cmd)) + else : + shutil.copy(mwRTMRstFile,mwRTMLocal) + + mwRTMRstFile = mwRTMLocal + mymwRTMRst = myRstDir+'/mwrtm_param_rst' + os.symlink(mwRTMRstFile, mymwRTMRst) + + # update 'restart_path' to use relative path from outdir + print ("Updating restart path...") + self.ExeInputs['RESTART_PATH'] = myRstDir + #if os.path.isfile(tmp_f2g_file.name): + # os.remove(tmp_f2g_file.name) + status = True + return status + + # ----------------------------------------------------------------------------------- + + def createRCFiles(self): + """ + (1) get resource files form DEFAULT rc files from /etc + (2) update from customed rc files + (2) write rc files to the run directory + """ + + status = False + + for mydir in [self.blddirLn, self.rundir]: + assert os.path.isdir(mydir), \ + 'dir [%s] does not exist!' % mydir + + if self.ladas_cpl == 0: + # copy ldas_setup exeinp and batinp input files to rundir (for the record) + # if a file w/ the same name already exists at rundir + # append 1,2,3 etc, to the filename + ## exe inp file + exefilename = self.exeinpfile.rstrip('/').split('/')[-1] + newfilename = exefilename + _nens = self.nens + ctr = 0 + while os.path.isfile(self.rundir+'/'+newfilename): + ctr += 1 + newfilename = exefilename + '.%d' % ctr + shutil.copy(self.exeinpfile, self.rundir+'/'+newfilename) + ## bat inp file + batfilename = self.batinpfile.rstrip('/').split('/')[-1] + newfilename = batfilename + ctr = 0 + while os.path.isfile(self.rundir+'/'+newfilename): + ctr += 1 + newfilename = batfilename + '.%d' % ctr + shutil.copy(self.batinpfile, self.rundir+'/'+newfilename) + + # ----------------------------------- + + etcdir = self.blddirLn + '/etc' + + #defalt nml + default_nml = glob.glob(etcdir+'/LDASsa_DEFAULT_inputs_*.nml') + for nmlfile in default_nml: + shortfile=self.rundir+'/'+nmlfile.split('/')[-1] + shutil.copy2(nmlfile, shortfile) + # special nml + special_nml=[] + if self.ladas_cpl > 0: + special_nml= glob.glob(etcdir+'/LDASsa_SPECIAL_inputs_*.nml') + else : + if 'NML_INPUT_PATH' in self.ExeInputs : + special_nml = glob.glob(self.ExeInputs['NML_INPUT_PATH']+'/LDASsa_SPECIAL_inputs_*.nml') + + for nmlfile in special_nml: + shortfile=self.rundir+'/'+nmlfile.split('/')[-1] + shutil.copy2(nmlfile, shortfile) + + if self.ladas_cpl > 0: + # edit resolution info in ensupd nml file + sp.run(['sed', '-i', 's//'+self.agcm_res+'/g', self.rundir+'/LDASsa_SPECIAL_inputs_ensupd.nml']) + + # get optimzed NX and IMS + optimized_distribution_file = tempfile.NamedTemporaryFile(delete=False) + print ("Optimizing... decomposition of processes.... \n") + cmd = self.bindir + '/preprocess_ldas.x optimize '+ self.inpdir+'/tile.data '+ str(self.RmInputs['ntasks_model']) + ' ' + optimized_distribution_file.name + ' ' + self.rundir + ' ' + '_'.join(self.tile_types) + print ("cmd: " + cmd) + print ("IMS.rc or JMS.rc would be generated on " + self.rundir) + sp.call(shlex.split(cmd)) + optinxny = parseInputFile(optimized_distribution_file.name) + if (int(optinxny['NX']) == 1): + if int(optinxny['NY']) != int(self.RmInputs['ntasks_model']): + self.RmInputs['ntasks_model']=optinxny['NY'] + print ('adjust ntasks_model %d for cubed-sphere grid' % int(self.RmInputs['ntasks_model'])) + + + #os.remove(optimized_distribution_file.name) + + # DEFAULT rc files + default_rc = glob.glob(etcdir+'/GEOSldas_*.rc') + assert len(default_rc)==6 + print (default_rc) + for rcfile in default_rc: + shortfile=rcfile.rsplit('GEOSldas_',1)[1] + print (shortfile + ' ' + etcdir + ' ' + self.rundir) + if shortfile =='HIST.rc': + tmprcfile=self.rundir+'/HISTORY.rc' + histrc_file=rcfile + + _file_found = False + if 'HISTRC_FILE' in self.ExeInputs : + _tmpfile = self.ExeInputs['HISTRC_FILE'].replace("'",'').replace('"','') + if(os.path.isfile(_tmpfile)) : + _file_found = True + else : + assert not _tmpfile.strip(), "HISTRC_FILE: %s is NOT a file. " %_tmpfile + + if _file_found : + histrc_file = self.ExeInputs['HISTRC_FILE'] + shutil.copy2(histrc_file,tmprcfile) + else : + shutil.copy2(histrc_file,tmprcfile) + if 'EASE' in self.ExeInputs['GRIDNAME'] : + TMPSTR='OUT1d' + else : + TMPSTR='OUT2d' + cmd = self.bindir +'/process_hist.csh' + ' ' \ + + tmprcfile + ' ' \ + + TMPSTR + ' ' \ + + self.ExeInputs['GRIDNAME'] + ' ' \ + + str(self.ExeInputs['LSM_CHOICE']) + ' ' \ + + str(self.ExeInputs['AEROSOL_DEPOSITION']) + ' ' \ + + str(self.ExeInputs['RUN_IRRIG']) + ' ' \ + + str(self.nens) + print(cmd) + #os.system(cmd) + sp.call(shlex.split(cmd)) + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('GEOSldas_expid',self.ExeInputs['EXP_ID'])) + + # if coupled land-atm DAS, always use either GEOSldas_HISTdet.rc or GEOSldas_HISTens.rc (depending on ladas_cpl) + if ( shortfile =='HISTdet.rc' and self.ladas_cpl == 1 ) or ( shortfile =='HISTens.rc' and self.ladas_cpl == 2 ): + tmprcfile=self.rundir+'/HISTORY.rc' + histrc_file=rcfile + shutil.copy2(rcfile, tmprcfile) + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('GEOSldas_expid',self.ExeInputs['EXP_ID'])) + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('GRIDNAME',self.ExeInputs['GRIDNAME'])) + + # just copy an empty ExtData.rc + if shortfile=='ExtData.rc' : + shutil.copy2(rcfile, self.rundir+'/'+shortfile) + + if shortfile == 'CAP.rc': + tmprcfile = self.rundir+'/CAP.rc' + shutil.copy2(rcfile,tmprcfile) + + _num_sgmt = int(self.ExeInputs['NUM_SGMT']) + + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('JOB_SGMT:',self.job_sgmt[0])) + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('NUM_SGMT:','NUM_SGMT: %d'% _num_sgmt)) + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('BEG_DATE:',self.begDates[ 0].strftime('BEG_DATE: %Y%m%d %H%M%S'))) + for line in fileinput.input(tmprcfile,inplace=True): + print (line.rstrip().replace('END_DATE:',self.endDates[-1].strftime('END_DATE: %Y%m%d %H%M%S'))) + + if shortfile == 'LDAS.rc' : + ldasrcInp = OrderedDict() + # land default + default_surfrcInp = parseInputFile(etcdir+'/GEOS_SurfaceGridComp.rc', ladas_cpl=self.ladas_cpl) + for key,val in default_surfrcInp.items() : + ldasrcInp[key] = val + + # ldas default, may overwrite land default + default_ldasrcInp = parseInputFile(rcfile, ladas_cpl=self.ladas_cpl) + for key,val in default_ldasrcInp.items() : + ldasrcInp[key] = val + + # exeinp, may overwrite ldas default + for key,val in self.ExeInputs.items(): + if key not in self.NoneLDASrcKeys: + ldasrcInp[key]= val + + # overide by optimized distribution + #for key,val in optinxny.items(): + # ldasrcInp[key]= val + + # create BC in rc file + tmpl_ = '' + if self.nens >1 : + tmpl_='%s' + if self.perturb == 1: + ldasrcInp['PERTURBATIONS'] ='1' + rstkey =[] + rstval =[] + if self.with_land : + bcval=['../input/green','../input/lai','../input/lnfm','../input/ndvi','../input/nirdf','../input/visdf'] + bckey=['GREEN','LAI','LNFM','NDVI','NIRDF','VISDF'] + for key, val in zip(bckey,bcval): + keyn = key+'_FILE' + valn = val+'.data' + ldasrcInp[keyn]= valn + if('catchcn' in self.catch): + ldasrcInp['CO2_MonthlyMean_DiurnalCycle_FILE']= '../input/CO2_MonthlyMean_DiurnalCycle.nc4' + else: + # remove catchcn-specific entries that do not apply to catch model + ldasrcInp.pop('DTCN',None) + ldasrcInp.pop('ATM_CO2',None) + ldasrcInp.pop('CO2',None) + ldasrcInp.pop('CO2_YEAR',None) + ldasrcInp.pop('PRESCRIBE_DVG',None) + + # create restart item in RC + catch_ = self.catch.upper() + + if catch_+'_INTERNAL_RESTART_TYPE' in ldasrcInp : + # avoid duplicate + del ldasrcInp[ catch_ +'_INTERNAL_RESTART_TYPE'] + if catch_+'_INTERNAL_CHECKPOINT_TYPE' in ldasrcInp : + # avoid duplicate + del ldasrcInp[ catch_ +'_INTERNAL_CHECKPOINT_TYPE'] + if 'VEGDYN_INTERNAL_RESTART_TYPE' in ldasrcInp : + # avoid duplicate + del ldasrcInp['VEGDYN_INTERNAL_RESTART_TYPE'] + + rstkey.append(catch_) + rstkey.append('VEGDYN') + rstval.append(self.catch) + rstval.append('vegdyn') + + if self.with_landice: + rstkey.append('LANDICE') + rstval.append('landice') + + if self.has_mwrtm : + keyn='LANDASSIM_INTERNAL_RESTART_FILE' + valn='../input/restart/mwrtm_param_rst' + ldasrcInp[keyn]= valn + if self.has_vegopacity : + keyn='VEGOPACITY_FILE' + valn='../input/vegopacity.data' + ldasrcInp[keyn]= valn + + if self.nens > 1 : + keyn='ENS_ID_WIDTH' + valn=str(self.ens_id_width) + ldasrcInp[keyn]= valn + + if self.has_landassim_seed and self.assim : + keyn='LANDASSIM_OBSPERTRSEED_RESTART_FILE' + valn='../input/restart/landassim_obspertrseed'+tmpl_+'_rst' + ldasrcInp[keyn]= valn + + if self.assim: + keyn='LANDASSIM_OBSPERTRSEED_CHECKPOINT_FILE' + valn='landassim_obspertrseed'+tmpl_+'_checkpoint' + ldasrcInp[keyn]= valn + + for key,val in zip(rstkey,rstval) : + keyn = key+ '_INTERNAL_RESTART_FILE' + valn = '../input/restart/'+val+tmpl_+'_internal_rst' + ldasrcInp[keyn]= valn + + # checkpoint file and its type + if self.with_land : + keyn = catch_ + '_INTERNAL_CHECKPOINT_FILE' + valn = self.catch+tmpl_+'_internal_checkpoint' + ldasrcInp[keyn]= valn + + if self.with_landice : + keyn = 'LANDICE_INTERNAL_CHECKPOINT_FILE' + valn = 'landice'+tmpl_+'_internal_checkpoint' + ldasrcInp[keyn]= valn + # specify LANDPERT restart file + if (self.perturb == 1): + keyn = 'LANDPERT_INTERNAL_RESTART_FILE' + valn = '../input/restart/landpert'+tmpl_+'_internal_rst' + ldasrcInp[keyn]= valn + # for lat/lon and EASE tile space, specify LANDPERT checkpoint file here (via MAPL); + # for cube-sphere tile space, Landpert GC will set up LANDPERT checkpoint file + if ('-CF' not in self.ExeInputs['GRIDNAME']): + keyn = 'LANDPERT_INTERNAL_CHECKPOINT_FILE' + valn = 'landpert'+tmpl_+'_internal_checkpoint' + ldasrcInp[keyn]= valn + + # add items for stretched grid + if '-SG' in self.ExeInputs['BCS_RESOLUTION']: + pos_ = self.ExeInputs['BCS_RESOLUTION'].find('-SG') + SG = self.ExeInputs['BCS_RESOLUTION'][pos_+1:pos_+6] # get ID of stretched grid (e.g., SG002) + ldasrcInp['STRETCH_FACTOR'] = STRETCH_GRID[SG][0] + ldasrcInp['TARGET_LAT'] = STRETCH_GRID[SG][1] + ldasrcInp['TARGET_LON'] = STRETCH_GRID[SG][2] + + # write LDAS.rc + fout =open(self.rundir+'/'+shortfile,'w') + # ldasrcInp['NUM_LDAS_ENSEMBLE']=ldasrcInp.pop('NUM_ENSEMBLE') + for key,val in optinxny.items(): + keyn=(key+":").ljust(36) + fout.write(keyn+str(val)+'\n') + for key,val in ldasrcInp.items() : + keyn=(key+":").ljust(36) + fout.write(keyn+str(val)+'\n') + fout.write("OUT_PATH:".ljust(36)+self.out_path+'\n') + fout.write("EXP_ID:".ljust(36)+self.ExeInputs['EXP_ID']+'\n') + fout.write("TILING_FILE:".ljust(36)+"../input/tile.data\n") + + fout.close() + + fout=open(self.rundir+'/'+'cap_restart','w') + #fout.write(self.ExeInputs['BEG_DATE']) + fout.write(self.begDates[0].strftime('%Y%m%d %H%M%S')) + fout.close() + status=True + return status + + # ----------------------------------------------------------------------------------- + + def createBatchRun(self): + """ + """ + + status = False + + os.chdir(self.rundir) + fout =open(self.rundir+'/ldas_batchrun.j','w') + fout.write("#!/bin/bash -f\n") + jobid = None + SBATCHQSUB = 'sbatch' + expid = self.ExeInputs['EXP_ID'] + if self.GEOS_SITE == 'NAS': + SBATCHQSUB = 'qsub' + fout.write("\nsed -i 's/if($capdate<$enddate) "+SBATCHQSUB+"/#if($capdate<$enddate) "+SBATCHQSUB+"/g' lenkf.j\n\n") + nSegments = self.nSegments + for iseg in range(nSegments): + if iseg ==0 : + fout.write("jobid%d=$(echo $(sbatch lenkf.j) | cut -d' ' -f 4)\n"%(iseg)) + fout.write("echo $jobid%d\n"%iseg ) + else : + _start = self.begDates[iseg] + myDateTime = '%04d%02d%02d_%02d%02dz' % \ + (_start.year, _start.month, _start.day,_start.hour,_start.minute) + _logfile = os.path.relpath( + '/'.join([ + self.outdir, + self.ExeInputs['EXP_DOMAIN'], + 'rc_out', + 'Y%04d' % _start.year, + 'M%02d' % _start.month, + '.'.join([expid, 'ldas_log', myDateTime, 'txt']), + ]), + self.rundir) + _errfile = os.path.relpath( + '/'.join([ + self.outdir, + self.ExeInputs['EXP_DOMAIN'], + 'rc_out', + 'Y%04d' % _start.year, + 'M%02d' % _start.month, + '.'.join([expid, 'ldas_err', myDateTime, 'txt']), + ]), + self.rundir) + + #fout.write("jobid%d=$(echo $(sbatch --dependency=afterany:$jobid%d --output=%s --error=%s lenkf.j) | cut -d' ' -f 4)\n"%(iseg,iseg-1,_logfile, _errfile)) + fout.write("jobid%d=$(echo $(sbatch --dependency=afterok:$jobid%d lenkf.j) | cut -d' ' -f 4)\n"%(iseg,iseg-1)) + fout.write("echo $jobid%d\n"%iseg ) + fout.write("\nsed -i 's/#if($capdate<$enddate) "+SBATCHQSUB+"/if($capdate<$enddate) "+SBATCHQSUB+"/g' lenkf.j\n\n") + fout.close() + + sp.call(['chmod', '755', self.rundir+'/ldas_batchrun.j']) + status = True + return status + + # ----------------------------------------------------------------------------------- + + def createRunScripts(self): + """ + """ + + status = False + + os.chdir(self.rundir) + + my_qos='allnccs' + if self.GEOS_SITE == 'NAS': my_qos = 'normal' + if 'qos' in self.RmInputs : + my_qos = self.RmInputs['qos'] + + my_job=self.ExeInputs['EXP_ID'] + if 'job_name' in self.RmInputs : + my_job = self.RmInputs['job_name'] + + start = self.begDates[0] + expid = self.ExeInputs['EXP_ID'] + myDateTime = '%04d%02d%02d_%02d%02dz' % \ + (start.year, start.month, start.day,start.hour,start.minute) + my_logfile = os.path.relpath( + '/'.join([ + self.outdir, + self.ExeInputs['EXP_DOMAIN'], + 'rc_out', + 'Y%04d' % start.year, + 'M%02d' % start.month, + '.'.join([expid, 'ldas_log', myDateTime, 'txt']), + ]), + self.rundir) + my_errfile = os.path.relpath( + '/'.join([ + self.outdir, + self.ExeInputs['EXP_DOMAIN'], + 'rc_out', + 'Y%04d' % start.year, + 'M%02d' % start.month, + '.'.join([expid, 'ldas_err', myDateTime, 'txt']), + ]), + self.rundir) + + constraint = '"[mil|cas]"' + if self.GEOS_SITE == "NAS" : + constraint = 'cas_ait' + + if 'constraint' in self.RmInputs: + constraint = self.RmInputs['constraint'] + + my_nodes='' + if 'ntasks-per-node' in self.RmInputs: + ntasks_per_node = int(self.RmInputs['ntasks-per-node']) + ntasks = int(self.RmInputs['ntasks_model']) + assert ntasks%ntasks_per_node == 0, 'Please make ntasks_model a multiple of ntasks-per-node' + nodes = ntasks//ntasks_per_node + my_nodes = '#SBATCH --nodes=' + str(nodes) +' --ntasks-per-node=' + str(ntasks_per_node) + if (ntasks_per_node > 46): + assert constraint != 'cas', "Make sure constraint is compataible with ntasks-per-node" + + SBATCHQSUB = 'sbatch' + if self.GEOS_SITE == 'NAS': + SBATCHQSUB = 'qsub' + + DETECTED_MPI_STACK = "@MPI_STACK@" + + job_head = job_directive[self.GEOS_SITE] + lenkf_str= (job_head+job_body).format( + SBATCHQSUB = SBATCHQSUB, + MY_ACCOUNT = self.RmInputs['account'], + MY_WALLTIME = self.RmInputs['walltime'], + MY_NTASKS_MODEL = str(self.RmInputs['ntasks_model']), + MY_NODES = my_nodes, + MY_CONSTRAINT = constraint, + MY_OSERVER_NODES = str(self.RmInputs['oserver_nodes']), + MY_WRITERS_NPES = str(self.RmInputs['writers-per-node']), + MY_QOS = my_qos, + MY_JOB = my_job, + MY_EXPID = self.ExeInputs['EXP_ID'], + MY_EXPDOMAIN = self.ExeInputs['EXP_DOMAIN'], + MY_LOGFILE = my_logfile, + MY_ERRFILE = my_errfile, + MY_LANDMODEL = self.catch, + MY_POSTPROC_HIST = str(self.ExeInputs['POSTPROC_HIST']), + MY_FIRST_ENS_ID = str(self.first_ens_id), + MY_LADAS_COUPLING = str(self.ladas_cpl), + MY_ENSEMBLE_FORCING= self.ExeInputs.get('ENSEMBLE_FORCING', 'NO').upper(), + MY_ADAS_EXPDIR = self.adas_expdir, + MY_EXPDIR = self.expdir, + DETECTED_MPI_STACK = DETECTED_MPI_STACK, + ) + + with open('lenkf.j','wt') as fout : + fout.write(lenkf_str) + sp.call(['chmod', '755', 'lenkf.j']) + + print ('\nExperiment directory: %s' % self.expdir) + print () + status = True + return status diff --git a/GEOSldas_App/ldas_setup b/GEOSldas_App/ldas_setup index 845312b6..f7540fac 100755 --- a/GEOSldas_App/ldas_setup +++ b/GEOSldas_App/ldas_setup @@ -1,1883 +1,11 @@ #!/usr/bin/env python3 -import os import sys -import glob -import copy -import linecache -import shutil import argparse -import fileinput -import time import resource -import subprocess as sp -import shlex -import tempfile -import netCDF4 +from setup_utils import * +from ldas import * -from dateutil import rrule -from datetime import datetime -from datetime import timedelta -from collections import OrderedDict -from dateutil.relativedelta import relativedelta -from remap_utils import * -from remap_lake_landice_saltwater import * -from remap_catchANDcn import * -from lenkf_j_template import * - -""" -This script is intended to be run from any installed directory with GEOSldas.x and ldas_setup -(The default setup is ../install/bin) -""" - -class LDASsetup: - - def __init__(self, cmdLineArgs): - """ - """ - # ------ - # Required exe input fields - # These fields are needed to pre-compute exp dir structure - # ------ - rqdExeInpKeys = ['EXP_ID', 'EXP_DOMAIN', 'NUM_LDAS_ENSEMBLE', - 'BEG_DATE', 'END_DATE','RESTART_PATH', - 'RESTART_DOMAIN','RESTART_ID','MET_TAG','MET_PATH','FORCE_DTSTEP','BCS_PATH', 'BCS_RESOLUTION'] - rqdExeInpKeys_rst = ['EXP_ID', 'EXP_DOMAIN', 'NUM_LDAS_ENSEMBLE', - 'BEG_DATE', 'END_DATE','MET_TAG','MET_PATH','FORCE_DTSTEP','BCS_PATH', 'BCS_RESOLUTION'] - - # These keywords are excluded from LDAS.rc (i.e., only needed in pre- or post-processing) - self.NoneLDASrcKeys=['EXP_ID', 'EXP_DOMAIN', - 'BEG_DATE', 'END_DATE','RESTART','RESTART_PATH', - 'RESTART_DOMAIN','RESTART_ID','BCS_PATH','TILING_FILE','GRN_FILE','LAI_FILE','LNFM_FILE','NIRDF_FILE', - 'VISDF_FILE','CATCH_DEF_FILE','NDVI_FILE', - 'NML_INPUT_PATH','HISTRC_FILE','RST_FROM_GLOBAL','JOB_SGMT','NUM_SGMT','POSTPROC_HIST', - 'MINLON','MAXLON','MINLAT','MAXLAT','EXCLUDE_FILE','INCLUDE_FILE','MWRTM_PATH','GRIDNAME', - 'ADAS_EXPDIR', 'BCS_RESOLUTION', 'TILE_FILE_FORMAT' ] - - self.GEOS_SITE = "@GEOS_SITE@" - - # ------ - # Required resource manager input fields - # ------ - rqdRmInpKeys = ['account', 'walltime', 'ntasks_model'] - # ------ - # Optional resource manager input fields - # ------ - optSlurmInpKeys = ['job_name', 'qos', 'oserver_nodes', 'writers-per-node', 'constraint'] - - - # =============================================================================================== - # - # ------ - # ./ldas_setup sample ... - # ------ - # - # "sample" sub-command: - # '--exeinp' and '--batinp' are mutually exclusive command line arguments. - # Specifying one will set it to True and set the other one to False. - # That is, we can have either: {'exeinp': False, 'batinp': True } - # or: {'exeinp': True, 'batinp': False} - - if 'exeinp' in cmdLineArgs: # 'exeinp' is always present in "sample" mode. - - if cmdLineArgs['exeinp']: - _produceExeInput() - elif cmdLineArgs['batinp']: - _printRmInputKeys( rqdRmInpKeys, optSlurmInpKeys) - else: - raise Exception('unrecognized option') - # - # EXIT after completing "sample" sub-command - sys.exit(0) - - - # =============================================================================================== - # - # ------ - # ./ldas_setup setup ... - # ------ - # Instance variables - self.exeinpfile = cmdLineArgs['exeinpfile'] - self.batinpfile = cmdLineArgs['batinpfile'] - exphome_ = cmdLineArgs['exphome'].rstrip('/') - assert os.path.isdir(exphome_) # exphome should exist - self.exphome = os.path.abspath(exphome_) - self.verbose = cmdLineArgs['verbose'] - - # command line args for coupled land-atm DAS (see "help" strings in parseCmdLine() for details) - self.ladas_cpl = cmdLineArgs['ladas_cpl'] - self.nymdb = cmdLineArgs['nymdb'] - self.nhmsb = cmdLineArgs['nhmsb'] - self.agcm_res = cmdLineArgs['agcm_res'] - self.bcs_version = cmdLineArgs['bcs_version'] - self.rstloc = cmdLineArgs['rstloc'] - self.varwindow = cmdLineArgs['varwindow'] - self.nens = cmdLineArgs['nens'] - - # obsolete command line args - self.runmodel = cmdLineArgs['runmodel'] - if self.runmodel : - print('\n The option "--runmodel" is out of date, not necessary anymore. \n') - - self.daysperjob = cmdLineArgs['daysperjob'] - self.monthsperjob = cmdLineArgs['monthsperjob'] - - self.rqdExeInp = OrderedDict() - self.rqdRmInp = OrderedDict() - self.optRmInp = OrderedDict() - self.rundir = None - self.blddir = None - self.blddirLn = None - self.outdir = None - self.out_path = None - self.inpdir = None - self.exefyl = None - self.isZoomIn = False - self.catch = '' - self.has_mwrtm = False - self.has_vegopacity = False - self.assim = False - self.has_landassim_seed = False - self.has_geos_pert = False - self.nSegments = 1 - self.perturb = 0 - self.first_ens_id = 0 - self.in_rstfile = None - self.in_tilefile = None # default string - self.ens_id_width = 6 # _eXXXX - self.bcs_dir_land = '' - self.bcs_dir_geom = '' - self.bcs_dir_landshared = '' - self.tile_types = '' - self.with_land = False - self.with_landice = False - self.adas_expdir = '' - - # ------ - # Read exe input file which is required to set up the dir - # ------ - if self.ladas_cpl is None: - self.ladas_cpl = 0 - else: - self.ladas_cpl = int(self.ladas_cpl) - - self.rqdExeInp = {} - if self.ladas_cpl == 0: - self.rqdExeInp = self._parseInputFile(cmdLineArgs['exeinpfile']) - else: - _produceExeInput(out_dict=self.rqdExeInp, ladas_cpl=self.ladas_cpl) - - # verifing the required input - if 'RESTART' not in self.rqdExeInp : - self.rqdExeInp['RESTART'] = "1" - - if self.rqdExeInp['RESTART'].isdigit() : - if int(self.rqdExeInp['RESTART']) ==0 : - rqdExeInpKeys = rqdExeInpKeys_rst - self.rqdExeInp['RESTART_ID'] = 'None' - self.rqdExeInp['RESTART_DOMAIN'] = 'None' - self.rqdExeInp['RESTART_PATH'] = 'None' - else: - if self.rqdExeInp['RESTART'] =='G' : - rqdExeInpKeys = rqdExeInpKeys_rst - self.rqdExeInp['RESTART_DOMAIN'] = 'None' - else: - self.rqdExeInp['RESTART_ID'] = 'None' - self.rqdExeInp['RESTART_DOMAIN'] = 'None' - self.rqdExeInp['RESTART_PATH'] = 'None' - - ### check if ldas is coupled to adas; if so, set/overwrite input parameters accordingly - if self.ladas_cpl > 0 : - - # make sure all necessary command line arguments were supplied - assert self.nymdb is not None, "Error. Must have command line arg nymdb for coupled land-atm DAS.\n" - assert self.nhmsb is not None, "Error. Must have command line arg nhmsb for coupled land-atm DAS.\n" - assert self.agcm_res is not None, "Error. Must have command line arg agcm_res for coupled land-atm DAS.\n" - assert self.bcs_version is not None, "Error. Must have command line arg bcs_version for coupled land-atm DAS.\n" - assert self.rstloc is not None, "Error. Must have command line arg rstloc for coupled land-atm DAS.\n" - assert self.varwindow is not None, "Error. Must have command line arg varwindow for coupled land-atm DAS.\n" - assert self.nens is not None, "Error. Must have command line arg nens for coupled land-atmensDAS.\n" - - self.rqdExeInp['BEG_DATE'] = f"{self.nymdb} {self.nhmsb}" - rstloc_ = self.rstloc.rstrip('/') # remove trailing '/' - assert os.path.isdir(rstloc_) # make sure rstloc_ is a valid directory - self.rstloc = os.path.abspath(rstloc_) - self.rqdExeInp['RESTART_PATH'] = os.path.dirname( self.rstloc) - self.rqdExeInp['RESTART_ID'] = os.path.basename(self.rstloc) - self.adas_expdir = os.path.dirname( self.exphome) - self.rqdExeInp['ADAS_EXPDIR'] = self.adas_expdir - self.adas_expid = os.path.basename(self.adas_expdir) - self.rqdExeInp['MET_TAG'] = self.adas_expid + '__bkg' - - if self.ladas_cpl == 1 : - # ldas coupled with determistic component of ADAS - self.rqdExeInp['EXP_ID'] = self.adas_expid + '_LDAS' - self.rqdExeInp['MET_PATH'] = self.adas_expdir + '/recycle/holdpredout' - self.rqdExeInp['ENSEMBLE_FORCING'] = 'NO' - elif self.ladas_cpl == 2 : - # ldas coupled with ensemble component of ADAS - self.rqdExeInp['EXP_ID'] = self.adas_expid + '_LDAS4ens' - self.rqdExeInp['MET_PATH'] = self.adas_expdir + '/atmens/mem' - self.rqdExeInp['ENSEMBLE_FORCING'] = 'YES' - else : - exit("Error. Unknown value of self.ladas_cpl.\n") - - self.rqdExeInp['NUM_LDAS_ENSEMBLE'] = self.nens # fvsetup finds Nens by counting restart files - self.first_ens_id = 1 # match ADAS convention - self.rqdExeInp['FIRST_ENS_ID'] = self.first_ens_id - - self.agcm_res = 'CF' + self.agcm_res # change format to "CFnnnn" - self.rqdExeInp['EXP_DOMAIN'] = self.agcm_res +'x6C_GLOBAL' - - # when coupled to ADAS, "BCS_PATH" EXCLUDE bcs version info - # hard-wired BCS_PATH for now - self.rqdExeInp['BCS_PATH'] = "/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles" - self.rqdExeInp['BCS_PATH'] = self.rqdExeInp['BCS_PATH'].rstrip('/') + '/' + self.bcs_version - if self.bcs_version == "Icarus-NLv3" : - self.rqdExeInp['BCS_PATH'] = self.rqdExeInp['BCS_PATH'] + '_new_layout' - self.rqdExeInp['BCS_RESOLUTION'] = self.agcm_res +'x6C_' + self.agcm_res +'x6C' - self.rqdExeInp['RESTART_DOMAIN'] = self.agcm_res +'x6C_GLOBAL' - - # the following are not in default rqdExeInp list; hardwire for now - self.rqdExeInp['MWRTM_PATH'] = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/RTM_params/RTMParam_SMAP_L4SM_v006/' - self.rqdExeInp['LAND_ASSIM'] = "YES" - self.rqdExeInp['MET_HINTERP'] = 0 - self.landassim_dt = 10800 # seconds - # make sure ADAS analysis window [minutes] is multiple of LANDASSIM_DT [seconds] - if int(self.varwindow) % (self.landassim_dt/60) == 0 : - self.rqdExeInp['LANDASSIM_DT'] = self.landassim_dt - else : - exit("Error. LANDASSIM_DT is inconsistent with ADAS analysis window.\n") - self.rqdExeInp['LANDASSIM_T0'] = "013000" # HHMMSS - jsgmt1 = "00000000" - jsgmt2 = hours_to_hhmmss(int(self.varwindow)/60) # convert minutes to HHMMSS - self.rqdExeInp['JOB_SGMT'] = f"{jsgmt1} {jsgmt2}" - self.rqdExeInp['NUM_SGMT'] = 1 - self.rqdExeInp['FORCE_DTSTEP'] = 3600 - - # determine END_DATE = BEG_DATE + TIME_STEP_OF_ADAS_CYCLE - _beg_date = datetime.strptime( self.rqdExeInp['BEG_DATE'], "%Y%m%d %H%M%S") - _hours = int(self.rqdExeInp['JOB_SGMT'][ 9:11]) - _end_date = _beg_date + timedelta(hours=int(self.varwindow)/60) - self.rqdExeInp['END_DATE'] = _end_date.strftime("%Y%m%d %H%M%S") - - # end if self.ladas_cpl > 0 ----------------------------------------------------------------------------------------- - - for key in rqdExeInpKeys : - assert key in self.rqdExeInp,' "%s" is required in the input file %s' % (key,self.exeinpfile) - - # print rqd exe inputs - if self.verbose: - print ('\nInputs from exeinp file:\n') - _printdict(self.rqdExeInp) - - self.tile_types = self.rqdExeInp.get('TILE_TYPES',"100").split() - if "100" in self.tile_types : - self.with_land = True - if "20" in self.tile_types : - self.with_landice = True - - # nens is an integer and =1 for model run - self.nens = int(self.rqdExeInp['NUM_LDAS_ENSEMBLE']) # fail if Nens's val is not int - assert self.nens>0, 'NUM_LDAS_ENSEMBLE [%d] <= 0' % self.nens - _mydir = self.exphome + '/' + self.rqdExeInp['EXP_ID'] - assert not os.path.isdir(_mydir), 'Dir [%s] already exists!' % _mydir - _mydir = None - self.first_ens_id = int(self.rqdExeInp.get('FIRST_ENS_ID',0)) - - self.perturb = int(self.rqdExeInp.get('PERTURBATIONS',0)) - if self.nens > 1: - self.perturb = 1 - self.ensdirs = ['ens%04d'%iens for iens in range(self.first_ens_id, self.nens + self.first_ens_id)] - # if self.ens_id_width = 4, _width = '_e%04d' - _width = '_e%0{}d'.format(self.ens_id_width-2) - # self.ensids will be a list of [_e0000, _e0001, ...] - self.ensids = [ _width%iens for iens in range(self.first_ens_id, self.nens + self.first_ens_id)] - if (self.nens == 1) : - self.ensdirs_avg = self.ensdirs - self.ensids=[''] - else : - self.ensdirs_avg = self.ensdirs + ['ens_avg'] - - ## convert date-time strings to datetime object - ## start/end_time are converted to lists - ## ensure end>start - - self.begDates=[] - self.endDates=[] - self.begDates.append( - datetime.strptime( - self.rqdExeInp['BEG_DATE'], - '%Y%m%d %H%M%S' - ) - ) - self.endDates.append( - datetime.strptime( - self.rqdExeInp['END_DATE'], - '%Y%m%d %H%M%S' - ) - ) - if self.rqdExeInp['RESTART'].isdigit() : - if int(self.rqdExeInp['RESTART']) == 0 : - print ("No restart file (cold restart): Forcing start date to January 1, 0z") - year = self.begDates[0].year - self.begDates[0]=datetime(year =year,month=1,day =1,hour =0, minute= 0,second= 0) - - assert self.endDates[0]>self.begDates[0], \ - 'END_DATE <= BEG_DATE' - - self.job_sgmt = [] - if 'JOB_SGMT' in self.rqdExeInp: - self.job_sgmt.append("JOB_SGMT: "+self.rqdExeInp['JOB_SGMT']) - else: - _datediff = relativedelta(self.endDates[0],self.begDates[0]) - self.rqdExeInp['JOB_SGMT'] = "%04d%02d%02d %02d%02d%02d" %(_datediff.years, - _datediff.months, - _datediff.days, - _datediff.hours, - _datediff.minutes, - _datediff.seconds) - self.job_sgmt.append("JOB_SGMT: "+self.rqdExeInp['JOB_SGMT']) - - if 'NUM_SGMT' not in self.rqdExeInp: - self.rqdExeInp['NUM_SGMT'] = 1 - - _years = int(self.rqdExeInp['JOB_SGMT'][ 0: 4]) - _months = int(self.rqdExeInp['JOB_SGMT'][ 4: 6]) - _days = int(self.rqdExeInp['JOB_SGMT'][ 6: 8]) - assert self.rqdExeInp['JOB_SGMT'][8] == ' ' and self.rqdExeInp['JOB_SGMT'][9] != ' ', "JOB_SGMT format is not right" - _hours = int(self.rqdExeInp['JOB_SGMT'][ 9:11]) - _mins = int(self.rqdExeInp['JOB_SGMT'][11:13]) - _seconds= int(self.rqdExeInp['JOB_SGMT'][13:15]) - - - _difftime =timedelta(days = _years*365+_months*30+_days,hours = _hours,minutes=_mins,seconds=_seconds) - _difftime = int(self.rqdExeInp['NUM_SGMT'])*_difftime - _d = self.begDates[0] - _endDate = self.endDates[0] - _d = _d + _difftime - while _d < _endDate : - print (_difftime.days) - self.nSegments +=1 - print (_d.year, _d.month, _d.day) - self.begDates.append(_d) - self.endDates.insert(-1,_d) - _d = _d+ _difftime - - # assemble bcs sub-directories - self.bcs_dir_land = self.rqdExeInp['BCS_PATH']+ '/land/' + self.rqdExeInp['BCS_RESOLUTION']+'/' - self.bcs_dir_geom = self.rqdExeInp['BCS_PATH']+ '/geometry/' + self.rqdExeInp['BCS_RESOLUTION']+'/' - self.bcs_dir_landshared = self.rqdExeInp['BCS_PATH']+ '/land/shared/' - - # make sure MET_PATH and RESTART_PATH have trailing '/' - if self.rqdExeInp['MET_PATH'][-1] != '/': - self.rqdExeInp['MET_PATH'] = self.rqdExeInp['MET_PATH']+'/' - if self.rqdExeInp['RESTART_PATH'][-1] != '/': - self.rqdExeInp['RESTART_PATH'] = self.rqdExeInp['RESTART_PATH']+'/' - - # make sure catchment and vegdyn restart files ( at least one for each) exist - if 'CATCH_DEF_FILE' not in self.rqdExeInp : - self.rqdExeInp['CATCH_DEF_FILE']= self.bcs_dir_land + 'clsm/catchment.def' - if (self.with_land) : - assert os.path.isfile(self.rqdExeInp['CATCH_DEF_FILE']),"[%s] file does not exist " % self.rqdExeInp['CATCH_DEF_FILE'] - - self.rqdExeInp['RST_FROM_GLOBAL'] = 1 - # skip checking. It is users' reponsibility to make it right! - #if self.rqdExeInp['RESTART'].isdigit() : - # if int(self.rqdExeInp['RESTART']) == 1 : - # _numg = int(linecache.getline(self.rqdExeInp['CATCH_DEF_FILE'], 1).strip()) - # _numd = _numg - # ldas_domain = self.rqdExeInp['RESTART_PATH']+ \ - # self.rqdExeInp['RESTART_ID'] + \ - # '/output/'+self.rqdExeInp['RESTART_DOMAIN']+'/rc_out/'+self.rqdExeInp['RESTART_ID']+'.ldas_domain.txt' - # if os.path.isfile(ldas_domain) : - # _numd = int(linecache.getline(ldas_domain, 1).strip()) - # - # if _numg != _numd : - # self.rqdExeInp['RST_FROM_GLOBAL'] = 0 - - self.rqdExeInp['LNFM_FILE'] = '' - tile_file_format = self.rqdExeInp.get('TILE_FILE_FORMAT', 'DEFAULT') - if int(self.rqdExeInp['RST_FROM_GLOBAL']) == 1 : - txt_tile = glob.glob(self.bcs_dir_geom + '*.til') - nc4_tile = glob.glob(self.bcs_dir_geom + '*.nc4') - if tile_file_format.upper() == 'TXT' : self.rqdExeInp['TILING_FILE'] = txt_tile[0] - if tile_file_format.upper() == 'DEFAULT' : self.rqdExeInp['TILING_FILE'] = (txt_tile+nc4_tile)[-1] - - self.rqdExeInp['GRN_FILE'] = glob.glob(self.bcs_dir_land + 'green_clim_*.data')[0] - self.rqdExeInp['LAI_FILE'] = glob.glob(self.bcs_dir_land + 'lai_clim_*.data' )[0] - tmp_ = glob.glob(self.bcs_dir_land + 'lnfm_clim_*.data') - if (len(tmp_) ==1) : - self.rqdExeInp['LNFM_FILE'] = tmp_[0] - self.rqdExeInp['NDVI_FILE'] = glob.glob(self.bcs_dir_land + 'ndvi_clim_*.data' )[0] - self.rqdExeInp['NIRDF_FILE'] = glob.glob(self.bcs_dir_land + 'nirdf_*.dat' )[0] - self.rqdExeInp['VISDF_FILE'] = glob.glob(self.bcs_dir_land + 'visdf_*.dat' )[0] - else : - inpdir=self.rqdExeInp['RESTART_PATH']+self.rqdExeInp['RESTART_ID']+'/input/' - self.rqdExeInp['TILING_FILE'] = os.path.realpath(glob.glob(inpdir+'*tile.data')[0]) - self.rqdExeInp['GRN_FILE'] = os.path.realpath(glob.glob(inpdir+'green*data')[0]) - self.rqdExeInp['LAI_FILE'] = os.path.realpath(glob.glob(inpdir+'lai*data' )[0]) - tmp_ = glob.glob(self.bcs_dir_land + 'lnfm_clim_*.data') - if (len(tmp_) == 1) : - self.rqdExeInp['LNFM_FILE'] = tmp_[0] - self.rqdExeInp['NDVI_FILE'] = os.path.realpath(glob.glob(inpdir+'ndvi*data' )[0]) - self.rqdExeInp['NIRDF_FILE'] = os.path.realpath(glob.glob(inpdir+'nirdf*data')[0]) - self.rqdExeInp['VISDF_FILE'] = os.path.realpath(glob.glob(inpdir+'visdf*data')[0]) - - if self.rqdExeInp['RESTART'].isdigit() : - if int(self.rqdExeInp['RESTART']) == 2 : - self.rqdExeInp['RST_FROM_GLOBAL'] = 1 - ldas_domain = self.rqdExeInp['RESTART_PATH']+ \ - self.rqdExeInp['RESTART_ID'] + \ - '/output/'+self.rqdExeInp['RESTART_DOMAIN']+'/rc_out/'+self.rqdExeInp['RESTART_ID']+'.ldas_domain.txt' - inpdir=self.rqdExeInp['RESTART_PATH']+self.rqdExeInp['RESTART_ID']+'/input/' - in_tilefiles_ = glob.glob(inpdir+'*tile.data') - if len(in_tilefiles_) == 0 : - inpdir=self.rqdExeInp['RESTART_PATH']+self.rqdExeInp['RESTART_ID']+'/output/'+self.rqdExeInp['RESTART_DOMAIN']+'/rc_out/' - in_tilefiles_ = glob.glob(inpdir+'MAPL_*.til') - if len(in_tilefiles_) == 0 : - in_tilefiles_ = glob.glob(inpdir+'/*.til') - if len(in_tilefiles_) == 0 : - in_tilefiles_ = glob.glob(inpdir+'/*.nc4') - - self.in_tilefile =os.path.realpath(in_tilefiles_[0]) - - if os.path.isfile(ldas_domain): - txt_tile = glob.glob(self.bcs_dir_geom + '*.til') - nc4_tile = glob.glob(self.bcs_dir_geom + '*.nc4') - if tile_file_format.upper() == 'TXT' : self.rqdExeInp['TILING_FILE'] = txt_tile[0] - if tile_file_format.upper() == 'DEFAULT' : self.rqdExeInp['TILING_FILE'] = (txt_tile+nc4_tile)[-1] - - self.rqdExeInp['GRN_FILE'] = glob.glob(self.bcs_dir_land + 'green_clim_*.data')[0] - self.rqdExeInp['LAI_FILE'] = glob.glob(self.bcs_dir_land + 'lai_clim_*.data' )[0] - tmp_ = glob.glob(self.bcs_dir_land + 'lnfm_clim_*.data') - if (len(tmp_) == 1) : - self.rqdExeInp['LNFM_FILE'] = tmp_[0] - self.rqdExeInp['LNFM_FILE'] = glob.glob(self.bcs_dir_land + 'lnfm_clim_*.data' )[0] - self.rqdExeInp['NDVI_FILE'] = glob.glob(self.bcs_dir_land + 'ndvi_clim_*.data' )[0] - self.rqdExeInp['NIRDF_FILE'] = glob.glob(self.bcs_dir_land + 'nirdf_*.dat' )[0] - self.rqdExeInp['VISDF_FILE'] = glob.glob(self.bcs_dir_land + 'visdf_*.dat' )[0] - - if 'GRIDNAME' not in self.rqdExeInp : - tmptile = os.path.realpath(self.rqdExeInp['TILING_FILE']) - extension = os.path.splitext(tmptile)[1] - if extension == '.domain': - extension = os.path.splitext(tmptile)[0] - gridname_ ='' - if extension == '.til': - gridname_ = linecache.getline(tmptile, 3).strip() - else: - nc_file = netCDF4.Dataset(tmptile,'r') - gridname_ = nc_file.getncattr('Grid_Name') - # in case it is an old name: SMAP-EASEvx-Mxx - gridname_ = gridname_.replace('SMAP-','').replace('-M','_M') - self.rqdExeInp['GRIDNAME'] = gridname_ - - if 'LSM_CHOICE' not in self.rqdExeInp: - self.rqdExeInp['LSM_CHOICE'] = 1 - - if int(self.rqdExeInp['LSM_CHOICE']) == 1 : - self.catch = 'catch' - if int(self.rqdExeInp['LSM_CHOICE']) == 2 : - self.catch = 'catchcnclm40' - - if self.with_land: - assert int(self.rqdExeInp['LSM_CHOICE']) <= 2, "\nLSM_CHOICE=3 (Catchment-CN4.5) is no longer supported. Please set LSM_CHOICE to 1 (Catchment) or 2 (Catchment-CN4.0)" - - if 'POSTPROC_HIST' not in self.rqdExeInp: - self.rqdExeInp['POSTPROC_HIST'] = 0 - - if 'RUN_IRRIG' not in self.rqdExeInp: - self.rqdExeInp['RUN_IRRIG'] = 0 - - if 'AEROSOL_DEPOSITION' not in self.rqdExeInp: - self.rqdExeInp['AEROSOL_DEPOSITION'] = 0 - # default is global - _domain_dic=OrderedDict() - _domain_dic['MINLON']=-180. - _domain_dic['MAXLON']= 180. - _domain_dic['MINLAT']= -90. - _domain_dic['MAXLAT']= 90. - _domain_dic['EXCLUDE_FILE']= "''" - _domain_dic['INCLUDE_FILE']= "''" - - for key,val in _domain_dic.items() : - if key in self.rqdExeInp : - _domain_dic[key]= self.rqdExeInp[key] - self.domain_def = tempfile.NamedTemporaryFile(mode='w', delete=False) - self.domain_def.write('&domain_inputs\n') - for key,val in _domain_dic.items() : - keyn=(key+" = ").ljust(16) - valn = str(val) - if '_FILE' in key: - self.domain_def.write(keyn+ "'"+valn+"'"+'\n') - else : - self.domain_def.write(keyn+ valn +'\n') - self.domain_def.write('/\n') - self.domain_def.close() - - # make sure bcs files exist - if self.rqdExeInp['RESTART'].isdigit() and self.with_land : - if int(self.rqdExeInp['RESTART']) >= 1 : - y4m2='Y%4d/M%02d' % (self.begDates[0].year, self.begDates[0].month) - y4m2d2_h2m2='%4d%02d%02d_%02d%02d' % (self.begDates[0].year, self.begDates[0].month, - self.begDates[0].day,self.begDates[0].hour,self.begDates[0].minute) - tmpFile=self.rqdExeInp['RESTART_ID']+'.'+self.catch+'_internal_rst.'+y4m2d2_h2m2 - tmpRstDir=self.rqdExeInp['RESTART_PATH']+'/'.join([self.rqdExeInp['RESTART_ID'],'output', - self.rqdExeInp['RESTART_DOMAIN'],'rs',self.ensdirs[0],y4m2]) - catchRstFile=tmpRstDir+'/'+tmpFile - - assert os.path.isfile(catchRstFile), self.catch+'_internal_rst file [%s] does not exist!' %(catchRstFile) - self.in_rstfile = catchRstFile - - if int(self.rqdExeInp['RESTART']) == 1 : - tmpFile=self.rqdExeInp['RESTART_ID']+'.vegdyn_internal_rst' - tmpRstDir=self.rqdExeInp['RESTART_PATH']+'/'.join([self.rqdExeInp['RESTART_ID'],'output', - self.rqdExeInp['RESTART_DOMAIN'],'rs',self.ensdirs[0]]) - vegdynRstFile=tmpRstDir+'/'+tmpFile - if not os.path.isfile(vegdynRstFile): - assert int(self.rqdExeInp['RST_FROM_GLOBAL']) == 1, 'restart from LDASsa should be global' - - tmpFile=self.rqdExeInp['RESTART_ID']+'.landpert_internal_rst.'+y4m2d2_h2m2 - tmpRstDir=self.rqdExeInp['RESTART_PATH']+'/'.join([self.rqdExeInp['RESTART_ID'],'output', - self.rqdExeInp['RESTART_DOMAIN'],'rs',self.ensdirs[0],y4m2]) - landpertRstFile=tmpRstDir+'/'+tmpFile - if ( os.path.isfile(landpertRstFile)) : - self.has_geos_pert = True - - elif (int(self.rqdExeInp['RESTART']) == 0) : - if (self.catch == 'catch'): - self.in_rstfile = '/discover/nobackup/projects/gmao/ssd/land/l_data/LandRestarts_for_Regridding' \ - '/Catch/M09/20170101/catch_internal_rst' - self.in_tilefile = '/discover/nobackup/projects/gmao/ssd/land/l_data/geos5/bcs/CLSM_params' \ - '/mkCatchParam_SMAP_L4SM_v002/SMAP_EASEv2_M09/SMAP_EASEv2_M09_3856x1624.til' - elif (self.catch == 'catchcnclm40'): - self.in_rstfile = '/discover/nobackup/projects/gmao/ssd/land/l_data/LandRestarts_for_Regridding' \ - '/CatchCN/M36/20150301_0000/catchcnclm40_internal_dummy' - self.in_tilefile = '/discover/nobackup/projects/gmao/bcs_shared/legacy_bcs/Heracles-NL/SMAP_EASEv2_M36/SMAP_EASEv2_M36_964x406.til' - elif (self.catch == 'catchcnclm45'): - self.in_rstfile = '/discover/nobackup/projects/gmao/ssd/land/l_data/LandRestarts_for_Regridding' \ - '/CatchCN/M36/19800101_0000/catchcnclm45_internal_dummy' - self.in_tilefile = '/discover/nobackup/projects/gmao/bcs_shared/legacy_bcs/Icarus-NLv3/Icarus-NLv3_EASE/SMAP_EASEv2_M36/SMAP_EASEv2_M36_964x406.til' - else: - sys.exit('need to provide at least dummy files') - - # DEAL WITH mwRTM input from exec - self.assim = True if self.rqdExeInp.get('LAND_ASSIM', 'NO').upper() == 'YES' and self.with_land else False - # verify mwrtm file - if 'MWRTM_PATH' in self.rqdExeInp and self.with_land : - self.rqdExeInp['MWRTM_PATH'] = self.rqdExeInp['MWRTM_PATH']+'/'+ self.rqdExeInp['BCS_RESOLUTION']+'/' - mwrtm_param_file_ = self.rqdExeInp['MWRTM_PATH']+'mwRTM_param.nc4' - vegopacity_file_ = self.rqdExeInp['MWRTM_PATH']+'vegopacity.bin' - if os.path.isfile(mwrtm_param_file_) : - self.has_mwrtm = True - self.mwrtm_file = mwrtm_param_file_ - else : - assert not mwrtm_param_file_.strip(), ' MWRTM_PATH: %s should contain mwRTM_param.nc4'% self.rqdExeInp['MWRTM_PATH'] - del self.rqdExeInp['MWRTM_PATH'] - if os.path.isfile(vegopacity_file_) : - self.has_vegopacity = True - self.rqdExeInp['VEGOPACITY_FILE'] = vegopacity_file_ - - - # ------ - # Read rm input file - # Read (and pop from inpfile) the input required fields in to - # self.rqdRmInp. Fields left in inpDictFromFile are then - # read in to self.optRmInp - # ------ - # re-using inpDictFromFile - - if self.ladas_cpl == 0 : - inpDictFromFile = self._parseInputFile(cmdLineArgs['batinpfile']) - # REQUIRED inputs - for key in rqdRmInpKeys: - self.rqdRmInp[key] = inpDictFromFile.pop(key) - - # checks on rqd rm inputs - ## account and walltime should exist - assert self.rqdRmInp['account'] - assert self.rqdRmInp['walltime'] - ## ntasks_model is a +ve integer - _ntasks = int(self.rqdRmInp['ntasks_model']) - assert _ntasks>0 - self.rqdRmInp['ntasks_model'] = _ntasks - _ntasks = None - - # OPTIONAL inputs - for key in inpDictFromFile: - assert key in optSlurmInpKeys, \ - 'unknown resource manager key [%s]' % key - self.optRmInp[key] = inpDictFromFile[key] - else : - self.rqdRmInp['account'] = cmdLineArgs['account'] - self.rqdRmInp['walltime'] = "01:00:00" - self.rqdRmInp['ntasks_model'] = 120 - - - # print rm inputs - if self.verbose: - print ('\n\nRequired inputs for resource manager:') - _printdict(self.rqdRmInp) - print ('\n\nOptional inputs for resource manager:') - _printdict(self.optRmInp) - print ('\n\n') - - # ------ - # set top level directories - # rundir, inpdir, outdir, blddir - # executable - # exefyl - # ------ - - self.bindir = os.path.dirname(os.path.realpath(__file__)) - self.blddir = self.bindir.rsplit('/',1)[0] - exefyl = '/bin/GEOSldas.x' - tmp_execfyl = self.blddir + exefyl - assert os.path.isfile(tmp_execfyl),\ - 'Executable [%s] does not exist!' % tmp_execfyl - self.expdir = self.exphome + '/' + self.rqdExeInp['EXP_ID'] - self.rundir = self.expdir + '/run' - self.inpdir = self.expdir + '/input' - self.outdir = self.expdir + '/output' - self.scratchdir = self.expdir + '/scratch' - self.blddirLn = self.expdir + '/build' - self.out_path = self.outdir + '/'+self.rqdExeInp['EXP_DOMAIN'] - self.bcsdir = self.outdir + '/'+self.rqdExeInp['EXP_DOMAIN']+'/rc_out/' - self.rstdir = self.outdir + '/'+self.rqdExeInp['EXP_DOMAIN']+'/rs/' - self.exefyl = self.blddirLn + exefyl - - # default is set to 0 ( no output server) - if 'oserver_nodes' not in self.optRmInp : - self.optRmInp['oserver_nodes'] = 0 - - if (int(self.optRmInp['oserver_nodes']) >=1) : - self.rqdExeInp['WRITE_RESTART_BY_OSERVER'] = "YES" - # set default for now - if 'writers-per-node' not in self.optRmInp: - self.optRmInp['writers-per-node'] = 5 - else: - self.optRmInp['writers-per-node'] = 0 - - - # end __init__ - - # ----------------------------------------------------------------------------------- - - def _parseInputFile(self, inpfile): - """ - Private method: parse input file and return a dict of options - Input: input file - Output: dict - """ - - inpdict = OrderedDict() - errstr = "line [%d] of [%s] is not in the form 'key: value'" - - # determine which default values to pick from GEOS_SurfaceGridComp.rc - if self.ladas_cpl == 0 : - use_rc_defaults = 'GEOSldas=>' # use defaults for LDAS - else : - use_rc_defaults = 'GEOSagcm=>' # use defaults for AGCM - - fin = open(inpfile, 'r') - linenum = 0 - for line in fin: - linenum += 1 - line = line.strip() - # blank line - if not line: - continue - if '"GEOSagcm=>"' in line: # echo lines that contain "GEOSagcm=>" (w/ quotation marks) [GEOS_SurfaceGridComp.rc] - continue - if '"GEOSldas=>"' in line: # echo lines that contain "GEOSldas=>" (w/ quotation marks) [GEOS_SurfaceGridComp.rc] - continue - # get 'GEOSldas=>' or 'GEOSagcm=>' defaults in GEOS_SurfaceGridComp.rc - if use_rc_defaults in line: - line = line.split(use_rc_defaults)[1] - # handle comments - position = line.find('#') - if position==0: # comment line - continue - if position>0: # strip out comment - line = line[:position] - # we expect a line to be of the form - # key : value - assert ':' in line, errstr % (linenum, inpfile) - - key, val = line.split(':',1) - key = key.strip() - val = val.strip() - if not key or not val: - print ("WARNING: " + errstr % (linenum, inpfile)) - continue - #raise Exception(errstr % (linenum, inpfile)) - if key in inpdict: - raise Exception('Duplicate key [%s] in [%s]' % (key, inpfile)) - inpdict[key] = val.strip() - fin.close() - - return inpdict - - # ----------------------------------------------------------------------------------- - - def _mkdir_p(self,path): - """ - Private method: implement 'mkdir -p' functionality - """ - - if os.path.isdir(path): - return - else: - os.makedirs(path) - - # ----------------------------------------------------------------------------------- - - def createDirStructure(self): - """ - Create required dir structure - """ - - status = False - - # shorthands - _nens = self.nens - - # run/inp/wrk dirs - self._mkdir_p(self.exphome+'/'+self.rqdExeInp['EXP_ID']) - self._mkdir_p(self.rundir) - self._mkdir_p(self.inpdir) - self._mkdir_p(self.outdir) - self._mkdir_p(self.scratchdir) - - #-start-shorthand-function- - def _getDirName(outtyp, ensdir, yyyymm): - return '/'.join([ - self.outdir, - self.rqdExeInp['EXP_DOMAIN'], - outtyp, # ana/cat/rs/rc_out - ensdir, - yyyymm - ]) - #-end-shorthand-function- - - # met forcing dir - myMetDir = self.inpdir + '/met_forcing' - self._mkdir_p(myMetDir) - - # ensxxxx directories - nSegments = self.nSegments - for iseg in range(nSegments): - _start = self.begDates[iseg] - _end = self.endDates[iseg] - - # Yyyyy/Mmm between StartDateTime and EndDateTime - newDate = _start - y4m2_list = [('Y%4d/M%02d' % (newDate.year, newDate.month))] - while newDate<_end: - newDate += relativedelta(months=1) - y4m2_list.append('Y%4d/M%02d' % (newDate.year, newDate.month)) - - # ExpDomain/ana/, /cat/ directories - for ensdir in self.ensdirs_avg: - for y4m2 in y4m2_list: - self._mkdir_p(_getDirName('ana', ensdir, y4m2)) - self._mkdir_p(_getDirName('cat', ensdir, y4m2)) - - # ExpDomain/rs/ directories - for ensdir in self.ensdirs: - for y4m2 in y4m2_list: - self._mkdir_p(_getDirName('rs', ensdir, y4m2)) - - # ExpDomain/rc_out/ - only for _start - self._mkdir_p(_getDirName('rc_out', '', y4m2_list[0])) - - # restart dir - self._mkdir_p(self.inpdir + '/restart') - - status = True - return status - - # ----------------------------------------------------------------------------------- - - # create links to BCs, restarts, met forcing, ... - def createLnRstBc(self) : - # link bld dir - status = False - - _nens = self.nens - - os.symlink(self.blddir, self.blddirLn) - - # met forcing dir - self.ensemble_forcing = True if self.rqdExeInp.get('ENSEMBLE_FORCING', 'NO').upper() == 'YES' else False - - myMetPath ='' - for _i in range(self.first_ens_id, _nens + self.first_ens_id) : - str_ens = '' - if ( _nens != 1 and self.ensemble_forcing): - str_ens = '%03d'%(_i) - metpath = self.rqdExeInp['MET_PATH'].rstrip('/')+str_ens - myMetDir = self.inpdir + '/met_forcing' - myMetPath = myMetDir + '/' + metpath.split('/')[-1] - os.symlink(metpath, myMetPath) - # update 'met_path' to use relative path from outdir - if ( not self.ensemble_forcing): - break - if ( _nens !=1 and self.ensemble_forcing) : - # replace last three character with '%s" - self.rqdExeInp['MET_PATH'] = os.path.relpath(myMetPath, self.rundir)[:-3]+'%s' - else: - self.rqdExeInp['MET_PATH'] = os.path.relpath(myMetPath, self.rundir) - - # update tile file - tile= self.rqdExeInp['TILING_FILE'] - short_tile= os.path.basename(self.rqdExeInp['TILING_FILE']) - newtile = self.bcsdir+'/'+short_tile - shutil.copy(tile, newtile) - tile=newtile - # if three extra lines exist, remove them and save it to inputdir - - print ('\nCorrect the tile file if it is an old EASE tile format... \n') - EASEtile=self.bcsdir+'/MAPL_'+short_tile - cmd = self.bindir + '/preprocess_ldas.x correctease '+ tile + ' '+ EASEtile - print ("cmd: " + cmd) - - sp.call(shlex.split(cmd)) - - if os.path.isfile(EASEtile) : - #update tile file name - short_tile ='MAPL_'+short_tile - tile=EASEtile - # setup BC files - - catchment_def = self.rqdExeInp['CATCH_DEF_FILE'] - exp_id = self.rqdExeInp['EXP_ID'] - - _start = self.begDates[0] - _y4m2d2h2m2 ='%4d%02d%02d%02d%02d' % (_start.year, _start.month,_start.day,_start.hour,_start.minute) - - dzsf = '50.0' - if 'SURFLAY' in self.rqdExeInp : - dzsf = self.rqdExeInp['SURFLAY'] - - # These are dummy values for *cold* restart: - wemin_in = '13' # WEmin input/output for scale_catch(cn), - wemin_out = '13' # - if 'WEMIN_IN' in self.rqdExeInp : - wemin_in = self.rqdExeInp['WEMIN_IN'] - if 'WEMIN_OUT' in self.rqdExeInp : - wemin_out = self.rqdExeInp['WEMIN_OUT'] - - tmp_f2g_file = tempfile.NamedTemporaryFile(delete=False) - cmd = self.bindir +'/preprocess_ldas.x c_f2g ' + tile + ' ' + self.domain_def.name + ' '+ self.out_path + ' ' + catchment_def + ' ' + exp_id + ' ' + _y4m2d2h2m2 + ' '+ dzsf + ' ' + tmp_f2g_file.name + ' ' + '_'.join(self.tile_types) - - print ('Creating f2g file if necessary: '+ tmp_f2g_file.name +'....\n') - print ("cmd: " + cmd) - sp.call(shlex.split(cmd)) - # check if it is local or global - if os.path.getsize(tmp_f2g_file.name) !=0 : - self.isZoomIn= True - #os.remove(self.domain_def.name) - - # update tile domain - if self.isZoomIn: - newZoominTile = tile+'.domain' - print ("\nCreating local tile file :"+ newZoominTile) - print ("\nAdding 1000 to type of tiles to be excluded from domain...\n") - cmd = self.bindir +'/preprocess_ldas.x zoomin_tile ' + tile + ' ' + newZoominTile + ' '+ tmp_f2g_file.name - print ("cmd: " + cmd) - sp.call(shlex.split(cmd)) - short_tile=short_tile +'.domain' - tile = newZoominTile - - myTile=self.inpdir+'/tile.data' - os.symlink(tile,myTile) - - if self.with_land: - bcs=[self.rqdExeInp['GRN_FILE'], - self.rqdExeInp['LAI_FILE'], - self.rqdExeInp['NDVI_FILE'], - self.rqdExeInp['NIRDF_FILE'], - self.rqdExeInp['VISDF_FILE'] ] - if (self.rqdExeInp['LNFM_FILE'] != ''): - bcs += [self.rqdExeInp['LNFM_FILE']] - if (self.has_vegopacity): - bcs += [self.rqdExeInp['VEGOPACITY_FILE']] - bcstmp=[] - for bcf in bcs : - shutil.copy(bcf, self.bcsdir+'/') - bcstmp=bcstmp+[self.bcsdir+'/'+os.path.basename(bcf)] - bcs=bcstmp - - if self.isZoomIn: - print ("Creating the boundary files for the simulation domain...\n") - bcs_tmp=[] - for bcf in bcs : - cmd = self.bindir +'/preprocess_ldas.x zoomin_bc ' + bcf + ' '+ bcf+'.domain' + ' '+ tmp_f2g_file.name - print ("cmd: " + cmd) - sp.call(shlex.split(cmd)) - bcs_tmp=bcs_tmp+[bcf+'.domain'] - bcs=bcs_tmp - - - # link BC - print ("linking bcs...") - bcnames=['green','lai','ndvi','nirdf','visdf'] - if (self.rqdExeInp['LNFM_FILE'] != ''): - bcnames += ['lnfm'] - if (self.has_vegopacity): - bcnames += ['vegopacity'] - for bcln,bc in zip(bcnames,bcs) : - myBC=self.inpdir+'/'+bcln+'.data' - os.symlink(bc,myBC) - - if ("catchcn" in self.catch): - os.symlink(self.bcs_dir_landshared + 'CO2_MonthlyMean_DiurnalCycle.nc4', \ - self.inpdir+'/CO2_MonthlyMean_DiurnalCycle.nc4') - - # create and link restart - print ("Creating and linking restart...") - _start = self.begDates[0] - - y4m2='Y%4d/M%02d'%(_start.year, _start.month) - y4m2d2_h2m2 ='%4d%02d%02d_%02d%02d' % (_start.year, _start.month,_start.day,_start.hour,_start.minute) - - myRstDir = self.inpdir + '/restart/' - - rstpath = self.rqdExeInp['RESTART_PATH']+ \ - self.rqdExeInp['RESTART_ID'] + \ - '/output/'+self.rqdExeInp['RESTART_DOMAIN']+'/rs/' - rcoutpath = self.rqdExeInp['RESTART_PATH']+ \ - self.rqdExeInp['RESTART_ID'] + \ - '/output/'+self.rqdExeInp['RESTART_DOMAIN']+'/rc_out/' - - # pass into remap_config_ldas - exp_id = self.rqdExeInp['EXP_ID'] - RESTART_str = str(self.rqdExeInp['RESTART']) - YYYYMMDD = '%4d%02d%02d' % (_start.year, _start.month,_start.day) - YYYYMMDDHH= '%4d%02d%02d%02d' % (_start.year, _start.month,_start.day, _start.hour) - rstid = self.rqdExeInp['RESTART_ID'] - rstdomain = self.rqdExeInp['RESTART_DOMAIN'] - rstpath0 = self.rqdExeInp['RESTART_PATH'] - - # just copy the landassim pert seed if it exists - for iens in range(self.nens) : - _ensdir = self.ensdirs[iens] - _ensid = self.ensids[iens] - landassim_seeds = rstpath + _ensdir + '/' + y4m2+'/' + rstid + '.landassim_obspertrseed_rst.'+y4m2d2_h2m2 - if os.path.isfile(landassim_seeds) and self.assim : - _seeds = self.rstdir + _ensdir + '/' + y4m2+'/' + exp_id + '.landassim_obspertrseed_rst.'+y4m2d2_h2m2 - shutil.copy(landassim_seeds, _seeds) - os.symlink(_seeds, myRstDir+ '/landassim_obspertrseed'+ _ensid +'_rst') - self.has_landassim_seed = True - mk_outdir = self.exphome+'/'+exp_id+'/mk_restarts/' - - if (RESTART_str != '1' and (self.with_land or self.with_landice)): - bcs_path = self.rqdExeInp['BCS_PATH'] - while bcs_path[-1] == '/' : bcs_path = bcs_path[0:-1] - bc_base = os.path.dirname(bcs_path) - bc_version = os.path.basename(bcs_path) - - remap_tpl = os.path.dirname(os.path.realpath(__file__)) + '/remap_params.tpl' - config = yaml_to_config(remap_tpl) - - config['slurm_pbs']['account'] = self.rqdRmInp['account'] - config['slurm_pbs']['qos'] = 'debug' - - config['input']['surface']['catch_tilefile'] = self.in_tilefile - config['input']['shared']['expid'] = self.rqdExeInp['RESTART_ID'] - config['input']['shared']['yyyymmddhh'] = YYYYMMDDHH - if RESTART_str != 'M': - config['input']['shared']['rst_dir'] = os.path.dirname(self.in_rstfile)+'/' - config['input']['surface']['wemin'] = wemin_in - config['input']['surface']['catch_model'] = self.catch - - config['output']['shared']['out_dir'] = mk_outdir - config['output']['surface']['catch_remap'] = True - config['output']['surface']['catch_tilefile'] = self.rqdExeInp['TILING_FILE'] - config['output']['shared']['bc_base'] = bc_base - config['output']['shared']['bc_version'] = bc_version - config['output']['surface']['EASE_grid'] = self.rqdExeInp['BCS_RESOLUTION'] - - config['output']['shared']['expid'] = self.rqdExeInp['EXP_ID'] - config['output']['surface']['surflay'] = dzsf - config['output']['surface']['wemin'] = wemin_out - - if RESTART_str == "M" : # restart from merra2 - yyyymm = int(YYYYMMDDHH[0:6]) - merra2_expid = "d5124_m2_jan10" - if yyyymm < 197901 : - exit("Error. MERRA-2 data < 1979 not available\n") - elif (yyyymm < 199201): - merra2_expid = "d5124_m2_jan79" - elif (yyyymm < 200106): - merra2_expid = "d5124_m2_jan91" - elif (yyyymm < 201101): - merra2_expid = "d5124_m2_jan00" - elif (yyyymm < 202106): - merra2_expid = "d5124_m2_jan10" - # There was a rewind in MERRA2 from Jun 2021 to Sept 2021 - elif (yyyymm < 202110): - merra2_expid = "d5124_m2_jun21" - config['input']['shared']['expid'] = merra2_expid - config['input']['shared']['rst_dir'] = mk_outdir+ '/merra2_tmp_'+ YYYYMMDDHH - config['input']['surface']['wemin'] = 26 - config['input']['shared']['bc_base'] = '/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles' - config['input']['shared']['bc_version'] = 'GM4' - config['input']['shared']['agrid'] = 'C180' - config['input']['shared']['ogrid'] = '1440x720' - config['input']['shared']['omodel'] = 'data' - config['input']['shared']['MERRA-2'] = True - config['input']['surface']['catch_tilefile'] = '/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles/GM4/geometry/CF0180x6C_DE1440xPE0720/CF0180x6C_DE1440xPE0720-Pfafstetter.til' - - if self.with_land: - catch_obj = catchANDcn(config_obj = config) - catch_obj.remap() - if self.with_landice: - config['output']['surface']['remap_water'] = True - config['input']['surface']['zoom'] = '2' - landice_obj = lake_landice_saltwater(config_obj = config) - landice_obj.remap() - - #for ens in self.ensdirs : - catchRstFile0 = '' - vegdynRstFile0 = '' - landiceRstFile0 = '' - for iens in range(self.nens) : - ensdir = self.ensdirs[iens] - ensid = self.ensids[iens] - myCatchRst = myRstDir+'/'+self.catch +ensid +'_internal_rst' - myLandiceRst = myRstDir+'/'+ 'landice' +ensid +'_internal_rst' - myVegRst = myRstDir+'/'+'vegdyn'+ensid +'_internal_rst' - myPertRst = myRstDir+'/'+ 'landpert' +ensid +'_internal_rst' - - catchRstFile = '' - vegdynRstFile = '' - pertRstFile = '' - print ("restart: " + self.rqdExeInp['RESTART']) - - if self.rqdExeInp['RESTART'].isdigit() : - - if int(self.rqdExeInp['RESTART']) == 0 or int(self.rqdExeInp['RESTART']) == 2 : - vegdynRstFile = glob.glob(self.bcs_dir_land + 'vegdyn_*.dat')[0] - catchRstFile = glob.glob(self.exphome+'/'+exp_id+'/mk_restarts/*'+self.catch+'_internal_rst.'+YYYYMMDD+'*')[0] - else : # RESTART == 1 - catchRstFile = rstpath+ensdir +'/'+ y4m2+'/'+self.rqdExeInp['RESTART_ID']+'.'+self.catch+'_internal_rst.'+y4m2d2_h2m2 - vegdynRstFile= rstpath+ensdir +'/'+self.rqdExeInp['RESTART_ID']+ '.vegdyn_internal_rst' - if not os.path.isfile(vegdynRstFile): # no vegdyn restart from LDASsa - if not os.path.isfile(vegdynRstFile0): - vegdynRstFile = glob.glob(self.bcs_dir_land + 'vegdyn_*.dat')[0] - else : - vegdynRstFile = glob.glob(self.bcs_dir_land + 'vegdyn_*.dat')[0] - if self.with_land: - catchRstFile = glob.glob(self.exphome+'/'+exp_id+'/mk_restarts/*'+self.catch+'_internal_rst.'+YYYYMMDD+'*')[0] - - # catchment restart file - if os.path.isfile(catchRstFile) and self.with_land : - catchLocal = self.rstdir+ensdir +'/'+ y4m2+'/'+self.rqdExeInp['EXP_ID']+'.'+self.catch+'_internal_rst.'+y4m2d2_h2m2 - if self.isZoomIn : - print( "Creating local catchment restart file... \n") - cmd=self.bindir +'/preprocess_ldas.x zoomin_catchrst '+ catchRstFile +' ' + catchLocal + ' '+ tmp_f2g_file.name - print ("cmd: "+cmd) - sp.call(shlex.split(cmd)) - else : - shutil.copy(catchRstFile,catchLocal) - - catchRstFile = catchLocal - - if '0000' in ensdir : - catchRstFile0 = catchRstFile - else : # re-use 0000 catch file - catchRstFile = catchRstFile0 - - # vegdyn restart file - if os.path.isfile(vegdynRstFile) and self.with_land : - vegdynLocal = self.rstdir+ensdir +'/'+self.rqdExeInp['EXP_ID']+'.vegdyn_internal_rst' - if self.isZoomIn : - print ("Creating the local veg restart file... \n") - cmd=self.bindir + '/preprocess_ldas.x zoomin_vegrst '+ vegdynRstFile +' ' + vegdynLocal + ' '+ tmp_f2g_file.name - print ("cmd: " + cmd) - sp.call(shlex.split(cmd)) - else : - shutil.copy(vegdynRstFile,vegdynLocal) - - vegdynRstFile = vegdynLocal - - if '0000' in ensdir : - vegdynRstFile0 = vegdynRstFile - else : - vegdynRstFile = vegdynRstFile0 - - landiceRstFile = '' - if self.with_landice : - if self.rqdExeInp['RESTART'].isdigit(): - if int(self.rqdExeInp['RESTART']) == 0 or int(self.rqdExeInp['RESTART']) == 2 : - print("RESTART=0 and RESTART=2 not supported for landice tiles. Please use RESTART=M (MERRA-2).") - landiceRstFile = rstpath+ensdir +'/'+ y4m2+'/'+self.rqdExeInp['RESTART_ID']+'.'+'landice_internal_rst.'+y4m2d2_h2m2 - else: - landiceRstFile = glob.glob(self.exphome+'/'+exp_id+'/mk_restarts/*'+'landice_internal_rst.'+YYYYMMDD+'*')[0] - - if os.path.isfile(landiceRstFile) : - landiceLocal = self.rstdir+ensdir +'/'+ y4m2+'/'+self.rqdExeInp['EXP_ID']+'.landice_internal_rst.'+y4m2d2_h2m2 - if self.isZoomIn : - print ("Creating zoom-in of landice restart file... \n") - cmd=self.bindir + '/preprocess_ldas.x zoomin_landicerst '+ landiceRstFile +' ' + landiceLocal + ' '+ tmp_f2g_file.name - print ("cmd: " + cmd) - sp.call(shlex.split(cmd)) - else : - shutil.copy(landiceRstFile,landiceLocal) - - landiceRstFile = landiceLocal - - if '0000' in ensdir : - landiceRstFile0 = landiceRstFile - else : - landiceRstFile = landiceRstFile0 - - if (self.has_geos_pert and self.perturb == 1) : - pertRstFile = rstpath+ensdir +'/'+ y4m2+'/'+self.rqdExeInp['RESTART_ID']+'.landpert_internal_rst.'+y4m2d2_h2m2 - pertLocal = self.rstdir+ensdir +'/'+ y4m2+'/'+self.rqdExeInp['EXP_ID']+'.landpert_internal_rst.'+y4m2d2_h2m2 - shutil.copy(pertRstFile,pertLocal) - pertRstFile = pertLocal - - if self.with_land : - print ('catchRstFile: ' + catchRstFile) - print ('vegdynRstFile: ' + vegdynRstFile) - os.symlink(catchRstFile, myCatchRst) - os.symlink(vegdynRstFile, myVegRst) - if self.with_landice : - print("link landice restart: " + myLandiceRst) - os.symlink(landiceRstFile, myLandiceRst) - if ( self.has_geos_pert and self.perturb == 1 ): - os.symlink(pertRstFile, myPertRst) - - # catch_param restar file - catch_param_file = self.bcsdir+'/'+ y4m2+'/'+self.rqdExeInp['EXP_ID']+'.ldas_catparam.'+y4m2d2_h2m2+'z.bin' - if self.with_land: - assert os.path.isfile(catch_param_file), "need catch_param file %s" % catch_param_file - - if self.has_mwrtm : - mwRTMRstFile = self.mwrtm_file - mwRTMLocal = self.bcsdir+'/'+ y4m2+'/'+self.rqdExeInp['EXP_ID']+'.ldas_mwRTMparam.'+y4m2d2_h2m2+'z.nc4' - if self.isZoomIn : - print ("Creating the local mwRTM restart file... \n") - cmd= self.bindir +'/preprocess_ldas.x zoomin_mwrtmrst '+ mwRTMRstFile +' ' + mwRTMLocal + ' '+ tmp_f2g_file.name - - print ("cmd: " + cmd) - sp.call(shlex.split(cmd)) - else : - shutil.copy(mwRTMRstFile,mwRTMLocal) - - mwRTMRstFile = mwRTMLocal - mymwRTMRst = myRstDir+'/mwrtm_param_rst' - os.symlink(mwRTMRstFile, mymwRTMRst) - - # update 'restart_path' to use relative path from outdir - print ("Updating restart path...") - self.rqdExeInp['RESTART_PATH'] = myRstDir - #if os.path.isfile(tmp_f2g_file.name): - # os.remove(tmp_f2g_file.name) - status = True - return status - - # ----------------------------------------------------------------------------------- - - def createRCFiles(self): - """ - (1) get resource files form DEFAULT rc files from /etc - (2) update from customed rc files - (2) write rc files to the run directory - """ - - status = False - - for mydir in [self.blddirLn, self.rundir]: - assert os.path.isdir(mydir), \ - 'dir [%s] does not exist!' % mydir - - if self.ladas_cpl == 0: - # copy ldas_setup exeinp and batinp input files to rundir (for the record) - # if a file w/ the same name already exists at rundir - # append 1,2,3 etc, to the filename - ## exe inp file - exefilename = self.exeinpfile.rstrip('/').split('/')[-1] - newfilename = exefilename - _nens = self.nens - ctr = 0 - while os.path.isfile(self.rundir+'/'+newfilename): - ctr += 1 - newfilename = exefilename + '.%d' % ctr - shutil.copy(self.exeinpfile, self.rundir+'/'+newfilename) - ## bat inp file - batfilename = self.batinpfile.rstrip('/').split('/')[-1] - newfilename = batfilename - ctr = 0 - while os.path.isfile(self.rundir+'/'+newfilename): - ctr += 1 - newfilename = batfilename + '.%d' % ctr - shutil.copy(self.batinpfile, self.rundir+'/'+newfilename) - - # ----------------------------------- - - etcdir = self.blddirLn + '/etc' - - #defalt nml - default_nml = glob.glob(etcdir+'/LDASsa_DEFAULT_inputs_*.nml') - for nmlfile in default_nml: - shortfile=self.rundir+'/'+nmlfile.split('/')[-1] - shutil.copy2(nmlfile, shortfile) - # special nml - special_nml=[] - if self.ladas_cpl > 0: - special_nml= glob.glob(etcdir+'/LDASsa_SPECIAL_inputs_*.nml') - else : - if 'NML_INPUT_PATH' in self.rqdExeInp : - special_nml = glob.glob(self.rqdExeInp['NML_INPUT_PATH']+'/LDASsa_SPECIAL_inputs_*.nml') - - for nmlfile in special_nml: - shortfile=self.rundir+'/'+nmlfile.split('/')[-1] - shutil.copy2(nmlfile, shortfile) - - if self.ladas_cpl > 0: - # edit resolution info in ensupd nml file - sp.run(['sed', '-i', 's//'+self.agcm_res+'/g', self.rundir+'/LDASsa_SPECIAL_inputs_ensupd.nml']) - - # get optimzed NX and IMS - optimized_distribution_file = tempfile.NamedTemporaryFile(delete=False) - print ("Optimizing... decomposition of processes.... \n") - cmd = self.bindir + '/preprocess_ldas.x optimize '+ self.inpdir+'/tile.data '+ str(self.rqdRmInp['ntasks_model']) + ' ' + optimized_distribution_file.name + ' ' + self.rundir + ' ' + '_'.join(self.tile_types) - print ("cmd: " + cmd) - print ("IMS.rc or JMS.rc would be generated on " + self.rundir) - sp.call(shlex.split(cmd)) - optinxny=self._parseInputFile(optimized_distribution_file.name) - if (int(optinxny['NX']) == 1): - if int(optinxny['NY']) != int(self.rqdRmInp['ntasks_model']): - self.rqdRmInp['ntasks_model']=optinxny['NY'] - print ('adjust ntasks_model %d for cubed-sphere grid' % int(self.rqdRmInp['ntasks_model'])) - - - #os.remove(optimized_distribution_file.name) - - # DEFAULT rc files - default_rc = glob.glob(etcdir+'/GEOSldas_*.rc') - assert len(default_rc)==6 - print (default_rc) - for rcfile in default_rc: - shortfile=rcfile.rsplit('GEOSldas_',1)[1] - print (shortfile + ' ' + etcdir + ' ' + self.rundir) - if shortfile =='HIST.rc': - tmprcfile=self.rundir+'/HISTORY.rc' - histrc_file=rcfile - - _file_found = False - if 'HISTRC_FILE' in self.rqdExeInp : - _tmpfile = self.rqdExeInp['HISTRC_FILE'].replace("'",'').replace('"','') - if(os.path.isfile(_tmpfile)) : - _file_found = True - else : - assert not _tmpfile.strip(), "HISTRC_FILE: %s is NOT a file. " %_tmpfile - - if _file_found : - histrc_file = self.rqdExeInp['HISTRC_FILE'] - shutil.copy2(histrc_file,tmprcfile) - else : - shutil.copy2(histrc_file,tmprcfile) - if 'EASE' in self.rqdExeInp['GRIDNAME'] : - TMPSTR='OUT1d' - else : - TMPSTR='OUT2d' - cmd = self.bindir +'/process_hist.csh' + ' ' \ - + tmprcfile + ' ' \ - + TMPSTR + ' ' \ - + self.rqdExeInp['GRIDNAME'] + ' ' \ - + str(self.rqdExeInp['LSM_CHOICE']) + ' ' \ - + str(self.rqdExeInp['AEROSOL_DEPOSITION']) + ' ' \ - + str(self.rqdExeInp['RUN_IRRIG']) + ' ' \ - + str(self.nens) - print(cmd) - #os.system(cmd) - sp.call(shlex.split(cmd)) - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('GEOSldas_expid',self.rqdExeInp['EXP_ID'])) - - # if coupled land-atm DAS, always use either GEOSldas_HISTdet.rc or GEOSldas_HISTens.rc (depending on ladas_cpl) - if ( shortfile =='HISTdet.rc' and self.ladas_cpl == 1 ) or ( shortfile =='HISTens.rc' and self.ladas_cpl == 2 ): - tmprcfile=self.rundir+'/HISTORY.rc' - histrc_file=rcfile - shutil.copy2(rcfile, tmprcfile) - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('GEOSldas_expid',self.rqdExeInp['EXP_ID'])) - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('GRIDNAME',self.rqdExeInp['GRIDNAME'])) - - # just copy an empty ExtData.rc - if shortfile=='ExtData.rc' : - shutil.copy2(rcfile, self.rundir+'/'+shortfile) - - if shortfile == 'CAP.rc': - tmprcfile = self.rundir+'/CAP.rc' - shutil.copy2(rcfile,tmprcfile) - - _num_sgmt = int(self.rqdExeInp['NUM_SGMT']) - - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('JOB_SGMT:',self.job_sgmt[0])) - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('NUM_SGMT:','NUM_SGMT: %d'% _num_sgmt)) - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('BEG_DATE:',self.begDates[ 0].strftime('BEG_DATE: %Y%m%d %H%M%S'))) - for line in fileinput.input(tmprcfile,inplace=True): - print (line.rstrip().replace('END_DATE:',self.endDates[-1].strftime('END_DATE: %Y%m%d %H%M%S'))) - - if shortfile == 'LDAS.rc' : - ldasrcInp = OrderedDict() - # land default - default_surfrcInp = self._parseInputFile(etcdir+'/GEOS_SurfaceGridComp.rc') - for key,val in default_surfrcInp.items() : - ldasrcInp[key] = val - - # ldas default, may overwrite land default - default_ldasrcInp = self._parseInputFile(rcfile) - for key,val in default_ldasrcInp.items() : - ldasrcInp[key] = val - - # exeinp, may overwrite ldas default - for key,val in self.rqdExeInp.items(): - if key not in self.NoneLDASrcKeys: - ldasrcInp[key]= val - - # overide by optimized distribution - #for key,val in optinxny.items(): - # ldasrcInp[key]= val - - # create BC in rc file - tmpl_ = '' - if self.nens >1 : - tmpl_='%s' - if self.perturb == 1: - ldasrcInp['PERTURBATIONS'] ='1' - rstkey =[] - rstval =[] - if self.with_land : - bcval=['../input/green','../input/lai','../input/lnfm','../input/ndvi','../input/nirdf','../input/visdf'] - bckey=['GREEN','LAI','LNFM','NDVI','NIRDF','VISDF'] - for key, val in zip(bckey,bcval): - keyn = key+'_FILE' - valn = val+'.data' - ldasrcInp[keyn]= valn - if('catchcn' in self.catch): - ldasrcInp['CO2_MonthlyMean_DiurnalCycle_FILE']= '../input/CO2_MonthlyMean_DiurnalCycle.nc4' - else: - # remove catchcn-specific entries that do not apply to catch model - ldasrcInp.pop('DTCN',None) - ldasrcInp.pop('ATM_CO2',None) - ldasrcInp.pop('CO2',None) - ldasrcInp.pop('CO2_YEAR',None) - ldasrcInp.pop('PRESCRIBE_DVG',None) - - # create restart item in RC - catch_ = self.catch.upper() - - if catch_+'_INTERNAL_RESTART_TYPE' in ldasrcInp : - # avoid duplicate - del ldasrcInp[ catch_ +'_INTERNAL_RESTART_TYPE'] - if catch_+'_INTERNAL_CHECKPOINT_TYPE' in ldasrcInp : - # avoid duplicate - del ldasrcInp[ catch_ +'_INTERNAL_CHECKPOINT_TYPE'] - if 'VEGDYN_INTERNAL_RESTART_TYPE' in ldasrcInp : - # avoid duplicate - del ldasrcInp['VEGDYN_INTERNAL_RESTART_TYPE'] - - rstkey.append(catch_) - rstkey.append('VEGDYN') - rstval.append(self.catch) - rstval.append('vegdyn') - - if self.with_landice: - rstkey.append('LANDICE') - rstval.append('landice') - - if self.has_mwrtm : - keyn='LANDASSIM_INTERNAL_RESTART_FILE' - valn='../input/restart/mwrtm_param_rst' - ldasrcInp[keyn]= valn - if self.has_vegopacity : - keyn='VEGOPACITY_FILE' - valn='../input/vegopacity.data' - ldasrcInp[keyn]= valn - - if self.nens > 1 : - keyn='ENS_ID_WIDTH' - valn=str(self.ens_id_width) - ldasrcInp[keyn]= valn - - if self.has_landassim_seed and self.assim : - keyn='LANDASSIM_OBSPERTRSEED_RESTART_FILE' - valn='../input/restart/landassim_obspertrseed'+tmpl_+'_rst' - ldasrcInp[keyn]= valn - - if self.assim: - keyn='LANDASSIM_OBSPERTRSEED_CHECKPOINT_FILE' - valn='landassim_obspertrseed'+tmpl_+'_checkpoint' - ldasrcInp[keyn]= valn - - for key,val in zip(rstkey,rstval) : - keyn = key+ '_INTERNAL_RESTART_FILE' - valn = '../input/restart/'+val+tmpl_+'_internal_rst' - ldasrcInp[keyn]= valn - - # checkpoint file and its type - if self.with_land : - keyn = catch_ + '_INTERNAL_CHECKPOINT_FILE' - valn = self.catch+tmpl_+'_internal_checkpoint' - ldasrcInp[keyn]= valn - - if self.with_landice : - keyn = 'LANDICE_INTERNAL_CHECKPOINT_FILE' - valn = 'landice'+tmpl_+'_internal_checkpoint' - ldasrcInp[keyn]= valn - # specify LANDPERT restart file - if (self.perturb == 1): - keyn = 'LANDPERT_INTERNAL_RESTART_FILE' - valn = '../input/restart/landpert'+tmpl_+'_internal_rst' - ldasrcInp[keyn]= valn - # for lat/lon and EASE tile space, specify LANDPERT checkpoint file here (via MAPL); - # for cube-sphere tile space, Landpert GC will set up LANDPERT checkpoint file - if ('-CF' not in self.rqdExeInp['GRIDNAME']): - keyn = 'LANDPERT_INTERNAL_CHECKPOINT_FILE' - valn = 'landpert'+tmpl_+'_internal_checkpoint' - ldasrcInp[keyn]= valn - - # add items for stretched grid - if '-SG' in self.rqdExeInp['BCS_RESOLUTION']: - pos_ = self.rqdExeInp['BCS_RESOLUTION'].find('-SG') - SG = self.rqdExeInp['BCS_RESOLUTION'][pos_+1:pos_+6] # get ID of stretched grid (e.g., SG002) - ldasrcInp['STRETCH_FACTOR'] = STRETCH_GRID[SG][0] - ldasrcInp['TARGET_LAT'] = STRETCH_GRID[SG][1] - ldasrcInp['TARGET_LON'] = STRETCH_GRID[SG][2] - - # write LDAS.rc - fout =open(self.rundir+'/'+shortfile,'w') - # ldasrcInp['NUM_LDAS_ENSEMBLE']=ldasrcInp.pop('NUM_ENSEMBLE') - for key,val in optinxny.items(): - keyn=(key+":").ljust(36) - fout.write(keyn+str(val)+'\n') - for key,val in ldasrcInp.items() : - keyn=(key+":").ljust(36) - fout.write(keyn+str(val)+'\n') - fout.write("OUT_PATH:".ljust(36)+self.out_path+'\n') - fout.write("EXP_ID:".ljust(36)+self.rqdExeInp['EXP_ID']+'\n') - fout.write("TILING_FILE:".ljust(36)+"../input/tile.data\n") - - fout.close() - - fout=open(self.rundir+'/'+'cap_restart','w') - #fout.write(self.rqdExeInp['BEG_DATE']) - fout.write(self.begDates[0].strftime('%Y%m%d %H%M%S')) - fout.close() - status=True - return status - - # ----------------------------------------------------------------------------------- - - def createBatchRun(self): - """ - """ - - status = False - - os.chdir(self.rundir) - fout =open(self.rundir+'/ldas_batchrun.j','w') - fout.write("#!/bin/bash -f\n") - jobid = None - SBATCHQSUB = 'sbatch' - expid = self.rqdExeInp['EXP_ID'] - if self.GEOS_SITE == 'NAS': - SBATCHQSUB = 'qsub' - fout.write("\nsed -i 's/if($capdate<$enddate) "+SBATCHQSUB+"/#if($capdate<$enddate) "+SBATCHQSUB+"/g' lenkf.j\n\n") - nSegments = self.nSegments - for iseg in range(nSegments): - if iseg ==0 : - fout.write("jobid%d=$(echo $(sbatch lenkf.j) | cut -d' ' -f 4)\n"%(iseg)) - fout.write("echo $jobid%d\n"%iseg ) - else : - _start = self.begDates[iseg] - myDateTime = '%04d%02d%02d_%02d%02dz' % \ - (_start.year, _start.month, _start.day,_start.hour,_start.minute) - _logfile = os.path.relpath( - '/'.join([ - self.outdir, - self.rqdExeInp['EXP_DOMAIN'], - 'rc_out', - 'Y%04d' % _start.year, - 'M%02d' % _start.month, - '.'.join([expid, 'ldas_log', myDateTime, 'txt']), - ]), - self.rundir) - _errfile = os.path.relpath( - '/'.join([ - self.outdir, - self.rqdExeInp['EXP_DOMAIN'], - 'rc_out', - 'Y%04d' % _start.year, - 'M%02d' % _start.month, - '.'.join([expid, 'ldas_err', myDateTime, 'txt']), - ]), - self.rundir) - - #fout.write("jobid%d=$(echo $(sbatch --dependency=afterany:$jobid%d --output=%s --error=%s lenkf.j) | cut -d' ' -f 4)\n"%(iseg,iseg-1,_logfile, _errfile)) - fout.write("jobid%d=$(echo $(sbatch --dependency=afterok:$jobid%d lenkf.j) | cut -d' ' -f 4)\n"%(iseg,iseg-1)) - fout.write("echo $jobid%d\n"%iseg ) - fout.write("\nsed -i 's/#if($capdate<$enddate) "+SBATCHQSUB+"/if($capdate<$enddate) "+SBATCHQSUB+"/g' lenkf.j\n\n") - fout.close() - - sp.call(['chmod', '755', self.rundir+'/ldas_batchrun.j']) - status = True - return status - - # ----------------------------------------------------------------------------------- - - def createRunScripts(self): - """ - """ - - status = False - - os.chdir(self.rundir) - - my_qos='allnccs' - if self.GEOS_SITE == 'NAS': my_qos = 'normal' - if 'qos' in self.optRmInp : - my_qos = self.optRmInp['qos'] - - my_job=self.rqdExeInp['EXP_ID'] - if 'job_name' in self.optRmInp : - my_job = self.optRmInp['job_name'] - - start = self.begDates[0] - expid = self.rqdExeInp['EXP_ID'] - myDateTime = '%04d%02d%02d_%02d%02dz' % \ - (start.year, start.month, start.day,start.hour,start.minute) - my_logfile = os.path.relpath( - '/'.join([ - self.outdir, - self.rqdExeInp['EXP_DOMAIN'], - 'rc_out', - 'Y%04d' % start.year, - 'M%02d' % start.month, - '.'.join([expid, 'ldas_log', myDateTime, 'txt']), - ]), - self.rundir) - my_errfile = os.path.relpath( - '/'.join([ - self.outdir, - self.rqdExeInp['EXP_DOMAIN'], - 'rc_out', - 'Y%04d' % start.year, - 'M%02d' % start.month, - '.'.join([expid, 'ldas_err', myDateTime, 'txt']), - ]), - self.rundir) - - constraint = '"[mil|cas]"' - if self.GEOS_SITE == "NAS" : - constraint = 'cas_ait' - - if 'constraint' in self.optRmInp: - constraint = self.optRmInp['constraint'] - - SBATCHQSUB = 'sbatch' - if self.GEOS_SITE == 'NAS': - SBATCHQSUB = 'qsub' - - DETECTED_MPI_STACK = "@MPI_STACK@" - - job_head = job_directive[self.GEOS_SITE] - lenkf_str= (job_head+job_body).format( - SBATCHQSUB = SBATCHQSUB, - MY_ACCOUNT = self.rqdRmInp['account'], - MY_WALLTIME = self.rqdRmInp['walltime'], - MY_NTASKS_MODEL = str(self.rqdRmInp['ntasks_model']), - MY_CONSTRAINT = constraint, - MY_OSERVER_NODES = str(self.optRmInp['oserver_nodes']), - MY_WRITERS_NPES = str(self.optRmInp['writers-per-node']), - MY_QOS = my_qos, - MY_JOB = my_job, - MY_EXPID = self.rqdExeInp['EXP_ID'], - MY_EXPDOMAIN = self.rqdExeInp['EXP_DOMAIN'], - MY_LOGFILE = my_logfile, - MY_ERRFILE = my_errfile, - MY_LANDMODEL = self.catch, - MY_POSTPROC_HIST = str(self.rqdExeInp['POSTPROC_HIST']), - MY_FIRST_ENS_ID = str(self.first_ens_id), - MY_LADAS_COUPLING = str(self.ladas_cpl), - MY_ENSEMBLE_FORCING= self.rqdExeInp.get('ENSEMBLE_FORCING', 'NO').upper(), - MY_ADAS_EXPDIR = self.adas_expdir, - MY_EXPDIR = self.expdir, - DETECTED_MPI_STACK = DETECTED_MPI_STACK, - ) - - with open('lenkf.j','wt') as fout : - fout.write(lenkf_str) - sp.call(['chmod', '755', 'lenkf.j']) - - print ('\nExperiment directory: %s' % self.expdir) - print () - status = True - return status - -# ----------------------------------------------------------------------------------- - -def _printdict(d): - """ - Private method: print a 'flat' dictionary - """ - - for key, val in d.items(): - print (key.ljust(23), ':', val) - -# ----------------------------------------------------------------------------------- - -def _produceExeInput(out_dict=None,ladas_cpl=0): - """ - Private method: (1) Print sample exeinp file to stdout for offline GEOSldas setup: - _produceExeInput(). - *or* - (2) Create dictionary w/ default parameters from GEOSldas_LDAS.rc and - GEOS_SurfaceGridComp.rc for coupled land-atm DAS setup: - _produceExeInput(out_dict, ladas_cpl=[1,2]). - """ - - if ladas_cpl > 0: - assert out_dict is not None , " Need out_dict to hold the default parameters" - - # stand-alone (offline) LDAS: - # sample exeinp file includes placeholders for inputs that the user needs to provide - if ladas_cpl == 0: - print ('####################################################################################') - print ('# #') - print ('# REQUIRED INPUTS #') - print ('# #') - print ('# These inputs are needed to set up output dir structure. #') - print ('# #') - print ('####################################################################################') - print () - print ('############################################################') - print ('# #') - print ('# EXPERIMENT INFO #') - print ('# #') - print ('# Format for start/end times is yyyymmdd hhmmss. #') - print ('# #') - print ('############################################################') - print () - print ('EXP_ID:') - print ('EXP_DOMAIN:') - print ('NUM_LDAS_ENSEMBLE:') - print ('BEG_DATE:') - print ('END_DATE:') - print () - print ('############################################################') - print ('# #') - print ('# RESTART INFO #') - print ('# #') - print ('# (i) Select "RESTART" option: #') - print ('# #') - print ('# Use one of the following options if you *have* a #') - print ('# GEOSldas restart file: #') - print ('# #') - print ('# RESTART: 1 #') - print ('# YES, have restart file from GEOSldas #') - print ('# in SAME tile space (grid) with SAME boundary #') - print ('# conditions and SAME snow model parameter (WEMIN). #') - print ('# The restart domain can be for the same or #') - print ('# a larger one. #') - print ('# #') - print ('# RESTART: 2 #') - print ('# YES, have restart file from GEOSldas but #') - print ('# in a DIFFERENT tile space (grid) or with #') - print ('# DIFFERENT boundary conditions or DIFFERENT snow #') - print ('# model parameter (WEMIN). #') - print ('# Restart *must* be for the GLOBAL domain. #') - print ('# #') - print ('# Use one of the following options if you DO NOT have a #') - print ('# GEOSldas restart file #') - print ('# (works for global domain ONLY!): #') - print ('# #') - print ('# RESTART: 0 #') - print ('# Cold start from some old restart for Jan 1, 0z. #') - print ('# #') - print ('# RESTART: M #') - print ('# Re-tile from archived MERRA-2 restart file. #') - print ('# #') - print ('# -------------------------------------------------------- #') - print ('# IMPORTANT: #') - print ('# Except for RESTART=1, SPIN-UP is REQUIRED in almost #') - print ('# all cases. #') - print ('# -------------------------------------------------------- #') - print ('# #') - print ('# #') - print ('# (ii) Specify experiment ID/location of restart file: #') - print ('# #') - print ('# For RESTART=1 or RESTART=2: #') - print ('# Specify RESTART_ID, RESTART_PATH, RESTART_DOMAIN with #') - print ('# restarts stored as follows: #') - print ('# RESTART_PATH/RESTART_ID/output/RESTART_DOMAIN/rs/ #') - print ('# #') - print ('# For RESTART=0 or RESTART=M: #') - print ('# There is no need to specify RESTART_ID, RESTART_PATH, #') - print ('# and RESTART_DOMAIN. #') - print ('# #') - print ('############################################################') - print () - print ('RESTART:') - print ('#RESTART_ID:') - print ('#RESTART_PATH:') - print ('#RESTART_DOMAIN:') - print () - print ('############################################################') - print ('# #') - print ('# SURFACE METEOROLOGICAL FORCING #') - print ('# #') - print ('# Surface meteorological forcing time step is in seconds. #') - print ('# #') - print ('# NOTE: #') - print ('# When forcing is on cube-sphere (CS) grid, must use: #') - print ('# - Model tile space (BCS) derived from same CS grid. #') - print ('# - Nearest-neighbor interpolation (MET_HINTERP: 0). #') - print ('# #') - print ('# For more information, see: #') - print ('# GEOSldas/doc/README.MetForcing_and_BCS.md #') - print ('# #') - print ('############################################################') - print () - print ('MET_TAG:') - print ('MET_PATH:') - print ('FORCE_DTSTEP:') - print () - print ('############################################################') - print ('# #') - print ('# LAND BOUNDARY CONDITIONS (BCS) #') - print ('# #') - print ('# Path to and (atmospheric) resolution of BCS. #') - print ('# Path includes BCS_VERSION. #') - print ('# #') - print ('# For more information, see: #') - print ('# GEOSldas/doc/README.MetForcing_and_BCS.md #') - print ('# [..]/GEOSsurface_GridComp/Utils/Raster/make_bcs #') - print ('# #') - print ('############################################################') - print () - print ('BCS_PATH:') - print ('BCS_RESOLUTION:') - print () - print ('############################################################') - - # end if ladas_cpl==0 - - # add defaults from rc template files - - current_directory = os.path.dirname(__file__) # path where ldas_setup is - - # rc template files are in [current_directory]/../etc/ - - # add defaults from GEOSldas_LDAS.rc - - _fn = current_directory+'/../etc/GEOSldas_LDAS.rc' - lines = [] - with open(_fn) as _f: - i_ = 1 - for line in _f: - if ( i_ < 5 or i_ >10): # ignore lines 5-10 - may need to change if GEOSldas_LDAS.rc is edited - if ladas_cpl == 0: - sys.stdout.write(line) - sys.stdout.flush() - else: - lines.append(line) - i_ += 1 - print () - print () - - # add land model parameter defaults from GEOS_SurfaceGridComp.rc - - _fn = current_directory+'/../etc/GEOS_SurfaceGridComp.rc' - - if ladas_cpl == 0 : - use_rc_defaults = 'GEOSldas=>' # use defaults for LDAS - else : - use_rc_defaults = 'GEOSagcm=>' # use defaults for AGCM - - with open(_fn) as _f : - i_ = 1 - for line in _f: - i_ +=1 - # skip over lines 5-21 (content does not apply after this processing) - may need to change if GEOS_SurfaceGridComp.rc is edited - if ( 5<=i_ and i_<=21 ) : - continue - if ladas_cpl == 0: - # process lines that contain string "use_rc_defaults" - if use_rc_defaults in line: - line0 = line.split(use_rc_defaults)[1] - sys.stdout.write(line0) - sys.stdout.flush() - # echo blank lines and comment lines (except if they contain 'GEOSldas=>' or 'GEOSagcm=>') - if (not 'GEOSldas=>' in line) and (not 'GEOSagcm=>' in line) : - if (not line.strip()) or line.strip().startswith('#') : - sys.stdout.write(line) - sys.stdout.flush() - else: - # process lines that contain string "use_rc_defaults" - if use_rc_defaults in line: - line0 = line.split(use_rc_defaults)[1] - # strip out inline comment (if present) - position = line0.find('#') - if position>0: - line0 = line0[:position] - # extract key/value pair and add to dictionary - key, val = line0.split(":",1) - out_dict[key.strip()] = val.strip() - - - print () - print () - - -# ----------------------------------------------------------------------------------- - -def _printRmInputKeys(rqdRmInpKeys, optRmInpKeys): - """ - Private method: print sample resource manager input - """ - - print ('#') - print ('# REQUIRED inputs') - print ('#') - print ('# NOTE:') - print ('# - account = computational project number') - print ('# [At NCCS: Use command "getsponsor" to see available account number(s).]' ) - print ('# - walltime = walltime requested; format is HH:MM:SS (hours/minutes/seconds)') - print ('# - ntasks_model = number of processors requested for the model (typically 126; output server is not included)') - print ('#') - for key in rqdRmInpKeys: - print (key + ':') - print () - print ('#') - print ('# OPTIONAL inputs') - print ('#') - print ('# NOTE:') - print ('# - job_name = name of experiment; default is "exp_id"') - print ('# - qos = quality-of-service; do not specify by default; specify "debug" for faster but limited service.') - print ('# - oserver_nodes = number of nodes for oserver ( default is 0, for future use )') - print ('# - writers-per-node = tasks per oserver_node for writing ( default is 5, for future use ),') - print ('# IMPORTANT REQUIREMENT: total #writers = writers-per-node * oserver_nodes >= 2') - print ('# Jobs will hang when oserver_nodes = writers-per-node = 1.') - print ('# - constraint = name of chip set(s) (NCCS default is "[mil|cas]", NAS default is "cas_ait").') - print ('#') - for key in optRmInpKeys: - print ('#'+key + ':') - -# ----------------------------------------------------------------------------------- - def parseCmdLine(): """ parse command line arguments and return a dict of options @@ -2013,48 +141,38 @@ def parseCmdLine(): return p.parse_args() - - -def hours_to_hhmmss(hours): - - # Convert hours to timedelta - td = timedelta(hours=hours) - - # Extract hours, minutes, seconds - total_seconds = int(td.total_seconds()) - hours, remainder = divmod(total_seconds, 3600) - minutes, seconds = divmod(remainder, 60) - - # Format as HHMMSS - return f"{hours:02d}{minutes:02d}{seconds:02d}" - - - if __name__=='__main__': resource.setrlimit(resource.RLIMIT_STACK, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)) - #print "reading params...." - args = vars(parseCmdLine()) # vars converts to dict - ld = LDASsetup(args) + #convert command line to dictionary + args = vars(parseCmdLine()) + + #./ldas_setup sample sub-command + # print input sample file then exit + if 'exeinp' in args: + printInputSampleFile(args) + sys.exit(0) + + # start ./ldas_setup setup sub-command + + ldasObj = ldas(args) print ("creating dir structure") - status = ld.createDirStructure() + status = ldasObj.createDirStructure() assert(status) print ("creating links to restarts, BCs, met forcing, ...") - status = ld.createLnRstBc() + status = ldasObj.createLnRstBc() assert(status) print ("creating RC Files") - status = ld.createRCFiles() + status = ldasObj.createRCFiles() assert(status) print ("creating gcm style batch Run scripts lenkf.j") - status = ld.createRunScripts() + status = ldasObj.createRunScripts() assert(status) print ("creating batch Run scripts") - status = ld.createBatchRun() + status = ldasObj.createBatchRun() assert (status) - -# =================== EOF ======================================================================= diff --git a/GEOSldas_App/lenkf_j_template.py b/GEOSldas_App/lenkf_j_template.py index 0ed56fd0..60443ff0 100644 --- a/GEOSldas_App/lenkf_j_template.py +++ b/GEOSldas_App/lenkf_j_template.py @@ -16,6 +16,7 @@ #SBATCH --job-name={MY_JOB} #SBATCH --qos={MY_QOS} #SBATCH --constraint={MY_CONSTRAINT} +{MY_NODES} ''' , "NAS": '''#!/bin/csh -f @@ -517,11 +518,9 @@ if($file_ext == nc4) then /bin/mv $ofile $THISDIR/. else - set binfile = `echo $ofile | rev | cut -d'.' -f2- | rev` - set decr_file = `echo $ofile | rev | cut -d'.' -f3- | rev`.ctl - ($GEOSBIN/tile_bin2nc4.x $binfile $decr_file $TILECOORD ; \\ - /bin/mv ${{binfile}}.nc4 $THISDIR/. ; \\ - /bin/rm ${{binfile}}.bin) & + set rc = -1 + echo "ERROR: detected unexpected binary output file(s), exit without post-processing" + exit $rc endif end wait diff --git a/GEOSldas_App/setup_utils.py b/GEOSldas_App/setup_utils.py new file mode 100644 index 00000000..e5c650d8 --- /dev/null +++ b/GEOSldas_App/setup_utils.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python3 + +import os +import sys + +from collections import OrderedDict +from datetime import timedelta + + +def generate_echo(inpfile, ladas_cpl = 0): + """ + Echo generator of inpfile, ignore line starts with "## " + """ + if ladas_cpl == 0 : + use_rc_defaults = 'GEOSldas=>' # use defaults for LDAS + else : + use_rc_defaults = 'GEOSagcm=>' # use defaults for AGCM + + with open (inpfile) as fin : + for line in fin: + if use_rc_defaults in line: + line = line.split(use_rc_defaults)[1] + yield line + +def parseInputFile(inpfile, ladas_cpl=0): + """ + Parse the input file and return a dict of options + Inpfile : input file + Output : dict, if inpfile does not exist, return empty {} + """ + if not os.path.exists(inpfile): + assert ladas_cpl > 0, " No exeinput file only if ladas_cpl > 0" + return {} + + inpdict = OrderedDict() + errstr = "line [%d] of [%s] is not in the form 'key: value'" + + echoLines = generate_echo(inpfile, ladas_cpl=ladas_cpl) + + linenum = 0 + for line in echoLines: + line = line.strip() + linenum += 1 + if not line: + continue + # handle comments + position = line.find('#') + if position==0: # comment line + continue + if position>0: # strip out comment + line = line[:position] + # we expect a line to be of the form + # key : value + assert ':' in line, errstr % (linenum, inpfile) + + key, val = line.split(':',1) + key = key.strip() + val = val.strip() + if not key or not val: + print ("WARNING: " + errstr % (linenum, inpfile)) + continue + #raise Exception(errstr % (linenum, inpfile)) + if key in inpdict: + raise Exception('Duplicate key [%s] in [%s]' % (key, inpfile)) + inpdict[key] = val.strip() + return inpdict + +def echoInputFile(inpfile, ladas_cpl = 0): + """ + Echo inpfile, ignore line starts with "## " + """ + echoLines = generate_echo(inpfile, ladas_cpl=ladas_cpl) + + for line in echoLines: + sys.stdout.write(line) + sys.stdout.flush() + +def printExeInputSampleFile(): + """ + Print sample exeinp file to screen + """ + print ('####################################################################################') + print ('# #') + print ('# REQUIRED INPUTS #') + print ('# #') + print ('# The inputs to ldas_setup can be provided in an "exeinp" file such as this #') + print ('# sample file. #') + print ('# When ldas_setup is called by fvsetup to set up an experiment with the coupled #') + print ('# land-atm data assimilation system, the inputs to ldas_setup are provided as #') + print ('# optional command line arguments. #') + print ('# #') + print ('####################################################################################') + print () + print ('############################################################') + print ('# #') + print ('# EXPERIMENT INFO #') + print ('# #') + print ('# Format for start/end times is yyyymmdd hhmmss. #') + print ('# #') + print ('############################################################') + print () + print ('EXP_ID:') + print ('EXP_DOMAIN:') + print ('NUM_LDAS_ENSEMBLE:') + print ('BEG_DATE:') + print ('END_DATE:') + print () + print ('############################################################') + print ('# #') + print ('# RESTART INFO #') + print ('# #') + print ('# (i) Select "RESTART" option: #') + print ('# #') + print ('# Use one of the following options if you *have* a #') + print ('# GEOSldas restart file: #') + print ('# #') + print ('# RESTART: 1 #') + print ('# YES, have restart file from GEOSldas #') + print ('# in SAME tile space (grid) with SAME boundary #') + print ('# conditions and SAME snow model parameter (WEMIN). #') + print ('# The restart domain can be for the same or #') + print ('# a larger one. #') + print ('# #') + print ('# RESTART: 2 #') + print ('# YES, have restart file from GEOSldas but #') + print ('# in a DIFFERENT tile space (grid) or with #') + print ('# DIFFERENT boundary conditions or DIFFERENT snow #') + print ('# model parameter (WEMIN). #') + print ('# Restart *must* be for the GLOBAL domain. #') + print ('# #') + print ('# Use one of the following options if you DO NOT have a #') + print ('# GEOSldas restart file #') + print ('# (works for global domain ONLY!): #') + print ('# #') + print ('# RESTART: 0 #') + print ('# Cold start from some old restart for Jan 1, 0z. #') + print ('# #') + print ('# RESTART: M #') + print ('# Re-tile from archived MERRA-2 restart file. #') + print ('# #') + print ('# -------------------------------------------------------- #') + print ('# IMPORTANT: #') + print ('# Except for RESTART=1, SPIN-UP is REQUIRED in almost #') + print ('# all cases. #') + print ('# -------------------------------------------------------- #') + print ('# #') + print ('# #') + print ('# (ii) Specify experiment ID/location of restart file: #') + print ('# #') + print ('# For RESTART=1 or RESTART=2: #') + print ('# Specify RESTART_ID, RESTART_PATH, RESTART_DOMAIN with #') + print ('# restarts stored as follows: #') + print ('# RESTART_PATH/RESTART_ID/output/RESTART_DOMAIN/rs/ #') + print ('# #') + print ('# For RESTART=0 or RESTART=M: #') + print ('# There is no need to specify RESTART_ID, RESTART_PATH, #') + print ('# and RESTART_DOMAIN. #') + print ('# #') + print ('############################################################') + print () + print ('RESTART:') + print ('#RESTART_ID:') + print ('#RESTART_PATH:') + print ('#RESTART_DOMAIN:') + print () + print ('############################################################') + print ('# #') + print ('# SURFACE METEOROLOGICAL FORCING #') + print ('# #') + print ('# Surface meteorological forcing time step is in seconds. #') + print ('# #') + print ('# NOTE: #') + print ('# When forcing is on cube-sphere (CS) grid, must use: #') + print ('# - Model tile space (BCS) derived from same CS grid. #') + print ('# - Nearest-neighbor interpolation (MET_HINTERP: 0). #') + print ('# #') + print ('# For more information, see: #') + print ('# GEOSldas/doc/README.MetForcing_and_BCS.md #') + print ('# #') + print ('############################################################') + print () + print ('MET_TAG:') + print ('MET_PATH:') + print ('FORCE_DTSTEP:') + print () + print ('############################################################') + print ('# #') + print ('# LAND BOUNDARY CONDITIONS (BCS) #') + print ('# #') + print ('# Path to and (atmospheric) resolution of BCS. #') + print ('# Path includes BCS_VERSION. #') + print ('# #') + print ('# For more information, see: #') + print ('# GEOSldas/doc/README.MetForcing_and_BCS.md #') + print ('# [..]/GEOSsurface_GridComp/Utils/Raster/make_bcs #') + print ('# #') + print ('############################################################') + print () + print ('BCS_PATH:') + print ('BCS_RESOLUTION:') + print () + print ('############################################################') + + current_directory = os.path.dirname(__file__) # path where ldas_setup is + # rc template files are in [current_directory]/../etc/ + # add defaults from GEOSldas_LDAS.rc + _fn = current_directory+'/../etc/GEOSldas_LDAS.rc' + echoInputFile(_fn) + _fn = current_directory+'/../etc/GEOS_SurfaceGridComp.rc' + echoInputFile(_fn) + + +def getExeKeys(option): + # required keys for exe input file depending on restart option + rqdExeInpKeys = {'1' : ['EXP_ID', 'EXP_DOMAIN', 'NUM_LDAS_ENSEMBLE', 'BEG_DATE', + 'END_DATE', 'RESTART_PATH', 'RESTART_DOMAIN', 'RESTART_ID', + 'MET_TAG', 'MET_PATH', 'FORCE_DTSTEP', 'BCS_PATH', + 'BCS_RESOLUTION'], + '0' : ['EXP_ID', 'EXP_DOMAIN', 'NUM_LDAS_ENSEMBLE', 'BEG_DATE', + 'END_DATE', + 'MET_TAG', 'MET_PATH', 'FORCE_DTSTEP', 'BCS_PATH', + 'BCS_RESOLUTION'] + } + + assert option == '0' or option == '1', '"%s" option is not recognized ' % option + return rqdExeInpKeys[option] + +def getResourceKeys(option): + # ------ + # Required resource manager input fields + # ------ + RmInpKeys ={'required' : ['account', 'walltime', 'ntasks_model'], + 'optional' : ['job_name', 'qos', 'oserver_nodes', 'writers-per-node', 'ntasks-per-node', 'constraint'] + } + assert option in ['required', 'optional'],' "%s" option is not supported' % option + + return RmInpKeys[option] + + +def printResourceInputSampleFile(): + """ + print sample resource manager input file + """ + requiredKeys = getResourceKeys('required') + optionalKeys = getResourceKeys('optional') + + print ('#') + print ('# REQUIRED inputs') + print ('#') + print ('# NOTE:') + print ('# - account = computational project number') + print ('# [At NCCS: Use command "getsponsor" to see available account number(s).]' ) + print ('# - walltime = walltime requested; format is HH:MM:SS (hours/minutes/seconds)') + print ('# - ntasks_model = number of processors requested for the model (typically 126; output server is not included)') + print ('#') + for key in requiredKeys: + print (key + ':') + print () + print ('#') + print ('# OPTIONAL inputs') + print ('#') + print ('# NOTE:') + print ('# - job_name = name of experiment; default is "exp_id"') + print ('# - qos = quality-of-service; do not specify by default; specify "debug" for faster but limited service') + print ('# - oserver_nodes = number of nodes for oserver ( default is 0, for future use )') + print ('# - writers-per-node = tasks per oserver_node for writing ( default is 5, for future use );') + print ('# IMPORTANT REQUIREMENT: total #writers = writers-per-node * oserver_nodes >= 2;') + print ('# jobs will hang when oserver_nodes = writers-per-node = 1.') + print ('# - ntasks-per-node = requesting fewer ntasks-per-node than total number of cores per node increases allocated memory;') + print ('# ntasks_model should be a multiple of ntasks-per-node') + print ('# - constraint = name of chip set(s) (NCCS default is "[mil|cas]", NAS default is "cas_ait")') + print ('#') + for key in optionalKeys: + print ('#'+key + ':') + +def printInputSampleFile(cmdLineArgs): + ''' + ./ldas_setup sample ... + + "sample" sub-command: + '--exeinp' and '--batinp' are mutually exclusive command line arguments. + Specifying one will set it to True and set the other one to False. + That is, we can have either: {'exeinp': False, 'batinp': True } + or: {'exeinp': True, 'batinp': False} + ''' + if cmdLineArgs['exeinp']: + printExeInputSampleFile() + elif cmdLineArgs['batinp']: + printResourceInputSampleFile() + else: + raise Exception('unrecognized sample option') + +def printDictionary(d): + """ + Private method: print a 'flat' dictionary + """ + + for key, val in d.items(): + print (key.ljust(24), ':', val) + +def hours_to_hhmmss(hours): + + # Convert hours to timedelta + td = timedelta(hours=hours) + + # Extract hours, minutes, seconds + total_seconds = int(td.total_seconds()) + hours, remainder = divmod(total_seconds, 3600) + minutes, seconds = divmod(remainder, 60) + + # Format as HHMMSS + return f"{hours:02d}{minutes:02d}{seconds:02d}" diff --git a/GEOSldas_App/tile_bin2nc4.F90 b/GEOSldas_App/tile_bin2nc4.F90 deleted file mode 100644 index 890bfd82..00000000 --- a/GEOSldas_App/tile_bin2nc4.F90 +++ /dev/null @@ -1,555 +0,0 @@ -PROGRAM tile_bin2nc4 - - implicit none - INCLUDE 'netcdf.inc' - - integer :: i,k, n, NTILES - integer :: NCFOutID, Vid, STATUS, CellID, TimID, nVars - character(256) :: Usage="tile_bin2nc4.x BINFILE DESCRIPTOR TILECOORD" ! DESCRIPTOR = GrADS ctl file generated by MAPL from HISTORY.rc - character(512) :: BINFILE, TILECOORD, DESCRIPTOR, arg(3) - character(128) :: MYNAME, BUF - - integer, dimension(8) :: date_time_values - character (22) :: time_stamp - real, allocatable, dimension (:) :: lons, lats, var - integer, allocatable, dimension (:) :: tileid, i_index, j_index - - integer :: myunit1, myunit2 - real :: undef - - ! process command line arguments - - I = command_argument_count() - - if( I /=3 ) then - print *, "Wrong Number of arguments: ", i - print *, trim(Usage) - stop - end if - - do n=1,I - call get_command_argument(n,arg(n)) - enddo - - call get_environment_variable ("MYNAME", MYNAME) - - read(arg(1),'(a)') BINFILE - read(arg(2),'(a)') DESCRIPTOR - read(arg(3),'(a)') TILECOORD - -! print *,MYNAME -! print *,trim(BINFILE) -! print *,trim(DESCRIPTOR) -! print *,trim(TILECOORD) - - ! read TILECOORD file - - open (newunit=myunit1, file = trim(TILECOORD), form = 'unformatted', action ='read') - read (myunit1) NTILES - - allocate (lons (1:NTILES)) - allocate (lats (1:NTILES)) - allocate (tileid (1:NTILES)) - allocate (var (1:NTILES)) - allocate (i_index(1:NTILES)) - allocate (j_index(1:NTILES)) - - read (myunit1) tileid - read (myunit1) tileid - read (myunit1) tileid - read (myunit1) lons - read (myunit1) lats - read (myunit1) var - read (myunit1) var - read (myunit1) var - read (myunit1) var - read (myunit1) i_index - read (myunit1) j_index - - close (myunit1,status = 'keep') - - ! read DESCRIPTOR file (=GrADS ctl file generated by MAPL from HISTORY.rc file) - - open (newunit=myunit1, file = trim(DESCRIPTOR), form ='formatted', action = 'read') - nVars = 0 - - undef = 0.100000E+16 - k = 0 - do - read(myunit1, '(a)', iostat=status) buf - if (status /= 0) exit - k = k + 1 - if(buf(1:index(buf,' ') -1) == 'vars') then - i = index(buf,' ') - read (buf(i:),*, IOSTAT = n) nVars - endif - if(buf(1:index(buf,' ') -1) == 'undef') then - i = index(buf,' ') - read (buf(i:),*, IOSTAT = n) undef - endif - if(nVars /= 0) exit - - end do - - ! prep nc4 file - - status = NF_CREATE (trim(BINFILE)//'.nc4', NF_NETCDF4, NCFOutID) - status = NF_DEF_DIM(NCFOutID, 'tile' , NTILES, CellID) - status = NF_DEF_DIM(NCFOutID, 'time' , NF_UNLIMITED, TimID) - - status = NF_DEF_VAR(NCFOutID, 'lon' , NF_FLOAT, 1 ,CellID, vid) - status = NF_PUT_ATT_TEXT(NCFOutID, vid, 'long_name', & - LEN_TRIM('longitude'), 'longitude') - status = NF_DEF_VAR(NCFOutID, 'lat' , NF_FLOAT, 1 ,CellID, vid) - status = NF_PUT_ATT_TEXT(NCFOutID, vid, 'long_name', & - LEN_TRIM('latitude'), 'latitude') - status = NF_DEF_VAR(NCFOutID, 'IG' , NF_INT, 1 ,CellID, vid) - status = NF_PUT_ATT_TEXT(NCFOutID, vid, 'long_name', & - LEN_TRIM('I_INDEX'), 'I_INDEX') - status = NF_DEF_VAR(NCFOutID, 'JG' , NF_INT, 1 ,CellID, vid) - status = NF_PUT_ATT_TEXT(NCFOutID, vid, 'long_name', & - LEN_TRIM('J_INDEX'), 'J_INDEX') - - do n = 1, nVars - read(myunit1, '(a)', iostat=status) buf - status = NF_DEF_VAR(NCFOutID,buf(1:index(buf,' ') -1) , NF_FLOAT, 2 ,(/CellID, TimID/), vid) - status = NF_PUT_ATT_TEXT(NCFOutID, vid, 'long_name', & - LEN_TRIM(getAttribute(buf(1:index(buf,' ') -1), LNAME = 1)), & - getAttribute(buf(1:index(buf,' ') -1), LNAME = 1)) - status = NF_PUT_ATT_TEXT(NCFOutID, vid, 'units', & - LEN_TRIM(getAttribute(buf(1:index(buf,' ') -1), UNT = 1)), & - getAttribute(buf(1:index(buf,' ') -1), UNT = 1)) - status = nf_put_att_real(NCFOutID, vid, '_FillValue',NF_FLOAT, 1, undef) - end do - - call date_and_time(VALUES=date_time_values) - - write (time_stamp,'(i4.4,a1,i2.2,a1,i2.2,1x,a2,1x,i2.2,a1,i2.2,a1,i2.2)') & - date_time_values(1),'-',date_time_values(2),'-',date_time_values(3),'at', & - date_time_values(5),':',date_time_values(6),':',date_time_values(7) - - status = NF_PUT_ATT_TEXT(NCFOutID, NF_GLOBAL, 'CreatedBy', LEN_TRIM(MYNAME), & - trim(MYNAME)) - status = NF_PUT_ATT_TEXT(NCFOutID, NF_GLOBAL, 'Date' , LEN_TRIM(time_stamp),trim(time_stamp)) - - status = NF_ENDDEF(NCFOutID) - - status = NF_PUT_VARA_REAL(NCFOutID,VarID(NCFOutID,'lon' ) ,(/1/),(/NTILES/),lons ) - status = NF_PUT_VARA_REAL(NCFOutID,VarID(NCFOutID,'lat' ) ,(/1/),(/NTILES/),lats ) - status = NF_PUT_VARA_INT (NCFOutID,VarID(NCFOutID,'IG' ) ,(/1/),(/NTILES/),i_index ) - status = NF_PUT_VARA_INT (NCFOutID,VarID(NCFOutID,'JG' ) ,(/1/),(/NTILES/),j_index ) - - ! read data from binary file and write into nc4 file - - open (newunit=myunit2, file = trim(BINFILE)//'.bin', form = 'unformatted', action = 'read') - - rewind (myunit1) - do i = 1, k - read(myunit1, '(a)', iostat=status) buf - end do - - do n = 1, nVars - read (myunit1, '(a)', iostat=status) buf - read (myunit2) var - status = NF_PUT_VARA_REAL(NCFOutID,VarID(NCFOutID,buf(1:index(buf,' ') -1)) ,(/1,1/),(/NTILES,1/),var ) - end do - - STATUS = NF_CLOSE (NCFOutID) - close (myunit1) - close (myunit2) - -contains - - ! ---------------------------------------------------------------------- - - integer function VarID (NCFID, VNAME) - - integer, intent (in) :: NCFID - character(*), intent (in) :: VNAME - integer :: status - - STATUS = NF_INQ_VARID (NCFID, trim(VNAME) ,VarID) - IF (STATUS .NE. NF_NOERR) & - CALL HANDLE_ERR(STATUS, trim(VNAME)) - - end function VarID - - ! ----------------------------------------------------------------------- - - SUBROUTINE HANDLE_ERR(STATUS, Line) - - INTEGER, INTENT (IN) :: STATUS - CHARACTER(*), INTENT (IN) :: Line - - IF (STATUS .NE. NF_NOERR) THEN - PRINT *, trim(Line),': ',NF_STRERROR(STATUS) - STOP 'Stopped' - ENDIF - - END SUBROUTINE HANDLE_ERR - - ! *********************************************************************** - - FUNCTION getAttribute (SHORT_NAME, LNAME, UNT) result (str_atr) - - character(*), intent(in) :: SHORT_NAME - integer, intent (in), optional :: LNAME, UNT - character(128) :: str_atr, LONG_NAME, UNITS - - SELECT case (trim(SHORT_NAME)) - - ! For L4_SM - ! reichle, 20 May 2020: verified SHORT_NAME and corrected UNITS to match SMAP L4_SM Product Specs; LONG_NAME (mostly) from GEOS_CatchGridComp.F90 - ! reichle, 14 Feb 2022: added "WATERTABLED" (now: "PEATCLSM_WATERLEVEL") and "FSWCHANGE" (now: "PEATCLSM_FSWCHANGE") - ! reichle, 21 Feb 2022: added "mwrtm_vegopacity" - - case ('sm_surface'); LONG_NAME = 'water_surface_layer'; UNITS = 'm3 m-3' - case ('sm_rootzone'); LONG_NAME = 'water_root_zone'; UNITS = 'm3 m-3' - case ('sm_profile'); LONG_NAME = 'water_ave_prof'; UNITS = 'm3 m-3' - case ('sm_surface_wetness'); LONG_NAME = 'surface_soil_wetness'; UNITS = '1' - case ('sm_rootzone_wetness'); LONG_NAME = 'root_zone_soil_wetness'; UNITS = '1' - case ('sm_profile_wetness'); LONG_NAME = 'ave_prof_soil_wetness'; UNITS = '1' - case ('surface_temp'); LONG_NAME = 'ave_catchment_temp_incl_snw'; UNITS = 'K' - case ('soil_temp_layer1'); LONG_NAME = 'soil_temperatures_layer_1'; UNITS = 'K' - case ('soil_temp_layer2'); LONG_NAME = 'soil_temperatures_layer_2'; UNITS = 'K' - case ('soil_temp_layer3'); LONG_NAME = 'soil_temperatures_layer_3'; UNITS = 'K' - case ('soil_temp_layer4'); LONG_NAME = 'soil_temperatures_layer_4'; UNITS = 'K' - case ('soil_temp_layer5'); LONG_NAME = 'soil_temperatures_layer_5'; UNITS = 'K' - case ('soil_temp_layer6'); LONG_NAME = 'soil_temperatures_layer_6'; UNITS = 'K' - case ('snow_mass'); LONG_NAME = 'snow_mass'; UNITS = 'kg m-2' - case ('snow_depth'); LONG_NAME = 'snow_depth_in_snow_covered_area'; UNITS = 'm' - case ('land_evapotranspiration_flux'); LONG_NAME = 'Evaporation_land'; UNITS = 'kg m-2 s-1' - case ('overland_runoff_flux'); LONG_NAME = 'runoff_flux'; UNITS = 'kg m-2 s-1' - case ('baseflow_flux'); LONG_NAME = 'baseflow_flux'; UNITS = 'kg m-2 s-1' - case ('snow_melt_flux'); LONG_NAME = 'Snowmelt_flux_land'; UNITS = 'kg m-2 s-1' - case ('soil_water_infiltration_flux'); LONG_NAME = 'rainwater_infiltration_flux'; UNITS = 'kg m-2 s-1' - case ('land_fraction_saturated'); LONG_NAME = 'fractional_area_of_saturated_zone'; UNITS = '1' - case ('land_fraction_unsaturated'); LONG_NAME = 'fractional_area_of_unsaturated_zone'; UNITS = '1' - case ('land_fraction_wilting'); LONG_NAME = 'fractional_area_of_wilting_zone'; UNITS = '1' - case ('land_fraction_snow_covered'); LONG_NAME = 'fractional_area_of_land_snowcover'; UNITS = '1' - case ('heat_flux_sensible'); LONG_NAME = 'Sensible_heat_flux_land'; UNITS = 'W m-2' - case ('heat_flux_latent'); LONG_NAME = 'Latent_heat_flux_land'; UNITS = 'W m-2' - case ('heat_flux_ground'); LONG_NAME = 'Ground_heating_land'; UNITS = 'W m-2' - case ('net_downward_shortwave_flux'); LONG_NAME = 'Net_shortwave_land'; UNITS = 'W m-2' - case ('net_downward_longwave_flux'); LONG_NAME = 'Net_longwave_land'; UNITS = 'W m-2' - case ('radiation_shortwave_downward_flux');LONG_NAME = 'Incident_shortwave_land'; UNITS = 'W m-2' - case ('radiation_longwave_absorbed_flux'); LONG_NAME = 'surface_absorbed_longwave_flux'; UNITS = 'W m-2' - case ('precipitation_total_surface_flux'); LONG_NAME = 'RainfSnowf'; UNITS = 'kg m-2 s-1' - case ('snowfall_surface_flux'); LONG_NAME = 'snowfall'; UNITS = 'kg m-2 s-1' - case ('surface_pressure'); LONG_NAME = 'surface_pressure'; UNITS = 'Pa' - case ('height_lowatmmodlay'); LONG_NAME = 'reference_height_for_Tair_Qair_Wind'; UNITS = 'm' - case ('temp_lowatmmodlay'); LONG_NAME = 'air_temperature_at_RefH'; UNITS = 'K' - case ('specific_humidity_lowatmmodlay'); LONG_NAME = 'specific_humidity_at_RefH'; UNITS = 'kg kg-1' - case ('windspeed_lowatmmodlay'); LONG_NAME = 'wind_speed_at_RefH'; UNITS = 'm s-1' - case ('vegetation_greenness_fraction'); LONG_NAME = 'greeness_fraction'; UNITS = '1' - case ('leaf_area_index'); LONG_NAME = 'leaf_area_index'; UNITS = 'm2 m-2' - case ('depth_to_water_table_from_surface_in_peat'); LONG_NAME = 'depth_to_water_table_from_surface_in_peat'; UNITS = 'm' - case ('free_surface_water_on_peat_flux'); LONG_NAME = 'change_in_free_surface_water_reservoir_on_peat'; UNITS = 'kg m-2 s-1' - case ('mwrtm_vegopacity'); LONG_NAME = 'Lband_microwave_vegopacity_normalized_with_cos_inc_angle'; UNITS = '1' - - ! additional defintions for SMAP Nature Run - reichle, 20 May 2020 - - case ('snow_temp_layer1'); LONG_NAME = 'temperature_top_snow_layer'; UNITS = 'K' - case ('tb_h'); LONG_NAME = 'brightness_temperature_land_1410MHz_40deg_Hpol'; UNITS = 'K' - case ('tb_v'); LONG_NAME = 'brightness_temperature_land_1410MHz_40deg_Vpol'; UNITS = 'K' - case ('TB_LAND_1410MHZ_40DEG_HPOL'); LONG_NAME = 'brightness_temperature_land_1410MHz_40deg_Hpol'; UNITS = 'K' - case ('TB_LAND_1410MHZ_40DEG_VPOL'); LONG_NAME = 'brightness_temperature_land_1410MHz_40deg_Vpol'; UNITS = 'K' - - ! End L4_SM ------------------------------------------------------------------------------------------------------------------------------------------------- - - case ('Tair'); LONG_NAME = 'air_temperature_at_RefH'; UNITS = 'K' - case ('TA'); LONG_NAME = 'air_temperature_at_RefH'; UNITS = 'K' - case ('Qair'); LONG_NAME = 'specific_humidity_at_RefH'; UNITS = 'kg kg-1' - case ('QA'); LONG_NAME = 'specific_humidity_at_RefH'; UNITS = 'kg kg-1' - case ('LWdown'); LONG_NAME = 'surface_absorbed_longwave_flux'; UNITS = 'W m-2' - case ('LWDNSRF'); LONG_NAME = 'surface_absorbed_longwave_flux'; UNITS = 'W m-2' - case ('SWdown'); LONG_NAME = 'downward_shortwave_radiation'; UNITS = 'W m-2' - case ('Wind'); LONG_NAME = 'wind_speed_at_RefH'; UNITS = 'm s-1' - case ('UU'); LONG_NAME = 'wind_speed_at_RefH'; UNITS = 'm s-1' - case ('Psurf'); LONG_NAME = 'surface_pressure'; UNITS = 'Pa' - case ('PS'); LONG_NAME = 'surface_pressure'; UNITS = 'Pa' - case ('Rainf_C'); LONG_NAME = 'convective_rainfall'; UNITS = 'kg m-2 s-1' - case ('Rainf'); LONG_NAME = 'liquid_water_precipitation'; UNITS = 'kg m-2 s-1' - case ('Snowf'); LONG_NAME = 'total_snowfall'; UNITS = 'kg m-2 s-1' - case ('RainfSnowf'); LONG_NAME = 'RainfSnowf'; UNITS = 'kg m-2 s-1' - case ('SWnet'); LONG_NAME = 'downward_net_shortwave_radiation'; UNITS = 'W m-2' - case ('RefH'); LONG_NAME = 'reference_height_for_Tair_Qair_Wind'; UNITS = 'm' - case ('DZ'); LONG_NAME = 'reference_height_for_Tair_Qair_Wind'; UNITS = 'm' - case ('CATDEF'); LONG_NAME = 'catchment_deficit'; UNITS = 'kg m-2' - case ('RZEXC'); LONG_NAME = 'root_zone_excess'; UNITS = 'kg m-2' - case ('SRFEXC'); LONG_NAME = 'surface_excess'; UNITS = 'kg m-2' - case ('CAPAC', 'INTRWATR'); LONG_NAME = 'vegetation_interception_water_storage'; UNITS = 'kg m-2' - case ('WESNN1'); LONG_NAME = 'snow_mass_layer_1'; UNITS = 'kg m-2' - case ('WESNN2'); LONG_NAME = 'snow_mass_layer_2'; UNITS = 'kg m-2' - case ('WESNN3'); LONG_NAME = 'snow_mass_layer_3'; UNITS = 'kg m-2' - case ('HTSNNN1'); LONG_NAME = 'heat_content_snow_layer_1'; UNITS = 'J m-2' - case ('HTSNNN2'); LONG_NAME = 'heat_content_snow_layer_2'; UNITS = 'J m-2' - case ('HTSNNN3'); LONG_NAME = 'heat_content_snow_layer_3'; UNITS = 'J m-2' - case ('SNDZN1'); LONG_NAME = 'snow_depth_layer_1'; UNITS = 'm' - case ('SNDZN2'); LONG_NAME = 'snow_depth_layer_2'; UNITS = 'm' - case ('SNDZN3'); LONG_NAME = 'snow_depth_layer_3'; UNITS = 'm' - case ('FICE1'); LONG_NAME = 'snow_frozen_fraction_layer_1'; UNITS = '1' - case ('FICE2'); LONG_NAME = 'snow_frozen_fraction_layer_2'; UNITS = '1' - case ('FICE3'); LONG_NAME = 'snow_frozen_fraction_layer_3'; UNITS = '1' - case ('ALBVR'); LONG_NAME = 'surface_reflectivity_for_visible_beam'; UNITS = '1' - case ('ALBVF'); LONG_NAME = 'surface_reflectivity_for_visible_diffuse'; UNITS = '1' - case ('ALBNR'); LONG_NAME = 'surface_reflectivity_for_near_infared_beam'; UNITS = '1' - case ('ALBNF'); LONG_NAME = 'surface_reflectivity_for_near_infrared_diffuse'; UNITS = '1' - case ('HLWUP'); LONG_NAME = 'surface_emitted_longwave_flux'; UNITS = 'W m-2' - case ('GWETPROF'); LONG_NAME = 'soil_wetness_profile'; UNITS = '1' - case ('GWETROOT'); LONG_NAME = 'soil_wetness_rootzone'; UNITS = '1' - case ('GWETTOP'); LONG_NAME = 'soil_wetness_surface'; UNITS = '1' - case ('PRMC'); LONG_NAME = 'soil_moisture_profile'; UNITS = 'm3 m-3' - case ('RZMC'); LONG_NAME = 'soil_moisture_rootzone'; UNITS = 'm3 m-3' - case ('SFMC'); LONG_NAME = 'soil_moisture_surface'; UNITS = 'm3 m-3' - case ('TPSNOW', 'TPSNOWLAND'); LONG_NAME = 'surface_temperature_of_snow_on_land'; UNITS = 'K' - case ('TUNST' , 'TUNSTLAND'); LONG_NAME = 'surface_temperature_of_unsaturated_zone'; UNITS = 'K' - case ('TSAT' , 'TSATLAND'); LONG_NAME = 'surface_temperature_of_saturated_zone'; UNITS = 'K' - case ('TWLT' , 'TWLTLAND'); LONG_NAME = 'surface_temperature_of_wilting_zone'; UNITS = 'K' - case ('TSURF', 'TPSURF', 'TSURFLAND'); LONG_NAME = 'surface_temperature_of_land_incl_snow'; UNITS = 'K' - case ('GRN'); LONG_NAME = 'vegetation_greenness_fraction'; UNITS = '1' - case ('LAI'); LONG_NAME = 'leaf_area_index'; UNITS = '1' - case ('TP1', 'TSOIL1'); LONG_NAME = 'soil_temperature_layer_1'; UNITS = 'K' ! units now K, rreichle & borescan, 6 Nov 2020 - case ('TP2', 'TSOIL2'); LONG_NAME = 'soil_temperature_layer_2'; UNITS = 'K' ! units now K, rreichle & borescan, 6 Nov 2020 - case ('TP3', 'TSOIL3'); LONG_NAME = 'soil_temperature_layer_3'; UNITS = 'K' ! units now K, rreichle & borescan, 6 Nov 2020 - case ('TP4', 'TSOIL4'); LONG_NAME = 'soil_temperature_layer_4'; UNITS = 'K' ! units now K, rreichle & borescan, 6 Nov 2020 - case ('TP5', 'TSOIL5'); LONG_NAME = 'soil_temperature_layer_5'; UNITS = 'K' ! units now K, rreichle & borescan, 6 Nov 2020 - case ('TP6', 'TSOIL6'); LONG_NAME = 'soil_temperature_layer_6'; UNITS = 'K' ! units now K, rreichle & borescan, 6 Nov 2020 - case ('PRECTOTLAND', 'PRECTOTCORRLAND'); LONG_NAME = 'Total_precipitation_land'; UNITS = 'kg m-2 s-1' - case ('PRECSNOLAND', 'PRECSNOCORRLAND'); LONG_NAME = 'snowfall_land'; UNITS = 'kg m-2 s-1' - case ('SNOWMASS', 'SNOMAS'); LONG_NAME = 'snow_mass'; UNITS = 'kg m-2' - case ('TSLAND', 'SNOMASLAND'); LONG_NAME = 'Total_snow_storage_land'; UNITS = 'kg m-2' - case ('SNO'); LONG_NAME = 'snowfall'; UNITS = 'kg m-2 s-1' - case ('SNODP'); LONG_NAME = 'snow_depth_within_snow_covered_area_fraction'; UNITS = 'm' - case ('SNODPLAND'); LONG_NAME = 'snow_depth_within_snow_covered_area_fraction_on_land'; UNITS = 'm' - case ('EVPSOIL', 'LHLANDSOIL'); LONG_NAME = 'baresoil_evaporation_latent_heat_flux'; UNITS = 'W m-2' - case ('EVPTRNS', 'LHLANDTRNS'); LONG_NAME = 'transpiration_latent_heat_flux'; UNITS = 'W m-2' - case ('EVPINTR', 'LHLANDINTR'); LONG_NAME = 'interception_loss_latent_heat_flux'; UNITS = 'W m-2' - case ('EVPSBLN', 'LHLANDSBLN'); LONG_NAME = 'snowpack_evaporation_latent_heat_flux_on_land'; UNITS = 'W m-2' - case ('EVPSNO'); LONG_NAME = 'snowpack_evaporation_latent_heat_flux'; UNITS = 'W m-2' ! avg across all tile types - case ('RUNOFF'); LONG_NAME = 'runoff_total_flux'; UNITS = 'kg m-2 s-1' ! avg across all tile types - case ('RUNSURF', 'RUNSURFLAND'); LONG_NAME = 'overland runoff including throughflow'; UNITS = 'kg m-2 s-1' - case ('BASEFLOW'); LONG_NAME = 'baseflow_flux'; UNITS = 'kg m-2 s-1' - case ('BASEFLOWLAND'); LONG_NAME = 'baseflow_flux_land'; UNITS = 'kg m-2 s-1' - case ('SMLAND'); LONG_NAME = 'Snowmelt_flux_land'; UNITS = 'kg m-2 s-1' - case ('QINFIL', 'QINFILLAND'); LONG_NAME = 'Soil_water_infiltration_rate'; UNITS = 'kg m-2 s-1' - case ('FRUNST', 'FRLANDUNST'); LONG_NAME = 'fractional_area_of_unsaturated_zone'; UNITS = '1' - case ('FRSAT' , 'FRLANDSAT' ); LONG_NAME = 'fractional_area_of_saturated_zone'; UNITS = '1' - case ('FRSNO' , 'FRLANDSNO' ); LONG_NAME = 'fractional_area_of_snow_on_land'; UNITS = '1' - case ('FRWLT' , 'FRLANDWLT' ); LONG_NAME = 'fractional_area_of_wilting_zone'; UNITS = '1' - case ('PARDFLAND'); LONG_NAME = 'surface_downwelling_PAR_diffuse_flux'; UNITS = 'W m-2' - case ('PARDRLAND'); LONG_NAME = 'surface_downwelling_PAR_beam_flux'; UNITS = 'W m-2' - case ('SHLAND'); LONG_NAME = 'Sensible_heat_flux_land'; UNITS = 'W m-2' - case ('LHLAND'); LONG_NAME = 'Latent_heat_flux_land'; UNITS = 'W m-2' - case ('EVLAND'); LONG_NAME = 'Total_evaporation_land'; UNITS = 'kg m-2 s-1' - case ('LWLAND'); LONG_NAME = 'Net_longwave_flux_land'; UNITS = 'W m-2' - case ('SWLAND'); LONG_NAME = 'Net_shortwave_flux_land'; UNITS = 'W m-2' - case ('SWDOWNLAND'); LONG_NAME = 'Incident_shortwave_flux_land'; UNITS = 'W m-2' - case ('GHLAND'); LONG_NAME = 'Ground_heating_flux_land'; UNITS = 'W m-2' - case ('TWLAND'); LONG_NAME = 'total_water_storage_land'; UNITS = 'kg m-2' - case ('TELAND'); LONG_NAME = 'Total_energy_storage_land'; UNITS = 'J m-2' - case ('WCHANGE','WCHANGELAND'); LONG_NAME = 'rate_of_change_of_total_land_water'; UNITS = 'kg m-2 s-1' - case ('ECHANGE','ECHANGELAND'); LONG_NAME = 'rate_of_change_of_total_land_energy'; UNITS = 'W m-2' - case ('SPLAND', 'SPSHLAND'); LONG_NAME = 'Spurious_sensible_heat_flux_land'; UNITS = 'W m-2' - case ('SPLH' , 'SPLHLAND'); LONG_NAME = 'Spurious_latent_heat_flux_land'; UNITS = 'W m-2' - case ('SPWATR', 'SPEVLAND'); LONG_NAME = 'Spurious_evapotranspiration_flux_land'; UNITS = 'kg m-2 s-1' - case ('SPSNOW', 'SPSNLAND'); LONG_NAME = 'Spurious_snow_energy_flux_land'; UNITS = 'W m-2' - case ('PEATCLSM_WATERLEVEL'); LONG_NAME = 'depth_to_water_table_from_surface_in_peat'; UNITS = 'm' - case ('PEATCLSM_FSWCHANGE'); LONG_NAME = 'change_in_free_surface_water_reservoir_on_peat'; UNITS = 'kg m-2 s-1' - case ('CNLAI'); LONG_NAME = 'CN_exposed_leaf-area_index'; UNITS = '1' - case ('CNTLAI'); LONG_NAME = 'CN_total_leaf-area_index'; UNITS = '1' - case ('CNSAI'); LONG_NAME = 'CN_exposed_stem-area_index'; UNITS = '1' - case ('CNTOTC'); LONG_NAME = 'CN_total_carbon'; UNITS = 'kg m-2' - case ('CNVEGC'); LONG_NAME = 'CN_total_vegetation_carbon'; UNITS = 'kg m-2' - case ('CNROOT'); LONG_NAME = 'CN_total_root_carbon'; UNITS = 'kg m-2' - case ('CNNPP'); LONG_NAME = 'CN_net_primary_production'; UNITS = 'kg m-2 s-1' - case ('CNGPP'); LONG_NAME = 'CN_gross_primary_production'; UNITS = 'kg m-2 s-1' - case ('CNSR'); LONG_NAME = 'CN_total_soil_respiration'; UNITS = 'kg m-2 s-1' - case ('CNNEE'); LONG_NAME = 'CN_net_ecosystem_exchange'; UNITS = 'kg m-2 s-1' - case ('CNXSMR'); LONG_NAME = 'abstract_C_pool_to_meet_excess_MR_demand'; UNITS = 'kg m-2' - case ('CNADD'); LONG_NAME = 'CN_added_to_maintain_positive_C'; UNITS = 'kg m-2 s-1' - case ('PARABS'); LONG_NAME = 'absorbed_PAR'; UNITS = 'W m-2' - case ('PARINC'); LONG_NAME = 'incident_PAR'; UNITS = 'W m-2' - case ('SCSAT'); LONG_NAME = 'saturated_stomatal_conductance'; UNITS = 'm s-1' - case ('SCUNS'); LONG_NAME = 'unstressed_stomatal_conductance'; UNITS = 'm s-1' - case ('BTRAN'); LONG_NAME = 'transpiration coefficient'; UNITS = '1' - case ('SIF'); LONG_NAME = 'solar induced fluorescence'; UNITS = 'umol m-2 sm s-1' - case ('CLOSS'); LONG_NAME = 'CN_carbon_loss_to_fire'; UNITS = 'kg m-2 s-1' - case ('BURN'); LONG_NAME = 'CN_fractional_area_burn_rate'; UNITS = 's-1' - case ('FSEL'); LONG_NAME = 'fire season length'; UNITS = 'days' - case ('GHTSKIN'); LONG_NAME = 'Ground_heating_flux_for_skin_temp_land'; UNITS = 'W m-2' - case ('WAT10CM'); LONG_NAME = 'soil moisture in Upper 10cm'; UNITS = 'kg m-2' - case ('WATSOI'); LONG_NAME = 'total soil moisture'; UNITS = 'kg m-2' - case ('ICESOI'); LONG_NAME = 'soil frozen water content'; UNITS = 'kg m-2' - case ('RMELTDU001'); LONG_NAME = 'flushed_out_dust_mass_flux_from_the_bottom_layer_bin_1'; UNITS = 'kg m-2 s-1' - case ('RMELTDU002'); LONG_NAME = 'flushed_out_dust_mass_flux_from_the_bottom_layer_bin_2'; UNITS = 'kg m-2 s-1' - case ('RMELTDU003'); LONG_NAME = 'flushed_out_dust_mass_flux_from_the_bottom_layer_bin_3'; UNITS = 'kg m-2 s-1' - case ('RMELTDU004'); LONG_NAME = 'flushed_out_dust_mass_flux_from_the_bottom_layer_bin_4'; UNITS = 'kg m-2 s-1' - case ('RMELTDU005'); LONG_NAME = 'flushed_out_dust_mass_flux_from_the_bottom_layer_bin_5'; UNITS = 'kg m-2 s-1' - case ('RMELTBC001'); LONG_NAME = 'flushed_out_black_carbon_mass_flux_from_the_bottom_layer_bin_1'; UNITS = 'kg m-2 s-1' - case ('RMELTBC002'); LONG_NAME = 'flushed_out_black_carbon_mass_flux_from_the_bottom_layer_bin_2'; UNITS = 'kg m-2 s-1' - case ('RMELTOC001'); LONG_NAME = 'flushed_out_organic_carbon_mass_flux_from_the_bottom_layer_bin_1'; UNITS = 'kg m-2 s-1' - case ('RMELTOC002'); LONG_NAME = 'flushed_out_organic_carbon_mass_flux_from_the_bottom_layer_bin_2'; UNITS = 'kg m-2 s-1' - - ! land constants - - case ('CDCR2'); LONG_NAME = 'maximum soil water content above wilting point'; UNITS = 'kg m-2' - case ('DZGT1'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSOIL1'; UNITS = 'm' - case ('DZGT2'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSOIL2'; UNITS = 'm' - case ('DZGT3'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSOIL3'; UNITS = 'm' - case ('DZGT4'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSOIL4'; UNITS = 'm' - case ('DZGT5'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSOIL5'; UNITS = 'm' - case ('DZGT6'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSOIL6'; UNITS = 'm' - case ('DZPR'); LONG_NAME = 'thickness_of_soil_layer_associated_with_PRMC_and_GWETPROF'; UNITS = 'm' - case ('DZRZ'); LONG_NAME = 'thickness_of_soil_layer_associated_with_RZMC_and_GWETROOT'; UNITS = 'm' - case ('DZSF'); LONG_NAME = 'thickness_of_soil_layer_associated_with_SFMC_and_GWETTOP'; UNITS = 'm' - case ('DZTS'); LONG_NAME = 'thickness_of_soil_layer_associated_with_TSATLAND_TUNSTLAND_and_TWLTLAND'; UNITS = 'm' - case ('POROS'); LONG_NAME = 'soil_porosity'; UNITS = 'm3 m-3' - case ('WPEMW'); LONG_NAME = 'soil_wilting_point_in_equivalent_mass_of_total_profile_water'; UNITS = 'kg m-2' - case ('WPMC'); LONG_NAME = 'soil_wilting_point_in_volumetric_units'; UNITS = 'm3 m-3' - case ('WPWET'); LONG_NAME = 'soil_wilting_point_in_degree_of_saturation_units'; UNITS = '1' - - ! land assimilation increments for Catchment prognostic variables in coupled land-atmosphere DAS (#sqz 2020-01) - - case ('TCFSAT_INCR'); LONG_NAME = 'increment_surface_temperature_of_saturated_zone'; UNITS = 'K' - case ('TCFTRN_INCR'); LONG_NAME = 'increment_surface_temperature_of_transition_zone'; UNITS = 'K' - case ('TCFWLT_INCR'); LONG_NAME = 'increment_surface_temperature_of_wilting_zone'; UNITS = 'K' - case ('QCFSAT_INCR'); LONG_NAME = 'increment_surface_specific_humidity_of_saturated_zone'; UNITS = 'kg kg-1' - case ('QCFTRN_INCR'); LONG_NAME = 'increment_surface_specific_humidity_of_transition_zone'; UNITS = 'kg kg-1' - case ('QCFWLT_INCR'); LONG_NAME = 'increment_surface_specific_humidity_of_wilting_zone'; UNITS = 'kg kg-1' - case ('CAPAC_INCR'); LONG_NAME = 'increment_vegetation_interception_water_storage'; UNITS = 'kg m-2' - case ('CATDEF_INCR'); LONG_NAME = 'increment_catchment_deficit'; UNITS = 'kg m-2' - case ('RZEXC_INCR'); LONG_NAME = 'increment_root_zone_excess'; UNITS = 'kg m-2' - case ('SRFEXC_INCR'); LONG_NAME = 'increment_surface_excess'; UNITS = 'kg m-2' - case ('GHTCNT1_INCR'); LONG_NAME = 'increment_soil_heat_content_layer_1'; UNITS = 'J m-2' - case ('GHTCNT2_INCR'); LONG_NAME = 'increment_soil_heat_content_layer_2'; UNITS = 'J m-2' - case ('GHTCNT3_INCR'); LONG_NAME = 'increment_soil_heat_content_layer_3'; UNITS = 'J m-2' - case ('GHTCNT4_INCR'); LONG_NAME = 'increment_soil_heat_content_layer_4'; UNITS = 'J m-2' - case ('GHTCNT5_INCR'); LONG_NAME = 'increment_soil_heat_content_layer_5'; UNITS = 'J m-2' - case ('GHTCNT6_INCR'); LONG_NAME = 'increment_soil_heat_content_layer_6'; UNITS = 'J m-2' - case ('WESNN1_INCR'); LONG_NAME = 'increment_snow_mass_layer_1'; UNITS = 'kg m-2' - case ('WESNN2_INCR'); LONG_NAME = 'increment_snow_mass_layer_2'; UNITS = 'kg m-2' - case ('WESNN3_INCR'); LONG_NAME = 'increment_snow_mass_layer_3'; UNITS = 'kg m-2' - case ('HTSNNN1_INCR'); LONG_NAME = 'increment_heat_content_snow_layer_1'; UNITS = 'J m-2' - case ('HTSNNN2_INCR'); LONG_NAME = 'increment_heat_content_snow_layer_2'; UNITS = 'J m-2' - case ('HTSNNN3_INCR'); LONG_NAME = 'increment_heat_content_snow_layer_3'; UNITS = 'J m-2' - case ('SNDZN1_INCR'); LONG_NAME = 'increment_snow_depth_layer_1'; UNITS = 'm' - case ('SNDZN2_INCR'); LONG_NAME = 'increment_snow_depth_layer_2'; UNITS = 'm' - case ('SNDZN3_INCR'); LONG_NAME = 'increment_snow_depth_layer_3'; UNITS = 'm' - - ! land assimilation forecast and analysis for Catchment model diagnostics - - case ('SFMC_FCST'); LONG_NAME = 'soil_moisture_surface_forecast'; UNITS = 'm3 m-3' - case ('RZMC_FCST'); LONG_NAME = 'soil_moisture_rootzone_forecast'; UNITS = 'm3 m-3' - case ('PRMC_FCST'); LONG_NAME = 'soil_moisture_profile_forecast'; UNITS = 'm3 m-3' - case ('TSURF_FCST'); LONG_NAME = 'surface_temperature_of_land_incl_snow_forecast'; UNITS = 'K' - case ('TSOIL1_FCST'); LONG_NAME = 'soil_temperature_layer_1_forecast'; UNITS = 'K' - - case ('SFMC_FCST_ENSSTD'); LONG_NAME = 'soil_moisture_surface_forecast_ensstd'; UNITS = 'm3 m-3' - case ('RZMC_FCST_ENSSTD'); LONG_NAME = 'soil_moisture_rootzone_forecast_ensstd'; UNITS = 'm3 m-3' - case ('PRMC_FCST_ENSSTD'); LONG_NAME = 'soil_moisture_profile_forecast_ensstd'; UNITS = 'm3 m-3' - case ('TSURF_FCST_ENSSTD'); LONG_NAME = 'surface_temperature_of_land_incl_snow_forecast_ensstd'; UNITS = 'K' - case ('TSOIL1_FCST_ENSSTD'); LONG_NAME = 'soil_temperature_layer_1_forecast_ensstd'; UNITS = 'K' - - case ('SFMC_ANA'); LONG_NAME = 'soil_moisture_surface_analysis'; UNITS = 'm3 m-3' - case ('RZMC_ANA'); LONG_NAME = 'soil_moisture_rootzone_analysis'; UNITS = 'm3 m-3' - case ('PRMC_ANA'); LONG_NAME = 'soil_moisture_profile_analysis'; UNITS = 'm3 m-3' - case ('TSURF_ANA'); LONG_NAME = 'surface_temperature_of_land_incl_snow_analysis'; UNITS = 'K' - case ('TSOIL1_ANA'); LONG_NAME = 'soil_temperature_layer_1_analysis'; UNITS = 'K' - - case ('SFMC_ANA_ENSSTD'); LONG_NAME = 'soil_moisture_surface_analysis_ensstd'; UNITS = 'm3 m-3' - case ('RZMC_ANA_ENSSTD'); LONG_NAME = 'soil_moisture_rootzone_analysis_ensstd'; UNITS = 'm3 m-3' - case ('PRMC_ANA_ENSSTD'); LONG_NAME = 'soil_moisture_profile_analysis_ensstd'; UNITS = 'm3 m-3' - case ('TSURF_ANA_ENSSTD'); LONG_NAME = 'surface_temperature_of_land_incl_snow_ensstd'; UNITS = 'K' - case ('TSOIL1_ANA_ENSSTD'); LONG_NAME = 'soil_temperature_layer_1_analysis_ensstd'; UNITS = 'K' - - ! other land assimilation fields - - case ('MWRTM_VEGOPACITY'); LONG_NAME = 'Lband_microwave_vegopacity_normalized_with_cos_inc_angle'; UNITS = '1' - - ! land ice fields - - case ('EMIS'); LONG_NAME = 'surface_emissivity'; UNITS = '1' - case ('ALBVR_GL'); LONG_NAME = 'surface_reflectivity_for_visible_beam'; UNITS = '1' - case ('ALBVF_GL'); LONG_NAME = 'surface_reflectivity_for_visible_diffuse'; UNITS = '1' - case ('ALBNR_GL'); LONG_NAME = 'surface_reflectivity_for_near_infrared_beam'; UNITS = '1' - case ('ALBNF_GL'); LONG_NAME = 'surface_reflectivity_for_near_infrared_direct'; UNITS = '1' - case ('TST'); LONG_NAME = 'surface_temperature'; UNITS = 'K' - case ('QST'); LONG_NAME = 'surface_specific_humidity'; UNITS = 'kg kg-1' - case ('TH'); LONG_NAME = 'turbulence_surface_skin_temperature'; UNITS = 'K' - case ('QH'); LONG_NAME = 'turbulence_surface_specific_humidity'; UNITS = 'kg kg-1' - case ('DELTS'); LONG_NAME = 'change_of_surface_skin_temperature'; UNITS = 'K' - case ('DELQS'); LONG_NAME = 'change_of_surface_specific_humidity'; UNITS = 'kg kg-1' - case ('CHT'); LONG_NAME = 'surface_heat_exchange_coefficient'; UNITS = 'kg m-2 s-1' - case ('CMT'); LONG_NAME = 'surface_momentum_exchange_coefficient'; UNITS = 'kg m-2 s-1' - case ('CQT'); LONG_NAME = 'surface_moisture_exchange_coefficient'; UNITS = 'kg m-2 s-1' - case ('CNT'); LONG_NAME = 'neutral_drag_coefficient'; UNITS = '1' - case ('RIT'); LONG_NAME = 'surface_bulk_richardson_number'; UNITS = '1' - case ('ACCUM'); LONG_NAME = 'net_ice_accumulation_rate'; UNITS = 'kg m-2 s-1' - case ('EVPICE_GL'); LONG_NAME = 'snow_ice_evaporation_energy_flux_over_glaciated_surface'; UNITS = 'W m-2' - case ('SUBLIM'); LONG_NAME = 'sublimation'; UNITS = 'kg m-2 s-1' - case ('SNOMAS_GL'); LONG_NAME = 'snow_mass_over_glaciated_surface'; UNITS = 'kg m-2' - case ('SNOWDP_GL'); LONG_NAME = 'snow_depth_over_glaciated_surface'; UNITS = 'm' - case ('ASNOW_GL'); LONG_NAME = 'fractional_snow_covered_area_of_glaciated_surface'; UNITS = '1' - case ('RHOSNOW'); LONG_NAME = 'snow_layer_density'; UNITS = 'kg m-3' - case ('TSNOW'); LONG_NAME = 'snow_layer_temperature'; UNITS = 'K' - case ('TICE0'); LONG_NAME = 'aggregated_ice_layer_temperatured'; UNITS = 'K' - case ('WSNOW'); LONG_NAME = 'snow_laer_water_content'; UNITS = 'kg m-2' - case ('ZSNOW'); LONG_NAME = 'snow_layer_thickness'; UNITS = 'm' - case ('DRHS0'); LONG_NAME = 'snow_layer_density_change_due_to_densification'; UNITS = 'kg m-3' - case ('WESNEX'); LONG_NAME = 'snow_layer_mass_residual_due_to_densification'; UNITS = 'kg_m-2 s-1' - case ('WESNEXT'); LONG_NAME = 'total_snow_mass_residual_due_to_densification'; UNITS = 'kg m-2 s-1' - case ('WESNSC'); LONG_NAME = 'top_snow_layer_change_due_to_sub_con'; UNITS = 'kg m-2 s-1' - case ('SNDZSC'); LONG_NAME = 'top_snow_layer_thickness_change_due_to_sub_con'; UNITS = 'm s-1' - case ('WESNPREC'); LONG_NAME = 'top_snow_layer_mass_change_due_to_precip'; UNITS = 'kg_m-2 s-1' - case ('SNDZPREC'); LONG_NAME = 'top_snow_layer_thickness_change_due_to_precip'; UNITS = 'm s-1' - case ('SNDZ1PERC'); LONG_NAME = 'top_snow_layer_thickness_change_due_to_percolation'; UNITS = 'm s-1' - case ('WESNPERC'); LONG_NAME = 'snow_layer_mass_change_due_to_percolation'; UNITS = 'kg m-2 s-1' - case ('WESNDENS'); LONG_NAME = 'snow_layer_mass_change_due_to_densification'; UNITS = 'kg m-2 s-1' - case ('WESNREPAR'); LONG_NAME = 'snow_layer_mass_change_due_to_repartition'; UNITS = 'kg m-2 s-1' - case ('WESNBOT'); LONG_NAME = 'frozen_runoff_due_to_fixed_max_depth'; UNITS = 'kg m-2 s-1' - case ('RAINRFZ'); LONG_NAME = 'contribution_to_surface_mass_balance_from_rain_frozen_onto_bare_ice'; UNITS = 'kg m-2 s-1' - case ('SMELT'); LONG_NAME = 'snow_melt_flux'; UNITS = 'kg_m-2 s-1' - case ('IMELT'); LONG_NAME = 'ice_melt_flux'; UNITS = 'kg_m-2 s-1' - case ('SNOWALB'); LONG_NAME = 'snow_broadband_reflectivity'; UNITS = '1' - case ('SNICEALB'); LONG_NAME = 'aggregated_snow_ice_broadband_reflectivity'; UNITS = '1' - case ('MELTWTR'); LONG_NAME = 'melt_water_production'; UNITS = 'kg m-2 s-1' - case ('MELTWTRCONT'); LONG_NAME = 'snowpack_meltwater_content'; UNITS = 'kg m-2' - case ('LWC'); LONG_NAME = 'liquid_water_content_in_top_snow_layer'; UNITS = '1' - case ('RUNOFF_GL'); LONG_NAME = 'runoff_total_flux'; UNITS = 'kg m-2 s-1' - case ('GUST'); LONG_NAME = 'gustiness'; UNITS = 'm s-1' - case ('VENT'); LONG_NAME = 'surface_ventilation_velocity'; UNITS = 'm s-1' - case ('Z0'); LONG_NAME = 'surface_roughness'; UNITS = 'm' - case ('Z0H'); LONG_NAME = 'surface_roughness_for_heat'; UNITS = 'm' - case ('MOT2M'); LONG_NAME = 'temperature_2m_wind_from_MO_sfc'; UNITS = 'K' - case ('MOQ2M'); LONG_NAME = 'humidity_2m_wind_from_MO_sfc'; UNITS = 'kg kg-1' - case ('MOU2M'); LONG_NAME = 'zonal_2m_wind_from_MO_sfc'; UNITS = 'm s-1' - case ('MOV2M'); LONG_NAME = 'meridional_2m_wind_from_MO_sfc'; UNITS = 'm s-1' - case ('MOT10M'); LONG_NAME = 'temperature_10m_wind_from_MO_sfc'; UNITS = 'K' - case ('MOQ10M'); LONG_NAME = 'humidity_10m_wind_from_MO_sfc'; UNITS = 'kg kg-1' - case ('MOU10M'); LONG_NAME = 'zonal_10m_wind_from_MO_sfc'; UNITS = 'm s-1' - case ('MOV10M'); LONG_NAME = 'meridional_10m_wind_from_MO_sfc'; UNITS = 'm s-1' - case ('MOU50M'); LONG_NAME = 'zonal_50m_wind_from_MO_sfc'; UNITS = 'm s-1' - case ('MOV50M'); LONG_NAME = 'merodopma;_50m_wind_from_MO_sfc'; UNITS = 'm s-1' - case ('EVAPOUT'); LONG_NAME = 'evaporation'; UNITS = 'kg m-2 s-1' - case ('SHOUT'); LONG_NAME = 'upward_sensible_heat_flux'; UNITS = 'W m-2' - case ('HLWUP_GL'); LONG_NAME = 'surface_emitted_longwave_flux'; UNITS = 'W m-2' - case ('LWNDSRF'); LONG_NAME = 'surface_net_downward_longwave_flux'; UNITS = 'W m-2' - case ('SWNDSRF'); LONG_NAME = 'surface_net_downward_shortwave_flux'; UNITS = 'W m-2' - case ('HLATN'); LONG_NAME = 'total_latent_energy_flux'; UNITS = 'W m-2' - case ('DNICFLX'); LONG_NAME = 'downward_heat_flux_in_ice'; UNITS = 'W m-2' - case ('GHSNOW'); LONG_NAME = 'ground_heating_snow'; UNITS = 'W m-2' - case ('GHTSKIN_GL'); LONG_NAME = 'glacier_ice_heating_flux'; UNITS = 'W m-2' - -! default LONG_NAME and UNITS for nc4 files created by tile_bin2nc4.F90 (used for any SHORT_NAME not listed above): - - case default; LONG_NAME = 'not defined in tile_bin2nc4.F90'; UNITS = 'not defined in tile_bin2nc4.F90'; - - end select - - if (present(LNAME)) str_atr = trim (LONG_NAME) - if (present(UNT)) str_atr = trim (UNITS ) - - END FUNCTION getAttribute - -END PROGRAM tile_bin2nc4 diff --git a/GEOSldas_App/util/shared/matlab/read_vegopacity.m b/GEOSldas_App/util/shared/matlab/read_vegopacity.m new file mode 100644 index 00000000..7f09f96c --- /dev/null +++ b/GEOSldas_App/util/shared/matlab/read_vegopacity.m @@ -0,0 +1,102 @@ + +function [ veg_opacity, N_tile, avg_period ] = read_vegopacity( fname, N_tile ); + +% Reader for climatological mwRTM vegopacity file (little-endian, Fortran sequential binary). +% +% vegopacity files are created with the script +% GEOSldas_GridComp/GEOSldas_App/util/inputs/mwRTM_params/Create_vegopacity_8day_clim.m +% +% The format of the vegopacity file is compatible with MAPL_ReadForcing(). There are 96 Fortran +% records for 48 pairs of header/vegopacity entries, stored in a temporal wrap-around manner: +% +% record 1: [FortranTag] header [FortranTag] % last record of year +% record 2: [FortranTag] vegopacity [FortranTag] +% record 3: [FortranTag] header [FortranTag] % first record of year +% record 4: [FortranTag] vegopacity [FortranTag] +% ... +% record 93: [FortranTag] header [FortranTag] % last record of year +% record 94: [FortranTag] vegopacity [FortranTag] +% record 95: [FortranTag] header [FortranTag] % first record of year +% record 96: [FortranTag] vegopacity [FortranTag] +% +% header: [FortranTag] Year Month Day Hour Minute Second ... % start of 8-day avg period (float) +% Year Month Day Hour Minute Second ... % end of 8-day avg period (float) +% N_data_vegopacity ... % number of tiles (float) +% 1 ... (float) +% [FortranTag] +% +% The [FortranTag] is an integer that indicates how many bytes of data are stored in the +% corresponding Fortran sequential binary record. Integers and reals are stored in +% simple (4-byte) precision. +% Odd ("header") records: FortranTag = 14 * 4 +% Even ("vegopactiy") records: FortranTag = N_tile * 4 +% +% N_tile is *required* as input because some vegopacity files that were interpolated to cube-sphere +% have the wrong N_tile info in their header lines. +% +% - reichle, 29 Sep 2025 +% +% ------------------------------------------------------------------ + +MAPL_UNDEF = 1.e15; + +machfmt = 'l'; % little-endian, GEOSldas + +int_precision = 'int32'; % precision of fortran tag and integer data +float_precision = 'float32'; % precision of real data + +disp(['read_vegopacity.m: reading from ', fname]) + +ifp = fopen( fname, 'r', machfmt ); + +N_head = 14; +N_time = 48; + +for ii=1:N_time + + disp(['reading avg period ', num2str(ii)]) + + % odd records ("header") + + fortran_tag = fread( ifp, 1, int_precision ); if fortran_tag~=N_head*4, error('wrong fortran_tag A'), end + + tmp = fread( ifp, N_head, float_precision ); + + fortran_tag = fread( ifp, 1, int_precision ); if fortran_tag~=N_head*4, error('wrong fortran_tag B'), end + + % populate avg_period + + avg_period(ii,:) = tmp(1:12); + + % verify N_tile + + N_tile_tmp = tmp(13); + + if N_tile_tmp~=N_tile, disp(['WARNING: wrong N_tile in header lines: ', num2str(N_tile_tmp)]), end + + + % even records ("vegopacity") + + fortran_tag = fread( ifp, 1, int_precision ); if fortran_tag~=N_tile*4, disp(['WARNING: wrong N_tile in vegopacity file ', num2str(fortran_tag/4)]), end + + tmp = fread( ifp, N_tile, float_precision ); + + fortran_tag = fread( ifp, 1, int_precision ); if fortran_tag~=N_tile*4, disp(['WARNING: wrong N_tile in vegopacity file ', num2str(fortran_tag/4)]), end + + % populate veg_opacity + + veg_opacity(ii,:) = tmp; + +end + +fclose(ifp); + +% change no-data-values to NaN + +veg_opacity( veg_opacity>MAPL_UNDEF*0.9999 ) = NaN; + + +disp(['done reading ', fname]) + +% =========== EOF =========================================== + diff --git a/GEOSmetforce_GridComp/GEOS_MetforceGridComp.F90 b/GEOSmetforce_GridComp/GEOS_MetforceGridComp.F90 index 308735c7..2dc99129 100644 --- a/GEOSmetforce_GridComp/GEOS_MetforceGridComp.F90 +++ b/GEOSmetforce_GridComp/GEOS_MetforceGridComp.F90 @@ -608,7 +608,7 @@ subroutine Initialize(gc, import, export, clock, rc) integer :: local_nt, k, NUM_ENSEMBLE, i1, i2, j1, j2 integer :: ForceDtStep type(met_force_type) :: mf_nodata - logical :: MERRA_file_specs, ensemble_forcing + logical :: MERRA_file_specs, S2S3_file_specs, ensemble_forcing logical :: backward_looking_fluxes real, pointer :: TileLats(:) real, pointer :: TileLons(:) @@ -768,6 +768,7 @@ subroutine Initialize(gc, import, export, clock, rc) internal%mf%hinterp, & AEROSOL_DEPOSITION, & MERRA_file_specs, & + S2S3_file_specs, & backward_looking_fluxes, & internal%mf%DataNxt, & .true. & ! init @@ -776,7 +777,7 @@ subroutine Initialize(gc, import, export, clock, rc) if (backward_looking_fluxes) & call LDAS_move_new_force_to_old( & - MERRA_file_specs, AEROSOL_DEPOSITION, & + MERRA_file_specs, S2S3_file_specs, AEROSOL_DEPOSITION, & internal%mf%DataNxt, internal%mf%DataPrv ) ! Turn timer off @@ -851,7 +852,7 @@ subroutine Run(gc, import, export, clock, rc) type(met_force_type), pointer, contiguous :: DataTmp(:)=>null() type(met_force_type) :: mf_nodata - logical :: MERRA_file_specs + logical :: MERRA_file_specs, S2S3_file_specs logical :: backward_looking_fluxes integer :: AEROSOL_DEPOSITION ! Export pointers @@ -995,6 +996,7 @@ subroutine Run(gc, import, export, clock, rc) internal%mf%hinterp, & AEROSOL_DEPOSITION, & MERRA_file_specs, & + S2S3_file_specs, & backward_looking_fluxes, & internal%mf%DataNxt, & .false. & ! init @@ -1003,7 +1005,7 @@ subroutine Run(gc, import, export, clock, rc) if (backward_looking_fluxes) & call LDAS_move_new_force_to_old( & - MERRA_file_specs, AEROSOL_DEPOSITION, & + MERRA_file_specs, S2S3_file_specs, AEROSOL_DEPOSITION, & internal%mf%DataNxt, internal%mf%DataPrv ) ! -compute-average-zenith-angle-over-daylight-part-of-forcing-interval- diff --git a/GEOSmetforce_GridComp/LDAS_Forcing.F90 b/GEOSmetforce_GridComp/LDAS_Forcing.F90 index 607dbcf0..b0b201da 100644 --- a/GEOSmetforce_GridComp/LDAS_Forcing.F90 +++ b/GEOSmetforce_GridComp/LDAS_Forcing.F90 @@ -35,6 +35,7 @@ module LDAS_ForceMod use LDAS_DateTimeMod, ONLY: & date_time_type, & augment_date_time, & + datetime_eq_refdatetime, & datetime_lt_refdatetime, & datetime_le_refdatetime, & is_leap_year, & @@ -102,7 +103,8 @@ module LDAS_ForceMod subroutine get_forcing( date_time, force_dtstep, met_path, met_tag, & N_catd, tile_coord, MET_HINTERP, AEROSOL_DEPOSITION, & - MERRA_file_specs, bkwd_looking_fluxes, met_force_obs_tile_new, & + MERRA_file_specs, S2S3_file_specs, & + bkwd_looking_fluxes, met_force_obs_tile_new, & init ) ! Read and check meteorological forcing data for the domain. @@ -165,7 +167,7 @@ subroutine get_forcing( date_time, force_dtstep, met_path, met_tag, & ! intent out: - logical, intent(out) :: MERRA_file_specs + logical, intent(out) :: MERRA_file_specs, S2S3_file_specs logical, intent(out) :: bkwd_looking_fluxes type(met_force_type), dimension(N_catd), intent(out) :: & @@ -221,6 +223,7 @@ subroutine get_forcing( date_time, force_dtstep, met_path, met_tag, & ! initialize MERRA_file_specs = .false. + S2S3_file_specs = .false. bkwd_looking_fluxes = .false. @@ -322,7 +325,10 @@ subroutine get_forcing( date_time, force_dtstep, met_path, met_tag, & elseif (index(met_tag(1:7), 'GEOSs2s')/=0) then - call get_GEOSs2s( date_time_tmp, met_path, met_tag, N_catd, tile_coord, & + ! IMPORTANT: met_tag = GEOSs2s* for S2S v2 (note lower case "s2s") + ! met_tag = GEOSS2S3* for S2S v3 (note upper case "S2S") --> handled by get_GEOS() in "else" block + + call get_GEOSs2s_v2( date_time_tmp, met_path, met_tag, N_catd, tile_coord, & MET_HINTERP, met_force_obs_tile_new, nodata_forcing, PAR_available) else ! assume forcing from GEOS5 GCM ("DAS" or "MERRA") output @@ -333,7 +339,8 @@ subroutine get_forcing( date_time, force_dtstep, met_path, met_tag, & N_catd, tile_coord, MET_HINTERP, AEROSOL_DEPOSITION, & supported_option_MET_HINTERP, & supported_option_AEROSOL_DEPOSITION, & - met_force_obs_tile_new, nodata_forcing, PAR_available, MERRA_file_specs, & + met_force_obs_tile_new, & + nodata_forcing, PAR_available, MERRA_file_specs, S2S3_file_specs, & init ) ! subroutine get_GEOS() provided backward-looking fluxes. @@ -431,7 +438,7 @@ end subroutine get_forcing !************************************************************************************** - subroutine LDAS_move_new_force_to_old( MERRA_file_specs, AEROSOL_DEPOSITION, & + subroutine LDAS_move_new_force_to_old( MERRA_file_specs, S2S3_file_specs, AEROSOL_DEPOSITION, & new_force, old_force ) ! move *flux*-type forcing data from "new" to "old"; @@ -440,7 +447,7 @@ subroutine LDAS_move_new_force_to_old( MERRA_file_specs, AEROSOL_DEPOSITION, & implicit none - logical, intent(in) :: MERRA_file_specs + logical, intent(in) :: MERRA_file_specs, S2S3_file_specs integer, intent(in) :: AEROSOL_DEPOSITION type(met_force_type), dimension(:), intent(inout) :: new_force @@ -464,7 +471,7 @@ subroutine LDAS_move_new_force_to_old( MERRA_file_specs, AEROSOL_DEPOSITION, & ! [moved here from below, reichle, 28 Jan 2021] ! treat Wind as flux when forcing with MERRA - if (MERRA_file_specs) then + if (MERRA_file_specs .or. S2S3_file_specs) then old_force%Wind = new_force%Wind new_force%Wind = nodata_generic endif @@ -2643,7 +2650,7 @@ end subroutine get_Viviana_OK_precip ! ************************************************************************ - subroutine get_GEOSs2s(date_time, met_path, met_tag, N_catd, tile_coord, & + subroutine get_GEOSs2s_v2(date_time, met_path, met_tag, N_catd, tile_coord, & met_hinterp, met_force_new, nodata_forcing, PAR_available) ! read forcing derived from GEOS S2S output and map to tile space @@ -2758,7 +2765,7 @@ subroutine get_GEOSs2s(date_time, met_path, met_tag, N_catd, tile_coord, & logical :: FCST = .false. logical :: AODAS = .false. - character(len=*), parameter :: Iam = 'get_GEOSs2s' + character(len=*), parameter :: Iam = 'get_GEOSs2s_v2' character(len=400) :: err_msg ! -------------------------------------------------------------------- @@ -3046,15 +3053,16 @@ subroutine get_GEOSs2s(date_time, met_path, met_tag, N_catd, tile_coord, & deallocate(force_array) deallocate(GEOSgcm_name) - end subroutine get_GEOSs2s - + end subroutine get_GEOSs2s_v2 + ! ************************************************************************* subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & N_catd, tile_coord, MET_HINTERP, AEROSOL_DEPOSITION, & supported_option_MET_HINTERP, & supported_option_AEROSOL_DEPOSITION, & - met_force_new, nodata_forcing, PAR_available, MERRA_file_specs, & + met_force_new, & + nodata_forcing, PAR_available, MERRA_file_specs, S2S3_file_specs, & init ) ! reichle, 5 March 2008 - adapted from get_GEOSgcm_gfio to work with DAS @@ -3154,7 +3162,8 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & logical, intent(out) :: PAR_available logical, intent(out) :: MERRA_file_specs ! original MERRA specs, not MERRA-2 - + logical, intent(out) :: S2S3_file_specs + ! optional: logical, intent(in), optional :: init @@ -3167,7 +3176,8 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & integer, parameter :: N_MERRA_vars = 13 integer, parameter :: N_MERRA2_vars = 12 ! same as for G5DAS (excl Aerosol vars) integer, parameter :: N_Aerosol_vars = 60 ! additional aerosol forcing vars for GOSWIM (w/ MERRA-2 only for now) - integer, parameter :: N_M21C_vars = 12 + integer, parameter :: N_M21C_vars = 12 + integer, parameter :: N_S2S3_vars = 12 integer, parameter :: N_MERRA2plusAerosol_vars = N_MERRA2_vars + N_Aerosol_vars @@ -3185,6 +3195,8 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & character(40), dimension(N_M21C_vars, N_defs_cols) :: M21CCOR_defs character(40), dimension(N_M21C_vars, N_defs_cols) :: M21CCSINT_defs character(40), dimension(N_M21C_vars, N_defs_cols) :: M21CCSCOR_defs + character(40), dimension(N_S2S3_vars, N_defs_cols) :: S2S3FCST_defs + character(40), dimension(N_S2S3_vars, N_defs_cols) :: S2S3AODAS_defs character(40), dimension(:,:), allocatable :: GEOSgcm_defs @@ -3200,6 +3212,9 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & character( 3) :: met_file_ext character( 3) :: precip_corr_file_ext + character( 5) :: S2S3_ens_num + character( 8) :: S2S3_init_YYYYMMDD + integer :: N_GEOSgcm_vars, N_lon_tmp, N_lat_tmp real :: this_lon, this_lat @@ -3217,8 +3232,10 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & logical :: minimize_shift, use_prec_corr, use_bkg, tmp_init - logical :: daily_met_files, daily_precipcorr_files - + logical :: daily_met_files, daily_precipcorr_files + + logical :: is_S2S3_fcst + integer :: nv_id, ierr, icount(3), istart(3), lonid, latid character(len=*), parameter :: Iam = 'get_GEOS' @@ -3255,7 +3272,6 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & G5DAS_defs(11,:)=[character(len=40):: 'QLML ','inst','inst1_2d_lfo_Nx','diag','S'] G5DAS_defs(12,:)=[character(len=40):: 'SPEEDLML','inst','inst1_2d_lfo_Nx','diag','S'] - ! ----------------------------------------------------------------------- ! ! define coupled land/atm DAS file specs (i.e., use bkg.lfo_* files) @@ -3593,23 +3609,59 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & ! - use "lfo" files ! reichle, 1 Dec 2009 - ! MERRA - ! collection - - MERRA_defs( 1,:)=[character(len=40):: 'SWGDN ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "rad" - MERRA_defs( 2,:)=[character(len=40):: 'LWGAB ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "rad" - MERRA_defs( 3,:)=[character(len=40):: 'PARDR ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" - MERRA_defs( 4,:)=[character(len=40):: 'PARDF ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" - MERRA_defs( 5,:)=[character(len=40):: 'PRECTOT','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" - MERRA_defs( 6,:)=[character(len=40):: 'PRECCON','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "flx" - MERRA_defs( 7,:)=[character(len=40):: 'PRECSNO','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" - MERRA_defs( 8,:)=[character(len=40):: 'PS ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "slv" - MERRA_defs( 9,:)=[character(len=40):: 'HLML ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "flx" - MERRA_defs(10,:)=[character(len=40):: 'TLML ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "flx" - MERRA_defs(11,:)=[character(len=40):: 'QLML ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "flx" - MERRA_defs(12,:)=[character(len=40):: 'ULML ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "flx" - MERRA_defs(13,:)=[character(len=40):: 'VLML ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "flx" + ! MERRA + ! collection + + MERRA_defs( 1,:)=[character(len=40):: 'SWGDN ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "rad" + MERRA_defs( 2,:)=[character(len=40):: 'LWGAB ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "rad" + MERRA_defs( 3,:)=[character(len=40):: 'PARDR ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" + MERRA_defs( 4,:)=[character(len=40):: 'PARDF ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" + MERRA_defs( 5,:)=[character(len=40):: 'PRECTOT','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" + MERRA_defs( 6,:)=[character(len=40):: 'PRECCON','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "flx" + MERRA_defs( 7,:)=[character(len=40):: 'PRECSNO','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "lnd" + MERRA_defs( 8,:)=[character(len=40):: 'PS ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "slv" + MERRA_defs( 9,:)=[character(len=40):: 'HLML ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "flx" + MERRA_defs(10,:)=[character(len=40):: 'TLML ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "flx" + MERRA_defs(11,:)=[character(len=40):: 'QLML ','tavg','tavg1_2d_lfo_Nx','diag','S'] ! "flx" + MERRA_defs(12,:)=[character(len=40):: 'ULML ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "flx" + MERRA_defs(13,:)=[character(len=40):: 'VLML ','tavg','tavg1_2d_lfo_Nx','diag','F'] ! "flx" + + ! ----------------------------------------------------------------------- + ! + ! define GEOS S2S3 FCST specs + ! + ! use *only* 3-hourly "tavg" files b/c instantaneous output is not available + ! + + S2S3FCST_defs( 1,:)=[character(len=40):: 'SWGDN ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] + S2S3FCST_defs( 2,:)=[character(len=40):: 'LWGAB ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] + S2S3FCST_defs( 3,:)=[character(len=40):: 'dummy ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] ! no PARDR for S2S3 + S2S3FCST_defs( 4,:)=[character(len=40):: 'dummy ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] ! no PARDF for S2S3 + S2S3FCST_defs( 5,:)=[character(len=40):: 'PCU ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] + S2S3FCST_defs( 6,:)=[character(len=40):: 'PLS ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] + S2S3FCST_defs( 7,:)=[character(len=40):: 'SNO ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] + S2S3FCST_defs( 8,:)=[character(len=40):: 'PS ','tavg','lfo_tavg_3hr_glo_L720x361','diag','S'] ! note "S" --> minimize_shift + S2S3FCST_defs( 9,:)=[character(len=40):: 'HLML ','tavg','lfo_tavg_3hr_glo_L720x361','diag','S'] ! note "S" --> minimize_shift + S2S3FCST_defs(10,:)=[character(len=40):: 'TA ','tavg','lfo_tavg_3hr_glo_L720x361','diag','S'] ! note "S" --> minimize_shift + S2S3FCST_defs(11,:)=[character(len=40):: 'QA ','tavg','lfo_tavg_3hr_glo_L720x361','diag','S'] ! note "S" --> minimize_shift + S2S3FCST_defs(12,:)=[character(len=40):: 'SPEED ','tavg','lfo_tavg_3hr_glo_L720x361','diag','F'] + ! ----------------------------------------------------------------------- + ! + ! define GEOS S2S3 AODAS specs; same as FCST except for corrected precip + + S2S3AODAS_defs = S2S3FCST_defs + + ! character(40): + ! 1 2 3 4 + ! 1234567890123456789012345678901234567890 + + + S2S3AODAS_defs( 5,1) = 'PCUCORR ' + S2S3AODAS_defs( 6,1) = 'PLSCORR ' + S2S3AODAS_defs( 7,1) = 'SNOCORR ' + + ! -------------------------------------------------------------------- ! ! preparations @@ -3627,7 +3679,7 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & tol = abs(nodata_forcing*nodata_tolfrac_generic) - ! all GEOS forcing datasets provide PAR (so far) + ! most GEOS forcing datasets provide PAR PAR_available = .true. @@ -3653,12 +3705,17 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & ! initialize to most likely values, overwrite below as needed MERRA_file_specs = .false. + S2S3_file_specs = .false. met_file_ext = 'nc4' daily_met_files = .false. - + precip_corr_file_ext = 'nc4' + + is_S2S3_fcst = .false. + + S2S3_init_YYYYMMDD = 'xxxxxxxx' ! character(8) if (met_tag(4:8)=='merra') then ! MERRA @@ -3773,7 +3830,43 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & call parse_MERRA2_met_tag( met_path, met_tag, date_time_bkwd, & met_path_bkwd, prec_path_bkwd, met_tag_bkwd, use_prec_corr ) - + + elseif (met_tag(1:8)=='GEOSS2S3') then ! GEOS S2S v3 + + N_GEOSgcm_vars = N_S2S3_vars + + PAR_available = .false. ! S2S3 does not have PAR + + S2S3_file_specs = .true. + + single_time_in_file = .false. ! FCST: monthly files, AODAS: daily files + + if (met_tag(9:12)=='FCST' ) then + + is_S2S3_fcst = .true. + + GEOSgcm_defs = S2S3FCST_defs + + call parse_S2S3FCST_met_tag( met_tag, S2S3_ens_num, S2S3_init_YYYYMMDD ) + + elseif (met_tag(9:13)=='AODAS') then + + daily_met_files = .true. + + GEOSgcm_defs = S2S3AODAS_defs + + else + + call ldas_abort(LDAS_GENERIC_ERROR, Iam, 'unknown "GEOSS2S3[xxx]" met_tag') + + end if + + met_path_fwd = met_path + met_tag_fwd = met_tag + + met_path_bkwd = met_path + met_tag_bkwd = met_tag + else ! GEOS ADAS (FP, GEOSIT) call parse_G5DAS_met_tag( met_path, met_tag, date_time_inst, & @@ -3825,7 +3918,9 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & do GEOSgcm_var = 1,N_GEOSgcm_vars - ! open GEOS file (G5DAS or MERRA or MERRA-2) + if (trim(GEOSgcm_defs(GEOSgcm_var,1))=="dummy") cycle ! skip "dummy" variable (e.g., no PAR for S2S3) + + ! open GEOS file (G5DAS or MERRA or MERRA-2 or ...) ! ! Initial "tavg1_2d_*_Nx" files may not be available. In this case, ! use first available file. For G5DAS file specs, only "PS" is affected @@ -3840,6 +3935,25 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & ! the file at date_time_fwd). do j=1,2 + + if (is_S2S3_fcst .and. j==1) then + + ! special S2S3 FCST case: must skip j==1 at S2S3 FCST initialization time because (monthly) file + ! exists but does not contain data for "date_time_bkwd" + + read (S2S3_init_YYYYMMDD(1:4),'(i4.4)') date_time_tmp%year + read (S2S3_init_YYYYMMDD(5:6),'(i2.2)') date_time_tmp%month + read (S2S3_init_YYYYMMDD(7:8),'(i2.2)') date_time_tmp%day + + date_time_tmp%hour = 0 + date_time_tmp%min = 0 + date_time_tmp%sec = 0 + + call augment_date_time( -force_dtstep, date_time_tmp ) ! S2S3 fcst is initialized at S2S3_init_YYYYMMDD minus 3 hours + + if (datetime_eq_refdatetime( date_time_tmp, date_time_inst )) cycle ! skip to j==2, i.e., try "date_time_fwd" + + end if ! determine time stamp on file and corresponding met_path, prec_path, & met_tag @@ -3863,6 +3977,7 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & met_path_tmp = met_path_inst prec_path_tmp = prec_path_inst met_tag_tmp = met_tag_inst + else call ldas_abort(LDAS_GENERIC_ERROR, Iam, 'unknown GEOSgcm_defs(2)') @@ -3887,11 +4002,12 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & daily_met_files, met_path_tmp, met_tag_tmp, & GEOSgcm_defs(GEOSgcm_var,:), met_file_ext) - single_time_in_file = .not. daily_met_files ! MERRA-2 files are daily files + single_time_in_file = .not. (daily_met_files .or. is_S2S3_fcst) ! MERRA-2 files are daily files; S2S3FCST are monthly files end if - - if ( file_exists) then + + + if ( file_exists ) then exit ! exit j loop after successfully finding file @@ -3901,20 +4017,21 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & (trim(GEOSgcm_defs(GEOSgcm_var,2))=='tavg') .and. & (root_logit) ) then - if (.not. MERRA_file_specs) write (logunit,'(400A)') & + if ( .not. (MERRA_file_specs .or. S2S3_file_specs) ) & + write (logunit,'(400A)') & 'NOTE: Initialization. Data from tavg file are not used ' // & 'with lfo inst/tavg forcing, but dummy values must be ' // & 'read from some file for backward compatibility with ' // & 'MERRA forcing.' - write (logunit,*) 'try again with different file...' + write (logunit,*) 'try again with different file (or time)...' else call ldas_abort(LDAS_GENERIC_ERROR, Iam, 'error finding met forcing file') end if - + end do ! j=1,2 ! open file, extract coord info, prep horizontal interpolation info (if not done already) @@ -3961,7 +4078,6 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & ! ---------------------------------------------- ! ! read global gridded field of given variable - call LDAS_GetVar( fid, trim(GEOSgcm_defs(GEOSgcm_var,1)), & YYYYMMDD, HHMMSS, single_time_in_file, local_info, ptrShForce, rc) if (rc<0) then @@ -4051,7 +4167,7 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & HHMMSS = date_time_tmp%hour*10000+date_time_tmp%min*100 +date_time_tmp%sec ! read global gridded field of given variable - + call LDAS_GetVar( fid, trim(GEOSgcm_defs(GEOSgcm_var,1)), & YYYYMMDD, HHMMSS, .false., local_info, ptrShForce, rc) @@ -4112,7 +4228,7 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & end if ! if (fid>0) end if ! if (minimize_shift) .and. [...] - + end do ! do GEOSgcm_var = 1,N_GEOSgcm_vars call FileOpenedHash%free( GEOS_closefile,.false. ) @@ -4126,35 +4242,40 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & ! from GEOSgcm files: ! - ! G5DAS - ! M2INT MERRA - ! M2COR - ! - ! force_array(:, 1) = SWGDN SWGDN W/m2 (downward shortwave) - ! force_array(:, 2) = LWGAB LWGAB W/m2 ("absorbed" longwave) - ! force_array(:, 3) = PARDR PARDR W/m2 (direct PAR) - ! force_array(:, 4) = PARDF PARDF W/m2 (diffuse PAR) - ! force_array(:, 5) = PRECCU[*] PRECTOT kg/m2/s (*see below*) - ! force_array(:, 6) = PRECLS[*] PRECCON kg/m2/s (*see below*) - ! force_array(:, 7) = PRECSN[*] PRECSNO kg/m2/s (*see below*) - ! force_array(:, 8) = PS PS Pa (surface air pressure) - ! force_array(:, 9) = HLML HLML m (height of lowest model level "LML") - ! force_array(:,10) = TLML TLML K (air temperature at LML) - ! force_array(:,11) = QLML QLML kg/kg (air spec humidity at LML) - ! force_array(:,12) = SPEEDLML ULML m/s (wind speed/U-wind at LML) - ! force_array(:,13) = n/a VLML m/s ( V-wind at LML) + ! G5DAS + ! G5BKG + ! GEOSIT + ! M2* + ! M21C* MERRA S2S3* + ! + ! force_array(:, 1) = SWGDN SWGDN SWGDN W/m2 (downward shortwave) + ! force_array(:, 2) = LWGAB LWGAB LWGAB W/m2 ("absorbed" longwave) + ! force_array(:, 3) = PARDR PARDR n/a W/m2 (direct PAR) + ! force_array(:, 4) = PARDF PARDF n/a W/m2 (diffuse PAR) + ! force_array(:, 5) = PRECCU[*] PRECTOT PCU[*] kg/m2/s (*see below*) + ! force_array(:, 6) = PRECLS[*] PRECCON PLS[*] kg/m2/s (*see below*) + ! force_array(:, 7) = PRECSN[*] PRECSNO SNO[*] kg/m2/s (*see below*) + ! force_array(:, 8) = PS PS PS Pa (surface air pressure) + ! force_array(:, 9) = HLML HLML HLML m (height of lowest model level "LML") + ! force_array(:,10) = TLML TLML TA K (air temperature at LML) + ! force_array(:,11) = QLML QLML QA kg/kg (air spec humidity at LML) + ! force_array(:,12) = SPEEDLML ULML SPEED m/s (wind speed/U-wind at LML) + ! force_array(:,13) = n/a VLML n/a m/s ( V-wind at LML) ! ! PRECTOT kg/m2/s (total rain+snow) = PRECCU+PRECLS+PRECSNO ! PRECCON kg/m2/s (convective rain+snow) - ! PRECCU kg/m2/s (convective rain) - ! PRECLS kg/m2/s (large-scale rain) - ! PRECSNO kg/m2/s (total snow) + ! PRECCU, PCU kg/m2/s (convective rain) + ! PRECLS, PLS kg/m2/s (large-scale rain) + ! PRECSNO, SNO kg/m2/s (total snow) met_force_new%SWdown = force_array(:, 1) met_force_new%LWdown = force_array(:, 2) - met_force_new%PARdrct = force_array(:, 3) - met_force_new%PARdffs = force_array(:, 4) - + + if (PAR_available) then + met_force_new%PARdrct = force_array(:, 3) + met_force_new%PARdffs = force_array(:, 4) + end if + met_force_new%Psurf = force_array(:, 8) met_force_new%RefH = force_array(:, 9) @@ -4179,7 +4300,7 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & end if - else ! G5DAS file specs + else ! other file specs met_force_new(k)%Wind = force_array(k,12) @@ -4200,6 +4321,8 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & if (MERRA_file_specs) then + ! deal with MERRA precip components + if (force_array(k,5)>0) then met_force_new(k)%Snowf = force_array(k,7) @@ -4223,7 +4346,7 @@ subroutine get_GEOS( date_time, force_dtstep, met_path, met_tag, & else - ! G5DAS file specs + ! other file specs met_force_new(k)%Rainf = force_array(k,5)+force_array(k,6) met_force_new(k)%Rainf_C = force_array(k,5) @@ -4385,6 +4508,7 @@ subroutine LDAS_GetVar(fid, vname, yyyymmdd, hhmmss, single_time_in_file, local_ iistart(3)=timeIndex istart(4) =timeIndex endif + ! node root read and share call MAPL_SyncSharedMemory(rc=status) @@ -5512,6 +5636,77 @@ subroutine parse_G5DAS_met_tag( met_path_in, met_tag_in, date_time, & end subroutine parse_G5DAS_met_tag + ! **************************************************************** + + subroutine parse_S2S3FCST_met_tag( met_tag, S2S3_ens_num, S2S3_init_YYYYMMDD ) + + ! reichle, 25 Aug 2025 + + ! parse GEOSS2S3FCST "met_tag": extract S2S3 ens number and initialization YYYYMMDD + ! + ! met_tag = "GEOSS2S3FCST__ens{XX}__[YYYYMMDD]" + ! + ! where + ! + ! ens{XX} = S2S3 ensemble member ("ens1", "ens2", ..., "ens9", "ens10", ..., "ens15") + ! YYYYMMDD = S2S3 fcst initialization YYYYMMDD (fcst start time is YYYYMMDD minus 3 hours) + ! + ! --------------------------------------------------------------------------- + + implicit none + + character(*), intent(in) :: met_tag + + character(5), intent(out) :: S2S3_ens_num + character(8), intent(out) :: S2S3_init_YYYYMMDD + + ! local variables + + integer :: is + + character(len=len(met_tag)) :: tmpstring + + character(len=*), parameter :: Iam = 'parse_S2S3FCST_met_tag' + character(len=400) :: err_msg + + ! ---------------------------------------------------------- + + err_msg = '' ! initialize error message to blank string + + if (met_tag(1:14) /= 'GEOSS2S3FCST__') err_msg = 'met_tag must start with GEOSS2S3FCST__' + + ! cut off leading 'GEOSS2S3FCST__' + + tmpstring = met_tag(15:len_trim(met_tag)) + + ! split met_tag at double underscores + + is = index(tmpstring, '__') + + if (is>0) then + + S2S3_ens_num = tmpstring(1:is-1) + + S2S3_init_YYYYMMDD = tmpstring(is+2:len_trim(tmpstring)) + + if (is<5 .or. is>6 .or. (len_trim(tmpstring)-is-1/=8)) then + + err_msg = 'ens_num or YYYYMMDD in met_tag does not match expectation' + + end if + + else + + err_msg = 'cannot find second double-underscore in met_tag' + + end if + + ! abort if something went wrong + + if (len_trim(err_msg)>0) call ldas_abort(LDAS_GENERIC_ERROR, Iam, err_msg) + + end subroutine parse_S2S3FCST_met_tag + ! **************************************************************** subroutine get_GEOS_forcing_filename(fname_full,file_exists, date_time, daily_file, met_path, met_tag, & @@ -5539,6 +5734,8 @@ subroutine get_GEOS_forcing_filename(fname_full,file_exists, date_time, daily_fi character( 16) :: time_stamp character( 4) :: YYYY, HHMM, day_dir character( 2) :: MM, DD + character( 8) :: S2S3_init_YYYYMMDD ! S2S3 fcst initialization YYYYMMDD (fcst start time is YYYYMMDD minus 3 hours) + character( 5) :: S2S3_ens_num ! S2S3 fcst ensemble member, e.g. "ens1", "ens12" integer :: tmpind, tmpindend @@ -5563,7 +5760,7 @@ subroutine get_GEOS_forcing_filename(fname_full,file_exists, date_time, daily_fi if (daily_file) then time_stamp(1:8) = YYYY // MM // DD - + elseif ( & index(met_tag,'GEOSIT') > 0 .or. index(met_tag,'geosit') > 0 .or. & index(met_tag,'M21C' ) > 0 .or. index(met_tag,'m21c' ) > 0 & @@ -5598,9 +5795,15 @@ subroutine get_GEOS_forcing_filename(fname_full,file_exists, date_time, daily_fi day_dir = 'D' // DD // '/' + elseif (met_tag(1:12) == 'GEOSS2S3FCST') then + + call parse_S2S3FCST_met_tag( met_tag, S2S3_ens_num, S2S3_init_YYYYMMDD ) + + fname = S2S3_init_YYYYMMDD // '/' // trim(S2S3_ens_num) // '/GEOSS2S3.' // YYYY // MM // '.nc4' + else - ! GEOS FP with experiment-specific file names and MERRA-2, e.g., + ! GEOS FP with experiment-specific file names, MERRA-2, etc, e.g., ! ! f525_p5_fp.inst1_2d_lfo_Nx.20200507_0000z.nc4 ! MERRA2_400.inst1_2d_lfo_Nx.20200507.nc4