diff --git a/configs/components/jsbach/jsbach.datasets.yaml b/configs/components/jsbach/jsbach.datasets.yaml index 19dc13ba3..240650f83 100644 --- a/configs/components/jsbach/jsbach.datasets.yaml +++ b/configs/components/jsbach/jsbach.datasets.yaml @@ -48,8 +48,10 @@ forcing_sources: Ndepo_ssp: # These files are 12-monthly; see README.md in Ndepo/ directory # NOTE: ssp370 might be missing in this directory + "${input_dir_couple}/${Ndepo_path}1850.nc": + to: 1850 "${input_dir_couple}/${Ndepo_path}@YEAR@.nc": - from: 1850 + from: 1851 to: 2099 "${input_dir_couple}/${Ndepo_path}2099.nc": from: 2100 diff --git a/configs/components/jsbach/jsbach.yaml b/configs/components/jsbach/jsbach.yaml index 1e2a043e0..6b795462c 100644 --- a/configs/components/jsbach/jsbach.yaml +++ b/configs/components/jsbach/jsbach.yaml @@ -352,6 +352,7 @@ choose_scenario_is_ssp: False: ssp_num: null Ndepo_sufix: "" + cover_fract_cmip6: cover_fract_hist_scenario choose_echam.with_wiso: True: diff --git a/configs/components/nemo/nemo.yaml b/configs/components/nemo/nemo.yaml index b3364ad33..51abb70a3 100644 --- a/configs/components/nemo/nemo.yaml +++ b/configs/components/nemo/nemo.yaml @@ -14,6 +14,8 @@ include_models: # clean_command: cp cfg.inc ../cfg.txt; cd ../ ./makenemo -n ${nemo.version} -m ${archfile} clean; + rm -r ${nemo.version}/BLD; + rm -r ../TOOLS/COMPILE/arch_nemo.fcm; cd .. comp_command: export NEMO_TOPLEVEL=${model_dir}/../../ ; cp cfg.inc ../cfg.txt ; cd ../ ; @@ -22,6 +24,13 @@ comp_command: export NEMO_TOPLEVEL=${model_dir}/../../ ; cp cfg.inc ../cfg.txt ; cd .. archfile: ESMTOOLS_generic_oasis_intel +choose_computer.name: + glogin: + choose_computer.compiler_mpi: + gcc11_ompi416: + archfile: ESMTOOLS_generic_oasis_GNU + + destination: ${version} install_bins: BLD/bin/oceanx @@ -47,19 +56,42 @@ file_movements: all_directions: move runoff_method: "old" +# +# workaround for limitations in the environment_changes functionality +# see discussion at https://github.com/esm-tools/esm_tools/discussions/912 +# +#hlrn_compiler_mpi: intel2019_impi2019 +#hlrn_iolibraries: geomar_libs +#environment_changes: +# choose_computer.name: +# glogin: +# compiler_mpi: ${nemo.hlrn_compiler_mpi} +# iolibraries: ${nemo.hlrn_iolibraries} +# blogin: +# compiler_mpi: ${nemo.hlrn_compiler_mpi} +# iolibraries: ${nemo.hlrn_iolibraries} available_versions: - ORCA05_LIM2_KCM_AOW - ORCA05_LIM2_KCM_AOW_autotools - ORCA05_LIM2_KCM_AOW_FS +- ORCA05_LIM2_FOCI_AGRIF - ORCA05_LIM2_FOCI_AGRIF_AOW -- ORCA05_LIM2_KCM_AGRIF_OASISMCT4 +- ORCA05_LIM2_KCM_AGRIF_OASISMCT4 +- ORCA05_LIM2_FOCI_MOPS_OASISMCT4 +- ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4 - ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 -- ORCA12_LIM2_KCM_AOW_FS_OASISMCT4 +- ORCA05_LIM2_KCM_AOW_OASISMCT4 +- ORCA05_LIM2_KCM_AOW_FS_OASISMCT5 - ORCA05_LIM2_NEMO_JRA55_test - GYRE_XIOS - GYRE_PISCES - 'ORCA05.z75.ICE.JRA' +- eORCA025_Z75_SI3_JRA55 +- eORCA025_Z75_SI3_COUPLED +- ORCA05_Z46_SI3_COUPLED +- eORCA05_Z75_SI3_COUPLED +- ORCA05_SI3_COUPLED_AGRIF choose_version: @@ -72,21 +104,19 @@ choose_version: GYRE_PISCES: - # this should work but it does not if you run - # the setting below are ignored - # if set on the root level of this file, and infinite loop - # occurs if esm_master comp-foci-default/nemo is executed - # the enviornment_changes thing seems to be a mess, - # see also comment in foci.yaml section echam: - # manually set compiler_mpi in glogin.yaml / blogin.yaml for now - #environment_changes: -# choose_computer.name: -# glogin: -# compiler_mpi: intel2019_impi2019_nemo4 -# iolibraries: geomar_libs -# blogin: -# compiler_mpi: intel2019_impi2019_nemo4 -# iolibraries: geomar_libs + # workaround for limitations in the environment_changes functionality + # see discussion at https://github.com/esm-tools/esm_tools/discussions/912 + #hlrn_compiler_mpi: intel2019_impi2019_nemo4 + #hlrn_iolibraries: geomar_libs + environment_changes: + choose_computer.name: + blogin: + add_module_actions: + - "load gcc/9.3.0" + glogin: + add_module_actions: + - "load gcc/9.3.0" + requires: - xios-trunk - nemobasemodel-4.2.x @@ -117,29 +147,322 @@ choose_version: generation: "4.2" resolution: "R4" + + eORCA025_Z75_SI3_JRA55: + # Uncoupled eORCA025 set up with SI3 sea ice model + # developed and maintained by Markus + # basis for our coupled setups with NEMO4.2 + + # workaround for limitations in the environment_changes functionality + # see discussion at https://github.com/esm-tools/esm_tools/discussions/912 + #hlrn_compiler_mpi: intel2019_impi2019_nemo4 + #hlrn_iolibraries: geomar_libs + environment_changes: + choose_computer.name: + blogin: + add_module_actions: + - "load gcc/9.3.0" + glogin: + add_module_actions: + - "load gcc/9.3.0" + + requires: + - xios-trunk + - nemobasemodel-4.2.0 + + branch: ${nemo.version} + reference_expid: 'K000.spinup.eos80' + clone_destination: nemo-${nemo.version}/cfgs/${nemo.version} + git-repository: https://git.geomar.de/ORCA/e025/eorca025.z75.si3.git + + # TODO: include arch repo as separate component + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + cp cfgs/${nemo.version}/work_cfgs.inc cfgs/ref_cfgs.txt; + ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; + cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean + archfile: ESMTOOLS_generic_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/${reference_expid}/ + + generation: "4.2" + resolution: eORCA025 + use_tracer: false + leapyear: false + free_surface: nonlinear + ln_tsd_tradmp: true + # this sets nn_fsbc + coupling_freq_in_steps: 1 + + input_dir: ${pool_dir}/NEMO4_${resolution}/input + # forcing for uncoupled setups + jra55_forcing_dir: ${pool_dir}/JRA55-do_drowned + jra55_runoff_dir: ${pool_dir}/eORCA025-nemo4 + + # override hardcoded path to runoff forcing + add_namelist_changes: + namelist_cfg: + namsbc_blk: + cn_dir: "./" + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_ice_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ice_ref + add_input_files: + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + + # NEMO eORCA025 4.2.x input files + coordinates_eORCA025: coordinates_eORCA025 + bfr_coef_eORCA025: bfr_coef_eORCA025 + subbasins_eORCA025: subbasins_eORCA025 + domain_cfg_eORCA025: domain_cfg_eORCA025 + # this file still needs to be generated by Markus or us + #domain_cfg_eORCA025: domain_cfg_eORCA025_CaspianSea + # we probably never need this one + domain_cfg_UKmasks: domain_cfg_UKmasks + + ghflux_v2.0: ghflux_v2.0 + reshape_ghflux2_eORCA025: reshape_ghflux2_eORCA025 + reshape_jra55do_eORCA025_bicub: reshape_jra55do_eORCA025_bicub + reshape_jra55do_eORCA025_bilin: reshape_jra55do_eORCA025_bilin + + # inital data + data_tem_eORCA025: data_tem_eORCA025 + data_sal_eORCA025: data_sal_eORCA025 + runoff_eORCA025: runoff_eORCA025 + seaice_eORCA025: seaice_eORCA025 + + input_in_work: + # NEMO eORCA025 4.2.x input files + domain_cfg_eORCA025: domain_cfg.nc + coordinates_orca05_nemo4: coordinates.nc + bfr_coef_eORCA025: bfr_coef.nc + subbasins_eORCA025: subbasins.nc + + # inital data + data_tem_eORCA025: data_tem.nc + data_sal_eORCA025: data_sal.nc + ghflux_v2.0: geothermal_heating.nc + + # AGRIF files + # Note: You can initialise T/S from parent + # so no 1_data_tem etc is needed. + 1_domain_cfg: 1_domain_cfg.nc + 1_subbasins: 1_subbasins.nc + + # surface forcing for uncoupled NEMO4 + forcing_files: + # forcing data + sn_wndi: sn_wndi + sn_wndj: sn_wndj + sn_qsr: sn_qsr + sn_qlw: sn_qlw + sn_tair: sn_tair + sn_humi: sn_humi + sn_prec: sn_prec + sn_snow: sn_snow + sn_slp: sn_slp + #sn_rnf: sn_rnf + + # only required if we need the link name does not match the source file name + forcing_in_work: + sn_wndi: "uas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_wndj: "vas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_qsr: "rsds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_qlw: "rlds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_tair: "tas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_humi: "huss-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_prec: "prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_snow: "prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + sn_slp: "psl_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc" + #sn_rnf: "sorunoff_JRA55-do-1-4-0_gr_orca05_y@YEAR@.nc" + + forcing_sources: + # JRA55-do drowned forcing + sn_wndi: + "${jra55_forcing_dir}/uas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_wndj: + "${jra55_forcing_dir}/vas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_qsr: + "${jra55_forcing_dir}/rsds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_qlw: + "${jra55_forcing_dir}/rlds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_tair: + "${jra55_forcing_dir}/tas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_humi: + "${jra55_forcing_dir}/huss-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_prec: + "${jra55_forcing_dir}/prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_snow: + "${jra55_forcing_dir}/prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_slp: + "${jra55_forcing_dir}/psl_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 2019 + sn_rnf: + "${jra55_runoff_dir}/sorunoff_JRA55-do-1-5-0_gr_eorca025_y@YEAR@.nc": + from: 1958 + to: 2019 + + 'eORCA025_Z75_SI3_COUPLED': + # coupled eORCA025 set up with SI3 sea ice model + + # modifications to compile time and runtime environment changes + # are set in components/setups/focioifs.yaml + + requires: + - nemobasemodel-4.2.0 + + branch: ${nemo.version} + reference_expid: 'FOCIOIFS' + clone_destination: nemo-${nemo.version}/cfgs/${nemo.version} + git-repository: https://git.geomar.de/ORCA/e025/eorca025.z75.si3.git + + # TODO: include arch repo as separate component + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + cp cfgs/${nemo.version}/work_cfgs.inc cfgs/ref_cfgs.txt; + ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; + cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean + archfile: ESMTOOLS_generic_oasis_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/${reference_expid}/ + + generation: "4.2" + resolution: eORCA025 + use_tracer: false + leapyear: false + free_surface: nonlinear + + # coupling fields differ for NEMO 3.6 and 4.2 + opat_fields: [O_AlbIce, OIceFrc, O_SSTSST, O_TepIce, OIceTck, OSnwTck, O_OCurx1, O_OCury1, O_OTaux1, O_OTauy1, O_ITaux1, O_ITauy1] + + input_dir: ${pool_dir}/NEMO4_${resolution}/input + + # override hardcoded path to runoff forcing + add_namelist_changes: + namelist_cfg: + namsbc_blk: + cn_dir: "./" + #namrun: + # nn_write: 3 # hourly output (no xios used) + namsbc: + ln_blk: false + ln_cpl: true + ln_ssr: false + ln_rnf: false + ln_traqsr: false + ln_apr_dyn: false + namsbc_cpl: + sn_snd_temp: ['oce and ice', 'no', '', '', ''] + sn_snd_alb: ['ice', 'no', '', '', ''] + sn_snd_thick: ['ice and snow', 'no', '', '', ''] + sn_snd_crt: ['oce only', 'no', 'spherical', 'eastward-northward', 'T'] + sn_snd_co2: ['none', 'no', '', '', ''] + sn_snd_cond: ['none', 'no', '', '', ''] + sn_snd_mpnd: ['none', 'no', '', '', ''] + sn_snd_sstfrz: ['none','no','','',''] + sn_snd_wlev: ['none','no','','',''] + sn_snd_ttilyr: ['none','no','','',''] + sn_rcv_w10m: ['none', 'no', '', '', ''] + sn_rcv_taumod: ['none', 'no', '', '', ''] + sn_rcv_tau: ['oce and ice', 'no', 'spherical', 'eastward-northward', 'T'] + sn_rcv_dqnsdt: ['coupled', 'no', '', '', ''] + sn_rcv_qsr: ['conservative', 'no', '', '', ''] + sn_rcv_qns: ['conservative', 'no', '', '', ''] + sn_rcv_emp: ['conservative', 'no', '', '', ''] + sn_rcv_rnf: ['coupled', 'no', '', '', ''] + sn_rcv_cal: ['coupled', 'no', '', '', ''] + sn_rcv_co2: ['none', 'no', '', '', ''] + # namtra_ldf: + # rn_aht_0: 600 + # rn_aeiv_scale: 0.5 + # namdyn_ldf: + # rn_cmsmag_2: 4 + # namsbc_rnf: + # rn_rfact: 0.984 + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_ice_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ice_ref + + add_input_files: + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + # NEMO eORCA025 4.2.x input files + coordinates_eORCA025: coordinates_eORCA025 + bfr_coef_eORCA025: bfr_coef_eORCA025 + subbasins_eORCA025: subbasins_eORCA025 + domain_cfg_eORCA025: domain_cfg_eORCA025 + # this file still needs to be generated by Markus or us + #domain_cfg_eORCA025: domain_cfg_eORCA025_CaspianSea + # we probably never need this one + domain_cfg_UKmasks: domain_cfg_UKmasks + + ghflux_v2.0: ghflux_v2.0 + reshape_ghflux2_eORCA025: reshape_ghflux2_eORCA025 + + # inital data + data_tem_eORCA025: data_tem_eORCA025 + data_sal_eORCA025: data_sal_eORCA025 + runoff_eORCA025: runoff_eORCA025 + seaice_eORCA025: seaice_eORCA025 + + input_in_work: + # NEMO eORCA025 4.2.x input files + domain_cfg_eORCA025: domain_cfg.nc + coordinates_orca05_nemo4: coordinates.nc + bfr_coef_eORCA025: bfr_coef.nc + subbasins_eORCA025: subbasins.nc + + # inital data + data_tem_eORCA025: data_tem.nc + data_sal_eORCA025: data_sal.nc + ghflux_v2.0: geothermal_heating.nc + 'ORCA05.z75.ICE.JRA': # uncoupled test setup from Markus # will be renamed once the name of the config is available # from Markus Scheinert # version: 'ORCA05.z75.ICE.JRA-KMST001' - # this should work but it does not if you run - # the setting below are ignored - # if set on the root level of this file, and infinite loop - # occurs if esm_master comp-foci-default/nemo is executed - # the enviornment_changes thing seems to be a mess, - # see also comment in foci.yaml section echam: - # manually set compiler_mpi in glogin.yaml / blogin.yaml for now - #environment_changes: -# choose_computer.name: -# glogin: -# compiler_mpi: intel2019_impi2019_nemo4 -# iolibraries: geomar_libs -# blogin: -# compiler_mpi: intel2019_impi2019_nemo4 -# iolibraries: geomar_libs - # TODO: add NEMO config once available from Markus + # workaround for limitations in the environment_changes functionality + # see discussion at https://github.com/esm-tools/esm_tools/discussions/912 + #hlrn_compiler_mpi: intel2019_impi2019_nemo4 + #hlrn_iolibraries: geomar_libs + environment_changes: + choose_computer.name: + blogin: + add_module_actions: + - "load gcc/9.3.0" + glogin: + add_module_actions: + - "load gcc/9.3.0" + requires: - xios-trunk - nemobasemodel-4.2.0 @@ -155,7 +478,7 @@ choose_version: cp cfgs/${nemo.version}/work_cfgs.inc cfgs/ref_cfgs.txt; ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${version}/BLD/bin/oceanx - clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean archfile: ESMTOOLS_generic_intel destination: nemo-${nemo.version} install_bins: cfgs/${nemo.version}/BLD/bin/oceanx @@ -165,7 +488,7 @@ choose_version: generation: "4.2" resolution: ORCA05 use_tracer: false - nn_leapy: 0 + leapyear: false free_surface: nonlinear input_dir: ${pool_dir}/NEMO4_${resolution}/input @@ -174,6 +497,10 @@ choose_version: namelist_cfg: namsbc_blk: cn_dir: "./" + namsbc_apr: + cn_dir: "./" + namsbc_rnf: + cn_dir: "./" add_input_sources: namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref @@ -198,22 +525,6 @@ choose_version: sss_orca05_nemo4: sss_orca05_nemo4 runoff_orca05_nemo4: runoff_orca05_nemo4 - # NEMO eORCA025 4.2.x input files - #bfr_coef: bfr_coef - #domain_cfg_ExclClosedSeas: domain_cfg_ExclClosedSeas - #domain_cfg_InclClosedSeas: - # TODO: this file still needs to be generated by Markus - #domain_cfg_CaspianSea: domain_cfg_CaspianSea - # we probably never need this one - #domain_cfg_UKmasks: domain_cfg_UKmasks - #ghflux_v2.0: ghflux_v2.0 - #reshape_ghflux2: reshape_ghflux2 - #reshape_jra55do_bicub: reshape_jra55do_bicub - #reshape_jra55do_bilin: reshape_jra55do_bilin - #subbasins_eORCA025: subbasins_eORCA025 - #sn_tem_woa13_omip_eORCA025: sn_tem_woa13_omip_eORCA025 - #sn_sal_woa13_omip_eORCA025: sn_sal_woa13_omip_eORCA025 - # TODO: add correct links in work dir if required input_in_work: # NEMO ORCA05 4.2.x input files @@ -295,26 +606,1002 @@ choose_version: # "${jra55_runoff_dir}/sorunoff_JRA55-do-1-4-0_gr_orca05_y@YEAR@.nc": # from: 1980 # to: 1984 + + ORCA05_Z46_SI3_COUPLED: + # coupled ORCA05 set up with SI3 sea ice model + + # modifications to compile time and runtime environment changes + # are set in components/setups/focioifs.yaml + + requires: + - nemobasemodel-4.2.2 + branch: master + reference_expid: 'FOCIOIFS' + clone_destination: nemo-${nemo.version}/cfgs/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/orca05_z46_si3_coupled.git - ORCA05_LIM2_FOCI_AGRIF_AOW: + # TODO: include arch repo as separate component + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + cp cfgs/${nemo.version}/work_cfgs.inc cfgs/ref_cfgs.txt; + ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; + cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean + archfile: ESMTOOLS_generic_oasis_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/${reference_expid}/ + + generation: "4.2" + resolution: ORCA05 + use_tracer: false + leapyear: true + free_surface: nonlinear + + input_dir: ${pool_dir}/NEMO4_${resolution}/input + + # override hardcoded path to runoff forcing + add_namelist_changes: + namelist_cfg: + namsbc_blk: + cn_dir: "./" + #namrun: + # nn_write: 3 # hourly output (no xios used) + namsbc: + ln_blk: false + ln_cpl: true + ln_ssr: false + ln_rnf: false + ln_traqsr: false + ln_apr_dyn: false + namsbc_cpl: + sn_snd_temp: ['oce and ice', 'no', '', '', ''] + sn_snd_alb: ['ice', 'no', '', '', ''] + sn_snd_thick: ['ice and snow', 'no', '', '', ''] + sn_snd_crt: ['oce only', 'no', 'spherical', 'eastward-northward', 'T'] + sn_snd_co2: ['none', 'no', '', '', ''] + sn_snd_cond: ['none', 'no', '', '', ''] + sn_snd_mpnd: ['none', 'no', '', '', ''] + sn_snd_sstfrz: ['none','no','','',''] + sn_snd_wlev: ['none','no','','',''] + sn_snd_ttilyr: ['none','no','','',''] + sn_rcv_w10m: ['none', 'no', '', '', ''] + sn_rcv_taumod: ['none', 'no', '', '', ''] + sn_rcv_tau: ['oce and ice', 'no', 'spherical', 'eastward-northward', 'T'] + sn_rcv_dqnsdt: ['coupled', 'no', '', '', ''] + sn_rcv_qsr: ['conservative', 'no', '', '', ''] + sn_rcv_qns: ['conservative', 'no', '', '', ''] + sn_rcv_emp: ['conservative', 'no', '', '', ''] + sn_rcv_rnf: ['coupled', 'no', '', '', ''] + sn_rcv_cal: ['coupled', 'no', '', '', ''] + sn_rcv_co2: ['none', 'no', '', '', ''] + nambbc: + # Unlike default, we use geothermal heating remapped to ORCA05 + # No need for a reshape file + sn_qgh: ['geothermal_heating.nc', -12.0, 'gh_flux', .false., .true., 'yearly', '', ''] + + # set diffusion to be 600 m2/s + # and visc to be -1,709 m4/s + # A = Ud * Ld / 2. Ud = 0.024 gives approx A=600 m2/s for Ld=50km. + # A = Uv * Lv / 12. Uv = 0.164 gives around A=-1,709 m4/s for Ld=50km + namtra_ldf: + rn_ud: 0.024 + namdyn_ldf: + rn_uv: 0.164 + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_ice_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ice_ref + + add_input_files: + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + + # from Sebastian / Markus + bathy_meter_orca05_nemo4: bathy_meter_orca05_nemo4 + coordinates_orca05_nemo4: bathy_meter_orca05_nemo4 + bfr_coef_orca05_nemo4: bfr_coef_orca05_nemo4 + subbasins_orca05_nemo4: subbasins_orca05_nemo4 + domain_cfg_orca05_nemo4: domain_cfg_orca05_nemo4 + + # NEMO ORCA05 4.2.x input files + #coordinates_orca05_nemo4: coordinates_orca05_nemo4 + #bfr_coef_orca05_nemo4: bfr_coef_orca05_nemo4 + #subbasins_orca05_nemo4: subbasins_orca05_nemo4 + #domain_cfg_orca05_nemo4: domain_cfg_orca05_nemo4 + # we probably never need this one + #domain_cfg_UKmasks: domain_cfg_UKmasks + # ghflux remapped to ORCA05, no reshape file needed + ghflux_v2.0_orca05_nemo4: ghflux_v2.0_orca05_nemo4 + + # inital data + data_tem_orca05_nemo4: data_tem_orca05_nemo4 + data_sal_orca05_nemo4: data_sal_orca05_nemo4 + + # inital data + seaice_orca05_nemo4: seaice_orca05_nemo4 + + input_in_work: + domain_cfg_orca05_nemo4: domain_cfg.nc + coordinates_orca05_nemo4: coordinates.nc + bfr_coef_orca05_nemo4: bfr_coef.nc + subbasins_orca05_nemo4: subbasins.nc + + # inital data + data_tem_orca05_nemo4: data_tem.nc + data_sal_orca05_nemo4: data_sal.nc + ghflux_v2.0_orca05_nemo4: geothermal_heating.nc + + seaice_orca05_nemo4: seaice_c3.0_v19802004.0_ORCA05_r4.2.0.nc + + ORCA05_SI3_COUPLED_AGRIF: + # coupled ORCA05 with AGRIF set up with SI3 sea ice model + # + # This configuration is based on the ICE_AGRIF_CPL in cfgs in NEMO + # but the settings here are defaults for ORCA05 with 5x AGRIF. + # It should be possible to use this config for e.g. ORCA025 with 1/20 + # AGRIF as well, but one would have to modify time step (rn_Dt) + # diffusion etc. + # + # modifications to compile time and runtime environment changes + # are set in components/setups/focioifs.yaml + requires: - # seb-wahl: comment xios below if used with OIFS which also uses XIOS which causes XIOS - # to be cloned and compiled twice, need to file an issue - - xios-2.0_r982 - # TODO: test with newer version of XIOS - # - xios-2.5_r1910 - - nemobasemodel-3.6foci_agrif - branch: master + - nemobasemodel-4.2.2 + + branch: master + reference_expid: 'EXP00' + clone_destination: nemo-${nemo.version}/cfgs/ICE_AGRIF_CPL + git-repository: https://git.geomar.de/foci/src/nemo_config/ice_agrif_cpl.git + + # TODO: include arch repo as separate component + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + cp cfgs/ICE_AGRIF_CPL/work_cfgs.inc cfgs/ref_cfgs.txt; + ./makenemo -n ${version} -m ${archfile} -r ICE_AGRIF_CPL -j 24 ; + cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ICE_AGRIF_CPL clean + archfile: ESMTOOLS_generic_oasis_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/${reference_expid}/ + + generation: "4.2" + resolution: ORCA05 + use_tracer: false + use_si3_agrif: true + leapyear: true + free_surface: nonlinear + + input_dir: ${pool_dir}/NEMO4_${resolution}/input + + # We need to add the namelist files for AGRIF + add_config_files: + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + # uncomment if we would use top for biogeochemistry + #1_namelist_top_cfg: 1_namelist_top_cfg + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_ice_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ice_ref + #namelist_top_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_top_ref + + # 1_namelist_ref is just a copy of namelist_ref + 1_namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + 1_namelist_ice_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ice_ref + #1_namelist_top_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_top_ref + + # Ensure we take domain_cfg made with AGRIF in mind + domain_cfg_orca05_nemo4: ${agrif_dir}/domain_cfg.nc + + add_input_files: + # add reference namelists to the work dir + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + #namelist_top_ref: namelist_top_ref + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + #1_namelist_top_ref: 1_namelist_top_ref + + # from Sebastian / Markus + # Note: Some of these files may be different + # than those used for non-AGRIF ORCA05 since + # the nesting tools modify the bathymetry for the + # parent grid + bathy_meter_orca05_nemo4: bathy_meter_orca05_nemo4 + coordinates_orca05_nemo4: bathy_meter_orca05_nemo4 + bfr_coef_orca05_nemo4: bfr_coef_orca05_nemo4 + subbasins_orca05_nemo4: subbasins_orca05_nemo4 + domain_cfg_orca05_nemo4: domain_cfg_orca05_nemo4 + # ghflux remapped to ORCA05, no reshape file needed + ghflux_v2.0_orca05_nemo4: ghflux_v2.0_orca05_nemo4 + + # inital data + # Note: We will initialise the child grids + # from the parent data, so no need for + # 1_data_tem etc + data_tem_orca05_nemo4: data_tem_orca05_nemo4 + data_sal_orca05_nemo4: data_sal_orca05_nemo4 + + # inital data + seaice_orca05_nemo4: seaice_orca05_nemo4 + + # domain_cfg and AGRIF_FixedGrids.in for AGRIF + 1_domain_cfg: 1_domain_cfg + fixed_grids: fixed_grids + # if subbasins is present for the parent grid + # (to compute Atlantic, Pacific overturning etc) + # then 1_subbasins must also be present for child + # Mostly, this file should only contain zeros + # since AGRIF nests usually do not enclose an + # entire ocean, e.g. Pacific or Atlantic + 1_subbasins: 1_subbasins + + input_in_work: + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + #1_namelist_top_ref: 1_namelist_top_ref + + domain_cfg_orca05_nemo4: domain_cfg.nc + coordinates_orca05_nemo4: coordinates.nc + bfr_coef_orca05_nemo4: bfr_coef.nc + subbasins_orca05_nemo4: subbasins.nc + + 1_domain_cfg: 1_domain_cfg.nc + 1_subbasins: 1_subbasins.nc + + # inital data + data_tem_orca05_nemo4: data_tem.nc + data_sal_orca05_nemo4: data_sal.nc + ghflux_v2.0_orca05_nemo4: geothermal_heating.nc + seaice_orca05_nemo4: seaice_c3.0_v19802004.0_ORCA05_r4.2.0.nc + + # Add extra coupling fields to couple AGRIF to OpenIFS + add_coupling_fields: + "[[agr1_t_fields-->FIELD]]": + grid: agr1 + "[[agr1_c_fields-->FIELD]]": + grid: agr1 + "[[agr1_r_fields-->FIELD]]": + grid: agr1 + "[[agr1_rc_fields-->FIELD]]": + grid: agr1 + "[[agr2_t_fields-->FIELD]]": + grid: agr2 + + # Namelist changes for AGRIF coupled + add_namelist_changes: + + # override hardcoded path to runoff forcing + namelist_cfg: + namsbc_blk: + cn_dir: "./" + # use domain_cfg file to set the grid + namcfg: + ln_read_cfg: ".true. " + cn_domcfg: "domain_cfg.nc" + ln_closea: '.false.' + namsbc: + # turn off all sbc except coupling + ln_blk: false + ln_cpl: true + ln_ssr: false + ln_rnf: false + ln_traqsr: false + ln_apr_dyn: false + # turn on most coupling fields + namsbc_cpl: + sn_snd_temp: ['oce and ice', 'no', '', '', ''] + sn_snd_alb: ['ice', 'no', '', '', ''] + sn_snd_thick: ['ice and snow', 'no', '', '', ''] + sn_snd_crt: ['oce only', 'no', 'spherical', 'eastward-northward', 'T'] + sn_snd_co2: ['none', 'no', '', '', ''] + sn_snd_cond: ['none', 'no', '', '', ''] + sn_snd_mpnd: ['none', 'no', '', '', ''] + sn_snd_sstfrz: ['none','no','','',''] + sn_snd_wlev: ['none','no','','',''] + sn_snd_ttilyr: ['none','no','','',''] + sn_rcv_w10m: ['none', 'no', '', '', ''] + sn_rcv_taumod: ['none', 'no', '', '', ''] + sn_rcv_tau: ['oce and ice', 'no', 'spherical', 'eastward-northward', 'T'] + sn_rcv_dqnsdt: ['coupled', 'no', '', '', ''] + sn_rcv_qsr: ['conservative', 'no', '', '', ''] + sn_rcv_qns: ['conservative', 'no', '', '', ''] + sn_rcv_emp: ['conservative', 'no', '', '', ''] + sn_rcv_rnf: ['coupled', 'no', '', '', ''] + sn_rcv_cal: ['coupled', 'no', '', '', ''] + sn_rcv_co2: ['none', 'no', '', '', ''] + nambbc: + # Unlike default, we use geothermal heating remapped to ORCA05 + # No need for a reshape file + sn_qgh: ['geothermal_heating.nc', -12.0, 'gh_flux', .false., .true., 'yearly', '', ''] + + # This is set later in this file under choose_free_surface + #namdyn_hpg: + # ln_hpg_zco: '.false.' + # ln_hpg_zps: '.false.' + # ln_hpg_sco: '.true.' + + namlbc: + # no slip + rn_shlat: 0 + + # set diffusion to be 600 m2/s + # and visc to be -1,709 m4/s + # A = Ud * Ld / 2. Ud = 0.024 gives approx A=600 m2/s for Ld=50km. + # A = Uv * Lv / 12. Uv = 0.164 gives around A=-1,709 m4/s for Ld=50km + namtra_ldf: + # Laplacian, iso-neutral + ln_traldf_lap: '.true.' + ln_traldf_iso: '.true.' + ln_traldf_msc: '.true.' + # 2D varying, i.e. scaled by grid-cell size + nn_aht_ijk_t: 20 + rn_ud: 0.018 + rn_Ld: 200.e+3 + + namtra_eiv: + ln_ldfeiv: '.false.' + # 21 = Treguier et al. JPO 1997 formulation + nn_aei_ijk_t: 21 + # rn_ue and rn_le are irrelevant if nn_aei_ijk_t = 21 + rn_ue: 0.03 + rn_Le: 200.e+3 + ln_ldfeiv_dia: '.true.' + + namdyn_ldf: + # Bi-Laplacian, horizontal + ln_dynldf_blp: '.true.' + ln_dynldf_hor: '.true.' + # Constant + nn_ahm_ijk_t: 0 + rn_uv: 0.164 + rn_Lv: 10.e+3 + + namagrif: + ln_spc_dyn: '.true.' + # use default sponge diffusion for now + # Experiment with this later... + + 1_namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep_nest} + nn_itend: ${newstep_nest} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_indir: '${parent_restart_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${experiment_restart_out_dir}/' + nn_stock: ${newstep_nest} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + ln_1st_euler: ${ln_1st_euler} + + namcfg: + ln_read_cfg: ".true. " + cn_domcfg: "domain_cfg.nc" + ln_closea: '.false.' + + namtsd: + ln_tsd_dmp: '.false.' + ln_tsd_init: '.false.' + + namdom: + rn_Dt: ${time_step_nest} + ln_meshmask: ${ln_meshmask} + + namlbc: + # free slip in the nest + rn_shlat: 2.0 + + namagrif: + ln_agrif_2way: '.true.' + # initialise AGRIF from parent T/S + ln_init_chfrpar: ${ln_init_chfrpar} + ln_vert_remap: '.false.' + # check bathymetry + ln_chk_bathy: '.true.' + #rn_sponge_tra: 600 + #rn_sponge_dyn: 600 + + namsbc: + ln_blk: false + ln_cpl: true + ln_ssr: false + ln_rnf: false + ln_traqsr: false + ln_apr_dyn: false + nn_fsbc: 1 #${nest_refinement} + namsbc_cpl: + sn_snd_temp: ['oce and ice', 'no', '', '', ''] + sn_snd_alb: ['ice', 'no', '', '', ''] + sn_snd_thick: ['ice and snow', 'no', '', '', ''] + sn_snd_crt: ['oce only', 'no', 'spherical', 'eastward-northward', 'T'] + sn_snd_co2: ['none', 'no', '', '', ''] + sn_snd_cond: ['none', 'no', '', '', ''] + sn_snd_mpnd: ['none', 'no', '', '', ''] + sn_snd_sstfrz: ['none','no','','',''] + sn_snd_wlev: ['none','no','','',''] + sn_snd_ttilyr: ['none','no','','',''] + sn_rcv_w10m: ['none', 'no', '', '', ''] + sn_rcv_taumod: ['none', 'no', '', '', ''] + sn_rcv_tau: ['oce and ice', 'no', 'spherical', 'eastward-northward', 'T'] + sn_rcv_dqnsdt: ['coupled', 'no', '', '', ''] + sn_rcv_qsr: ['conservative', 'no', '', '', ''] + sn_rcv_qns: ['conservative', 'no', '', '', ''] + sn_rcv_emp: ['conservative', 'no', '', '', ''] + sn_rcv_rnf: ['coupled', 'no', '', '', ''] + sn_rcv_cal: ['coupled', 'no', '', '', ''] + sn_rcv_co2: ['none', 'no', '', '', ''] + + # Sang-Yeobs first test was with partial steps + namdyn_hpg: + ln_hpg_zco: '.false.' + ln_hpg_zps: '.false.' + ln_hpg_sco: '.true.' + + namtra_ldf: + # For now turn it all off + ln_traldf_OFF: '.true.' + # aht = 1/2 * rn_Ud * rn_Ld = 1/2 * 0.02 * 10000 = 100 m2/s + #ln_traldf_lap: '.true.' + #ln_traldf_iso: '.true.' + #nn_aht_ijk_t: 20 + #rn_Ud: 0.02 + #rn_Ld: 10000 + + namdyn_ldf: + # For now turn it all off + ln_dynldf_off: '.true.' + #ln_dynldf_blp: '.true.' + #ln_dynldf_hor: '.true.' + #nn_ahm_ijk_t: 32 + + + eORCA05_Z75_SI3_COUPLED: + # coupled eORCA05.L75, set up with SI3 sea ice model + # Used for FOCI-OpenIFS 4.1 + # + # The configuration is similar to ORCA05_Z46_SI3_COUPLED above + # but has the grid extended south (more j points, but same i points) + # and more vertical levels. + # + # NOTE: The source code is the same as ORCA05_Z46_SI3_COUPLED + # so we use the same git repository. + # The main modifications in this config is the use of + # different input files (domain_cfg etc). + + requires: + - nemobasemodel-4.2.2 + + # We use the orca05_z46_si3_coupled git repo, but a different branch + branch: eORCA05_Z75_SI3_COUPLED + + reference_expid: 'FOCIOIFS' + + # The code is the same as for ORCA05_Z46 + # So we clone that repo and build a new config based on it + clone_destination: nemo-${nemo.version}/cfgs/${version} + + # Use the same code as for orca05_z46 + git-repository: https://git.geomar.de/foci/src/nemo_config/orca05_z46_si3_coupled.git + + # Use the ORCA05_Z46_SI3_COUPLED config + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + cp cfgs/${nemo.version}/work_cfgs.inc cfgs/ref_cfgs.txt; + ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; + cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${version} clean + archfile: ESMTOOLS_generic_oasis_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/${reference_expid}/ + + generation: "4.2" + resolution: ORCA05 + use_tracer: false + leapyear: true + free_surface: nonlinear + + input_dir: ${pool_dir}/NEMO4_eORCA05/L75/ + + # override hardcoded path to runoff forcing + add_namelist_changes: + namelist_cfg: + namsbc_blk: + cn_dir: "./" + #namrun: + # nn_write: 3 # hourly output (no xios used) + namsbc: + ln_blk: false + ln_cpl: true + ln_ssr: false + ln_rnf: false + ln_traqsr: false + ln_apr_dyn: false + namsbc_cpl: + sn_snd_temp: ['oce and ice', 'no', '', '', ''] + sn_snd_alb: ['ice', 'no', '', '', ''] + sn_snd_thick: ['ice and snow', 'no', '', '', ''] + sn_snd_crt: ['oce only', 'no', 'spherical', 'eastward-northward', 'T'] + sn_snd_co2: ['none', 'no', '', '', ''] + sn_snd_cond: ['none', 'no', '', '', ''] + sn_snd_mpnd: ['none', 'no', '', '', ''] + sn_snd_sstfrz: ['none','no','','',''] + sn_snd_wlev: ['none','no','','',''] + sn_snd_ttilyr: ['none','no','','',''] + sn_rcv_w10m: ['none', 'no', '', '', ''] + sn_rcv_taumod: ['none', 'no', '', '', ''] + sn_rcv_tau: ['oce and ice', 'no', 'spherical', 'eastward-northward', 'T'] + sn_rcv_dqnsdt: ['coupled', 'no', '', '', ''] + sn_rcv_qsr: ['conservative', 'no', '', '', ''] + sn_rcv_qns: ['conservative', 'no', '', '', ''] + sn_rcv_emp: ['conservative', 'no', '', '', ''] + sn_rcv_rnf: ['coupled', 'no', '', '', ''] + sn_rcv_cal: ['coupled', 'no', '', '', ''] + sn_rcv_co2: ['none', 'no', '', '', ''] + nambbc: + # set to constant flux since we dont have the file + nn_geoflx: 1 + + # # Unlike default, we use geothermal heating remapped to ORCA05 + # # No need for a reshape file + # sn_qgh: ['geothermal_heating.nc', -12.0, 'gh_flux', .false., .true., 'yearly', '', ''] + + # set diffusion to be 600 m2/s + # and visc to be -1,709 m4/s + # A = Ud * Ld / 2. Ud = 0.024 gives approx A=600 m2/s for Ld=50km. + # A = Uv * Lv / 12. Uv = 0.164 gives around A=-1,709 m4/s for Ld=50km + namtra_ldf: + rn_ud: 0.024 + namdyn_ldf: + rn_uv: 0.164 + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_ice_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ice_ref + + add_input_files: + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + + # from Sebastian / Markus + #bathy_meter_orca05_nemo4: bathy_meter_orca05_nemo4 + #coordinates_orca05_nemo4: bathy_meter_orca05_nemo4 + bfr_coef_orca05_nemo4: bfr_coef_eorca05_nemo4 + subbasins_orca05_nemo4: subbasins_eorca05_nemo4 + domain_cfg_orca05_nemo4: domain_cfg_eorca05_nemo4 + + # NEMO ORCA05 4.2.x input files + #ghflux_v2.0_orca05_nemo4: ghflux_v2.0_eorca05_nemo4 + + # inital data + data_tem_orca05_nemo4: data_tem_eorca05_nemo4 + data_sal_orca05_nemo4: data_sal_eorca05_nemo4 + + # inital data + #seaice_orca05_nemo4: seaice_eorca05_nemo4 + + input_in_work: + domain_cfg_orca05_nemo4: domain_cfg.nc + #coordinates_orca05_nemo4: coordinates.nc + bfr_coef_orca05_nemo4: bfr_coef.nc + subbasins_orca05_nemo4: subbasins.nc + + # inital data + data_tem_orca05_nemo4: data_tem.nc + data_sal_orca05_nemo4: data_sal.nc + #ghflux_v2.0_orca05_nemo4: geothermal_heating.nc + + #seaice_orca05_nemo4: seaice_c3.0_v19802004.0_ORCA05_r4.2.0.nc + + + ORCA05_LIM2_FOCI_AGRIF: + requires: + # seb-wahl: comment xios below if used with OIFS which also uses XIOS which causes XIOS + # to be cloned and compiled twice, need to file an issue + - xios-2.0_r982 + # TODO: test with newer version of XIOS + # - xios-2.5_r1910 + - nemobasemodel-3.6foci_agrif + branch: master + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + archfile: ESMTOOLS_generic_oasis_intel_agrif + + # use tracer in AGRIF? + use_tracer: false + use_tracer_agrif: false + + # use LIM2 in AGRIF? + use_lim2_agrif: true + + free_surface: linear + leapyear: true + + add_input_files: + # reference namelists + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + # grids and coefficients + coordinates: coordinates + subbasins: subbasins + coef-G70: coef-G70 + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + 1_sn_tem_levitus: 1_sn_tem_levitus + 1_sn_sal_levitus: 1_sn_sal_levitus + 1_ice_init: 1_ice_init + # reference namelists + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + # grids and coefficients + # batmeter: bathy_meter + cn_batmeter: bathy_updated + 1_cn_batmeter: 1_bathy_meter + 1_coordinates: 1_coordinates + fixed_grids: fixed_grids + # reshape files for nest + 1_reshape_bicub: 1_reshape_bicub + 1_reshape_bilin: 1_reshape_bilin + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_coordinates: 1_coordinates_ORCA05.nc + + add_config_files: + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + + add_namelist_changes: + 1_namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep_nest} + nn_itend: ${newstep_nest} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_indir: '${parent_restart_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${experiment_restart_out_dir}/' + nn_stock: ${newstep_nest} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} + namdom: + nn_closea: 1 + nn_msh: ${nn_msh} + rn_rdt: ${time_step_nest} + namsbc: + nn_fsbc: ${nest_refinement} + ln_echam: '.true.' + nammpp: + jpni: ${jpni} + jpnj: ${jpnj} + jpnij: ${nproc} + namsbc_echam: + sn_owndi: ['A_OTaux1_echam6_08', 3, 'A_OTaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] + sn_owndj: ['A_OTauy1_echam6_09', 3, 'A_OTauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] + sn_iwndi: ['A_ITaux1_echam6_10', 3, 'A_ITaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] + sn_iwndj: ['A_ITauy1_echam6_11', 3, 'A_ITauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] + sn_iqsr: ['A_QsrIce_echam6_12', 3, 'A_QsrIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_qsr: ['A_QsrMix_echam6_13', 3, 'A_QsrMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_iqns: ['A_QnsIce_echam6_14', 3, 'A_QnsIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] + sn_qns: ['A_QnsMix_echam6_15', 3, 'A_QnsMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] + sn_prec: ['ATotRain_echam6_16', 3, 'ATotRain', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_snow: ['ATotSnow_echam6_17', 3, 'ATotSnow', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_ievp: ['AIceEvap_echam6_18', 3, 'AIceEvap', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_dqns: ['A_dQnsdT_echam6_19', 3, 'A_dQnsdT', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + + # with AGRIF always use linear free surface + # always set free_surface: linear with AGRIF + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + + ORCA05_LIM2_FOCI_AGRIF_AOW: + requires: + # seb-wahl: comment xios below if used with OIFS which also uses XIOS which causes XIOS + # to be cloned and compiled twice, need to file an issue + - xios-2.0_r982 + # TODO: test with newer version of XIOS + # - xios-2.5_r1910 + - nemobasemodel-3.6foci_agrif + branch: master + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + archfile: ESMTOOLS_generic_oasis_intel_agrif + + # use tracer in AGRIF? + use_tracer_agrif: true + # use LIM2 in AGRIF? + use_lim2_agrif: true + free_surface: linear + leapyear: true + + add_input_files: + # reference namelists + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + # grids and coefficients + coordinates: coordinates + subbasins: subbasins + coef-G70: coef-G70 + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + 1_sn_tem_levitus: 1_sn_tem_levitus + 1_sn_sal_levitus: 1_sn_sal_levitus + 1_ice_init: 1_ice_init + # reference namelists + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + # grids and coefficients + # batmeter: bathy_meter + cn_batmeter: bathy_updated + 1_cn_batmeter: 1_bathy_meter + 1_coordinates: 1_coordinates + fixed_grids: fixed_grids + # reshape files for nest + 1_reshape_bicub: 1_reshape_bicub + 1_reshape_bilin: 1_reshape_bilin + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + 1_coordinates: 1_coordinates_ORCA05.nc + + add_config_files: + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + 1_namelist_top_cfg: 1_namelist_top_cfg + + add_namelist_changes: + 1_namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep_nest} + nn_itend: ${newstep_nest} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_indir: '${parent_restart_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${experiment_restart_out_dir}/' + nn_stock: ${newstep_nest} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} + namdom: + nn_closea: 1 + nn_msh: ${nn_msh} + rn_rdt: ${time_step_nest} + namsbc: + nn_fsbc: ${nest_refinement} + ln_echam: '.true.' + nammpp: + jpni: ${jpni} + jpnj: ${jpnj} + jpnij: ${nproc} + namsbc_echam: + sn_owndi: ['A_OTaux1_echam6_08', 3, 'A_OTaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] + sn_owndj: ['A_OTauy1_echam6_09', 3, 'A_OTauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] + sn_iwndi: ['A_ITaux1_echam6_10', 3, 'A_ITaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] + sn_iwndj: ['A_ITauy1_echam6_11', 3, 'A_ITauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] + sn_iqsr: ['A_QsrIce_echam6_12', 3, 'A_QsrIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_qsr: ['A_QsrMix_echam6_13', 3, 'A_QsrMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_iqns: ['A_QnsIce_echam6_14', 3, 'A_QnsIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] + sn_qns: ['A_QnsMix_echam6_15', 3, 'A_QnsMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] + sn_prec: ['ATotRain_echam6_16', 3, 'ATotRain', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_snow: ['ATotSnow_echam6_17', 3, 'ATotSnow', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_ievp: ['AIceEvap_echam6_18', 3, 'AIceEvap', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_dqns: ['A_dQnsdT_echam6_19', 3, 'A_dQnsdT', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + + # with AGRIF always use linear free surface + # always set free_surface: linear with AGRIF + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + + ORCA05_LIM2_KCM_AGRIF_OASISMCT4: + requires: + # seb-wahl: workaround if used with OIFS which also uses XIOS which causes XIOS + # to be cloned and compiled twice, need to file an issue + #- xios-2.5_r1910 + - nemobasemodel-3.6foci + branch: esm-tools + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + archfile: ESMTOOLS_generic_oasis_intel_agrif + + # use tracer in AGRIF? + use_tracer_agrif: true + # use LIM2 in AGRIF? + use_lim2_agrif: true + free_surface: linear + leapyear: true + + add_input_files: + # reference namelists + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + # grids and coefficients + coordinates: coordinates + subbasins: subbasins + coef-G70: coef-G70 + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + 1_sn_tem_levitus: 1_sn_tem_levitus + 1_sn_sal_levitus: 1_sn_sal_levitus + 1_ice_init: 1_ice_init + # reference namelists + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + # grids and coefficients + cn_batmeter: bathy_updated + 1_cn_batmeter: 1_bathy_meter + 1_coordinates: 1_coordinates + fixed_grids: fixed_grids + # reshape files for nest + 1_reshape_bicub: 1_reshape_bicub + 1_reshape_bilin: 1_reshape_bilin + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + 1_coordinates: 1_coordinates_ORCA05.nc + + add_config_files: + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + 1_namelist_top_cfg: 1_namelist_top_cfg + + add_namelist_changes: + 1_namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep_nest} + nn_itend: ${newstep_nest} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_indir: '${parent_restart_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${experiment_restart_out_dir}/' + nn_stock: ${newstep_nest} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + #namcfg: + # jpidta: ${_nx_nest1} + # jpjdta: ${_ny_nest1} + # jpiglo: ${_nx_nest1} + # jpjglo: ${_ny_nest1} + namlbc: + rn_shlat: 2 + ln_vorlat: '.false.' + namagrif: + #nn_cln_update: 3 + rn_sponge_tra: 600 + rn_sponge_dyn: 600 + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} + namdom: + nn_closea: 1 + nn_msh: ${nn_msh} + rn_rdt: ${time_step_nest} + namsbc: + nn_fsbc: 1 + nn_ice: 2 + ln_echam: '.false.' + ln_cpl: '.true.' + namtra_ldf: + rn_aeiv_0: 0. + rn_aht_0: 120. + rn_aht_m: 120. + namdyn_ldf: + # Computed as -1.709e12 / 5^3 = -1.37e10 + # Note: -1.709e12 is applied at the largest ORCA05 cell, at equator + # but coeff is actually -1.267e12 where VIKING10 starts + # So ahm should be -1.267e12 / 5^3 = -1.013e10 + # We should scale this by 0.74. + rn_ahm_0_blp: -1.013e10 + rn_ahm_m_blp: -1.37e11 + rn_ahm_m_lap: 0. + nammpp: + jpni: ${jpni} + jpnj: ${jpnj} + jpnij: ${nproc} + #namnc4: + # ln_nc4zip: '.false.' + # nn_nchunks_i: 4 + # nn_nchunks_j: 4 + # nn_nchunks_k: 4 + nambbl: + rn_ahtbbl: 1000 + namctl: + ln_ctl: '.false.' + nn_timing: 0 + # with AGRIF always use linear free surface + # always set free_surface: linear with AGRIF + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + + add_coupling_fields: + "[[agr1_t_fields-->FIELD]]": + grid: agr1 + "[[agr1_c_fields-->FIELD]]": + grid: agr1 + "[[agr1_r_fields-->FIELD]]": + grid: agr1r + "[[agr1_rc_fields-->FIELD]]": + grid: agrc + "[[agr2_t_fields-->FIELD]]": + grid: agr2 + + ORCA05_LIM2_KCM_AGRIF_OASISMCT5: + requires: + - nemobasemodel-3.6foci + branch: esm-tools destination: nemo-${nemo.version}/CONFIG/${nemo.version} git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git - + archfile: ESMTOOLS_generic_oasis_intel_agrif + # use tracer in AGRIF? use_tracer_agrif: true # use LIM2 in AGRIF? use_lim2_agrif: true free_surface: linear - + leapyear: true + add_input_files: # reference namelists namelist_ref: namelist_ref @@ -322,26 +1609,26 @@ choose_version: # grids and coefficients coordinates: coordinates subbasins: subbasins + coef-G70: coef-G70 # init and (if used) damping data sn_tem: sn_tem_levitus sn_sal: sn_sal_levitus ice_init: ice_init_kkg36f13h - 1_sn_tem_levitus: 1_sn_tem_levitus - 1_sn_sal_levitus: 1_sn_sal_levitus + 1_sn_tem_levitus: 1_sn_tem_levitus + 1_sn_sal_levitus: 1_sn_sal_levitus 1_ice_init: 1_ice_init # reference namelists 1_namelist_ref: 1_namelist_ref 1_namelist_ice_ref: 1_namelist_ice_ref 1_namelist_top_ref: 1_namelist_top_ref - # grids and coefficients - # batmeter: bathy_meter + # grids and coefficients cn_batmeter: bathy_updated 1_cn_batmeter: 1_bathy_meter 1_coordinates: 1_coordinates fixed_grids: fixed_grids # reshape files for nest - 1_reshape_bicub: 1_reshape_bicub - 1_reshape_bilin: 1_reshape_bilin + 1_reshape_bicub: 1_reshape_bicub + 1_reshape_bilin: 1_reshape_bilin input_in_work: ice_init: Ice_initialization.nc @@ -350,7 +1637,7 @@ choose_version: 1_namelist_ice_ref: 1_namelist_ice_ref 1_namelist_top_ref: 1_namelist_top_ref 1_coordinates: 1_coordinates_ORCA05.nc - + add_config_files: 1_namelist_cfg: 1_namelist_cfg 1_namelist_ice_cfg: 1_namelist_ice_cfg @@ -363,44 +1650,62 @@ choose_version: nn_it000: ${thisstep_nest} nn_itend: ${newstep_nest} nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} - cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} cn_ocerst_indir: '${parent_restart_dir}/' cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} cn_ocerst_outdir: '${experiment_restart_out_dir}/' nn_stock: ${newstep_nest} - nn_leapy: ${nn_leapy} - nn_rstctl: ${nn_rstctl} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} ln_rstart: ${nemo.lresume} - namtsd: - ln_tsd_tradmp: ${ln_tsd_tradmp} - ln_tsd_init: ${ln_tsd_init} + #namcfg: + # jpidta: ${_nx_nest1} + # jpjdta: ${_ny_nest1} + # jpiglo: ${_nx_nest1} + # jpjglo: ${_ny_nest1} + namlbc: + rn_shlat: 2 + ln_vorlat: '.false.' + namagrif: + #nn_cln_update: 3 + rn_sponge_tra: 600 + rn_sponge_dyn: 600 + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} namdom: nn_closea: 1 - nn_msh: ${nn_msh} + nn_msh: ${nn_msh} rn_rdt: ${time_step_nest} namsbc: - nn_fsbc: ${nest_refinement} - ln_echam: '.true.' + nn_fsbc: 3 + nn_ice: 2 + ln_echam: '.false.' + ln_cpl: '.true.' + namtra_ldf: + rn_aeiv_0: 0. + rn_aht_0: 120. + rn_aht_m: 120. + namdyn_ldf: + rn_ahm_0_blp: -2.4e10 + rn_ahm_m_blp: -8.e9 + rn_ahm_m_lap: 0. nammpp: jpni: ${jpni} jpnj: ${jpnj} jpnij: ${nproc} - namsbc_echam: - sn_owndi: ['A_OTaux1_echam6_08', 3, 'A_OTaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] - sn_owndj: ['A_OTauy1_echam6_09', 3, 'A_OTauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] - sn_iwndi: ['A_ITaux1_echam6_10', 3, 'A_ITaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] - sn_iwndj: ['A_ITauy1_echam6_11', 3, 'A_ITauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] - sn_iqsr: ['A_QsrIce_echam6_12', 3, 'A_QsrIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] - sn_qsr: ['A_QsrMix_echam6_13', 3, 'A_QsrMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] - sn_iqns: ['A_QnsIce_echam6_14', 3, 'A_QnsIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] - sn_qns: ['A_QnsMix_echam6_15', 3, 'A_QnsMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] - sn_prec: ['ATotRain_echam6_16', 3, 'ATotRain', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] - sn_snow: ['ATotSnow_echam6_17', 3, 'ATotSnow', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] - sn_ievp: ['AIceEvap_echam6_18', 3, 'AIceEvap', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] - sn_dqns: ['A_dQnsdT_echam6_19', 3, 'A_dQnsdT', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] - + #namnc4: + # ln_nc4zip: '.false.' + # nn_nchunks_i: 4 + # nn_nchunks_j: 4 + # nn_nchunks_k: 4 + nambbl: + rn_ahtbbl: 1000 + namctl: + ln_ctl: '.false.' + nn_timing: 0 # with AGRIF always use linear free surface - # always set free_surface: linear with AGRIF + # always set free_surface: linear with AGRIF namdyn_hpg: ln_hpg_zps: '.true.' ln_hpg_sco: '.false.' @@ -410,22 +1715,36 @@ choose_version: ln_hpg_zps: '.true.' ln_hpg_sco: '.false.' ln_dynhpg_imp: '.true.' + + add_coupling_fields: + "[[agr1_t_fields-->FIELD]]": + grid: agr1 + "[[agr1_c_fields-->FIELD]]": + grid: agr1 + "[[agr2_t_fields-->FIELD]]": + grid: agr2 - ORCA05_LIM2_KCM_AGRIF_OASISMCT4: + ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4: requires: # seb-wahl: workaround if used with OIFS which also uses XIOS which causes XIOS # to be cloned and compiled twice, need to file an issue #- xios-2.5_r1910 - nemobasemodel-3.6foci branch: esm-tools + git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_FOCI_AGRIF_MOPS.git destination: nemo-${nemo.version}/CONFIG/${nemo.version} - git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + archfile: ESMTOOLS_generic_oasis_intel_agrif # use tracer in AGRIF? use_tracer_agrif: true # use LIM2 in AGRIF? use_lim2_agrif: true free_surface: linear + leapyear: true + + # coupling fields + opac_mops_fields: [O_AtmCO2, CO2FLXOC] + opat_mops_fields: [CO2OCEAN, CO2TRA, FF_OCE] add_input_files: # reference namelists @@ -434,6 +1753,7 @@ choose_version: # grids and coefficients coordinates: coordinates subbasins: subbasins + coef-G70: coef-G70 # init and (if used) damping data sn_tem: sn_tem_levitus sn_sal: sn_sal_levitus @@ -488,7 +1808,7 @@ choose_version: # jpiglo: ${_nx_nest1} # jpjglo: ${_ny_nest1} namlbc: - rn_shlat: 2 + rn_shlat: 0 ln_vorlat: '.false.' namagrif: #nn_cln_update: 3 @@ -498,7 +1818,7 @@ choose_version: ln_tsd_tradmp: ${ln_tsd_tradmp} ln_tsd_init: ${ln_tsd_init} namdom: - nn_closea: 1 + nn_closea: 0 nn_msh: ${nn_msh} rn_rdt: ${time_step_nest} namsbc: @@ -539,8 +1859,142 @@ choose_version: ln_hpg_zps: '.true.' ln_hpg_sco: '.false.' ln_dynhpg_imp: '.true.' + # workaround for bug in f90nml library used by esm_runscripts, see + # https://github.com/esm-tools/esm_tools/issues/633 and + # https://github.com/marshallward/f90nml/issues/110 + namelist_top_cfg: + namtrc: + sn_tracer: "remove_from_namelist" + sn_tracer(1)%clsname: AGE + sn_tracer(2)%clsname: PO4 + sn_tracer(3)%clsname: DOP + sn_tracer(4)%clsname: O2 + sn_tracer(5)%clsname: PHY + sn_tracer(6)%clsname: ZOO + sn_tracer(7)%clsname: DET + sn_tracer(8)%clsname: DIN + sn_tracer(9)%clsname: DIC + sn_tracer(10)%clsname: ALK + sn_tracer(11)%clsname: IDEAL + + sn_tracer(1)%cllname: age + sn_tracer(2)%cllname: phosphate + sn_tracer(3)%cllname: dop + sn_tracer(4)%cllname: oxygen + sn_tracer(5)%cllname: phytoplankton + sn_tracer(6)%cllname: zooplankton + sn_tracer(7)%cllname: det + sn_tracer(8)%cllname: din + sn_tracer(9)%cllname: dic + sn_tracer(10)%cllname: alkalinity + sn_tracer(11)%cllname: ideal + + sn_tracer(1)%clunit: s + sn_tracer(2)%clunit: mmol/m3 + sn_tracer(3)%clunit: mmol/m3 + sn_tracer(4)%clunit: mmol/m3 + sn_tracer(5)%clunit: mmol/m3 + sn_tracer(6)%clunit: mmol/m3 + sn_tracer(7)%clunit: mmol/m3 + sn_tracer(8)%clunit: mmol/m3 + sn_tracer(9)%clunit: umol/kg + sn_tracer(10)%clunit: umol/kg + sn_tracer(11)%clunit: mmol/m3 + + sn_tracer(1)%llinit: false + sn_tracer(2)%llinit: true + sn_tracer(3)%llinit: false + sn_tracer(4)%llinit: true + sn_tracer(5)%llinit: false + sn_tracer(6)%llinit: false + sn_tracer(7)%llinit: false + sn_tracer(8)%llinit: true + sn_tracer(9)%llinit: true + sn_tracer(10)%llinit: true + sn_tracer(11)%llinit: true + sn_tracer(1)%llsave: true + sn_tracer(2)%llsave: true + sn_tracer(3)%llsave: true + sn_tracer(4)%llsave: true + sn_tracer(5)%llsave: true + sn_tracer(6)%llsave: true + sn_tracer(7)%llsave: true + sn_tracer(8)%llsave: true + sn_tracer(9)%llsave: true + sn_tracer(10)%llsave: true + sn_tracer(11)%llsave: true + + 1_namelist_top_cfg: + namtrc: + sn_tracer: "remove_from_namelist" + sn_tracer(1)%clsname: AGE + sn_tracer(2)%clsname: PO4 + sn_tracer(3)%clsname: DOP + sn_tracer(4)%clsname: O2 + sn_tracer(5)%clsname: PHY + sn_tracer(6)%clsname: ZOO + sn_tracer(7)%clsname: DET + sn_tracer(8)%clsname: DIN + sn_tracer(9)%clsname: DIC + sn_tracer(10)%clsname: ALK + sn_tracer(11)%clsname: IDEAL + + sn_tracer(1)%cllname: age + sn_tracer(2)%cllname: phosphate + sn_tracer(3)%cllname: dop + sn_tracer(4)%cllname: oxygen + sn_tracer(5)%cllname: phytoplankton + sn_tracer(6)%cllname: zooplankton + sn_tracer(7)%cllname: det + sn_tracer(8)%cllname: din + sn_tracer(9)%cllname: dic + sn_tracer(10)%cllname: alkalinity + sn_tracer(11)%cllname: ideal + + sn_tracer(1)%clunit: s + sn_tracer(2)%clunit: mmol/m3 + sn_tracer(3)%clunit: mmol/m3 + sn_tracer(4)%clunit: mmol/m3 + sn_tracer(5)%clunit: mmol/m3 + sn_tracer(6)%clunit: mmol/m3 + sn_tracer(7)%clunit: mmol/m3 + sn_tracer(8)%clunit: mmol/m3 + sn_tracer(9)%clunit: umol/kg + sn_tracer(10)%clunit: umol/kg + sn_tracer(11)%clunit: mmol/m3 + + sn_tracer(1)%llinit: false + sn_tracer(2)%llinit: true + sn_tracer(3)%llinit: false + sn_tracer(4)%llinit: true + sn_tracer(5)%llinit: false + sn_tracer(6)%llinit: false + sn_tracer(7)%llinit: false + sn_tracer(8)%llinit: true + sn_tracer(9)%llinit: true + sn_tracer(10)%llinit: true + sn_tracer(11)%llinit: true + sn_tracer(1)%llsave: true + sn_tracer(2)%llsave: true + sn_tracer(3)%llsave: true + sn_tracer(4)%llsave: true + sn_tracer(5)%llsave: true + sn_tracer(6)%llsave: true + sn_tracer(7)%llsave: true + sn_tracer(8)%llsave: true + sn_tracer(9)%llsave: true + sn_tracer(10)%llsave: true + sn_tracer(11)%llsave: true add_coupling_fields: + "[[opat_mops_fields-->FIELD]]": + grid: opat + "[[opac_mops_fields-->FIELD]]": + grid: opac + "[[agr1_t_mops_fields-->FIELD]]": + grid: agr1 + "[[agr1_c_mops_fields-->FIELD]]": + grid: agr1 "[[agr1_t_fields-->FIELD]]": grid: agr1 "[[agr1_c_fields-->FIELD]]": @@ -548,7 +2002,7 @@ choose_version: "[[agr1_r_fields-->FIELD]]": grid: agr1r "[[agr1_rc_fields-->FIELD]]": - grid: agr1 + grid: agrc "[[agr2_t_fields-->FIELD]]": grid: agr2 @@ -559,6 +2013,7 @@ choose_version: branch: master destination: nemo-${nemo.version}/CONFIG/${nemo.version} git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + leapyear: true add_input_files: # reference namelists @@ -572,10 +2027,12 @@ choose_version: # init and (if used) damping data sn_tem: sn_tem_levitus sn_sal: sn_sal_levitus + sn_sal_diag: sn_sal_levitus ice_init: ice_init_kkg36f13h input_in_work: ice_init: Ice_initialization.nc + sn_sal_diag: sali_ref_clim_monthly.nc namelist_ice_ref: namelist_ice_ref ORCA05_LIM2_KCM_AOW_autotools: @@ -584,6 +2041,7 @@ choose_version: branch: master_autotools destination: nemo-${nemo.version}/CONFIG/${nemo.version} git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_KCM_AOW.git + leapyear: true add_input_files: # reference namelists @@ -612,6 +2070,7 @@ choose_version: git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git free_surface: nonlinear + leapyear: true add_input_files: # reference namelists @@ -630,14 +2089,114 @@ choose_version: input_in_work: ice_init: Ice_initialization.nc namelist_ice_ref: namelist_ice_ref + + # Use for FOCI-OpenIFS v2.x + # No smagorinsky + # Add flag to support OASIS3-MCT >= 3.0 + ORCA05_LIM2_KCM_AOW_FS_OASISMCT4: + requires: + - nemobasemodel-3.6foci + branch: mct3_nosmag + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git - ORCA05_LIM2_KCM_AOW_OASISMCT4: + free_surface: nonlinear + leapyear: true + + add_input_files: + # reference namelists + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + # grids and coefficients + cn_batmeter: bathy_meter + coordinates: coordinates + subbasins: subbasins + coef-G70: coef-G70 + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + ORCA05_LIM2_FOCI_MOPS_OASISMCT4: requires: - #- xios-2.0_r982 - nemobasemodel-3.6foci - branch: oasismct4 + branch: esm-tools-oasismct4 destination: nemo-${nemo.version}/CONFIG/${nemo.version} - git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_KCM_AOW.git + git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_FOCI_MOPS.git + + # tracers are used by default + free_surface: nonlinear + leapyear: true + + # workaround for bug in f90nml library used by esm_runscripts, see + # https://github.com/esm-tools/esm_tools/issues/633 and + # https://github.com/marshallward/f90nml/issues/110 + add_namelist_changes: + namelist_top_cfg: + namtrc: + sn_tracer: "remove_from_namelist" + sn_tracer(1)%clsname: AGE + sn_tracer(2)%clsname: PO4 + sn_tracer(3)%clsname: DOP + sn_tracer(4)%clsname: O2 + sn_tracer(5)%clsname: PHY + sn_tracer(6)%clsname: ZOO + sn_tracer(7)%clsname: DET + sn_tracer(8)%clsname: DIN + sn_tracer(9)%clsname: DIC + sn_tracer(10)%clsname: ALK + sn_tracer(11)%clsname: IDEAL + + sn_tracer(1)%cllname: age + sn_tracer(2)%cllname: phosphate + sn_tracer(3)%cllname: dop + sn_tracer(4)%cllname: oxygen + sn_tracer(5)%cllname: phytoplankton + sn_tracer(6)%cllname: zooplankton + sn_tracer(7)%cllname: det + sn_tracer(8)%cllname: din + sn_tracer(9)%cllname: dic + sn_tracer(10)%cllname: alkalinity + sn_tracer(11)%cllname: ideal + + sn_tracer(1)%clunit: s + sn_tracer(2)%clunit: mmol/m3 + sn_tracer(3)%clunit: mmol/m3 + sn_tracer(4)%clunit: mmol/m3 + sn_tracer(5)%clunit: mmol/m3 + sn_tracer(6)%clunit: mmol/m3 + sn_tracer(7)%clunit: mmol/m3 + sn_tracer(8)%clunit: mmol/m3 + sn_tracer(9)%clunit: umol/kg + sn_tracer(10)%clunit: umol/kg + sn_tracer(11)%clunit: mmol/m3 + + sn_tracer(1)%llinit: false + sn_tracer(2)%llinit: true + sn_tracer(3)%llinit: false + sn_tracer(4)%llinit: true + sn_tracer(5)%llinit: false + sn_tracer(6)%llinit: false + sn_tracer(7)%llinit: false + sn_tracer(8)%llinit: true + sn_tracer(9)%llinit: true + sn_tracer(10)%llinit: true + sn_tracer(11)%llinit: true + sn_tracer(1)%llsave: true + sn_tracer(2)%llsave: true + sn_tracer(3)%llsave: true + sn_tracer(4)%llsave: true + sn_tracer(5)%llsave: true + sn_tracer(6)%llsave: true + sn_tracer(7)%llsave: true + sn_tracer(8)%llsave: true + sn_tracer(9)%llsave: true + sn_tracer(10)%llsave: true + sn_tracer(11)%llsave: true add_input_files: # reference namelists @@ -657,14 +2216,66 @@ choose_version: ice_init: Ice_initialization.nc namelist_ice_ref: namelist_ice_ref - ORCA05_LIM2_KCM_AOW_FS_OASISMCT4: + add_coupling_fields: + "[[opat_mops_fields-->FIELD]]": + grid: opat + "[[opac_mops_fields-->FIELD]]": + grid: opac + + + # As for ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG + # here we use ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 + # and set free_surface = linear + # and also change branch name. + ORCA05_LIM2_KCM_AOW_OASISMCT4: requires: - nemobasemodel-3.6foci - branch: esm-tools + branch: mct3_nosmag_novvl destination: nemo-${nemo.version}/CONFIG/${nemo.version} - git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_KCM_AOW_FS_OASISMCT4.git + + free_surface: linear + leapyear: true + + add_input_files: + # reference namelists + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + # grids and coefficients + cn_batmeter: bathy_meter + coordinates: coordinates + subbasins: subbasins + coef-G70: coef-G70 + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + # Use for FOCI-OpenIFS v3.0 + # Note: This version is identical to ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 + # and is the same repo, just a different branch. + # But for some strange reason we can't just change branch, but have to define + # a new version... + # + # In the future we should have one repo for all NEMO 3.6 code + # and then just change branches between model versions + # + ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG: + requires: + - nemobasemodel-3.6foci + # unlike the other versions, we hard code the name here + # so that we avoid the SMAG at the end. + # That way, we use the same repo as ORCA05_LIM2_KCM_AOW_FS_OASISMCT4, but different branch + branch: esm-tools + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_KCM_AOW_FS_OASISMCT4.git + free_surface: nonlinear + leapyear: true add_input_files: # reference namelists @@ -684,6 +2295,7 @@ choose_version: ice_init: Ice_initialization.nc namelist_ice_ref: namelist_ice_ref + #ORCA05_SI3_FOCI_FS_AOW: # requires: # - nemobasemodel-4.2.xfoci @@ -842,7 +2454,7 @@ choose_version: sn_humi: "huss-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" sn_prec: "prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" sn_snow: "prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" - sn_rnf: "sorunoff_JRA55-do-1-4-0_gr_orca05_y@YEAR@.nc" + sn_rnf: "sorunoff_JRA55-do-1-5-0_gr_eorca025_y@YEAR@.nc" forcing_sources: # JRA55-do drowned forcing @@ -879,7 +2491,7 @@ choose_version: from: 1958 to: 1960 sn_rnf: - "${jra55_runoff_dir}/sorunoff_JRA55-do-1-4-0_gr_orca05_y@YEAR@.nc": + "${jra55_runoff_dir}/sorunoff_JRA55-do-1-5-0_gr_eorca025_y@YEAR@.nc": from: 1958 to: 1960 @@ -905,19 +2517,35 @@ jpni: 0 jpnj: 0 nproc: $(( ${jpni} * ${jpnj} )) +# first step is an Euler step +# this needs to be true for a cold start +# but should be false for restarts +nn_euler: 1 # use Euler step in NEMO 3.6 +ln_1st_euler: true # use Euler step in NEMO 4.2 + +# use_lim2 is kept default here +# but for NEMO 4, one should have use_lim2 = false and use_si3 = true use_lim2: true +use_si3: false use_tracer: true hosing: false lresume: false free_surface: linear -correct_neg_tracer_conc: true +correct_neg_tracer_conc: false # use damping? ln_tsd_tradmp: false # Nest settings -nest1: no +# Note: ESM-Tools only supports one nest at the moment +# name of first nest +nest1: false +# use tracer, LIM2 (NEMO 3.6), SI3 (NEMO 4.2) use_tracer_agrif: false use_lim2_agrif: false +use_si3_agrif: false +# initialise AGRIF temperature and salinity +# from parent? Only works in NEMO 4.2. +ln_init_chfrpar: true # calendar: leapyear: False @@ -926,9 +2554,10 @@ leapyear: False # values below will be set to false if lresume = true ln_tsd_init: true ln_limini: true -# in NEMO4.x limini is called ln_iceini -ln_iceini: ${ln_limini} ln_limini_agrif: false +# in NEMO4.x limini is called ln_iceini +ln_iceini: true +ln_iceini_agrif: false nn_msh: 1 ln_meshmask: true # will be set to 0 if lresume = true and run_number=1 @@ -937,7 +2566,7 @@ nn_rstctl: 2 # tracer stuff ln_rsttr: ${lresume} nn_rsttr : 2 -ln_trcdta: true +ln_trcdta: false # default directories pool_dir: ${computer.pool_directories.focipool} @@ -992,7 +2621,9 @@ coupling_freq_in_steps: 6 #jrastart: 1958 #jraend: 1960 - +# +# These changes are done for all configurations +# namelist_changes: namelist_cfg: namrun: @@ -1012,17 +2643,46 @@ namelist_changes: #ln_tsd_tradmp: ${ln_tsd_tradmp} ln_tsd_init: ${ln_tsd_init} namsbc: + # Note: This could actually be set to 1 as well + # It would mean calling ice model each step + # which is expensive. + # Something to be tested in the future. nn_fsbc: ${coupling_freq_in_steps} nammpp: jpni: ${jpni} jpnj: ${jpnj} + +# +# Settings for all configurations but +# separate for NEMO generations. +# choose_generation: "4.2": + + # Switching from LIM2 (NEMO 3.6) to SI3 (NEMO 4) + use_lim2: false + use_si3: true + + # coupling fields are the same as in NEMO 3.6 except for: + # 1) OIceFrac is now OIceFrc + # 2) O_AlbIce is added + # 3) O_SnwTck, O_IceTck are now OSnwTck, OIceTck + opat_fields: [O_AlbIce, OIceFrc, O_SSTSST, O_TepIce, OIceTck, OSnwTck, O_OCurx1, O_OCury1, O_OTaux1, O_OTauy1, O_ITaux1, O_ITauy1] + agr1_t_fields: [1_O_AlbIce, 1_OIceFrc, 1_O_SSTSST, 1_O_TepIce, 1_OIceTck, 1_OSnwTck, 1_O_OCurx1, 1_O_OCury1, 1_O_OTaux1, 1_O_OTauy1, 1_O_ITaux1, 1_O_ITauy1] + + # in NEMO 4, the periodic points, i.e. i=1, i=imax + # are not included in the coupling + # So we need to set P 0 in namecouple + # oasis_p: 2 is left as default for compatibility with NEMO 3.6 + oasis_p: 0 + add_namelist_changes: namelist_cfg: + namrun: + ln_1st_euler: ${ln_1st_euler} namdom: - rn_dt: ${time_step} + rn_Dt: ${time_step} ln_meshmask: ${ln_meshmask} namcfg: ln_closea: '.false.' @@ -1033,11 +2693,71 @@ choose_generation: nonlinear: add_namelist_changes: namelist_cfg: + namdom: + ln_linssh: '.false.' namdyn_hpg: + ln_hpg_zco: '.false.' + ln_hpg_zps: '.false.' ln_hpg_sco: '.true.' + + # Restart settings + choose_lresume: + + # If the run is a restart + true: + # Turn off initialisation + ln_iceini: false + ln_limini_agrif: false + ln_tsd_init: false + # dont write mesh_mask files + ln_meshmask: false + # take date from restart file + nn_rstctl: 2 + # do not initialise AGRIF from parent + # We need to use AGRIF restart files + ln_init_chfrpar: '.false.' + # Do not use Euler forward for first step + # This only works if you have restart files with two + # time levels stored + ln_1st_euler: false + + # Run is a restart but run number = 1, + # i.e. it is the first run of the experiment + # That means you are restarting (branching off) + # from another experiment. + choose_general.run_number: + # For the first run + 1: + # Take date from nn_date0, not restart file + # (allows to restart at year 1850 from picontrol at year 3000) + nn_rstctl: 0 + nn_rsttr : 0 # only needed if tracers are used + # This assumes you have glued restart files into one global file. + # upon restart, nemo restart files are global files and have end on _global.nc (FOCI convention) + global_tag: "_global" + prevstep_formatted: "<--format(%08d)-- ${ini_restart_steps}" + prevstep_formatted_nest: "<--format(%08d)-- ${ini_restart_steps_nest}" + + # If not a restart, i.e. cold start + false: + add_outdata_sources: + mesh_mask: "*mesh_mask*.nc" + "3.6": + # Coupling fields are different for NEMO 3.6 and 4.2, so it has to go in this choose block + # see comment for 4.2 + choose_sst_grid_name: + "opat": + opat_fields: [OIceFrac, O_SSTSST, O_TepIce, O_IceTck, O_SnwTck, O_OCurx1, O_OCury1, O_OTaux1, O_OTauy1, O_ITaux1, O_ITauy1] + "opac": + opat_fields: [] + opac_fields: [O_QsrIce, O_QsrMix, O_QnsIce, O_QnsMix, OTotRain, OTotSnow, OIceEvap, OTotEvap, O_dQnsdT, OIceFrac, O_SSTSST, O_TepIce, O_IceTck, O_SnwTck, O_OCurx1, O_OCury1, O_OTaux1, O_OTauy1, O_ITaux1, O_ITauy1] + agr1_t_fields: [1_OIceFrac, 1_O_SSTSST, 1_O_TepIce, 1_O_IceTck, 1_O_SnwTck, 1_O_OCurx1, 1_O_OCury1, 1_O_OTaux1, 1_O_OTauy1, 1_O_ITaux1, 1_O_ITauy1] + add_namelist_changes: namelist_cfg: + namrun: + nn_euler: ${nn_euler} namdom: nn_closea: 1 nn_msh: ${nn_msh} @@ -1062,6 +2782,45 @@ choose_generation: ln_hpg_sco: '.true.' ln_dynhpg_imp: '.false.' + # Restart settings + choose_lresume: + + # If the run is a restart + true: + # Turn off initialisation + ln_limini: false + ln_limini_agrif: false + ln_tsd_init: false + # dont write mesh_mask files + nn_msh: 0 + ln_meshmask: false + # Set nn_euler = 0 to make the model start with leap frog + # Only works if restart files with two time levels (before and now) + # are stored. + nn_euler: 0 + + # Run is a restart but run number = 1, + # i.e. it is the first run of the experiment + # That means you are restarting (branching off) + # from another experiment. + choose_general.run_number: + # For the first run + 1: + # Take date from nn_date0, not restart file + # (allows to restart at year 1850 from picontrol at year 3000) + nn_rstctl: 0 + nn_rsttr : 0 # only needed if tracers are used + # This assumes you have glued restart files into one global file. + # upon restart, nemo restart files are global files and have end on _global.nc (FOCI convention) + global_tag: "_global" + prevstep_formatted: "<--format(%08d)-- ${ini_restart_steps}" + prevstep_formatted_nest: "<--format(%08d)-- ${ini_restart_steps_nest}" + + # If not a restart, i.e. cold start + false: + add_outdata_sources: + mesh_mask: "*mesh_mask*.nc" + choose_leapyear: False: nn_leapy: 0 @@ -1075,36 +2834,49 @@ choose_hosing: namdyn_sbc: ln_hosing: '.true.' -choose_lresume: - true: - ln_limini: false - ln_limini_agrif: false - ln_tsd_init: false - nn_msh: 0 - ln_meshmask: false - - choose_general.run_number: - 1: - nn_rstctl: 0 - nn_rsttr : 0 # only needed if tracers are used - # upon restart, nemo restart files are global files and have end on _global.nc (FOCI convention) - global_tag: "_global" - prevstep_formatted: "<--format(%08d)-- ${ini_restart_steps}" - prevstep_formatted_nest: "<--format(%08d)-- ${ini_restart_steps_nest}" - - false: - add_outdata_sources: - mesh_mask: "*mesh_mask*.nc" +#choose_lresume: +# true: +# ln_limini: false +# ln_limini_agrif: false +# ln_tsd_init: false +# nn_msh: 0 +# ln_meshmask: false +# +# choose_general.run_number: +# 1: +# nn_rstctl: 0 +# nn_rsttr : 0 # only needed if tracers are used +# # upon restart, nemo restart files are global files and have end on _global.nc (FOCI convention) +# global_tag: "_global" +# prevstep_formatted: "<--format(%08d)-- ${ini_restart_steps}" +# prevstep_formatted_nest: "<--format(%08d)-- ${ini_restart_steps_nest}" +# +# false: +# add_outdata_sources: +# mesh_mask: "*mesh_mask*.nc" # choices: # choose_resolution: + eORCA025: + _nx: 1440 + _ny: 1206 + time_step: 1200 ORCA05: + # Periodic points and folding points + # are not included on grid for NEMO 4.2 _nx: 722 _ny: 511 time_step: 1800 - + + eORCA05: + # Periodic points and folding points + # are not included on grid for NEMO 4.2 + _nx: 722 + _ny: 604 + time_step: 1800 + ORCA12: _nx: 4322 _ny: 3059 @@ -1175,15 +2947,29 @@ choose_nest1: _nx_nest1: 1569 _ny_nest1: 664 nest_refinement: 3 + NWPAC10: + _nx_nest1: 733 + _ny_nest1: 838 + nest_refinement: 4 WG10: _nx_nest1: 1414 _ny_nest1: 944 nest_refinement: 5 + WG10v4: + _nx_nest1: 1614 + _ny_nest1: 944 + nest_refinement: 5 + ORION10X: + _nx_nest1: 3564 + _ny_nest1: 884 + nest_refinement: 5 choose_levels: L75: add_namelist_changes: namelist_cfg: + namcfg: + jpkdta: 75 namdom: # very important to set hmin to -10 or lower rn_hmin: -10 @@ -1202,6 +2988,7 @@ choose_levels: choose_use_lim2: + # NOTE: LIM2 only available for NEMO v3 true: add_config_files: namelist_ice_cfg: namelist_ice_cfg @@ -1217,18 +3004,6 @@ choose_use_lim2: cn_icerst_outdir: '${experiment_restart_out_dir}/' namiceini: ln_limini: ${ln_limini} - "4.2": - add_namelist_changes: - namelist_ice_cfg: - nampar: - cn_icerst_in: ${parent_expid}_${prevstep_formatted}_restart_ice_${parent_date!syear!smonth!sday}${global_tag} - cn_icerst_indir: '${parent_restart_dir}/' - cn_icerst_out: restart_ice_${end_date_m1!syear!smonth!sday} - cn_icerst_outdir: '${experiment_restart_out_dir}/' - namini: - ln_iceini: ${ln_iceini} - # initialize sea ice based on SSTs - nn_iceini_file: 0 choose_use_lim2_agrif: true: @@ -1244,6 +3019,43 @@ choose_use_lim2: namiceini: ln_limini: ${ln_limini_agrif} +choose_use_si3: + # NOTE: SI3 only available for NEMO v4 + true: + add_config_files: + namelist_ice_cfg: namelist_ice_cfg + + choose_generation: + "4.2": + add_namelist_changes: + namelist_ice_cfg: + nampar: + cn_icerst_in: ${parent_expid}_${prevstep_formatted}_restart_ice_${parent_date!syear!smonth!sday}${global_tag} + cn_icerst_indir: '${parent_restart_dir}/' + cn_icerst_out: restart_ice_${end_date_m1!syear!smonth!sday} + cn_icerst_outdir: '${experiment_restart_out_dir}/' + namini: + ln_iceini: ${ln_iceini} + # initialize sea ice based on SSTs + nn_iceini_file: 0 + + # Additional changes for 1st AGRIF child + choose_use_si3_agrif: + true: + add_config_files: + 1_namelist_ice_cfg: 1_namelist_ice_cfg + add_namelist_changes: + 1_namelist_ice_cfg: + nampar: + cn_icerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_ice_${parent_date!syear!smonth!sday}${global_tag} + cn_icerst_indir: '${parent_restart_dir}/' + cn_icerst_out: restart_ice_${end_date_m1!syear!smonth!sday} + cn_icerst_outdir: '${experiment_restart_out_dir}/' + namini: + ln_iceini: ${ln_iceini_agrif} + # initialize sea ice based on SSTs + nn_iceini_file: 0 + choose_use_tracer: true: add_input_files: @@ -1293,6 +3105,7 @@ bin_sources: input_sources: # grids and coefficients + # TODO: why did Tronje put agrif_dir for bathy_meter and coordinates bathy_meter: ${input_dir}/bathy_meter.nc coordinates: ${input_dir}/coordinates.nc #subbasins: ${input_dir}/orca05_subbasins_3.6.nc @@ -1347,39 +3160,62 @@ input_sources: 1_namelist_top_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_top_ref # NEMO 4.2.x eORCA025 input files - bfr_coef: ${input_dir}/bfr_coef_eORCA025_r4.2.0.nc - # coordinates_eORCA025_r4.2.0__v1.0.nc - domain_cfg_ExclClosedSeas: ${input_dir}/domain_cfg_eORCA025_r4.2.0__ExclClosedSeas.nc - domain_cfg_InclClosedSeas: ${input_dir}/domain_cfg_eORCA025_r4.2.0__InclClosedSeas.nc + # grids and coefficients + coordinates_eORCA025: ${input_dir}/coordinates_eORCA025_r4.2.0__v1.0.nc + bfr_coef_eORCA025: ${input_dir}/bfr_coef_eORCA025_r4.2.0.nc + subbasins_eORCA025: ${input_dir}/subbasins_eORCA025_r4.2.0.nc + #domain_cfg_eORCA025: ${input_dir}/domain_cfg_eORCA025_r4.2.0__ExclClosedSeas.nc + domain_cfg_eORCA025: ${input_dir}/domain_cfg_eORCA025.z75_r4.2.0-2.nc # TODO: this file still needs to be generated by Markus - domain_cfg_CaspianSea: ${input_dir}/domain_cfg_eORCA025_r4.2.0__CaspianSea.nc + domain_cfg_eORCA025_CaspianSea: ${input_dir}/domain_cfg_eORCA025_r4.2.0__CaspianSea.nc # we probably never need this one domain_cfg_UKmasks: ${input_dir}/domain_cfg_eORCA025_r4.2.0__UKmasks.nc + ghflux_v2.0: ${input_dir}/ghflux_v2.0.nc - reshape_ghflux2: ${input_dir}/reshape_ghflux2_eORCA025_r4.2.0_bilin.nc - reshape_jra55do_bicub: ${input_dir}/reshape_jra55do_eORCA025_r4.2.0_bicub.nc - reshape_jra55do_bilin: ${input_dir}/reshape_jra55do_eORCA025_r4.2.0_bilin.nc - subbasins_eORCA025: ${input_dir}/subbasins_eORCA025_r4.2.0.nc - sn_tem_woa13_omip_eORCA025: ${input_dir}/woa13_decav_ptemp_OMIPinit_eORCA025.L75_4.2.0_Kv1.0.0.nc - sn_sal_woa13_omip_eORCA025: ${input_dir}/woa13_decav_salt_OMIPinit_eORCA025.L75_4.2.0_Kv1.0.0.nc + reshape_ghflux2_eORCA025: ${input_dir}/reshape_ghflux2_eORCA025_r4.2.0_bilin.nc + reshape_jra55do_eORCA025_bicub: ${input_dir}/reshape_jra55do_eORCA025_r4.2.0_bicub.nc + reshape_jra55do_eORCA025_bilin: ${input_dir}/reshape_jra55do_eORCA025_r4.2.0_bilin.nc + data_tem_eORCA025: ${input_dir}/woa13_decav_ptemp_OMIPinit_eORCA025.L75_4.2.0_Kv1.0.5.nc + data_sal_eORCA025: ${input_dir}/woa13_decav_salt_OMIPinit_eORCA025.L75_4.2.0_Kv1.0.5.nc + runoff_eORCA025: ${input_dir}/runoff_b0.2.0_eORCA025_r4.2.0.nc + seaice_eORCA025: ${input_dir}/seaice_c3.0_v19802004.0_eORCA025_r4.2.0.nc # NEMO 4.2.x ORCA05 input files # grids and coefficients bathy_meter_orca05_nemo4: ${input_dir}/bathy_meter__4.2.0_ORCA05.L46.nc - coordinates_orca05_nemo4: ${input_dir}/bathy_meter__4.2.0_ORCA05.L46.nc + coordinates_orca05_nemo4: ${input_dir}/coordinates.nc + #coordinates_orca05_nemo4: ${input_dir}/bathy_meter__4.2.0_ORCA05.L46.nc bfr_coef_orca05_nemo4: ${input_dir}/bfr_coef__4.2.0_ORCA05.nc subbasins_orca05_nemo4: ${input_dir}/subbasins__4.2.0_ORCA05.nc - domain_cfg_orca05_nemo4: ${input_dir}/domain_cfg__ORCA05_zps_noclo.nc - + # File from Joakim. Use same settings as ORCA05 in 3.6 + # (zps, no isf, Caspian included) + domain_cfg_orca05_nemo4: ${input_dir}/domain_cfg__ORCA05_zps_noCaspian.nc + reshape_jra_orca05_nemo4_bicub: ${input_dir}/reshape_jra_bicub__4.2.0_ORCA05_v1.0.0.nc reshape_jra_orca05_nemo4_bilin: ${input_dir}/reshape_jra_bilin__4.2.0_ORCA05_v1.0.0.nc - + # inital data data_tem_orca05_nemo4: ${input_dir}/Tpot_PHC2.1_WOA98__4.2.0_ORCA05.L46_Kv1.0.0.nc data_sal_orca05_nemo4: ${input_dir}/Salt_PHC2.1_WOA98__4.2.0_ORCA05.L46_Kv1.0.0.nc - sss_orca05_nemo4: ${input_dir}/SSS_PHC2.1_WOA98__4.2.0_ORCA05.L46_Kv1.0.0.nc - runoff_orca05_nemo4: ${input_dir}/runoff_12month__4.2.0_ORCA05_Kv1.0.0.nc - + seaice_orca05_nemo4: ${input_dir}/seaice_c3.0_v19802004.0_ORCA05_r4.2.0.nc + + # remapped ghflux to orca05 + ghflux_v2.0_orca05_nemo4: ${input_dir}/ghflux_v2.0_ORCA05.nc + + # NEMO 4.2.x input for AGRIF + 1_domain_cfg: ${agrif_dir}/1_domain_cfg.nc + 1_subbasins: ${agrif_dir}/1_subbasins.nc + + # eORCA05.L75 files + domain_cfg_eorca05_nemo4: ${input_dir}/eORCA05.L75_domain_cfg_cut.nc # was cut from 722x604 to 720x603 + #coordinates_orca05_nemo4: eORCA05.L75_mesh_mask.nc + bfr_coef_eorca05_nemo4: ${input_dir}/eORCA05.L75_bfr2d.nc + subbasins_eorca05_nemo4: ${input_dir}/subbasins.nc + data_tem_eorca05_nemo4: ${input_dir}/eORCA05.L75_81B0_WOA18_1m_votemper.nc #eORCA05.L75_GOU18_1m_votemper.nc + data_sal_eorca05_nemo4: ${input_dir}/eORCA05.L75_81B0_WOA18_1m_vosaline.nc #eORCA05.L75_GOU18_1m_vosaline.nc + + + #forcing_files: ############## config files / namelist files: @@ -1437,13 +3273,29 @@ log_sources: ########################## coupling stuff +sst_grid_name: "opat" opat_fields: [OIceFrac, O_SSTSST, O_TepIce, O_IceTck, O_SnwTck, O_OCurx1, O_OCury1, O_OTaux1, O_OTauy1, O_ITaux1, O_ITauy1] +opat_mops_fields: [CO2OCEAN, CO2TRA, FF_OCE] + opac_fields: [O_QsrIce, O_QsrMix, O_QnsIce, O_QnsMix, OTotRain, OTotSnow, OIceEvap, OTotEvap, O_dQnsdT] +opac_mops_fields: [O_AtmCO2, CO2FLXOC] + +# agr1_t_fields are added in a choose block higher up since they are different for NEMO 3.6 and 4.2 +agr1_t_mops_fields: [1_CO2OCEAN, 1_CO2TRA, 1_FF_OCE] -agr1_t_fields: [1_OIceFrac, 1_O_SSTSST, 1_O_TepIce, 1_O_IceTck, 1_O_SnwTck, 1_O_OCurx1, 1_O_OCury1, - 1_O_OTaux1, 1_O_OTauy1, 1_O_ITaux1, 1_O_ITauy1] agr1_c_fields: [1_O_QsrIce, 1_O_QsrMix, 1_O_QnsIce, 1_O_QnsMix, 1_OTotRain, 1_OTotSnow, 1_OTotEvap, 1_OIceEvap, 1_O_dQnsdT] +agr1_c_mops_fields: [1_O_AtmCO2, 1_CO2FLXOC] + agr2_t_fields: [1_O_AgrSpg] +# default needs to be set even if not used +agr1_rc_fields: [1_OCalving] + +# How many periodic points on the NEMO grid +# This is 2 for NEMO 3.6 (i=1, and i=imax) +# But 0 for NEMO 4, where the halo points are not included in coupling +# oasis_p is set to 2 as default here, +# but is set to 0 for if generation is 4.2, somewhere above +oasis_p: 2 coupling_fields: "[[opat_fields-->FIELD]]": @@ -1469,13 +3321,14 @@ choose_runoff_method: "[[agr1_r_fields-->FIELD]]": grid: agr1 "[[agr1_rc_fields-->FIELD]]": - grid: agr1 + grid: agrc # Old method based on remapping runoff to a pre-made # runoff mask on the ORCA05 grid (rnfo) or AGRIF (agr1r) "*": ornf_fields: [O_Runoff] agr1_r_fields: [1_O_Runoff] + agr1_rc_fields: [1_OCalving] add_coupling_fields: "[[ornf_fields-->FIELD]]": grid: rnfo @@ -1488,25 +3341,19 @@ grids: nx: ${_nx} ny: ${_ny} oasis_grid_type: "LR" - number_of_overlapping_points: 2 # oasis P-value + number_of_overlapping_points: ${oasis_p} # oasis P-value opac: name: opac nx: ${_nx} ny: ${_ny} oasis_grid_type: "LR" #??? not sure, doesn't matter - number_of_overlapping_points: 2 # oasis P-value + number_of_overlapping_points: ${oasis_p} # oasis P-value opaa: name: opaa nx: ${_nx} ny: ${_ny} oasis_grid_type: "LR" number_of_overlapping_points: 0 - #opar: - # name: opar - # nx: ${_nx} - # ny: ${_ny} - # oasis_grid_type: "LR" - # number_of_overlapping_points: 2 rnfo: name: rnfo nx: ${_nx} @@ -1531,4 +3378,12 @@ grids: ny: ${_ny_nest1} oasis_grid_type: "LR" number_of_overlapping_points: 0 + + # agrc: AGRIF grid but with a mask corresponding to calving + agrc: + name: agrc + nx: ${_nx_nest1} + ny: ${_ny_nest1} + oasis_grid_type: "LR" + number_of_overlapping_points: 0 diff --git a/configs/components/nemo/nemo_with_restart_handling.yaml b/configs/components/nemo/nemo_with_restart_handling.yaml new file mode 100644 index 000000000..e82c4bfd5 --- /dev/null +++ b/configs/components/nemo/nemo_with_restart_handling.yaml @@ -0,0 +1,1339 @@ +# NEMO YAML CONFIGURATION FILE +# + +# DEFAULT VALUES +model: nemo +generation: "3.6" +executable: oceanx +version: GYRE_XIOS +include_models: +- xios + +runoff_method: "old" + +clean_command: cp cfg.inc ../cfg.txt; cd ../ + ./makenemo -n ${nemo.version} -m ${archfile} clean; + cd .. + +comp_command: export NEMO_TOPLEVEL=${model_dir}/../../ ; cp cfg.inc ../cfg.txt ; cd ../ ; + ./makenemo -n ${version} -m ${archfile} -j 24; cp -p ${version}/BLD/bin/nemo.exe + ${version}/BLD/bin/oceanx ; + cd .. + +archfile: ESMTOOLS_generic_oasis_intel + + +destination: ${version} +install_bins: BLD/bin/oceanx + +available_versions: +- ORCA05_LIM2_KCM_AOW +- ORCA05_LIM2_KCM_AOW_autotools +- ORCA05_LIM2_KCM_AOW_FS +- ORCA05_LIM2_FOCI_AGRIF_AOW +- ORCA05_LIM2_KCM_AGRIF_OASISMCT4 +- ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 +- ORCA12_LIM2_KCM_AOW_FS_OASISMCT4 +- ORCA05_LIM2_NEMO_JRA55_test +- GYRE_XIOS +- GYRE_PISCES + +choose_version: + + GYRE_XIOS: + requires: + - xios-2.5_r1910_ogcm + - nemobasemodel-3.6ogcm_test + archfile: ESMTOOLS_generic_intel + resolution: R4 + + GYRE_PISCES: + + # this should work but it does not if you run + # the setting below are ignored + # if set on the root level of this file, and infinite loop + # occurs if esm_master comp-foci-default/nemo is executed + # the enviornment_changes thing seems to be a mess, + # see also comment in foci.yaml section echam: + # manually set compiler_mpi in glogin.yaml / blogin.yaml for now + #environment_changes: +# choose_computer.name: +# glogin: +# compiler_mpi: intel2019_impi2019_nemo4 +# iolibraries: geomar_libs +# blogin: +# compiler_mpi: intel2019_impi2019_nemo4 +# iolibraries: geomar_libs + requires: + - xios-trunk + - nemobasemodel-4.2.x + + # TODO: include arch repo as separate component + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${nemo.version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean + archfile: ESMTOOLS_generic_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/EXPREF/ + + use_tracer: true + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_top_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_top_ref + namelist_pisces_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_pisces_ref + namelist_pisces_cfg: ${nemo.model_dir}/cfgs/${nemo.version}/EXPREF/namelist_pisces_cfg + add_input_files: + namelist_top_ref: namelist_top_ref + namelist_pisces_ref: namelist_pisces_ref + namelist_pisces_cfg: namelist_pisces_cfg + + generation: "4.2" + resolution: "R4" + + UNCOUPLED_MARKUS: + # uncoupled test setup from Markus + # will be renamed once the name of the config is available + # from Markus Scheinert + + # this should work but it does not if you run + # the setting below are ignored + # if set on the root level of this file, and infinite loop + # occurs if esm_master comp-foci-default/nemo is executed + # the enviornment_changes thing seems to be a mess, + # see also comment in foci.yaml section echam: + # manually set compiler_mpi in glogin.yaml / blogin.yaml for now + #environment_changes: +# choose_computer.name: +# glogin: +# compiler_mpi: intel2019_impi2019_nemo4 +# iolibraries: geomar_libs +# blogin: +# compiler_mpi: intel2019_impi2019_nemo4 +# iolibraries: geomar_libs + # TODO: add NEMO config once available from Markus + requires: + - xios-trunk + - nemobasemodel-4.2.0 + + # TODO: include arch repo as separate component + comp_command: export NEMO_TOPLEVEL=${model_dir} ; export XIOS_TOPLEVEL=${model_dir}/../xios; + test -d arch/GEOMAR || git clone https://git.geomar.de/foci/src/nemo_arch.git arch/GEOMAR; + ./makenemo -n ${version} -m ${archfile} -r ${version} -j 24 ; cp -p cfgs/${version}/BLD/bin/nemo.exe cfgs/${nemo.version}/BLD/bin/oceanx + clean_command: ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean + archfile: ESMTOOLS_generic_intel + destination: nemo-${nemo.version} + install_bins: cfgs/${nemo.version}/BLD/bin/oceanx + + namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/EXPREF/ + + generation: "4.2" + resolution: "R4" + use_tracer: true + + add_input_sources: + namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + namelist_top_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_top_ref + namelist_pisces_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_pisces_ref + namelist_pisces_cfg: ${nemo.model_dir}/cfgs/${nemo.version}/EXPREF/namelist_pisces_cfg + add_input_files: + namelist_top_ref: namelist_top_ref + namelist_pisces_ref: namelist_pisces_ref + namelist_pisces_cfg: namelist_pisces_cfg + + # NEMO 4.2.x input files + bfr_coef: bfr_coef + domain_cfg_ExclClosedSeas: domain_cfg_ExclClosedSeas + domain_cfg_InclClosedSeas: + # TODO: this file still needs to be generated by Markus + domain_cfg_CaspianSea: domain_cfg_CaspianSea + # we probably never need this one + domain_cfg_UKmasks: domain_cfg_UKmasks + ghflux_v2.0: ghflux_v2.0 + reshape_ghflux2: reshape_ghflux2 + reshape_jra55do_bicub: reshape_jra55do_bicub + reshape_jra55do_bilin: reshape_jra55do_bilin + subbasins_eORCA025: subbasins_eORCA025 + sn_tem_woa13_omip_eORCA025: sn_tem_woa13_omip_eORCA025 + sn_sal_woa13_omip_eORCA025: sn_sal_woa13_omip_eORCA025 + + # TODO: add correct links in work dir if required + # input_in_work: + + ORCA05_LIM2_FOCI_AGRIF_AOW: + requires: + # seb-wahl: comment xios below if used with OIFS which also uses XIOS which causes XIOS + # to be cloned and compiled twice, need to file an issue + - xios-2.0_r982 + # TODO: test with newer version of XIOS + # - xios-2.5_r1910 + - nemobasemodel-3.6foci_agrif + branch: master + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + # use tracer in AGRIF? + use_tracer_agrif: true + # use LIM2 in AGRIF? + use_lim2_agrif: true + free_surface: linear + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + 1_sn_tem_levitus: 1_sn_tem_levitus + 1_sn_sal_levitus: 1_sn_sal_levitus + 1_ice_init: 1_ice_init + # reference namelists + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + # grids and coefficients + # batmeter: bathy_meter + cn_batmeter: bathy_updated + 1_cn_batmeter: 1_bathy_meter + 1_coordinates: 1_coordinates + fixed_grids: fixed_grids + # reshape files for nest + 1_reshape_bicub: 1_reshape_bicub + 1_reshape_bilin: 1_reshape_bilin + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + 1_coordinates: 1_coordinates_ORCA05.nc + + add_config_files: + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + 1_namelist_top_cfg: 1_namelist_top_cfg + + add_restart_in_sources: + restart_in_nest: 1_${parent_expid}_${prevstep_formatted_nest}_restart*_${parent_date!syear!smonth!sday}_*.nc + + add_restart_out_sources: + restart_out_nest: 1_${expid}_${newstep_formatted_nest}_restart*_${end_date_m1!syear!smonth!sday}_*.nc + + add_namelist_changes: + 1_namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep_nest} + nn_itend: ${newstep_nest} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_indir: '${work_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${work_dir}/' + nn_stock: ${newstep_nest} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} + namdom: + nn_closea: 1 + nn_msh: ${nn_msh} + rn_rdt: ${time_step_nest} + namsbc: + nn_fsbc: ${nest_refinement} + ln_echam: '.true.' + nammpp: + jpni: ${jpni} + jpnj: ${jpnj} + jpnij: ${nproc} + namsbc_echam: + sn_owndi: ['A_OTaux1_echam6_08', 3, 'A_OTaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] + sn_owndj: ['A_OTauy1_echam6_09', 3, 'A_OTauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] + sn_iwndi: ['A_ITaux1_echam6_10', 3, 'A_ITaux1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Uwnd', ' '] + sn_iwndj: ['A_ITauy1_echam6_11', 3, 'A_ITauy1', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', 'Vwnd', ' '] + sn_iqsr: ['A_QsrIce_echam6_12', 3, 'A_QsrIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_qsr: ['A_QsrMix_echam6_13', 3, 'A_QsrMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_iqns: ['A_QnsIce_echam6_14', 3, 'A_QnsIce', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] + sn_qns: ['A_QnsMix_echam6_15', 3, 'A_QnsMix', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bicub.nc', '', ' '] + sn_prec: ['ATotRain_echam6_16', 3, 'ATotRain', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_snow: ['ATotSnow_echam6_17', 3, 'ATotSnow', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_ievp: ['AIceEvap_echam6_18', 3, 'AIceEvap', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + sn_dqns: ['A_dQnsdT_echam6_19', 3, 'A_dQnsdT', .false., .false., 'instant', 'reshape_T63invert_${nest1}_bilin.nc', '', ' '] + + # with AGRIF always use linear free surface + # always set free_surface: linear with AGRIF + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + + ORCA05_LIM2_KCM_AGRIF_OASISMCT4: + requires: + # seb-wahl: workaround if used with OIFS which also uses XIOS which causes XIOS + # to be cloned and compiled twice, need to file an issue + #- xios-2.5_r1910 + - nemobasemodel-3.6foci + branch: esm-tools + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + # use tracer in AGRIF? + use_tracer_agrif: true + # use LIM2 in AGRIF? + use_lim2_agrif: true + free_surface: linear + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + 1_sn_tem_levitus: 1_sn_tem_levitus + 1_sn_sal_levitus: 1_sn_sal_levitus + 1_ice_init: 1_ice_init + # reference namelists + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + # grids and coefficients + cn_batmeter: bathy_updated + 1_cn_batmeter: 1_bathy_meter + 1_coordinates: 1_coordinates + fixed_grids: fixed_grids + # reshape files for nest + 1_reshape_bicub: 1_reshape_bicub + 1_reshape_bilin: 1_reshape_bilin + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + 1_namelist_ref: 1_namelist_ref + 1_namelist_ice_ref: 1_namelist_ice_ref + 1_namelist_top_ref: 1_namelist_top_ref + 1_coordinates: 1_coordinates_ORCA05.nc + + add_config_files: + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + 1_namelist_top_cfg: 1_namelist_top_cfg + + add_restart_in_sources: + restart_in_nest: 1_${parent_expid}_${prevstep_formatted_nest}_restart*_${parent_date!syear!smonth!sday}_*.nc + + add_restart_out_sources: + restart_out_nest: 1_${expid}_${newstep_formatted_nest}_restart*_${end_date_m1!syear!smonth!sday}_*.nc + + add_namelist_changes: + 1_namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep_nest} + nn_itend: ${newstep_nest} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_${parent_date!syear!smonth!sday}${global_tag} + cn_ocerst_indir: '${work_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${work_dir}/' + nn_stock: ${newstep_nest} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + #namcfg: + # jpidta: ${_nx_nest1} + # jpjdta: ${_ny_nest1} + # jpiglo: ${_nx_nest1} + # jpjglo: ${_ny_nest1} + namlbc: + rn_shlat: 2 + ln_vorlat: '.false.' + namagrif: + #nn_cln_update: 3 + rn_sponge_tra: 600 + rn_sponge_dyn: 600 + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} + namdom: + nn_closea: 1 + nn_msh: ${nn_msh} + rn_rdt: ${time_step_nest} + namsbc: + nn_fsbc: 3 + nn_ice: 2 + ln_echam: '.false.' + ln_cpl: '.true.' + namtra_ldf: + rn_aeiv_0: 0. + rn_aht_0: 120. + rn_aht_m: 120. + namdyn_ldf: + rn_ahm_0_blp: -2.4e10 + rn_ahm_m_blp: -8.e9 + rn_ahm_m_lap: 0. + nammpp: + jpni: ${jpni} + jpnj: ${jpnj} + jpnij: ${nproc} + #namnc4: + # ln_nc4zip: '.false.' + # nn_nchunks_i: 4 + # nn_nchunks_j: 4 + # nn_nchunks_k: 4 + nambbl: + rn_ahtbbl: 1000 + namctl: + ln_ctl: '.false.' + nn_timing: 0 + # with AGRIF always use linear free surface + # always set free_surface: linear with AGRIF + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + + add_coupling_fields: + "[[agr1_t_fields-->FIELD]]": + grid: agr1 + "[[agr1_c_fields-->FIELD]]": + grid: agr1 + "[[agr1_r_fields-->FIELD]]": + grid: agr1r + "[[agr1_rc_fields-->FIELD]]": + grid: agr1 + "[[agr2_t_fields-->FIELD]]": + grid: agr2 + + ORCA05_LIM2_KCM_AOW: + requires: + #- xios-2.0_r982 + - nemobasemodel-3.6foci + branch: master + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + cn_batmeter: bathy_meter + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + ORCA05_LIM2_KCM_AOW_autotools: + requires: + - nemobasemodel-3.6foci_autotools + branch: master_autotools + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_KCM_AOW.git + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + cn_batmeter: bathy_meter + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + ORCA05_LIM2_KCM_AOW_FS: + requires: + #- xios-2.0_r982 + - nemobasemodel-3.6foci + branch: master + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + free_surface: nonlinear + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + cn_batmeter: bathy_meter + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + ORCA05_LIM2_KCM_AOW_OASISMCT4: + requires: + #- xios-2.0_r982 + - nemobasemodel-3.6foci + branch: oasismct4 + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/ORCA05_LIM2_KCM_AOW.git + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + cn_batmeter: bathy_meter + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + ORCA05_LIM2_KCM_AOW_FS_OASISMCT4: + requires: + - nemobasemodel-3.6foci + branch: esm-tools + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + free_surface: nonlinear + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_levitus + sn_sal: sn_sal_levitus + ice_init: ice_init_kkg36f13h + cn_batmeter: bathy_meter + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + #ORCA05_SI3_FOCI_FS_AOW: + # requires: + # - nemobasemodel-4.2.xfoci + # branch: esm-tools + # destination: nemo-${nemo.version}/cfgs/${nemo.version} + # git-repository: https://github.com/joakimkjellsson/ORCA05_SI3_FOCI_FS_AOW + # namelist_dir: ${nemo.model_dir}/cfgs/${nemo.version}/EXPREF/ + # add_input_sources: + # namelist_ref: ${nemo.model_dir}/cfgs/SHARED/namelist_ref + # generation: "4.2" + + ORCA12_LIM2_KCM_AOW_FS_OASISMCT4: + requires: + - nemobasemodel-3.6foci + branch: esm-tools + destination: nemo-${nemo.version}/CONFIG/${nemo.version} + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + free_surface: nonlinear + + levels: L75 + + add_input_files: + # init and (if used) damping data + sn_tem: sn_tem_EN4_ORCA12 + sn_sal: sn_sal_EN4_ORCA12 + ice_init: ice_init_orca12 + cn_batmeter: bathy_meter + + input_in_work: + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + add_namelist_changes: + namelist_cfg: + namcfg: + jperio: 4 # T-fold instead of F-fold + namtra_adv: + ln_traadv_tvd: '.false.' + ln_traadv_tvd_zts: '.true.' + namtra_ldf: + ln_traldf_grif: '.true.' + rn_aht_0: 125. + rn_aht_m: 125. + namdyn_adv: + ln_dynzad_zts: '.true.' + #namdyn_ldf: + # rn_ahm_0_blp: -2.4e10 + # rn_ahm_m_blp: -8.e9 + namsbc_rnf: + sn_cnf: ['rnf_cal_msk', 0, 'rnfmsk', .false., .true., 'yearly', '', '', ''] + cn_dir: './' + ln_rnf_mouth: '.true.' + rn_hrnf: 15. + rn_avt_rnf: 1e-3 + rn_rfact: 1.0 + + namelist_ice_cfg: + namicedyn: + ahi0: 200 # reduce horizontal eddy diffusivity coefficient for sea-ice [m2/s] + telast: 120 # timescale for elastic EVP waves + + # NEMO standalone setup + ORCA05_LIM2_NEMO_JRA55_test: + requires: + - xios-2.5_r1910_ogcm + - nemobasemodel-3.6ogcm_test + branch: master + archfile: ESMTOOLS_generic_intel + #destination: nemo-${nemo.version} + # clone destination is a workaround implemented into esm_master for the fact that + # "destination" is used as both the target directory for a "git clone ... destination" + # and the top level direcotory. + clone_destination: nemo-${nemo.version}/CONFIG/${nemo.version} + # + #original repository is the one below. seb-wahl made a copy in 02/2020 + #since not all FOCI testes have access the original repository + #git-repository: https://git.geomar.de/cmip6-omip/GEOMAR05.CORE-cycle6.git + git-repository: https://git.geomar.de/foci/src/nemo_config/${nemo.version}.git + + comp_command: export NEMO_TOPLEVEL=${model_dir} ; + if ! test -f ARCH/arch-${archfile}.fcm ; then cp CONFIG/${version}/EXP00/arch-${archfile}.fcm ARCH/; fi; + cd CONFIG ; + cp ${version}/cfg.inc cfg.txt; + ./makenemo -n ${version} -m ${archfile} -j 24 ; + cp -p ${version}/BLD/bin/nemo.exe ${version}/BLD/bin/oceanx ; cd ../ ; + + clean_command: cd CONFIG ; ./makenemo -n ${nemo.version} -m ${archfile} -r ${nemo.version} clean ; cd ../ + + # override hardcoded path to runoff forcing + add_namelist_changes: + namelist_cfg: + namsbc_rnf: + cn_dir: "./" + + add_input_files: + # reference namelists + #namelist_ref: namelist_ref + #namelist_ice_ref: namelist_ice_ref + # TODO: do we need top namelist + #namelist_top_ref: namelist_top_ref + + # grids and coefficients + cn_batmeter: bathy_meter + #coordinates: coordinates + #subbasins: subbasins + #coef-G70: coef-G70 + reshape_jra_orca05_bicub: reshape_jra_orca05_bicub + reshape_jra_orca05_bilin: reshape_jra_orca05_bilin + + # init data + sn_tem: sn_tem_woa13_omip + sn_sal: sn_sal_woa13_omip + #ice_init: ice_init_orca05 + ice_init: ice_init_kkg36f13h + + # restoring data / mask + sn_sss: sn_sss_phc21_woa98 + cn_resto: cn_resto_medsea + + input_in_work: + # TODO: Jan Klaus Rieck used subbasins__3.6.0_ORCA05_Kv1.0.0.nc + # subbasins: orca05_subbasins_3.6.nc + # TODO: Jan Klaus Rieck used bfr_coef__3.6.0_ORCA05_Kv1.0.0.nc + coef-G70: bfr_coef.nc + reshape_jra_orca05_bicub: reshape_jra_orca05_bicub.nc + reshape_jra_orca05_bilin: reshape_jra_orca05_bilin.nc + + sn_tem: data_tem.nc + sn_sal: data_sal.nc + sn_sss: Levitus_p2.1_1m_01_12_S_correc_ORCA_R05_SSS_EB_time.nc + ice_init: Ice_initialization.nc + namelist_ice_ref: namelist_ice_ref + + forcing_files: + # forcing data + sn_wndi: sn_wndi + sn_wndj: sn_wndj + sn_qsr: sn_qsr + sn_qlw: sn_qlw + sn_tair: sn_tair + sn_humi: sn_humi + sn_prec: sn_prec + sn_snow: sn_snow + sn_rnf: sn_rnf + + forcing_in_work: + sn_wndi: "uas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_wndj: "vas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_qsr: "rsds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_qlw: "rlds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_tair: "tas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_humi: "huss-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_prec: "prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_snow: "prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr.nc" + sn_rnf: "sorunoff_JRA55-do-1-4-0_gr_orca05_y@YEAR@.nc" + + forcing_sources: + # JRA55-do drowned forcing + sn_wndi: + "${jra55_forcing_dir}/uas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_wndj: + "${jra55_forcing_dir}/vas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_qsr: + "${jra55_forcing_dir}/rsds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_qlw: + "${jra55_forcing_dir}/rlds-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_tair: + "${jra55_forcing_dir}/tas-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_humi: + "${jra55_forcing_dir}/huss-drowned_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_prec: + "${jra55_forcing_dir}/prra_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_snow: + "${jra55_forcing_dir}/prsn_input4MIPs_atmosphericState_OMIP_MRI-JRA55-do-1-4-0_gr_y@YEAR@.nc": + from: 1958 + to: 1960 + sn_rnf: + "${jra55_runoff_dir}/sorunoff_JRA55-do-1-4-0_gr_orca05_y@YEAR@.nc": + from: 1958 + to: 1960 + +metadata: + Organization: Nucleus for European Modelling of the Ocean + Institute: IPSL + Description: + NEMO standing for Nucleus for European Modelling of the Ocean is a + state-of-the-art modelling framework for research activities and + forecasting services in ocean and climate sciences, developed in a + sustainable way by a European consortium. + Authors: Gurvan Madec and NEMO System Team (nemo_st@locean-ipsl.umpc.fr) + Publications: + NEMO ocean engine + License: + Please make sure you have a license to use NEMO. In case you are + unsure, please contact redmine... + +resolution: ORCA05 +levels: L46 +time_step: 1800 +jpni: 0 +jpnj: 0 +nproc: $(( ${jpni} * ${jpnj} )) + +use_lim2: true +use_tracer: true +hosing: false +lresume: false +free_surface: linear +correct_neg_tracer_conc: true +# use damping? +ln_tsd_tradmp: false + +# Nest settings +nest1: no +use_tracer_agrif: false +use_lim2_agrif: false + +# calendar: +leapyear: True +nn_leapy: 1 + +# Restart or initial run settings +# values below will be set to false if lresume = true +ln_tsd_init: true +ln_limini: true +ln_limini_agrif: false +nn_msh: 1 +ln_meshmask: true +# will be set to 0 if lresume = true and run_number=1 +# i.e. restart from another run +nn_rstctl: 2 +# tracer stuff +ln_rsttr: ${lresume} +nn_rsttr : 2 +ln_trcdta: true + +# default directories +pool_dir: ${computer.pool_directories.focipool} +input_dir: ${pool_dir}/NEMO_${resolution}/input/${resolution} +namelist_dir: ${nemo.model_dir}/CONFIG/${nemo.version}/EXP00 + +model_dir: ${general.model_dir}/nemo-${nemo.version} +setup_dir: ${general.model_dir} +bin_dir: ${setup_dir}/bin + +# AGRIF +agrif_dir: ${pool_dir}/AGRIF/agrif_${nest1} + +# forcing for uncoupled setups +jra55_forcing_dir: ${pool_dir}/NEMO_JRA55-do_drowned_test +jra55_runoff_dir: ${pool_dir}/NEMO_ORCA05_JRA_runoff_forcing + +# start_date_m1: $((${start_date} - ${time_step}seconds)) +end_date_m1: $((${next_date} - ${time_step}seconds)) +runtime: $((${next_date} - ${start_date})) +timestep_per_run: $(( ${runtime} / ${time_step} )) + +seconds_since_initial: $((${start_date} - ${initial_date})) +steps_since_initial: $(( ${seconds_since_initial} / ${time_step})) + +prevstep: ${steps_since_initial} +thisstep: $(( ${prevstep} + 1)) +newstep: $(( ${prevstep} + ${timestep_per_run} )) + +prevstep_formatted: "<--format(%08d)-- ${prevstep}" +thisstep_formatted: "<--format(%08d)-- ${thisstep}" +newstep_formatted: "<--format(%08d)-- ${newstep}" + +time_step_nest: $(( ${time_step} / ${nest_refinement} )) +prevstep_nest: $(( ${steps_since_initial} * ${nest_refinement} )) +thisstep_nest: $(( ${prevstep_nest} + 1 )) +timesteps_per_run_nest: $(( ${timestep_per_run} * ${nest_refinement} )) +newstep_nest: $(( ${prevstep_nest} + ${timesteps_per_run_nest} )) + +prevstep_formatted_nest: "<--format(%08d)-- ${prevstep_nest}" +thisstep_formatted_nest: "<--format(%08d)-- ${thisstep_nest}" +newstep_formatted_nest: "<--format(%08d)-- ${newstep_nest}" + +# generate settings for a restart from a different run +ini_restart_steps: 0 # this value will be/needs to be set in the runscript +ini_restart_steps_nest: $(( ${ini_restart_steps} * ${nest_refinement} )) +global_tag: "" + +coupling_freq_in_steps: 6 + +# TODO: link with model start and end +#jrastart: 1958 +#jraend: 1960 + + +namelist_changes: + namelist_cfg: + namrun: + cn_exp: ${expid} + nn_it000: ${thisstep} + nn_itend: ${newstep} + nn_date0: ${initial_date!syear!smonth!sday} # ${ini_date} + cn_ocerst_in: ${parent_expid}_${prevstep_formatted}_restart_${parent_date!syear!smonth!sday}${global_tag} #${restart_in} + cn_ocerst_indir: '${work_dir}/' + cn_ocerst_out: restart_${end_date_m1!syear!smonth!sday} + cn_ocerst_outdir: '${work_dir}/' + nn_stock: ${newstep} + nn_leapy: ${nn_leapy} + nn_rstctl: ${nn_rstctl} + ln_rstart: ${nemo.lresume} + namtsd: + #ln_tsd_tradmp: ${ln_tsd_tradmp} + ln_tsd_init: ${ln_tsd_init} + namsbc: + nn_fsbc: ${coupling_freq_in_steps} + nammpp: + jpni: ${jpni} + jpnj: ${jpnj} + +choose_generation: + "4.2": + add_namelist_changes: + namelist_cfg: + namdom: + rn_dt: ${time_step} + ln_meshmask: ${ln_meshmask} + namcfg: + ln_closea: '.false.' + namtsd: + ln_tsd_dmp: ${ln_tsd_tradmp} + "3.6": + add_namelist_changes: + namelist_cfg: + namdom: + nn_closea: 1 + nn_msh: ${nn_msh} + rn_rdt: ${time_step} + nammpp: + jpnij: ${nproc} + namtsd: + ln_tsd_tradmp: ${ln_tsd_tradmp} + + +choose_leapyear: + False: + nn_leapy: 1 + +choose_free_surface: + linear: + add_namelist_changes: + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.true.' + ln_hpg_sco: '.false.' + ln_dynhpg_imp: '.true.' + nonlinear: + add_namelist_changes: + namelist_cfg: + namdyn_hpg: + ln_hpg_zps: '.false.' + ln_hpg_sco: '.true.' + ln_dynhpg_imp: '.false.' + +choose_hosing: + true: + add_namelist_changes: + namelist_cfg: + namdyn_sbc: + ln_hosing: '.true.' + +choose_lresume: + true: + ln_limini: false + ln_limini_agrif: false + ln_tsd_init: false + nn_msh: 0 + ln_meshmask: false + + choose_general.run_number: + 1: + nn_rstctl: 0 + nn_rsttr : 0 # only needed if tracers are used + # upon restart, nemo restart files are global files and have end on _global.nc (FOCI convention) + global_tag: "_global" + prevstep_formatted: "<--format(%08d)-- ${ini_restart_steps}" + prevstep_formatted_nest: "<--format(%08d)-- ${ini_restart_steps_nest}" + + false: + add_outdata_sources: + mesh_mask: "*mesh_mask*.nc" + + +# choices: +# +choose_resolution: + ORCA05: + _nx: 722 + _ny: 511 + time_step: 1800 + + # default input files currently used in all COUPLED setups + # developed in FB1-ME + # standalone setups use slightly different files + input_files: + # reference namelists + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + namelist_top_ref: namelist_top_ref + # grids and coefficients + coordinates: coordinates + subbasins: subbasins + coef-G70: coef-G70 + add_input_sources: + subbasins: ${input_dir}/orca05_subbasins_3.6.nc + + ORCA12: + _nx: 4322 + _ny: 3059 + time_step: 300 + + input_files: + namelist_ref: namelist_ref + namelist_ice_ref: namelist_ice_ref + namelist_top_ref: namelist_top_ref + coordinates: coordinates + subbasins: subbasins + rnfmask: rnfmask + add_input_sources: + subbasins: ${input_dir}/ORCA0083_basinmsk_fullarctic.nc + rnfmask: ${input_dir}/rnf_cal_msk.nc + + R4: + free_surface: no_option + use_lim2: false + #use_tracer: false + coupling_freq_in_steps: 1 + _nx: 32 + _ny: 22 + time_step: 600 + choose_generation: + "3.6": + add_namelist_changes: + namelist_cfg: + namtsd: + ln_tsd_tradmp: '.false.' + ln_tsd_init: '.false.' + nameos: + ln_useCT: '.false.' + "4.2": + add_namelist_changes: + namelist_cfg: + namtsd: + ln_tsd_init: '.false.' + ln_tsd_dmp: '.false.' + input_files: + namelist_ref: namelist_ref + config_files: + namelist_cfg: namelist_cfg + input_in_work: + namelist_ref: namelist_ref + config_in_work: + namelist_cfg: namelist_cfg + +_nx_nest1: 0 +_ny_nest1: 0 +nest_refinement: 3 +choose_nest1: + # nest_refinement MUST (!!!) match the last value + # in AGRIF_FixedGrids.in + # for backward compatibility with Joakims settings + viking10: + _nx_nest1: 884 + _ny_nest1: 869 + nest_refinement: 3 + VIKING10: + _nx_nest1: 884 + _ny_nest1: 869 + nest_refinement: 3 + INALT10X: + _nx_nest1: 1404 + _ny_nest1: 924 + nest_refinement: 3 + NPAC10: + _nx_nest1: 1569 + _ny_nest1: 664 + nest_refinement: 3 + WG10: + _nx_nest1: 1414 + _ny_nest1: 944 + nest_refinement: 5 + +choose_levels: + L75: + add_namelist_changes: + namelist_cfg: + namdom: + # very important to set hmin to -10 or lower + rn_hmin: -10 + rn_e3zps_min: 25.0 + rn_e3zps_rat: 0.2 + ppsur: -3958.951371276829 + ppa0: 103.9530096000000 + ppa1: 2.415951269000000 + ppkth: 15.35101370000000 + ppacr: 7.0 + ppdzmin: 999999.0 + ldbletanh: '.true.' + ppa2: 100.760928500000 + ppkth2: 48.029893720000 + ppacr2: 13.000000000000 + + +choose_use_lim2: + true: + add_config_files: + namelist_ice_cfg: namelist_ice_cfg + + add_namelist_changes: + namelist_ice_cfg: + namicerun: + cn_icerst_in: ${parent_expid}_${prevstep_formatted}_restart_ice_${parent_date!syear!smonth!sday}${global_tag} + cn_icerst_indir: '${work_dir}' + cn_icerst_out: restart_ice_${end_date_m1!syear!smonth!sday} + cn_icerst_outdir: '${work_dir}' + namiceini: + ln_limini: ${ln_limini} + + choose_use_lim2_agrif: + true: + add_config_files: + 1_namelist_ice_cfg: 1_namelist_ice_cfg + add_namelist_changes: + 1_namelist_ice_cfg: + namicerun: + cn_icerst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_ice_${parent_date!syear!smonth!sday}${global_tag} + cn_icerst_indir: '${work_dir}' + cn_icerst_out: restart_ice_${end_date_m1!syear!smonth!sday} + cn_icerst_outdir: '${work_dir}' + namiceini: + ln_limini: ${ln_limini_agrif} + +choose_use_tracer: + true: + add_config_files: + namelist_top_cfg: namelist_top_cfg + add_namelist_changes: + namelist_top_cfg: + namtrc_run: + cn_trcrst_in: ${parent_expid}_${prevstep_formatted}_restart_trc_${parent_date!syear!smonth!sday}${global_tag} #${restart_in} + cn_trcrst_indir: '${work_dir}' + cn_trcrst_out: restart_trc_${end_date_m1!syear!smonth!sday} + cn_trcrst_outdir: '${work_dir}' + ln_rsttr: ${ln_rsttr} + nn_rsttr : ${nn_rsttr} + namtrc: + ln_trcdta: ${ln_trcdta} + namtrc_rad: + ln_trcrad: ${correct_neg_tracer_conc} + + choose_use_tracer_agrif: + true: + add_config_files: + 1_namelist_top_cfg: 1_namelist_top_cfg + add_namelist_changes: + 1_namelist_top_cfg: + namtrc_run: + cn_trcrst_in: ${parent_expid}_${prevstep_formatted_nest}_restart_trc_${parent_date!syear!smonth!sday}${global_tag} #${restart_in} + cn_trcrst_indir: '${work_dir}' + cn_trcrst_out: restart_trc_${end_date_m1!syear!smonth!sday} + cn_trcrst_outdir: '${work_dir}' + ln_rsttr: ${ln_rsttr} + nn_rsttr : ${nn_rsttr} + namtrc: + ln_trcdta: ${ln_trcdta} + namtrc_rad: + ln_trcrad: ${correct_neg_tracer_conc} + +bin_sources: + nemo: ${bin_dir}/oceanx + +############## input files: + +#input_files: + +#input_in_work: + +input_sources: + # grids and coefficients + bathy_meter: ${input_dir}/bathy_meter.nc + coordinates: ${input_dir}/coordinates.nc + #subbasins: ${input_dir}/orca05_subbasins_3.6.nc + coef-G70: ${input_dir}/orca05_bfr_coef-G70.nc + reshape_jra_orca05_bicub: ${input_dir}/reshape_jra_bicub__3.6.0_ORCA05_Kv1.0.0.nc + reshape_jra_orca05_bilin: ${input_dir}/reshape_jra_bilin__3.6.0_ORCA05_Kv1.0.0.nc + + # inital data + sn_tem_levitus: ${input_dir}/Levitus_p2.1_1m_01_12_Tpot_ORCA_R05.nc + sn_tem_woa13_omip: ${input_dir}/woa13_decav_ptemp_OMIPinit__3.6.0_ORCA05.L46_Kv1.0.0.nc + sn_tem_EN4_ORCA12: ${input_dir}/votemper_EN4_gridded_195001-ORCA12_DROWN.nc + sn_sal_levitus: ${input_dir}/Levitus_p2.1_1m_01_12_S_correc_ORCA_R05.nc + sn_sal_woa13_omip: ${input_dir}/woa13_decav_salt_OMIPinit__3.6.0_ORCA05.L46_Kv1.0.0.nc + sn_sal_EN4_ORCA12: ${input_dir}/vosaline_EN4_gridded_195001-ORCA12_DROWN.nc + ice_init_kkg36f13h: ${input_dir}/Ice_initialization_KKG36F13H-R.nc + ice_init_orca05: ${input_dir}/Ice_initialization__3.6.0_ORCA05_Kv1.0.0.nc + ice_init_orca12: ${input_dir}/Ice_initialization_ORCA12.nc + + # restoring data / mask + sn_sss_phc21_woa98: ${input_dir}/Levitus_p2.1_1m_01_12_S_correc_ORCA_R05_SSS_EB.nc + cn_resto_medsea: ${input_dir}/dmpmsk_MedSea_orca05.l46_RA-II.nc + + # namelists + # add namelist_ref as input source to avoid removal of comments which + # makes namelist unreadable + namelist_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_ref + namelist_ice_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_ice_lim2_ref + namelist_top_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_top_ref + + # AGRIF: grids and coefficients + bathy_updated: ${agrif_dir}/bathy_updated.nc + 1_bathy_meter: ${agrif_dir}/1_bathy_meter.nc + 1_coordinates: ${agrif_dir}/1_coordinates_ORCA05.nc + fixed_grids: ${agrif_dir}/AGRIF_FixedGrids.in + + # ini files + 1_sn_tem_levitus: ${agrif_dir}/1_Levitus_p2.1_1m_01_12_Tpot_ORCA_R05.nc + 1_sn_sal_levitus: ${agrif_dir}/1_Levitus_p2.1_1m_01_12_S_correc_ORCA_R05.nc + 1_ice_init: ${agrif_dir}/1_Ice_initialization.nc + + # reshape files for nest, only required with file based coupling + # TODO: replace T63 with proper variable + 1_reshape_bicub: ${agrif_dir}/1_reshape_T63invert_${nest1}_bicub.nc + 1_reshape_bilin: ${agrif_dir}/1_reshape_T63invert_${nest1}_bilin.nc + + # add namelist_ref as input source to avoid removal of comments which + # makes namelist unreadable + 1_namelist_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_ref + 1_namelist_ice_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_ice_lim2_ref + 1_namelist_top_ref: ${nemo.model_dir}/CONFIG/SHARED/namelist_top_ref + + # NEMO 4.2.x input files + bfr_coef: bfr_coef_eORCA025_r4.2.0.nc + # coordinates_eORCA025_r4.2.0__v1.0.nc + domain_cfg_ExclClosedSeas: domain_cfg_eORCA025_r4.2.0__ExclClosedSeas.nc + domain_cfg_InclClosedSeas: domain_cfg_eORCA025_r4.2.0__InclClosedSeas.nc + # TODO: this file still needs to be generated by Markus + domain_cfg_CaspianSea: domain_cfg_eORCA025_r4.2.0__CaspianSea.nc + # we probably never need this one + domain_cfg_UKmasks: domain_cfg_eORCA025_r4.2.0__UKmasks.nc + ghflux_v2.0: ghflux_v2.0.nc + reshape_ghflux2: reshape_ghflux2_eORCA025_r4.2.0_bilin.nc + reshape_jra55do_bicub: reshape_jra55do_eORCA025_r4.2.0_bicub.nc + reshape_jra55do_bilin: reshape_jra55do_eORCA025_r4.2.0_bilin.nc + subbasins_eORCA025: subbasins_eORCA025_r4.2.0.nc + sn_tem_woa13_omip_eORCA025: woa13_decav_ptemp_OMIPinit_eORCA025.L75_4.2.0_Kv1.0.0.nc + sn_sal_woa13_omip_eORCA025: woa13_decav_salt_OMIPinit_eORCA025.L75_4.2.0_Kv1.0.0.nc + +#forcing_files: + +############## config files / namelist files: + +config_files: + namelist_cfg: namelist_cfg + +config_sources: + namelist_cfg: ${namelist_dir}/namelist_cfg + namelist_top_cfg: ${namelist_dir}/namelist_top_cfg + namelist_ice_cfg: ${namelist_dir}/namelist_ice_cfg + 1_namelist_cfg: ${namelist_dir}/1_namelist_cfg + 1_namelist_top_cfg: ${namelist_dir}/1_namelist_top_cfg + 1_namelist_ice_cfg: ${namelist_dir}/1_namelist_ice_cfg + +config_in_work: + namelist_cfg: namelist_cfg + namelist_ice_cfg: namelist_ice_cfg + namelist_top_cfg: namelist_top_cfg + 1_namelist_cfg: 1_namelist_cfg + 1_namelist_ice_cfg: 1_namelist_ice_cfg + 1_namelist_top_cfg: 1_namelist_top_cfg + +namelists: + - namelist_cfg + - namelist_top_cfg + - namelist_ice_cfg + - 1_namelist_cfg + - 1_namelist_top_cfg + - 1_namelist_ice_cfg + +############## restart files: +# +restart_in_sources: + restart_in: ${parent_expid}_${prevstep_formatted}_restart*_${parent_date!syear!smonth!sday}_*.nc + +restart_out_sources: + restart_out: ${expid}_${newstep_formatted}_restart*_${end_date_m1!syear!smonth!sday}_*.nc + +############## output files: + +outdata_sources: + 1h: "*${expid}_1h_${start_date!syear!smonth!sday}_${end_date_m1!syear!smonth!sday}_*.nc" + 3h: "*${expid}_3h_${start_date!syear!smonth!sday}_${end_date_m1!syear!smonth!sday}_*.nc" + 6h: "*${expid}_6h_${start_date!syear!smonth!sday}_${end_date_m1!syear!smonth!sday}_*.nc" + 1d: "*${expid}_1d_${start_date!syear!smonth!sday}_${end_date_m1!syear!smonth!sday}_*.nc" + 5d: "*${expid}_5d_${start_date!syear!smonth!sday}_${end_date_m1!syear!smonth!sday}_*.nc" + 1m: "*${expid}_1m_${start_date!syear!smonth!sday}_${end_date_m1!syear!smonth!sday}_*.nc" + +############## log files: + +log_sources: + tracer: tracer.stat + ocean: '*ocean.output' + namout: output.namelist.* + solver_stat: solver.stat + time_step: time.step + layout: layout.dat + +########################## coupling stuff + +opat_fields: [OIceFrac, O_SSTSST, O_TepIce, O_IceTck, O_SnwTck, O_OCurx1, O_OCury1, O_OTaux1, O_OTauy1, O_ITaux1, O_ITauy1] +opac_fields: [O_QsrIce, O_QsrMix, O_QnsIce, O_QnsMix, OTotRain, OTotSnow, OIceEvap, OTotEvap, O_dQnsdT] + +agr1_t_fields: [1_OIceFrac, 1_O_SSTSST, 1_O_TepIce, 1_O_IceTck, 1_O_SnwTck, 1_O_OCurx1, 1_O_OCury1, + 1_O_OTaux1, 1_O_OTauy1, 1_O_ITaux1, 1_O_ITauy1] +agr1_c_fields: [1_O_QsrIce, 1_O_QsrMix, 1_O_QnsIce, 1_O_QnsMix, 1_OTotRain, 1_OTotSnow, 1_OTotEvap, 1_OIceEvap, 1_O_dQnsdT] +agr2_t_fields: [1_O_AgrSpg] + +coupling_fields: + "[[opat_fields-->FIELD]]": + grid: opat + "[[opac_fields-->FIELD]]": + grid: opac + +choose_runoff_method: + # EM21 developed by Eric Maisonnave in 2021 + # Runoff is split to river and calving. + # Also remapped from runoff mapper grid (rnfm) + # to the opac grid (for NEMO) and agr1 (for AGRIF) + "EM21": + ornf_fields: [O_Runoff] + ocal_fields: [OCalving] + agr1_r_fields: [1_O_Runoff] + agr1_rc_fields: [1_OCalving] + add_coupling_fields: + "[[ornf_fields-->FIELD]]": + grid: opac + "[[ocal_fields-->FIELD]]": + grid: opaa + "[[agr1_r_fields-->FIELD]]": + grid: agr1 + "[[agr1_rc_fields-->FIELD]]": + grid: agr1 + + # Old method based on remapping runoff to a pre-made + # runoff mask on the ORCA05 grid (rnfo) or AGRIF (agr1r) + "*": + ornf_fields: [O_Runoff] + agr1_r_fields: [1_O_Runoff] + add_coupling_fields: + "[[ornf_fields-->FIELD]]": + grid: rnfo + "[[agr1_r_fields-->FIELD]]": + grid: agr1r + +grids: + opat: + name: opat + nx: ${_nx} + ny: ${_ny} + oasis_grid_type: "LR" + number_of_overlapping_points: 2 # oasis P-value + opac: + name: opac + nx: ${_nx} + ny: ${_ny} + oasis_grid_type: "LR" #??? not sure, doesn't matter + number_of_overlapping_points: 2 # oasis P-value + opaa: + name: opaa + nx: ${_nx} + ny: ${_ny} + oasis_grid_type: "LR" + number_of_overlapping_points: 0 + #opar: + # name: opar + # nx: ${_nx} + # ny: ${_ny} + # oasis_grid_type: "LR" + # number_of_overlapping_points: 2 + rnfo: + name: rnfo + nx: ${_nx} + ny: ${_ny} + oasis_grid_type: "LR" + number_of_overlapping_points: 0 + agr1: + name: agr1 + nx: ${_nx_nest1} + ny: ${_ny_nest1} + oasis_grid_type: "LR" + number_of_overlapping_points: 0 + agr2: + name: agr2 + nx: ${_nx_nest1} + ny: ${_ny_nest1} + oasis_grid_type: "LR" + number_of_overlapping_points: 0 + agr1r: + name: agr1r + nx: ${_nx_nest1} + ny: ${_ny_nest1} + oasis_grid_type: "LR" + number_of_overlapping_points: 0 + diff --git a/configs/components/nemobasemodel/nemobasemodel.yaml b/configs/components/nemobasemodel/nemobasemodel.yaml index d85de365e..83fd4f9c1 100644 --- a/configs/components/nemobasemodel/nemobasemodel.yaml +++ b/configs/components/nemobasemodel/nemobasemodel.yaml @@ -3,6 +3,7 @@ available_versions: - 3.6ogcm_test - 3.6foci - 3.6foci_autotools +- 4.2.0 - 4.2.x choose_version: @@ -15,7 +16,12 @@ choose_version: # to use tag 4.2.0 as a basis for all our developments branch: 4.2.0 git-repository: https://forge.nemo-ocean.eu/nemo/nemo.git - + 4.2.2: + # 4.2.2 includes a bug fix for sea-ice drag. + # apparently very important + branch: 4.2.2 + git-repository: https://forge.nemo-ocean.eu/nemo/nemo.git + 3.6.3.x: branch: release-3.6.3.x git-repository: https://git.geomar.de/NEMO/NEMOGCM.git diff --git a/configs/components/oasis3mct/oasis3mct.yaml b/configs/components/oasis3mct/oasis3mct.yaml index dd98ae986..4d62ba6d1 100644 --- a/configs/components/oasis3mct/oasis3mct.yaml +++ b/configs/components/oasis3mct/oasis3mct.yaml @@ -4,7 +4,7 @@ cf_name_dir: ${model_dir} time_step: 900 # changed by coupled setup anyway norestart: F use_lucia: False -mct_version: "2.8" # TODO: set mct_version = version (can't do at the moment as version is a string) +mct_version: "4.0" # TODO: set mct_version = version (can't do at the moment as version is a string) git-repository: https://gitlab.dkrz.de/modular_esm/oasis3-mct.git contact: "miguel.andres-martinez(at)awi.de, paul.gierz(at)awi.de" @@ -33,6 +33,7 @@ available_versions: - '4.0-awicm-3.0' - '4.0-awicm-3.1' - '5.0-smhi' +- '5.0-geomar' - '6.0-YAC' choose_version: '2.8-paleodyn': @@ -62,6 +63,23 @@ choose_version: 5.0-smhi: branch: main_dkrz git-repository: https://gitlab.dkrz.de/ec-earth/oasis3mct5-ece + 5.0-geomar: + # this version is taken directly from the CERFACS gitlab + # it compiles in the standard way, i.e. not with cmake + branch: master + git-repository: https://git.geomar.de/foci/src/oasis3-mct5.git + # compile with make -f TopMakefileOasis3 + # then copy all modules etc to a oasis/build/lib directory to match the older versions + comp_command: 'export ESM_OASIS_DIR=${model_dir} ; cd util/make_dir/ ; cp make.ESMTOOLS make.inc ; make -f TopMakefileOasis3 ; cd ../../ ; mv INSTALL_OASIS.ESMTOOLS/lib/libpsmile.MPI1.a INSTALL_OASIS.ESMTOOLS/lib/libpsmile.a ; mkdir -p build/lib/psmile/scrip ; mkdir -p build/lib/psmile/mct/mpeu ; cp INSTALL_OASIS.ESMTOOLS/build-static/lib/psmile.MPI1/*.mod build/lib/psmile/. ; cp INSTALL_OASIS.ESMTOOLS/build-static/lib/scrip/*mod build/lib/psmile/scrip/. ; cp INSTALL_OASIS.ESMTOOLS/build-static/lib/mct/*mod build/lib/psmile/mct/. ; cp INSTALL_OASIS.ESMTOOLS/lib/libmct.a build/lib/psmile/mct/. ; cp INSTALL_OASIS.ESMTOOLS/lib/libmpeu.a build/lib/psmile/mct/. ; cp INSTALL_OASIS.ESMTOOLS/lib/libpsmile.a build/lib/psmile/. ; cp INSTALL_OASIS.ESMTOOLS/lib/libscrip.a build/lib/psmile/scrip/. ' + install_libs: + - INSTALL_OASIS.ESMTOOLS/lib/libpsmile.a + - INSTALL_OASIS.ESMTOOLS/lib/libmct.a + - INSTALL_OASIS.ESMTOOLS/lib/libmpeu.a + - INSTALL_OASIS.ESMTOOLS/lib/libscrip.a + # this is a quick fix for now + clean_command: 'rm -rf INSTALL_OASIS.ESMTOOLS' + # this is how we should do it but it does not work + #clean_command: 'export ESM_OASIS_DIR=${model_dir} ; cd util/make_dir/ ; make -f TopMakefileOasis3 realclean ; cd ../../ ; rm -rf build lib/libpsmile.a lib/libscrip.a lib/libmct.a lib/libmpeu.a' ec-earth: branch: ec-earth-version foci: diff --git a/configs/components/oifs/oifs.env.yaml b/configs/components/oifs/oifs.env.yaml index 73fdca41c..d36e86dc9 100644 --- a/configs/components/oifs/oifs.env.yaml +++ b/configs/components/oifs/oifs.env.yaml @@ -16,8 +16,6 @@ runtime_environment_changes: PATH: "$ECCODESROOT/bin:${PATH}" blogin: - add_module_actions: - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" add_export_vars: - 'OIFS_FFIXED=""' - 'GRIB_SAMPLES_PATH="$ECCODESROOT/share/eccodes/ifs_samples/grib1_mlgrib2/"' @@ -121,14 +119,21 @@ runtime_environment_changes: OMP_STACKSIZE: 128M nesh: - compiler_mpi: intel2020_impi2020 - add_module_actions: - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" + compiler_mpi: intel2023_impi2021 add_export_vars: - 'OIFS_FFIXED=""' - 'GRIB_SAMPLES_PATH="$ECCODESROOT/share/eccodes/ifs_samples/grib1_mlgrib2/"' - 'DR_HOOK_IGNORE_SIGNALS=${dr_hook_ignore_signals}' - + + olaf: + add_export_vars: + - 'OIFS_FFIXED=""' + - 'GRIB_SAMPLES_PATH="$ECCODESROOT/share/eccodes/ifs_samples/grib1_mlgrib2/"' + - 'DR_HOOK_IGNORE_SIGNALS=${dr_hook_ignore_signals}' + # OpenMP + - 'OMP_SCHEDULE=STATIC' + - 'OMP_STACKSIZE=128M' + albedo: add_export_vars: - 'GRIB_SAMPLES_PATH="$ECCODESROOT/share/eccodes/ifs_samples/grib1_mlgrib2/"' @@ -139,22 +144,24 @@ compiletime_environment_changes: choose_computer.name: blogin: add_export_vars: - - "LAPACK_LIB='-mkl=sequential'" - - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + - "LAPACK_LIB='-qmkl=sequential'" + # The mkl_sequential flag is no long valid with Intel 2024 and beyond. + # Time to swtich to qmkl=sequential + - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential'" # FFTW is included in MKL so we link to that - "OIFS_FFTW_DIR='-L$MKLROOT/lib/intel64'" - "OIFS_FFTW_INCLUDE='-I$OIFS_FFTW_DIR/include/'" - - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -qmkl=sequential'" # TODO: figure out whether those two are still needed - "ESM_NETCDF_C_DIR=$NETCDFROOT" - "ESM_NETCDF_F_DIR=$NETCDFFROOT" # grib api / eccodes - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' - - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -L$ECCODESROOT/lib64 -leccodes_f90 -leccodes"' - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' - - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential"' # oasis - 'OIFS_OASIS_BASE=$(pwd)/oasis' - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' @@ -171,46 +178,168 @@ compiletime_environment_changes: - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' - 'OIFS_LFLAGS="$OIFS_MPI_LIB -qopenmp"' - 'OIFS_CC=$CC' - - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O1 -xCORE_AVX2 -g -traceback -qopt-report=0 -fpe0"' + # Intel icx (C compiler) no longer supports the -fpe0 flag. + # Also, the C standard is raised, so we need to use -std=gnu89 to enforce C89 + # since implicit declarations were removed in C99 and onward. + - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O1 -xCORE_AVX2 -g -traceback -qopt-report=0 -std=gnu89"' - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' glogin: - add_export_vars: - - - "LAPACK_LIB='-mkl=sequential'" - - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" - # FFTW is included in MKL so we link to that - - "OIFS_FFTW_DIR='-L$MKLROOT/lib/intel64'" - - "OIFS_FFTW_INCLUDE='-I$OIFS_FFTW_DIR/include/'" - - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" - # TODO: figure out whether those two are still needed - - "ESM_NETCDF_C_DIR=$NETCDFROOT" - - "ESM_NETCDF_F_DIR=$NETCDFFROOT" - # grib api / eccodes - - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' - - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -leccodes_f90 -leccodes"' - - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' - - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' - - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' - - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' - # oasis - - 'OIFS_OASIS_BASE=$(pwd)/oasis' - - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' - - 'OIFS_OASIS_LIB="-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' - # netcdf - - 'OIFS_NETCDF_INCLUDE="-I$NETCDFROOT/include"' - - 'OIFS_NETCDF_LIB="-L$NETCDFROOT/lib -lnetcdf -lnetcdff"' - - 'OIFS_NETCDFF_INCLUDE="-I$NETCDFFROOT/include"' - - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' - # compilers and compile switches - - 'OIFS_FC=$FC' - - 'OIFS_FFLAGS="-qopenmp -r8 -fp-model precise -align array32byte -O1 -xCORE_AVX2 -g -traceback -convert big_endian -fpe0"' - - 'OIFS_FFIXED=""' - - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' - - 'OIFS_LFLAGS="$OIFS_MPI_LIB -qopenmp"' - - 'OIFS_CC=$CC' - - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O1 -xCORE_AVX2 -g -traceback -qopt-report=0 -fpe0"' - - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' + choose_computer.compiler_mpi: + intel2021_impi2019: + add_export_vars: + - 'LAPACK_LIB="-qmkl=sequential"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential"' + # FFTW is included in MKL so we link to that + - 'OIFS_FFTW_DIR="$MKLROOT/lib/intel64"' + - 'OIFS_FFTW_INCLUDE="-I$OIFS_FFTW_DIR/include/"' + - 'OIFS_FFTW_LIB="-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -qmkl=sequential"' + # TODO: figure out whether those two are still needed + - "ESM_NETCDF_C_DIR=$NETCDFROOT" + - "ESM_NETCDF_F_DIR=$NETCDFFROOT" + # grib api / eccodes + - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -L$ECCODESROOT/lib64 -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' + - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' + - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential"' + # oasis + - 'OIFS_OASIS_BASE=$(pwd)/oasis' + - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' + - 'OIFS_OASIS_LIB="-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' + # netcdf + - 'OIFS_NETCDF_INCLUDE="-I$NETCDFROOT/include"' + - 'OIFS_NETCDF_LIB="-L$NETCDFROOT/lib -lnetcdf -lnetcdff"' + - 'OIFS_NETCDFF_INCLUDE="-I$NETCDFFROOT/include"' + - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' + # + - 'OIFS_FC=$FC' + - 'OIFS_FFLAGS="-qopenmp -r8 -align array32byte -O3 -fp-model precise -g -traceback -xCORE_AVX2 -convert big_endian"' + #- 'OIFS_FFLAGS="-qopenmp -r8 -align array32byte -O3 -g -traceback -xCORE_AVX2 -convert big_endian"' + - 'OIFS_FFIXED=""' + - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' + - 'OIFS_LFLAGS="$OIFS_MPI_LIB -qopenmp"' + - 'OIFS_CC=$CC' + - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O3 -g -traceback -xCORE_AVX2 -qopt-report=0 -std=gnu89"' + #- 'OIFS_CFLAGS="-qopenmp -O3 -g -traceback -xCORE_AVX2 -qopt-report=0 -std=gnu89"' + - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' + + intel2023_impi2021: + add_export_vars: + - "LAPACK_LIB='-qmkl=sequential'" + - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential'" + # FFTW is included in MKL so we link to that + - "OIFS_FFTW_DIR='$MKLROOT/lib/intel64'" + - "OIFS_FFTW_INCLUDE='-I$OIFS_FFTW_DIR/include/'" + - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -qmkl=sequential'" + # TODO: figure out whether those two are still needed + - "ESM_NETCDF_C_DIR=$NETCDFROOT" + - "ESM_NETCDF_F_DIR=$NETCDFFROOT" + # grib api / eccodes + - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -L$ECCODESROOT/lib64 -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' + - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' + - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential"' + # oasis + - 'OIFS_OASIS_BASE=$(pwd)/oasis' + - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' + - 'OIFS_OASIS_LIB="-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' + # netcdf + - 'OIFS_NETCDF_INCLUDE="-I$NETCDFROOT/include"' + - 'OIFS_NETCDF_LIB="-L$NETCDFROOT/lib -lnetcdf -lnetcdff"' + - 'OIFS_NETCDFF_INCLUDE="-I$NETCDFFROOT/include"' + - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' + # compilers and compile switches + # in case of crashes, you may active -g -traceback -fpe0 for FFLAGS and CFLAGS + # but these slow down the model by 30%. + - 'OIFS_FC=$FC' + - 'OIFS_FFLAGS="-qopenmp -r8 -fp-model precise -align array32byte -O3 -g -traceback -xCORE_AVX2 -convert big_endian"' + - 'OIFS_FFIXED=""' + - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' + - 'OIFS_LFLAGS="$OIFS_MPI_LIB -qopenmp"' + - 'OIFS_CC=$CC' + - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O3 -g -traceback -xCORE_AVX2 -qopt-report=0 -std=gnu89"' + - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' + + intel2023_ompi416: + add_export_vars: + - "LAPACK_LIB='-qmkl=sequential'" + - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + # FFTW is included in MKL so we link to that + - "OIFS_FFTW_DIR='-L$MKLROOT/lib/intel64'" + - "OIFS_FFTW_INCLUDE='-I$OIFS_FFTW_DIR/include/'" + - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + # TODO: figure out whether those two are still needed + - "ESM_NETCDF_C_DIR=$NETCDFROOT" + - "ESM_NETCDF_F_DIR=$NETCDFFROOT" + # grib api / eccodes + - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -L$ECCODESROOT/lib64 -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' + - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' + - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' + # oasis + - 'OIFS_OASIS_BASE=$(pwd)/oasis' + - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' + - 'OIFS_OASIS_LIB="-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' + # netcdf + - 'OIFS_NETCDF_INCLUDE="-I$NETCDFROOT/include"' + - 'OIFS_NETCDF_LIB="-L$NETCDFROOT/lib -lnetcdf -lnetcdff"' + - 'OIFS_NETCDFF_INCLUDE="-I$NETCDFFROOT/include"' + - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' + # compilers and compile switches + # in case of crashes, you may active -g -traceback -fpe0 for FFLAGS and CFLAGS + # but these slow down the model by 30%. + - 'OIFS_FC=$FC' + - 'OIFS_FFLAGS="-qopenmp -r8 -fp-model precise -align array32byte -O3 -g -traceback -xCORE_AVX2 -convert big_endian"' + - 'OIFS_FFIXED=""' + - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' + - 'OIFS_LFLAGS="$OIFS_MPI_LIB -qopenmp"' + - 'OIFS_CC=$CC' + - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O3 -g -traceback -xCORE_AVX2 -qopt-report=0 -std=gnu89"' + - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' + + gcc11_ompi416: + add_export_vars: + - 'LAPACK_LIB="-mkl=sequential"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' + # FFTW is a separate module + - 'OIFS_FFTW_DIR="-L$FFTWROOT/"' + - 'OIFS_FFTW_INCLUDE="-I$FFTWROOT/include/"' + - 'OIFS_FFTW_LIB="-L$FFTWROOT/lib/ -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' + # TODO: figure out whether those two are still needed + - "ESM_NETCDF_C_DIR=$NETCDFROOT" + - "ESM_NETCDF_F_DIR=$NETCDFFROOT" + # grib api / eccodes + - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' + - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' + - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' + # oasis + - 'OIFS_OASIS_BASE=$(pwd)/oasis' + - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' + - 'OIFS_OASIS_LIB="-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' + # netcdf + - 'OIFS_NETCDF_INCLUDE="-I$NETCDFROOT/include"' + - 'OIFS_NETCDF_LIB="-L$NETCDFROOT/lib -lnetcdf -lnetcdff"' + - 'OIFS_NETCDFF_INCLUDE="-I$NETCDFFROOT/include"' + - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' + # compilers and compile switches + # in case of crashes, you may active -g -traceback -fpe0 for FFLAGS and CFLAGS + # but these slow down the model by 30%. + - 'OIFS_FC=$FC' + - 'OIFS_FFLAGS="-fopenmp -ffree-line-length-none -m64 -O3 -g -fbacktrace -fconvert=big-endian -fallow-argument-mismatch -fallow-invalid-boz"' + - 'OIFS_FFIXED="-fdefault-real-8 -fdefault-double-8 -fallow-argument-mismatch "' + - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' + - 'OIFS_LFLAGS="$OIFS_MPI_LIB -fopenmp"' + - 'OIFS_CC=$CC' + - 'OIFS_CFLAGS="-fopenmp -O3 -g -fbacktrace -fallow-argument-mismatch -fallow-invalid-boz"' + - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' juwels: add_export_vars: @@ -240,54 +369,6 @@ compiletime_environment_changes: MAIN_LDFLAGS: '"-openmp"' - mistral: - add_export_vars: - MKLROOT: '/sw/rhel6-x64/intel/intel-18.0.4/compilers_and_libraries_2018/linux/mkl/lib/intel64/' - LAPACK_LIB_DEFAULT[(1)]: '"-L$MKLROOT -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' - ESM_NETCDF_C_DIR: "$NETCDFROOT" - ESM_NETCDF_F_DIR: "$NETCDFFROOT" - GRIBAPIROOT: "${computer.GribApiRoot}" - GRIBROOT: "${computer.GribApiRoot}" - UDUNITS2_ROOT: "/sw/rhel6-x64/util/udunits-2.2.26-gcc64" - FFTW_ROOT: "/sw/rhel6-x64/numerics/fftw-3.3.7-openmp-gcc64" - PROJ4_ROOT: "/sw/rhel6-x64/graphics/proj4-4.9.3-gcc48" - #PETSC_DIR: "/sw/rhel6-x64/numerics/PETSc-3.12.2-impi2018-intel18/" - PATH[(2)]: "/sw/rhel6-x64/gcc/binutils-2.24-gccsys/bin:${PATH}" - LD_LIBRARY_PATH[(2)]: "$LD_LIBRARY_PATH:$GRIBAPIROOT/lib:$PROJ4_ROOT/lib:$FFTW_ROOT/lib:$SZIPROOT/lib" - - GRIB_SAMPLES_PATH: '"$GRIBAPIROOT/share/${computer.GribSamples}/ifs_samples/grib1_mlgrib2/"' - PATH[(3)]: '$PATH:/mnt/lustre01/sw/rhel6-x64/devtools/fcm-2017.10.0/bin/' - - OIFS_GRIB_API_INCLUDE: '"-I$GRIBAPIROOT/include"' - OIFS_GRIB_API_LIB: '"${computer.GribApiLib}"' - # OIFS_GRIB_API_LIB is used by OpenIFS CY40, OIFS_GRIB_LIB is used by CY43 - OIFS_GRIB_INCLUDE: '"$OIFS_GRIB_API_INCLUDE"' - OIFS_GRIB_LIB: '"$OIFS_GRIB_API_LIB"' - OIFS_GRIB_API_BIN: '"$GRIBAPIROOT/bin"' - OIFS_OASIS_BASE: '$(pwd)/oasis' - OIFS_OASIS_INCLUDE: '"-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' - OIFS_OASIS_LIB: '"-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' - OIFS_NETCDF_INCLUDE: '"-I$NETCDFROOT/include"' - OIFS_NETCDF_LIB: '"-L$NETCDFROOT/lib -lnetcdf"' - OIFS_NETCDFF_INCLUDE: '"-I$NETCDFFROOT/include"' - OIFS_NETCDFF_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' - OIFS_FC: '$FC' - OIFS_FFLAGS: '"-r8 -fp-model precise -align array32byte -O3 -qopenmp -xCORE_AVX2 -g -traceback -convert big_endian -fpe0"' - #OIFS_FFLAGS: '"-r8 -fp-model precise -align array32byte -O0 -qopenmp -xCORE_AVX2 -g -traceback -convert big_endian -check all,noarg_temp_created,bounds -fpe0"' - OIFS_FFIXED: '""' - OIFS_FCDEFS: '"BLAS LITTLE LINUX INTEGER_IS_INT"' - OIFS_LFLAGS: '"$OIFS_MPI_LIB -qopenmp"' - OIFS_CC: '$CC' - OIFS_CFLAGS: '"-fp-model precise -O3 -qopenmp -xCORE_AVX2 -g -traceback -qopt-report=0 -fpe0"' - OIFS_CCDEFS: '"LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' - # Build OpenIFS with FFTW. Use Intel MKL interfaces - # WARNING: FFTW does not work yet. Wait for patch from ECMWF - # OIFS_FFTW: '"enable"' - OIFS_FFTW_DIR: '"$FFTW_ROOT"' - OIFS_FFTW_INCLUDE: '"-I$OIFS_FFTW_DIR/include/"' - OIFS_FFTW_LIB: '"-L$OIFS_FFTW_DIR/lib/ -lfftw3f"' - DR_HOOK_IGNORE_SIGNALS: '${dr_hook_ignore_signals}' - levante: add_module_actions: - "load libaec/1.0.5-intel-2021.5.0" @@ -422,7 +503,48 @@ compiletime_environment_changes: OIFS_CC: '$CC' OIFS_CFLAGS: '"-fp-model precise -O3 -xCORE_AVX2 -g -traceback -qopt-report=0 -fpe0 -qopenmp"' OIFS_CCDEFS: '"LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS _OPENMP"' + + olaf: + add_export_vars: + - "LAPACK_LIB='-qmkl=sequential'" + - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + # FFTW is included in MKL so we link to that + - "OIFS_FFTW_DIR='-L$MKLROOT/lib/intel64'" + - "OIFS_FFTW_INCLUDE='-I$OIFS_FFTW_DIR/include/'" + - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + # TODO: figure out whether those two are still needed + - "ESM_NETCDF_C_DIR=$NETCDFROOT" + - "ESM_NETCDF_F_DIR=$NETCDFFROOT" + # grib api / eccodes + - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' + - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' + - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' + # oasis + - 'OIFS_OASIS_BASE=$(pwd)/oasis' + - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' + - 'OIFS_OASIS_LIB="-L$OIFS_OASIS_BASE/build/lib/psmile -L$OIFS_OASIS_BASE/build/lib/psmile/scrip -L$OIFS_OASIS_BASE/build/lib/psmile/mct -L$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu -lpsmile -lmct -lmpeu -lscrip"' + # netcdf + - 'OIFS_NETCDF_INCLUDE="-I$NETCDFROOT/include"' + - 'OIFS_NETCDF_LIB="-L$NETCDFROOT/lib -lnetcdf -lnetcdff"' + - 'OIFS_NETCDFF_INCLUDE="-I$NETCDFFROOT/include"' + - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' + # compilers and compile switches + # in case of crashes, you may active -g -traceback -fpe0 for FFLAGS and CFLAGS + # but these slow down the model by 30%. + - 'OIFS_FC=$FC' + - 'OIFS_FFLAGS="-qopenmp -r8 -fp-model precise -align array32byte -O1 -g -traceback -xCORE_AVX2 -convert big_endian"' + - 'OIFS_FFIXED=""' + - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' + - 'OIFS_LFLAGS="$OIFS_MPI_LIB -qopenmp"' + - 'OIFS_CC=$CC' + - 'OIFS_CFLAGS="-qopenmp -fp-model precise -O1 -g -traceback -xCORE_AVX2 -qopt-report=0"' + - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' + + aleph: add_export_vars: # grib api / eccodes @@ -457,24 +579,24 @@ compiletime_environment_changes: OIFS_XIOS_LIB_NAME: '${computer.c++_lib}' nesh: - compiler_mpi: intel2020_impi2020 + compiler_mpi: intel2023_impi2021 add_export_vars: - - "LAPACK_LIB='-mkl=sequential'" - - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + - "LAPACK_LIB='-qmkl=sequential'" + - "LAPACK_LIB_DEFAULT='-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential'" # FFTW is included in MKL so we link to that - "OIFS_FFTW_DIR='-L$MKLROOT/lib/intel64'" - "OIFS_FFTW_INCLUDE='-I$OIFS_FFTW_DIR/include/'" - - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -lmkl_sequential'" + - "OIFS_FFTW_LIB='-L$OIFS_FFTW_DIR/lib/ -lmkl_intel_lp64 -lmkl_core -qmkl=sequential'" # TODO: figure out whether those two are still needed - "ESM_NETCDF_C_DIR=$NETCDFROOT" - "ESM_NETCDF_F_DIR=$NETCDFFROOT" # grib api / eccodes - 'OIFS_GRIB_API_INCLUDE="-I$ECCODESROOT/include"' - - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -leccodes_f90 -leccodes"' + - 'OIFS_GRIB_API_LIB="-L$ECCODESROOT/lib -L$ECCODESROOT/lib64 -leccodes_f90 -leccodes"' - 'OIFS_GRIB_INCLUDE="$OIFS_GRIB_API_INCLUDE"' - 'OIFS_GRIB_LIB="$OIFS_GRIB_API_LIB"' - 'OIFS_GRIB_API_BIN="$ECCODESROOT/bin"' - - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' + - 'LAPACK_LIB_DEFAULT="-L$MKLROOT/lib/intel64 -lmkl_intel_lp64 -lmkl_core -qmkl=sequential"' # oasis - 'OIFS_OASIS_BASE=$(pwd)/oasis' - 'OIFS_OASIS_INCLUDE="-I$OIFS_OASIS_BASE/build/lib/psmile -I$OIFS_OASIS_BASE/build/lib/psmile/scrip -I$OIFS_OASIS_BASE/build/lib/psmile/mct -I$OIFS_OASIS_BASE/build/lib/psmile/mct/mpeu"' @@ -486,12 +608,18 @@ compiletime_environment_changes: - 'OIFS_NETCDFF_LIB="-L$NETCDFFROOT/lib -lnetcdff"' # compilers and compile switches - 'OIFS_FC=$FC' - - 'OIFS_FFLAGS="-r8 -fp-model precise -align array32byte -O1 -xCORE_AVX2 -g -traceback -convert big_endian -fpe0"' + # use -O3 to achieve faster model. + # this is a bit dangerous though... + # Also, switch from AVX2 to -xhost to use highest instruction set + - 'OIFS_FFLAGS="-r8 -fp-model precise -align array32byte -O3 -xhost -convert big_endian -fpe0"' - 'OIFS_FFIXED=""' - 'OIFS_FCDEFS="BLAS LITTLE LINUX INTEGER_IS_INT"' - 'OIFS_LFLAGS=$OIFS_MPI_LIB' - 'OIFS_CC=$CC' - - 'OIFS_CFLAGS="-fp-model precise -O1 -xCORE_AVX2 -g -traceback -qopt-report=0 -fpe0"' + # Intel icx (C compiler) no longer supports the -fpe0 flag. + # Also, the C standard is raised, so we need to use -std=gnu89 to enforce C89 + # since implicit declarations were removed in C99 and onward. + - 'OIFS_CFLAGS="-fp-model precise -O3 -xhost -qopt-report=0 -std=gnu89"' - 'OIFS_CCDEFS="LINUX LITTLE INTEGER_IS_INT _ABI64 BLAS"' environment_changes: diff --git a/configs/components/oifs/oifs.yaml b/configs/components/oifs/oifs.yaml index 0ca6da457..bdbd307f0 100644 --- a/configs/components/oifs/oifs.yaml +++ b/configs/components/oifs/oifs.yaml @@ -101,8 +101,14 @@ available_versions: - 43r3-awicm-3.2.1 - 43r3-awicm-frontiers-xios - 43r3-foci +- 43r3-foci21 +- 43r3-foci211 +- 43r3-foci22 +- 43r3-foci30 - 43r3-v1 +- 43r3-v2 - 48r1 + choose_version: 40r1: branch: foci_conserv @@ -253,11 +259,44 @@ choose_version: - xios-2.5_r1910 contact: "swahl(at)geomar.de" major_version: 43r3 + 43r3-foci22: + branch: foci22 + git-repository: https://gitlab.dkrz.de/ec-earth/oifs-43r3.git + comp_command: "export OIFS_TOPLEVEL_DIR=${model_dir}; export OIFS_XIOS=enable ; export OIFS_XIOS_DIR=${model_dir}/../xios ; export OIFS_XIOS_INCLUDE=-I/${model_dir}/../xios/inc/ ; cd make; ../fcm/bin/fcm make -v -j8 -f oifs.fcm ; chmod -R ${source_code_permissions} . ; git config core.fileMode false ; mv esm/oifs/bin/master.exe esm/oifs/bin/oifs" + install_bins: make/esm/oifs/bin/oifs + requires: + - oasis3mct-EM21 + - xios-2.5_r1910 + contact: "swahl(at)geomar.de" + major_version: 43r3 + 43r3-foci30: + branch: foci22 + git-repository: https://gitlab.dkrz.de/ec-earth/oifs-43r3.git + comp_command: "export OIFS_TOPLEVEL_DIR=${model_dir}; export OIFS_XIOS=enable ; export OIFS_XIOS_DIR=${model_dir}/../xios ; export OIFS_XIOS_INCLUDE=-I/${model_dir}/../xios/inc/ ; cd make; ../fcm/bin/fcm make -v -j8 -f oifs.fcm ; chmod -R ${source_code_permissions} . ; git config core.fileMode false ; mv esm/oifs/bin/master.exe esm/oifs/bin/oifs" + install_bins: make/esm/oifs/bin/oifs + requires: + - oasis3mct-5.0-geomar + - xios-2.5_r1910 + contact: "swahl(at)geomar.de" + major_version: 43r3 + 43r3-foci40: + # CSW: new branch once created by Joakim + branch: foci40 + git-repository: https://gitlab.dkrz.de/ec-earth/oifs-43r3.git + comp_command: "export OIFS_TOPLEVEL_DIR=${model_dir}; export OIFS_XIOS=enable ; export OIFS_XIOS_DIR=${model_dir}/../xios ; export OIFS_XIOS_INCLUDE=-I/${model_dir}/../xios/inc/ ; cd make; ../fcm/bin/fcm make -v -j8 -f oifs.fcm ; git config core.fileMode false ; mv esm/oifs/bin/master.exe esm/oifs/bin/oifs" + install_bins: make/esm/oifs/bin/oifs + requires: + - oasis3mct-5.0-geomar + - xios-trunk_oasis + contact: "swahl(at)geomar.de" + major_version: 43r3 43r3-v1: branch: 43r3v1-foci git-repository: https://gitlab.dkrz.de/ec-earth/oifs-43r3.git comp_command: "export OIFS_TOPLEVEL_DIR=${model_dir}; export OIFS_XIOS=enable ; export OIFS_XIOS_DIR=${model_dir}/../xios ; export OIFS_XIOS_INCLUDE=-I/${model_dir}/../xios/inc/ ; cd make; ../fcm/bin/fcm make -v -j8 -f oifs.fcm ; chmod -R ${source_code_permissions} .; mv esm/oifs/bin/master.exe esm/oifs/bin/oifs" install_bins: make/esm/oifs/bin/oifs + add_include_models: + - oasis3mct requires: - oasis3mct-4.0 - xios-2.5_r1910_oifs @@ -265,11 +304,15 @@ choose_version: 43r3-v2: branch: master git-repository: https://gitlab.dkrz.de/ec-earth/oifs-43r3.git - comp_command: "export OIFS_TOPLEVEL_DIR=${model_dir}; export OIFS_XIOS=enable ; export OIFS_XIOS_DIR=${model_dir}/../xios ; export OIFS_XIOS_INCLUDE=-I/${model_dir}/../xios/inc/ ; cd make; ../fcm/bin/fcm make -v -j8 -f oifs.fcm ; chmod -R ${source_code_permissions} .; mv esm/oifs/bin/master.exe esm/oifs/bin/oifs" + comp_command: "export OIFS_TOPLEVEL_DIR=${model_dir}; export OIFS_XIOS=enable ; export OIFS_XIOS_DIR=${model_dir}/../xios ; export OIFS_XIOS_INCLUDE=-I/${model_dir}/../xios/inc/ ; cd make; ../fcm/bin/fcm make -v -j8 -f oifs.fcm ; chmod -R ${source_code_permissions} .; git config core.fileMode false ; mv esm/oifs/bin/master.exe esm/oifs/bin/oifs" install_bins: make/esm/oifs/bin/oifs + add_include_models: + - oasis3mct requires: - - oasis3mct-4.0 - - xios-2.5_r1910_oifs + #- oasis3mct-4.0 + #- xios-2.5_r1910_oifs + - oasis3mct-5.0-geomar + - xios-trunk major_version: 43r3 48r1: branch: main @@ -297,6 +340,13 @@ o3_scheme: "default" scenario: "amip-prepifs" output: "default" +# We need to get a default nproc +# This is later modified in a +# choose statement below +# and most likely in the +# user runscript +nproc: 96 + wam: False wam_2w: True wam_step: 1 @@ -347,7 +397,11 @@ choose_resolution: TCO95: nx: 40320 ny: 1 - time_step: 3600 + # Joakim: Lower Tco95 time step to 1800s + # This has 2x computer cost but reduces + # biases by a lot, so it is worth it + # Savita et al. (2024) 10.5194/gmd-2023-101 + time_step: 1800 oasis_grid_name: "096" res_number: 95 res_number_tl: "95_4" @@ -448,6 +502,8 @@ choose_computer.partitions.compute.cores_per_node: nproc: 108 48: nproc: 96 + 72: + nproc: 144 96: nproc: 96 128: @@ -459,6 +515,11 @@ choose_computer.partitions.compute.cores_per_node: #====================================================================================== # Microphysics switch: 0 for off, 1 for light version and 2 for full +# sclct_switch <= 1 : Transform all cloud water below 600 hPa to liquid phase +# sclct_switch == 2 : Reduce ice deposition rate by 50% to retain supercooled water +# sclct_switch should be set to 2 to reduce warm bias in Southern Ocean +# Note: Only works for 43r3. +# See Forbes 2016 10.21957/s41h7q7l sclct_switch: 0 # Namelist modifications for truncation types @@ -518,7 +579,7 @@ file_movements: all_directions: copy waminfo_in: all_directions: copy - outdata: # TODO: ask GEOMAR if they are fine with this, otherwise we have to put it into awicm3.yaml + outdata: all_directions: move # TODO: expand comment @@ -993,7 +1054,13 @@ choose_major_version: ORBIY: "${orb_iyear}" add_outdata_files: + # output is handled by XIOS + # but we write the full model state at the last time step + # in case we want to create restart files ICMUA: ICMUA + ICMSH: ICMSH + ICMGG: ICMGG + further_reading: - oifs/oifs.env.yaml 48r1: @@ -1142,7 +1209,21 @@ choose_mip: fort.4: NAERAD: SSPNAME: "SSP5-8.5" - + "4xCO2": + # quadruple CO2 from 1850 levels + add_namelist_changes: + fort.4: + NAERAD: + LA4xCO2: ".true." + NCMIPFIXYR: 1850 + "1pctCO2": + # increase CO2 from 1850 by 1pct per year + # (quadruple by year 140) + add_namelist_changes: + fort.4: + NAERAD: + L1PCTCO2: ".true." + NCMIPFIXYR: 1850 #choose_o3_scheme: # "prescribed": # choose_mip: @@ -1297,18 +1378,63 @@ choose_output: # as general.run_number, but for branchoff experiments should be 0 so that the 3rd # preprocessing option is used, instead of the options 1 or 2. branchoff: false +ini_pseudo_initial_date: "${initial_date}" #${prev_run.general.start_date}" eternal_run_number: "$(( 0 if (${lresume} and ${general.run_number} == 1) or (${general.run_number} == 2 and ${prev_run.oifs.branchoff}) else ${general.run_number} - ))" + ))" + +# There are four kinds of runs: +# eternal_run_number=0 - Restart from another experiment. +# eternal_run_number=1 - Start from initial conditions +# eternal_run_number=2 - Regular restart +# eternal_run_number=3 - Also regular restart, but we trick OpenIFS to think +# the initial_date is the start_date of the previous run +# i.e. OpenIFS never knows it does more than 1 restart +# This avoids both a memory issue for long runs and +# overflow of some calendar variables in single precision. +# +# When we branch off from an existing experiment, we must set ini_pseudo_initial_date +# in the runscript to inform ESM-Tools what initial_date it should trick OpenIFS it is. +# For example, a piControl may start from 1850 and run to 2850. +# A historical run would then start from 2850 of the existing run, but we would set +# start_date for the new run to be 1850 and also trick OpenIFS to think initial_date is 1849. # The following choose defines a pseudo_initial_date for OIFS. This allows us to trick # OIFS into thinking that, from leg 3, it is always running a 2nd leg, avoiding the # memory issues mentioned before. This is done by declaring the pseudo_initial_date # as the last start date. choose_eternal_run_number: + 0: + # If we are branching off from an existing run + pseudo_initial_date: "${ini_pseudo_initial_date}" + start_ndays_source: "${prev_run.oifs.next_ndays}" + branchoff: true + preprocess_method: " + ${general.esm_function_dir}/components/oifs/change_icm_date.sh + ${thisrun_input_dir}/ + ${oifs.input_expid} + ${oifs.input_expid} + ${pseudo_initial_date!syear!smonth!sday} + ${start_date!syear!smonth!sday} + ${oifs.wam_number} + ${oifs.perturb} + ${oifs.nx} + ${oifs.ensemble_id}; + ${general.esm_function_dir}/components/oifs/change_rcf_date.sh + ${thisrun_restart_in_dir}/ + ${pseudo_initial_date!syear!smonth!sday} + ${oifs.time_step} + ${oifs.seconds_since_initial} + ${oifs.start_ndays} + ${oifs.wam}; + ${slice_icml} + ${append_icmcl} + echo 'Long OpenIFS run. Number for internal OpenIFS timekeeping: ${eternal_run_number}'" + 1: + # If this is the first leg for an experiment pseudo_initial_date: "${initial_date}" start_ndays_source: "${start_ndays}" preprocess_method: " @@ -1326,6 +1452,7 @@ choose_eternal_run_number: ${append_icmcl} echo 'Run number for internal OpenIFS timekeeping: ${eternal_run_number}'" 2: + # First restart pseudo_initial_date: "${prev_run.general.start_date}" start_ndays_source: "${start_ndays}" preprocess_method: "${general.esm_function_dir}/components/oifs/skip.sh; @@ -1333,6 +1460,7 @@ choose_eternal_run_number: ${append_icmcl} echo 'Run number for internal OpenIFS timekeeping: ${eternal_run_number}'" "*": + # Any restart that is not the first pseudo_initial_date: "${prev_run.general.start_date}" start_ndays_source: "${prev_run.oifs.next_ndays}" preprocess_method: " @@ -1357,9 +1485,6 @@ choose_eternal_run_number: ${append_icmcl} echo 'Long OpenIFS run. Number for internal OpenIFS timekeeping: ${eternal_run_number}'" # If this is run 1 or 2 of an experiment, lable it as branchoff - choose_eternal_run_number: - 0: - branchoff: true choose_general.standalone: @@ -1388,11 +1513,26 @@ preprocess: # the postprocessing is at the moment essentially the same as the preprocessing plugin # it just executes a shell script # not working as expected as there is no "wait" after submit in the compute recipe below -postprocess: - postprocess_shell: - method: "${general.esm_function_dir}/components/oifs/oifs-43r3-postprocess.sh ${work_dir} ECE3 ${start_date!syear!smonth!sday} ${end_date!syear!smonth!sday}" - type: shell +choose_oifs.post_processing: + True: + workflow: + next_run_triggered_by: tidy + subjobs: + oifs_postprocessing: + batch_or_shell: shell + order_in_cluster: concurrent + run_on_queue: ${computer.partitions.pp.name} + run_after: tidy + script_dir: ${general.esm_function_dir}/setups/focioifs/ + submit_to_batch_system: True + script: " + oifs_postprocessing.sh -m + -r ${general.expid} + -s ${start_date} + -e ${end_date} + -p ${general.base_dir}" + nproc: 1 #====================================================================================== # RECIPE diff --git a/configs/components/oifs/oifs_grib_output_to_restart_new.sh b/configs/components/oifs/oifs_grib_output_to_restart_new.sh new file mode 100755 index 000000000..834f89f80 --- /dev/null +++ b/configs/components/oifs/oifs_grib_output_to_restart_new.sh @@ -0,0 +1,290 @@ +#!/bin/bash +# + +debug=$1 +expid=$2 #"ECE3" +expout=$3 #"ECE3" +indate=$4 #"185001" +outdate=$5 #"185001" +indir=$6 #"./" +outdir=$7 #"./" +machine=$8 #"glogin" + +#echo "$indir" + +print () { echo "$(date +'%F %T'):" "$@"; } + +# Read the command line arguments +#OPTIND=1 # Reset in case getopts has been used previously in the shell. +#while getopts "h:d:e:o:i:u:a:t:m:" opt; do +#while getopts ":hdeoiuatm" opt; do +# echo "opt $opt" +# case "$opt" in +# h|\?) +# echo " " +# echo " This script can be used to produce restart conditions from an OpenIFS output file " +# echo " " +# echo " Usage: " +# echo " ./oifs_grib_output_to_restart_new.sh " +# echo " -h = Displays this message " +# echo " -d = Activate extra info for debugging " +# echo " -e = EXP ID (four characters) for input data " +# echo " -o = EXP ID (four characters) for output data " +# echo " -i = Input dir " +# echo " -u = Output dir " +# echo " -a = Date of input data " +# echo " -t = Date of output date " +# echo " -m = Machine you are using " +# echo " " +# echo " Author/Contact: Joakim Kjellsson, GEOMAR, jkjellsson@geomar.de " +# echo " No support will be provided by the ESM-Tools development team w.r.t this script " +# echo " Please contact the author Joakim Kjellsson if you need help using this tool " +# echo " " +# echo " How to create restart files from OpenIFS output " +# echo " Step 1: Make sure all appropriate variables are written to the output " +# echo " Step 2: Run this script " +# echo " Step 3: Provide the ICMGGINIT, ICMSHINIT, and ICMUAINIT files produced to the new run " +# echo " Step 4: You can now use the produced ICM* files as initial conditions for " +# echo " OpenIFS to restart the model. " +# echo " Note: This does not produce a true restart, but its pretty darn close... " +# echo " " +# exit 0 +# ;; +# d) debug=1 # verbose mode +# echo "debug: $debug" +# ;; +# e) expid=$OPTARG +# ;; +# o) expout=$OPTARG +# ;; +# i) indir="$OPTARG" +# echo "optarg $OPTARG" +# echo "indir $indir" +# ;; +# u) outdir=$OPTARG +# ;; +# a) indate=$OPTARG +# ;; +# t) outdate=$OPTARG +# ;; +# m) machine=$OPTARG +# ;; +# esac +#done +#shift $((OPTIND-1)) +#[ "$1" = "--" ] && shift + +# update vars with command line options if set +#[[ -z $envfile ]] && envfile="$basedir/$EXP_ID/scripts/env.sh" +#export PBS_NP=${ncpus} + +# +# This script needs ecCodes or grib_api installed +# This works on blogin, but not glogin +# On glogin you can find grib binaries in +# /sw/dataformats/eccodes/2.25.0/skl/gcc.8.3.0/bin/ + +if [[ "x$machine" == "xglogin" ]] ; then + grib_dir="/sw/dataformats/eccodes/2.25.0/skl/gcc.8.3.0/bin/" + module load cdo +fi + +if [[ "x$debug" == "x1" ]] ; then + echo " Machine: $machine " + echo " Grib_dir: $grib_dir " + echo " Exp ID (in): $expid " + echo " Exp ID (out): $expout " + echo " In dir: $indir " + echo " Out dir: $outdir " + echo " In date: $indate " + echo " Out date: $outdate " +fi + +# Locate ecCodes binaries +grib_ls=$grib_dir/grib_ls +grib_copy=$grib_dir/grib_copy +grib_set=$grib_dir/grib_set +grib_filter=$grib_dir/grib_filter + +# Where do you want the resulting files to end up +mkdir -vp $outdir/$outdate/ + +# These files will be used +# Note: The ICMUA file is new in 43r3. Did not exist in 40r1 +icmgg_in="${indir}/ICMGG${expid}+${indate}" +icmsh_in="${indir}/ICMSH${expid}+${indate}" +icmua_in="${indir}/ICMUA${expid}+${indate}" + +if [[ "x$debug" == "x1" ]] ; then + echo " Will work on these files: " + echo $icmgg_in + echo $icmsh_in + echo $icmua_in +fi + +# Create a tmp dir +tmpdir="tmp" +if [[ "x$debug" == "x1" ]] ; then + echo " Removing the old tmp dir " + echo " and making a new one " +fi + +rm -rf ${tmpdir} +mkdir -vp ${tmpdir} + +# Copy input files +if [[ "x$debug" == "x1" ]] ; then + echo " Copy input files to tmp dir " +fi +cp -v ${indir}/ICMGG${expid}INIT ${tmpdir}/. +cp -v ${indir}/ICMSH${expid}INIT ${tmpdir}/. + +# Create a rules file for grib_filter +# This tells grib_filter to split the GRIB file +# into separate files for each variable (shortName) +# and level +cat > ${tmpdir}/gf1 < ${tmpdir}/gf2 < ${tmpdir}/gf3 <> ${tmpdir}/shinit.tmp + done +done + +# End with orography +if [[ "x$debug" == "x1" ]] ; then +echo " orography is last field in ${tmpdir}/shinit.tmp " +fi +# I think the last should be on surface, not hybrid +#cat ${tmpdir}/shinit.z.hybrid.1 >> ${tmpdir}/shinit.tmp +cat ${tmpdir}/shinit.z.surface.0 >> ${tmpdir}/shinit.tmp + +# Now surface physics (grid point) +for var in stl1 stl2 stl3 stl4 swvl1 swvl2 swvl3 swvl4 sd src skt ci \ + lmlt lmld lblt ltlt lshf lict licd tsn asn \ + rsn sst istl1 istl2 istl3 istl4 chnk lsm sr al aluvp alnip aluvd alnid \ + lai_lv lai_hv sdfor slt sdor isor anor slor lsrh cvh cvl tvh tvl cl dl +do + if [[ "x$debug" == "x1" ]] ; then + echo " Put $var in ${tmpdir}/gginit.tmp " + fi + cat ${tmpdir}/gginit.$var >> ${tmpdir}/gginit.tmp +done + +# Now q and o3 +for lev in {1..91} +do + for var in q o3 + do + if [[ "x$debug" == "x1" ]] ; then + echo " Put $var on hybrid level $lev in ${tmpdir}/gginiua.tmp " + fi + cat ${tmpdir}/gginiua.$var.hybrid.$lev >> ${tmpdir}/gginiua.tmp + done +done + +# Then cloud variables +for lev in {1..91} +do + for var in crwc cswc clwc ciwc cc + do + if [[ "x$debug" == "x1" ]] ; then + echo " Put $var on hybrid level $lev in ${tmpdir}/gginiua.tmp " + fi + cat ${tmpdir}/gginiua.$var.hybrid.$lev >> ${tmpdir}/gginiua.tmp + done +done + +# +# Move files +# +if [[ "x$debug" == "x1" ]] ; then + echo " Rename files and move them " +fi +#mv -v ${tmpdir}/gginit.tmp ${outdir}/${outdate}/ICMGG${expout}INIT +#mv -v ${tmpdir}/gginiua.tmp ${outdir}/${outdate}/ICMGG${expout}INIUA +#mv -v ${tmpdir}/shinit.tmp ${outdir}/${outdate}/ICMSH${expout}INIT +$grib_set -s dataDate=$outdate ${tmpdir}/gginit.tmp ${outdir}/${outdate}/ICMGG${expout}INIT +$grib_set -s dataDate=$outdate ${tmpdir}/gginiua.tmp ${outdir}/${outdate}/ICMGG${expout}INIUA +$grib_set -s dataDate=$outdate ${tmpdir}/shinit.tmp ${outdir}/${outdate}/ICMSH${expout}INIT + +if [[ "x$debug" == "x1" ]] ; then + echo " Make netcdf files on regular grid of restarts " +fi +cdo -f nc -setgridtype,regular ${outdir}/${outdate}/ICMGG${expout}INIT ${outdir}/${outdate}/ICMGG${expout}INIT.nc +cdo -f nc -setgridtype,regular ${outdir}/${outdate}/ICMGG${expout}INIUA ${outdir}/${outdate}/ICMGG${expout}INIUA.nc +cdo -f nc -sp2gp,cubic ${outdir}/${outdate}/ICMSH${expout}INIT ${outdir}/${outdate}/ICMSH${expout}INIT.nc + + +echo " ==== Can you feel that? We are done here... " + + diff --git a/configs/components/rnfmap/rnfmap.yaml b/configs/components/rnfmap/rnfmap.yaml index dfecd8bf4..9850916fc 100644 --- a/configs/components/rnfmap/rnfmap.yaml +++ b/configs/components/rnfmap/rnfmap.yaml @@ -17,6 +17,7 @@ description: | ## available_versions: - ec-earth +- agrif - awicm-3.0 - awicm-3.1 - awicm-frontiers @@ -51,7 +52,9 @@ choose_version: branch: runoff foci211: branch: foci211 - + agrif: + branch: agrif + clean_command: rm -rf bin; cd src; make clean; cd .. comp_command: rm -rf bin; mkdir bin; cd src; make ; cd .. ; cp bin/rnfmap.exe ./bin/rnfma git-repository: https://gitlab.dkrz.de/ec-earth/runoff-mapper.git @@ -91,6 +94,7 @@ nproca: 1 nprocb: 1 runoff_method: "old" +with_agrif: False runtime_seconds: "$(( $((${end_date} + 1days)) - ${start_date} ))" @@ -116,6 +120,23 @@ coupling_fields: "[[runoff_fields_send-->FIELD]]": grid: opaf +# If we run FOCI-OpenIFS with AGRIF, +# then AGRIF needs to send its mask +# to rnfmap +# Rnfmap then computes runoff and calving fields +# for AGRIF +choose_with_agrif: + True: + runoff_agrif_fields_recv: [1_R_AgrSpg] + runoff_agrif_fields_send: [1_R_Runoff_oce] + calving_agrif_fields_send: [1_R_Calving_oce] + add_coupling_fields: + "[[runoff_agrif_fields_send-->FIELD]]": + grid: rnfm + "[[calving_agrif_fields_send-->FIELD]]": + grid: rnfs + "[[runoff_agrif_fields_recv-->FIELD]]": + grid: rnfm choose_runoff_method: 'EM21': runoff_fields_recv: [R_Runoff_atm] @@ -222,6 +243,14 @@ choose_computer.name: levante: fflags: -r8 -fp-model precise -align array32byte -O3 -march=core-avx2 -mtune=core-avx2 -g -traceback -convert big_endian -fpe0 cflags: -fp-model precise -O3 -march=core-avx2 -mtune=core-avx2 -g -traceback -qopt-report=0 -fpe0 + glogin: + choose_computer.compiler_mpi: + gcc11_ompi416: + fflags: -fdefault-real-8 -O3 -g -fbacktrace -fconvert=big-endian -fallow-argument-mismatch -fallow-invalid-boz + cflags: -fdefault-real-8 -O3 -g -fbacktrace -fconvert=big-endian -fallow-argument-mismatch -fallow-invalid-boz + '*': + fflags: -r8 -fp-model precise -align array32byte -O3 -g -traceback -convert big_endian -fpe0 + cflags: -fp-model precise -O3 -g -traceback -qopt-report=0 "*": fflags: -r8 -fp-model precise -align array32byte -O3 -xCORE_AVX2 -g -traceback -convert big_endian -fpe0 - cflags: -fp-model precise -O3 -xCORE_AVX2 -g -traceback -qopt-report=0 -fpe0 + cflags: -fp-model precise -O3 -xCORE_AVX2 -g -traceback -qopt-report=0 diff --git a/configs/components/xios/xios.yaml b/configs/components/xios/xios.yaml index 10d0c0f80..608cbcb0a 100644 --- a/configs/components/xios/xios.yaml +++ b/configs/components/xios/xios.yaml @@ -3,8 +3,9 @@ # model: xios -version: 2.0r982 +version: 2.5_r1910 executable: xios.x +xml_dir: "" metadata: Institute: IPSL and CEA @@ -27,6 +28,7 @@ available_versions: - trunk_oasis - "2.5" - 2.5_r1910 +- 2.5_r2497 - 2.5_r1910_oifs - 2.5_r1910_ogcm - 2.5_smhi @@ -53,7 +55,7 @@ choose_version: contact: "swahl(at)geomar.de" trunk_oasis: git-repository: https://git.geomar.de/foci/src/xios.git - branch: xios_trunk + branch: xios_trunk archfile: ESMTOOLS_generic_oasis_intel use_oasis: --use_oasis oasis3_mct comp_command: export XIOS_TOPLEVEL=${model_dir}; ./make_xios --arch ${archfile} --netcdf_lib netcdf4_par ${use_oasis} --job 24 --prod; cp bin/xios_server.exe bin/xios.x @@ -116,10 +118,23 @@ choose_version: comp_command: export XIOS_TOPLEVEL=${model_dir}; ./make_xios --arch ${archfile} --netcdf_lib netcdf4_par ${use_oasis} --job 24 --prod; cp bin/xios_server.exe bin/xios.x # XIOS with oasis dependencies and oasis support 2.5_r1910: - archfile: ESMTOOLS_generic_oasis_intel + # Switch arch file if we use GCC on glogin + choose_computer.name: + glogin: + choose_computer.compiler_mpi: + gcc11_ompi416: + archfile: ESMTOOLS_generic_oasis_GNU + '*': + archfile: ESMTOOLS_generic_oasis_intel + '*': + archfile: ESMTOOLS_generic_oasis_intel use_oasis: --use_oasis oasis3_mct branch: 2.5_r1910 - # XIOS with oasis dependencies + 2.5_r2497: + archfile: ESMTOOLS_generic_oasis_intel + use_oasis: --use_oasis oasis3_mct + branch: 2.5_r2497 + # XIOS without oasis dependencies 2.5_r1910_oifs: archfile: ESMTOOLS_generic_intel use_oasis: '' @@ -184,22 +199,11 @@ choose_general.setup_name: ifs_xml: ifs_xml focioifs: xml_dir: "${computer.pool_directories.focipool}" - choose_general.version: - "*": - add_config_files: - context_ifs: context_ifs - ifs_xml: ifs_xml - domain_def: domain_def - field_def: field_def - file_def: file_def - agrif: - add_config_files: - context_ifs: context_ifs - ifs_xml: ifs_xml - domain_def: domain_def - field_def: field_def - file_def: file_def - file_def_agrif: file_def_agrif + add_config_files: + context_ifs: context_ifs + ifs_xml: ifs_xml + nemo_xml: nemo_xml + foci: choose_general.version: default: @@ -220,12 +224,24 @@ choose_general.setup_name: domain_def: domain_def field_def: field_def file_def: file_def + fs_oasismct4: + xml_dir: ${nemo.model_dir}/CONFIG/${nemo.version}/${nemo.reference_expid} + add_config_files: + domain_def: domain_def + field_def: field_def + file_def: file_def default_oasismct4: xml_dir: ${nemo.model_dir}/CONFIG/${nemo.version}/${nemo.reference_expid} add_config_files: domain_def: domain_def field_def: field_def file_def: file_def + mops_oasismct4: + xml_dir: ${nemo.model_dir}/CONFIG/${nemo.version}/${nemo.reference_expid} + add_config_files: + domain_def: domain_def + field_def: field_def + file_def: file_def agrif: xml_dir: ${nemo.model_dir}/CONFIG/${nemo.version}/${nemo.reference_expid} add_config_files: @@ -240,6 +256,13 @@ choose_general.setup_name: field_def: field_def file_def: file_def file_def_agrif: file_def_agrif + agrif_mops_oasismct4: + xml_dir: ${nemo.model_dir}/CONFIG/${nemo.version}/${nemo.reference_expid} + add_config_files: + domain_def: domain_def + field_def: field_def + file_def: file_def + file_def_agrif: file_def_agrif awicm3: xml_dir: "${general.pool_dir}" add_config_files: diff --git a/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci.yaml b/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci.yaml similarity index 80% rename from configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci.yaml rename to configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci.yaml index b5ff82446..9ccd070c1 100644 --- a/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci.yaml +++ b/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci.yaml @@ -1,6 +1,6 @@ components: - echam-6.3.05p2-foci -- nemo-ORCA05_LIM2_FOCI_AGRIF_AOW +- nemo-ORCA05_LIM2_FOCI_AGRIF - oasis3mct-foci coupling_changes: - sed -i '/ECHAM6_COUPLED/s/OFF/ON/g' echam-6.3.05p2-foci/CMakeLists.txt diff --git a/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml b/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml new file mode 100644 index 000000000..19342bfdb --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml @@ -0,0 +1,7 @@ +components: +- oasis3mct-5.0-geomar +- echam-6.3.05p2-foci_oasismct4 +- xios-2.5_r1910 +- nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4 +coupling_changes: +- sed -i '/ECHAM6_COUPLED/s/OFF/ON/g' echam-6.3.05p2-foci_oasismct4/CMakeLists.txt diff --git a/configs/couplings/nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4.yaml b/configs/couplings/nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4.yaml new file mode 100644 index 000000000..992812981 --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4.yaml @@ -0,0 +1,8 @@ +components: +- oasis3mct-4.0-geomar +- echam-6.3.05p2-foci_oasismct4 +- xios-2.5_r2497 +#- nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4 +- nemo-ORCA05_LIM2_FOCI_MOPS_OASISMCT4 +coupling_changes: +- sed -i '/ECHAM6_COUPLED/s/OFF/ON/g' echam-6.3.05p2-foci_oasismct4/CMakeLists.txt diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21.yaml index 115052be9..1a3588948 100644 --- a/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21.yaml +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21.yaml @@ -1,9 +1,9 @@ components: -- oifs-43r3-foci21 +- oifs-43r3-foci211 - nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4 - oasis3mct-EM21 -- rnfmap-focioifs21 +- rnfmap-agrif coupling_changes: -- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci21/src/ifs/module/yommcc.F90 -- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci21/src/ifs/module/yommcc.F90 -- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci21/src/ifs/module/yommcc.F90 \ No newline at end of file +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci211/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci211/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci211/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30.yaml new file mode 100644 index 000000000..6595b1017 --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30.yaml @@ -0,0 +1,9 @@ +components: +- oifs-43r3-foci30 +- nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4 +- oasis3mct-5.0-geomar +- rnfmap-agrif +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci.yaml index a2ab1fe50..fe424f6a6 100644 --- a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci.yaml +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci.yaml @@ -1,7 +1,8 @@ components: - oasis3mct-foci - echam-6.3.05p2-foci -- xios-2.0_r982 +#- xios-2.0_r982 +- xios-2.5_r2497 - nemo-ORCA05_LIM2_KCM_AOW coupling_changes: - sed -i '/ECHAM6_COUPLED/s/OFF/ON/g' echam-6.3.05p2-foci/CMakeLists.txt diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml new file mode 100644 index 000000000..62f060a12 --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml @@ -0,0 +1,8 @@ +components: +- oasis3mct-4.0-geomar +- echam-6.3.05p2-foci_oasismct4 +#- xios-2.0_r982 +- xios-2.5_r1910 +- nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 +coupling_changes: +- sed -i '/ECHAM6_COUPLED/s/OFF/ON/g' echam-6.3.05p2-foci_oasismct4/CMakeLists.txt diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci22.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci22.yaml new file mode 100644 index 000000000..b619bc76e --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci22.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci22 +- nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 +- oasis3mct-5.0-geomar +- rnfmap-foci211 +- xios-2.5_r1910 +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22.yaml new file mode 100644 index 000000000..91d425fda --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci22 +- nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG +- oasis3mct-EM21 +- rnfmap-foci211 +- xios-2.5_r1910 +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5+oifs43r3-foci22.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5+oifs43r3-foci22.yaml new file mode 100644 index 000000000..b619bc76e --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5+oifs43r3-foci22/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5+oifs43r3-foci22.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci22 +- nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 +- oasis3mct-5.0-geomar +- rnfmap-foci211 +- xios-2.5_r1910 +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci22/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30.yaml new file mode 100644 index 000000000..828f651d7 --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci30 +- nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG +- oasis3mct-5.0-geomar +- rnfmap-foci211 +- xios-2.5_r1910 +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci_oasismct4.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml similarity index 100% rename from configs/couplings/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci_oasismct4.yaml rename to configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4+echam-6.3.05p2-foci_oasismct4/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4+echam-6.3.05p2-foci_oasismct4.yaml diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5.yaml new file mode 100644 index 000000000..47ed53d72 --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5.yaml @@ -0,0 +1,7 @@ +components: +- oasis3mct-5.0-geomar +- echam-6.3.05p2-foci_oasismct4 +- xios-2.5_r1910 +- nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4 +coupling_changes: +- sed -i '/ECHAM6_COUPLED/s/OFF/ON/g' echam-6.3.05p2-foci_oasismct4/CMakeLists.txt diff --git a/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30.yaml b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30.yaml new file mode 100644 index 000000000..e787e505c --- /dev/null +++ b/configs/couplings/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30/nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci30 +- nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4 +- oasis3mct-5.0-geomar +- rnfmap-foci211 +- xios-2.5_r1910 +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40/nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40.yaml b/configs/couplings/nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40/nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40.yaml new file mode 100644 index 000000000..1bc466271 --- /dev/null +++ b/configs/couplings/nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40/nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci40 +- nemo-ORCA05_SI3_COUPLED_AGRIF +- oasis3mct-5.0-geomar +- rnfmap-agrif +#- xios-trunk_oasis +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40/nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40.yaml b/configs/couplings/nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40/nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40.yaml new file mode 100644 index 000000000..bbc08e3ea --- /dev/null +++ b/configs/couplings/nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40/nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci40 +- nemo-ORCA05_Z46_SI3_COUPLED +- oasis3mct-5.0-geomar +- rnfmap-foci211 +#- xios-trunk_oasis +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-eORCA025_Z75_SI3_COUPLED+oifs43r3-foci30/nemo-eORCA025_Z75_SI3_COUPLED+oifs43r3-foci30.yaml b/configs/couplings/nemo-eORCA025_Z75_SI3_COUPLED+oifs43r3-foci30/nemo-eORCA025_Z75_SI3_COUPLED+oifs43r3-foci30.yaml new file mode 100644 index 000000000..03c1e68fa --- /dev/null +++ b/configs/couplings/nemo-eORCA025_Z75_SI3_COUPLED+oifs43r3-foci30/nemo-eORCA025_Z75_SI3_COUPLED+oifs43r3-foci30.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci30 +- nemo-eORCA025_Z75_SI3_COUPLED +- oasis3mct-EM21 +- rnfmap-foci211 +#- xios-trunk_oasis +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci30/src/ifs/module/yommcc.F90 diff --git a/configs/couplings/nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40/nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40.yaml b/configs/couplings/nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40/nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40.yaml new file mode 100644 index 000000000..7707c154d --- /dev/null +++ b/configs/couplings/nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40/nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40.yaml @@ -0,0 +1,10 @@ +components: +- oifs-43r3-foci40 +- nemo-eORCA05_Z75_SI3_COUPLED +- oasis3mct-5.0-geomar +- rnfmap-foci211 +#- xios-trunk_oasis +coupling_changes: +- sed -i '/COUPLENEMOECE = /s/.TRUE./.FALSE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 +- sed -i '/COUPLEFESOM2 = /s/.TRUE./.FALSE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 +- sed -i '/COUPLENEMOFOCI = /s/.FALSE./.TRUE./g' oifs-43r3-foci40/src/ifs/module/yommcc.F90 diff --git a/configs/esm_software/esm_runscripts/defaults_geomar_test.yaml b/configs/esm_software/esm_runscripts/defaults_geomar_test.yaml new file mode 100644 index 000000000..f4ad48f17 --- /dev/null +++ b/configs/esm_software/esm_runscripts/defaults_geomar_test.yaml @@ -0,0 +1,60 @@ +#per_model_defaults: +# file_movements: +# default: +# all_directions: copy +# bin: +# init_to_exp: copy +# exp_to_run: copy +# run_to_work: copy +# work_to_run: copy +# +# GEOMAR defaults +per_model_defaults: + file_movements: + default: + all_directions: copy + log: + all_directions: copy + bin: + init_to_exp: copy + exp_to_run: copy + run_to_work: copy + work_to_run: copy + forcing: + all_directions: link + input: + all_directions: link + restart_in: + all_directions: link + restart_out: + all_directions: move + outdata: + all_directions: move + unknown: + all_directions: move + couple: + all_directions: move + config: + init_to_exp: copy + exp_to_run: link + run_to_work: link + work_to_run: link + +oasis: + file_movements: + restart_in: + # seb-wahl: need to copy restart files by default + # as oasis modifies the restart file !!!! + init_to_exp: copy + exp_to_run: copy + run_to_work: copy + work_to_run: link +hdmodel: + file_movements: + restart_in: + # seb-wahl: need to copy restart files by default + # as oasis modifies the restart file !!!! + init_to_exp: copy + exp_to_run: copy + run_to_work: copy + work_to_run: link diff --git a/configs/machines/albedo.yaml b/configs/machines/albedo.yaml index fe4e194a8..04bf42387 100644 --- a/configs/machines/albedo.yaml +++ b/configs/machines/albedo.yaml @@ -171,7 +171,10 @@ export_vars: NETCDFROOT: "" NETCDFFROOT: "" ECCODESROOT: "" - + + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' # Message Passing Interface (MPI) environment variables: diff --git a/configs/machines/aleph.yaml b/configs/machines/aleph.yaml index 9970ffe78..12cbc0730 100644 --- a/configs/machines/aleph.yaml +++ b/configs/machines/aleph.yaml @@ -82,7 +82,11 @@ export_vars: NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_LIBRARIES: $NETCDFROOT/lib - + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDF_DIR + OASIS_NETCDFF: $NETCDF_DIR + PERL5LIB: /usr/lib64/perl5 #LAPACK_LIB: '"-lmkl_intel_lp64 -lmkl_core -mkl=sequential -lpthread -lm -ldl"' #LAPACK_LIB_DEFAULT: '"-L/global/AWIsoft/intel/2018/compilers_and_libraries_2018.5.274/linux/mkl/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' diff --git a/configs/machines/all_machines.yaml b/configs/machines/all_machines.yaml index 54ae7b53e..126eef512 100644 --- a/configs/machines/all_machines.yaml +++ b/configs/machines/all_machines.yaml @@ -38,9 +38,11 @@ blogin: post_nodes: 'blogin*' glogin: - login_nodes: 'glogin*' - compute_nodes: '^g[A-Za-z0-9]+\.usr\.hlrn\.de$' - post_nodes: 'glogin*' + login_nodes: 'glogin[1-9]' + compute_nodes: 'g[A-Za-z0-9]+' + #compute_nodes: '^g[A-Za-z0-9]+\.usr\.hlrn\.de$' + #compute_nodes: '^c[0-9][0-9][0-9][0-9]' + post_nodes: 'glogin[1-9]' aleph: login_nodes: 'elogin*' @@ -53,4 +55,12 @@ nesh: - 'neshcl*' - 'nesh-bigmem*' post_nodes: 'neshcl*' + +olaf: + login_nodes: 'olaf*' + compute_nodes: + - 'normal_cpu' + - 'long_cpu' + post_nodes: 'olaf*' + diff --git a/configs/machines/blogin.yaml b/configs/machines/blogin.yaml index 697308711..2d4b7c922 100644 --- a/configs/machines/blogin.yaml +++ b/configs/machines/blogin.yaml @@ -18,7 +18,7 @@ choose_use_hyperthreading: hyperthreading_flag: "--ntasks-per-core=1" True: hyperthreading_flag: "" - launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind}" + launcher_flags: "--mpi=pmix -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind}" add_export_vars: I_MPI_SLURM_EXT: 0 add_unset_vars: @@ -32,7 +32,7 @@ choose_use_hyperthreading: hyperthreading_flag: "--ntasks-per-core=1" True: hyperthreading_flag: "" - launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind}" + launcher_flags: "--mpi=pmix -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind}" add_export_vars: I_MPI_SLURM_EXT: 0 add_unset_vars: @@ -42,19 +42,21 @@ choose_use_hyperthreading: - "SLURM_ARBITRARY_NODELIST" accounting: true - batch_system: "slurm" - jobtype: compute sh_interpreter: "/bin/bash" - - -partition: standard96 +partition: cpu-clx:test choose_partition: standard96: partition_name: standard96 partition_cpn: 96 + 'standard96:test': + partition_name: 'standard96:test' + partition_cpn: 96 + 'cpu-clx:test': + partition_name: 'cpu-clx:test' + partition_cpn: 96 partitions: compute: @@ -66,7 +68,6 @@ partitions: logical_cpus_per_core: 2 - threads_per_core: 1 hetjob_flag: hetjob @@ -78,26 +79,11 @@ pool_dir: "/scratch/usr/hbkawi" # default settings for compiler, mpi and I/O libs # TODO: system_libs not yet properly configured as I (seb-wahl) don't use them -compiler_mpi: intel2019_impi2019 -#iolibraries: system_libs -# -# for FOCIOIFS use -# compiler_mpi: intel2019_ompi -# for FOCI use -# compiler_mpi: intel2019_impi2019 -# for both FOCI and FOCIOIFS use -iolibraries: geomar_libs +compiler_mpi: intel2024_impi2021 +iolibraries: system_libs # basic modules and export vars needed # for all compiler and I/O settings -module_actions: - - "purge" - - "load slurm" - - "load HLRNenv" - - "load sw.skl" - - "load cmake" - - "load cdo nco" - - "load git" export_vars: LC_ALL: en_US.UTF-8 @@ -106,113 +92,99 @@ export_vars: #SLURM_CPU_BIND: none choose_compiler_mpi: - intel2021_impi2021: - add_module_actions: - - "load intel/2021.2" - - "load impi/2021.2" - add_export_vars: - FC: mpiifort - F77: mpiifort - MPIFC: mpiifort - FCFLAGS: -free - CC: mpiicc - CXX: mpiicpc - MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - - intel2019_impi2019_nemo4: - add_module_actions: - - "load intel/19.0.5" - - "load impi/2019.5" - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" - - "load gcc/9.3.0" - add_export_vars: - FC: mpiifort - F77: mpiifort - MPIFC: mpiifort - FCFLAGS: -free - CC: mpiicc - CXX: mpiicpc - MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - - intel2019_impi2019: + intel2024_impi2021: add_module_actions: - - "load intel/19.0.5" - - "load impi/2019.5" + - "load intel/2024.2" + - "load impi/2021.13" + # TODO: check whether loading gcc is still required + #- "load gcc/13.3.0" add_export_vars: - FC: mpiifort - F77: mpiifort - MPIFC: mpiifort + FC: mpiifx + F77: mpiifx + MPIFC: mpiifx + CC: mpiicx + CXX: mpiicpx FCFLAGS: -free - CC: mpiicc - CXX: mpiicpc - MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - - intel2019_ompi: - add_module_actions: - - "load intel/19.0.5" - - "load openmpi/intel/3.1.6" - add_export_vars: - FC: mpifort - F77: mpifort - MPIFC: mpifort - FCFLAGS: -free - CC: mpicc - CXX: mpic++ - MPIROOT: "\"$(mpifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + MPIROOT: "\"$(mpiifx -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + MPI_LIB: "\"$(mpiifx -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" choose_iolibraries: + # not yet configured for berlin as of August 2024 as e.g. netcdf is not yet available + # as a module system_libs: - # TODO: find the correct libraries and dependencies - add_module_actions: - - "load netcdf/intel/4.7.3" - # TODO: find the correct libraries and dependencies - add_export_vars: - NETCDF_DIR: /sw/dataformats/netcdf/intel.18/4.7.3/skl/ - LD_LIBRARY_PATH: $NETCDF_DIR/lib/:$LD_LIBRARY_PATH - NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDF_DIR/include - NETCDF_CXX_LIBRARIES: $NETCDF_DIR/lib + + choose_compiler_mpi: + intel2024_impi2021: + # Use system HDF5 and netCDF + # ecCodes will be installed, but is not a priority now + add_module_actions: + - "load hdf5-parallel/impi/intel/1.14.4" + - "load netcdf-parallel/impi/intel/4.9.2" + # We dont have ecCodes, so we need to compile it ourselves for now + add_export_vars: + IO_LIB_ROOT: /home/shkjocke/sw/HPC_libraries/intel2024.2_impi2021.13_systemnetcdf_20240801 + # Path to libaec + SZIPROOT: /sw/dataformats/aec/1.1.3/clx.el9/ + # HDF5 + HDF5ROOT: $HDF5_ROOT + # We need to find NETCDF + NETCDFROOT: "\"$(nc-config --prefix)\"" + NETCDFFROOT: $NETCDFROOT + # ecCodes + ECCODESROOT: $IO_LIB_ROOT + + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + + # ZIB considers it bad practice to add module paths + # to LD_LIBRARY_PATH + # So we do it ourselves + LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$IO_LIB_ROOT/lib64:$HDF5ROOT/lib:$NETCDFROOT/lib:$LD_LIBRARY_PATH geomar_libs: add_export_vars: IO_LIB_ROOT: "" PATH: $IO_LIB_ROOT/bin:$PATH - LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$LD_LIBRARY_PATH - + + # Set paths from Sebastians libs SZIPROOT: $IO_LIB_ROOT - HDF5ROOT: $IO_LIB_ROOT + # HDF5 now sees fit to not install libs in the PREFIX path (idiots) + HDF5ROOT: $IO_LIB_ROOT/HDF_Group/HDF5/1.14.4.3/ HDF5_ROOT: $HDF5ROOT NETCDFROOT: $IO_LIB_ROOT NETCDFFROOT: $IO_LIB_ROOT ECCODESROOT: $IO_LIB_ROOT + + # Set paths from system libs (eccodes is missing) + #HDF5ROOT: $HDF5_ROOT + #NETCDFROOT: "\"$(nc-config --prefix)\"" + #NETCDFFROOT: $NETCDFROOT HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT - choose_compiler_mpi: + LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$IO_LIB_ROOT/lib64:$HDF5ROOT/lib:$LD_LIBRARY_PATH - intel2019_impi2019_nemo4: - add_export_vars: - IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2019.0.5_impi2019.5_20200811 - - intel2021_impi2021: - add_export_vars: - IO_LIB_ROOT: /home/shkjocke/sw/HPC_libraries/intel2021.2_impi2021.2_20211007 - - intel2019_impi2019: - add_export_vars: - IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2019.0.5_impi2019.5_20200811 - - intel2019_ompi: + choose_compiler_mpi: + + intel2024_impi2021: add_export_vars: - IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2019.0.5_ompi3.1.6_20201117 + IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2024.2_impi2021.13_20240723/ # some yamls use computer.fc, etc to identify the compiler, so we need to add them fc: "$FC" @@ -221,7 +193,8 @@ mpifc: "$MPIFC" mpicc: "$MPICC" cxx: "$CXX" -launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind} --distribution=cyclic:cyclic --export=ALL" +# TODO: or stay with pmi2 ? +launcher_flags: "--mpi=pmix -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind} --distribution=cyclic:cyclic --export=ALL" further_reading: - batch_system/slurm.yaml diff --git a/configs/machines/glogin.yaml b/configs/machines/glogin.yaml index 012a8c6d4..c36f08ac2 100644 --- a/configs/machines/glogin.yaml +++ b/configs/machines/glogin.yaml @@ -1,11 +1,22 @@ -# BLOGIN YAML CONFIGURATION FILES +# GLOGIN YAML CONFIGURATION FILES +# +# This file describes the glogin (Emmy) HPC in phase 3 which came into operation July 2024. +# The machine file for the older phases of Emmy, see glogin_old +# +# Documentation: https://docs.hpc.gwdg.de/compute_partitions/cpu_partitions/index.html +# Hardware: Intel Cascade-Lake 9242. 48 cores per chip, 2 chips per node. 364 Gb memory per node. +# name: glogin account: None # set default for hyperthreading_flag use_hyperthreading: False -# seb-wahl: use old heterogeneous parallelization on HLRN4, the new approach does not work yet + +# need to check how heterogeneous parallelisation should be done on glogin now +# I think we can use mpirun, for example: +# mpirun OMP_NUM_THREADS=4 -np 168 ./oifs -e ECE3 : -np 432 ./oceanx +# but it needs to be tested taskset: true choose_use_hyperthreading: "1": @@ -40,7 +51,13 @@ choose_use_hyperthreading: - "SLURM_NTASKS" - "SLURM_NPROCS" - "SLURM_ARBITRARY_NODELIST" + # After suggestion from Timon (not Pumbaa) at HLRN + - "SLURM_JOB_NUM_NODES" + - "SLURM_NNODES" +# If you do not have a project, there may be a small +# amount of resources given to each user +# If so, the account name is usually the username accounting: true batch_system: "slurm" @@ -48,14 +65,41 @@ batch_system: "slurm" jobtype: compute sh_interpreter: "/bin/bash" - +# This is the default partition (96 cores, Intel Cascade Lake) partition: standard96 +# Describe partitions available +# There are more (48-core nodes etc) +# but I (Joakim) see no reason to use them +# The performance difference between Cascade Lake and Sapphire Rapids is to be tested choose_partition: + # 2 x 48-core Intel Cascade-Lake 9242. 2,3 GHz base clock. standard96: partition_name: standard96 partition_cpn: 96 + # 2 x 48-core Intel Cascade-Lake 9242 (Cent OS 7) + 'standard96:el7': + partition_name: 'standard96:el8' + partition_cpn: 96 + # 2 x 48-core Intel Cascade-Lake 9242 + 'standard96:el8': + partition_name: 'standard96:el8' + partition_cpn: 96 + # 2 x 48-core Intel Cascade-Lake 9242. For short tests (< 1hr walltime) + 'standard96:test': + partition_name: 'standard96:test' + partition_cpn: 96 + # 2 x Sapphire Rapids 8468. 514 Gb memory per node. 2,1 GHz base clock. + 'standard96s': + partition_name: 'standard96s' + partition_cpn: 96 + 'standard96s:test': + partition_name: 'standard96s' + partition_cpn: 96 +# Choose partition for different kinds of jobs +# compute: Simulations +# pp: post processing partitions: compute: name: ${computer.partition_name} @@ -64,71 +108,177 @@ partitions: name: ${computer.partition_name} cores_per_node: ${computer.partition_cpn} - +# Intel chips support hyper-threading which means +# each core presents two logical cores to the system. +# It is possible to run 2 threads per core. +# Hyper-threading has not been found to speed up OpenIFS +# in any way, so we usually use 1 thread per core. logical_cpus_per_core: 2 - threads_per_core: 1 -hetjob_flag: hetjob +hetjob_flag: packjob +# Set default pool directory +# TODO: clarify with AWI: why pool, pool_dir and focipool pool_directories: - pool: "/scratch/usr/hbkawi" - focipool: "/scratch/usr/shkifmsw/foci_input2" + # This currently points to the shk00018 project from GEOMAR + # but can of course be set by the user in the runscript + pool: ${computer.pool_dir} + # This is default pool dir for FOCI and FOCI-OpenIFS + focipool: ${computer.pool_dir} + +pool_dir: "/scratch/projects/shk00018" + +# +# Now set default compiler etc +# The user can choose a compiler set which is something like intel2023_impi2021 +# which indicates the C/Fortran compiler and the MPI implementation. +# For each compiler set, we have a pre-defined list of modules for netCDF etc. +# That way, the user just picks compiler and ESM-Tools solves all other modules +# -pool_dir: "/scratch/usr/hbkawi" +# Available compiler options +# +# * intel2023_impi2021 +# * intel2023_ompi416 +# * intel2021_impi2019 (this requires the user to build their on spack environment) -# default settings for compiler, mpi and I/O libs -# TODO: system_libs not yet properly configured as I (seb-wahl) don't use them -compiler_mpi: intel2019_impi2019 -# to compile nemo standalone, comment the line above and uncomment the one below -#compiler_mpi: intel2019_impi2019_nemo4 -#iolibraries: system_libs -# -# for FOCIOIFS use -# compiler_mpi: intel2019_ompi -# for FOCI use -# compiler_mpi: intel2019_impi2019 -# for both FOCI and FOCIOIFS use -iolibraries: geomar_libs - -# basic modules and export vars needed -# for all compiler and I/O settings -module_actions: - - "purge" - - "load slurm" - - "load HLRNenv" - - "load sw.skl" - - "load cmake" - - "load cdo nco" - - "load git" +# Default compiler +# NOTE: THE COMPILER DEFAULT IS OFTEN SET FOR EACH MODEL, E.G. configs/setups/focioifs.yaml +# SO THIS LINE MAY NOT ACTUALLY DO ANYTHING +compiler_mpi: intel2023_impi2021 + +# Do we use modules available on the system +# or install our own (geomar_libs) +iolibraries: system_libs export_vars: LC_ALL: en_US.UTF-8 - # Recommended by HLNR support when using an MPI binary and srun - # removed by seb-wahl as it slows down ECHAM6 by 50% - #SLURM_CPU_BIND: none + # Taken from the GWDG examples and recipes page + SLURM_CPU_BIND: none -choose_compiler_mpi: +# We need to use mpirun rather than srun +launcher: mpirun +launcher_flags: "" +#launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind} --distribution=cyclic:cyclic --export=ALL" + +# Start by clearing all loaded modules +module_actions: + - "purge" + - "load git-lfs" + - "load perl" + - "load perl-uri" + # cmake is used by OpenIFS and some versions of ECHAM and OASIS + - "load cmake/3.27.7" - intel2019_impi2019_nemo4: +choose_compiler_mpi: + + # build locally for myself using spack + # Anyone else can do this too using the following commands: + # module load spack # load spack module + # spack install netcdf-fortran@4.6.1%intel@2021.10.0+mpi ^intel-mpi%intel@2021.10.0 ^hdf5%intel@2021.10.0+hl+cxx+fortran~java+threadsafe+map + # spack install eccodes@2.34.0%intel@2021.10.0+aec+fortran+openmp+tools + # spack install intel-mkl@2020.4.304%intel@2021.10.0 threads=openmp + # This should give a build with Intel 2021.10.0, IMPI 2019, HDF5 and netcdf, ecCodes, MKL. + # Note: There is no CDO with this build so postprocessing might not work. + intel2021_impi2019: + + # Use spack libraries + iolibraries: spack_libs + + # Here we load compiler and MPI + # netCDF etc is done later add_module_actions: - - "load intel/19.0.5" - - "load impi/2019.5" - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" - - "load gcc/9.3.0" + - "load spack" + - "source $SPACK_ROOT/share/spack/setup-env.sh" + # we load system intel compilers and mpi. Needed for spack, but wont be used. + - "load intel-oneapi-compilers/2023.2.1" + #- "load intel-oneapi-mpi/2021.10.0" + #- "load intel-oneapi-mkl/2023.2.0" + + add_spack_actions: + # This part needs to be changed to your personal spack build. + # first try. did not work + #- "load netcdf-fortran@4.6.1%intel@2021.10.0+mpi ^intel-mpi%intel@2021.10.0 ^hdf5%intel@2021.10.0+hl+cxx+fortran~java+threadsafe+map" + # using older netcdf-fortran. works + #- "load netcdf-fortran@4.5.3%intel@2021.10.0+mpi ^intel-mpi%intel@2021.10.0 ^hdf5@1.10.7%intel@2021.10.0+hl+cxx+fortran~threadsafe" + # load eccodes and mkl for intel 2021 + #- "load eccodes@2.34.0%intel@2021.10.0+aec+fortran+openmp+tools" + #- "load intel-mkl@2020.4.304%intel@2021.10.0 threads=openmp" + # load netcdf and hdf5 for intel 2023 and intel mpi 2021 + - "load netcdf-fortran@4.5.3%oneapi@2023.2.1+mpi ^intel-oneapi-mpi@2021.10.0%oneapi@2023.2.1 ^hdf5@1.10.7%oneapi@2023.2.1+hl+cxx+fortran~threadsafe" + - "load eccodes@2.34.0%oneapi@2023.2.1+aec+fortran+openmp+tools/2ls624l" + - "load intel-oneapi-mkl@2023.2.0%oneapi@2023.2.1 threads=openmp" + + add_export_vars: - FC: mpiifort - F77: mpiifort - MPIFC: mpiifort + FC: mpiifort + F77: mpiifort + MPIFC: mpiifort FCFLAGS: -free - CC: mpiicc + CC: mpiicc CXX: mpiicpc MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - - intel2019_impi2019: + + # This is the new, recommended system Intel compiler set + # Note: The old release_mt stuff should not be needed anymore. + intel2023_impi2021: + + # Use system modules + iolibraries: system_libs + + # Here we load compiler and MPI + # netCDF etc is done later add_module_actions: - - "load intel/19.0.5" - - "load impi/2019.5" + - "load intel-oneapi-compilers/2023.2.1" + - "load intel-oneapi-mpi/2021.10.0" + # MKL needed for OpenIFS + - "load intel-oneapi-mkl/2023.2.0" + + # Note: Intel compilers now have new names: + # mpiicc (C) = mpiicx + # mpiicpc (C++) = mpiicpx + # mpiifort (Fortran) = mpiifx + # + # OASIS compiles with mpiifx etc, but + # XIOS and OpenIFS do not, so we + # use the old mpiifort etc and live with the warnings + add_export_vars: + # For now (Intel 2023) we can stick to mpiifort etc + FC: mpiifort + F77: mpiifort + MPIFC: mpiifort + FCFLAGS: -free + CC: mpiicc + CXX: mpiicpc + MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + + # It is possible to use the new compilers (mpiifx etc) + # But some changes are necessary. + # -std=gnu89 is required in OIFS_CFLAGS. Also -mkl_sequential must be -qmkl=sequential + # Only new xios_trunk will work (not rev 1910) and only with -std=c++11 + # The CPP for NEMO must be cpp -P. $CC will not work anymore. + # + #FC: mpiifx + #F77: mpiifx + #MPIFC: mpiifx + #FCFLAGS: -free + #CC: mpiicx + #CXX: mpiicpx + #MPIROOT: "\"$(mpiifx -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + #MPI_LIB: "\"$(mpiifx -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + + # tk = Tronje Kemena, GEOMAR. Settings TK used to get FOCI-MOPS initially going on + # glogin in 08/2024 after their system upgrade, kept for reference but the approach + # by Joakim (intel2023_impi2021) seems cleaner + intel2023_impi2021_tk: + add_module_actions: + - "load intel-oneapi-compilers/2023.2.1" + - "load intel-oneapi-mpi/2021.10.0" + #- "load intel/2022.2" + #- "load impi/2021.6" + #- "load gcc/9.3.0" add_export_vars: FC: mpiifort F77: mpiifort @@ -138,35 +288,323 @@ choose_compiler_mpi: CXX: mpiicpc MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - - intel2019_ompi: + + intel2023_ompi416: + + # Use system modules + iolibraries: system_libs + + # Here we load compiler and MPI + # netCDF etc is done later + add_module_actions: + - "load intel-oneapi-compilers/2023.2.1" + - "load openmpi/4.1.6" + # MKL needed for OpenIFS + - "load intel-oneapi-mkl/2023.2.0" + + # Note: OpenMPI compilers link to new Intel compilers, e.g. + # mpicc (C) = icx + # mpicxx (C++) = icx + # mpifort (Fortran) = ifx + # Some changes in arch files for XIOS and NEMO are necessary to compile + # and XIOS must be quite new. + add_export_vars: + FC: mpifort + F77: mpifort + MPIFC: mpifort + FCFLAGS: -free + CC: mpicc + CXX: mpicxx + MPIROOT: "\"$(mpifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + MPI_LIB: "\"$(mpifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + + # At first I (Joakim) could not get Intel 2023 + IMPI 2021 to work so I tried GNU + OpenMPI + # It turned out to be very hard to compile FOCI-OpenIFS with GNU, and I could never get it to run + # These settings can remain here, but I strongly recommend against using them. + gcc11_ompi416: + + # Use system modules + iolibraries: system_libs + + # Here we load compiler and MPI + # netCDF etc is done later add_module_actions: - - "load intel/19.0.5" - - "load openmpi/intel/3.1.6" + - "load gcc/11.4.0" + - "load openmpi/4.1.6" + # MKL needed for OpenIFS + # Intel MKL works with GCC. Not specific to Intel Fortran + - "load intel-oneapi-mkl/2023.2.0" + # Load FFTW + # (yes it really does stand for Fastest Fourier Transform in the West) + - "load fftw/3.3.10" + # required for CMake + #- "load curl/8.4.0-5rlmgmu ncurses/6.4-u72r7qn zlib-ng/2.1.4-ftbye2s" + # git conflicts + #- "load git/2.42.0" + + # Note: Intel compilers now have new names: + # mpiicc (C) = mpiicx + # mpiicpc (C++) = mpiicpx + # mpiifort (Fortran) = mpiifx + # + # OASIS compiles with mpiifx etc, but + # XIOS and OpenIFS do not, so we + # use the old mpiifort etc and live with the warnings add_export_vars: - FC: mpifort - F77: mpifort - MPIFC: mpifort + FC: mpifort #mpiifx + F77: mpifort #mpiifx + MPIFC: mpifort #mpiifx FCFLAGS: -free - CC: mpicc - CXX: mpic++ + CC: mpicc #mpiicx + CXX: mpic++ #mpiicpx MPIROOT: "\"$(mpifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" MPI_LIB: "\"$(mpifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + MKLROOT: $INTEL_ONEAPI_MKL_MODULE_INSTALL_PREFIX + FFTWROOT: $FFTW_MODULE_INSTALL_PREFIX choose_iolibraries: system_libs: - # TODO: find the correct libraries and dependencies - add_module_actions: - - "load netcdf/intel/4.7.3" - # TODO: find the correct libraries and dependencies - add_export_vars: - NETCDF_DIR: /sw/dataformats/netcdf/intel.18/4.7.3/skl/ - LD_LIBRARY_PATH: $NETCDF_DIR/lib/:$LD_LIBRARY_PATH - NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDF_DIR/include - NETCDF_CXX_LIBRARIES: $NETCDF_DIR/lib - + choose_compiler_mpi: + intel2023_impi2021: + add_module_actions: + # This took a long time to work out how to do + # I (Joakim) figured out that FOCI-OpenIFS can not run with + # netcdf-fortran 4.6.1, due to "floating invalid" during nf90_open, + # so a slightly older version is required. + # See: https://www.unidata.ucar.edu/support/help/MailArchives/netcdf/msg15037.html + # Therefore, GWDG support older HDF5 and netCDF for us. + + # Parallel HDF5 + - "load hdf5/1.10.7" + + # netcdf built with older HDF5 + - "load netcdf-c/4.9.2-hdf5-1.10" + + # bug in netcdf-fortran 4.6.1. avoid it + - "load netcdf-fortran/4.5.3-hdf5-1.10" + + # GWDG support suspected a problem with netcdf when not built with fp-model precise + # Here are some test modules with fp-model precise + # I could not find that it made any difference in FOCI-OpenIFS though + #- "load hdf5/1.10.7-precise-fp" + #- "load netcdf-c/4.9.2-hdf5-1.10-precise-fp" + #- "load netcdf-fortran/4.5.3-hdf5-1.10-precise-fp" + + # The ecCodes, CDO and NCO modules are built with modern + # netcdf, so they conflict when loaded. + # Instead, we can load them manually (below) by adding paths + # In the future, GWDG should re-build these libraries. + # eccodes + #- "load eccodes/2.34.0" + + # cdo and nco built with older netcdf + # post processing seems ok + - "load nco/5.1.6-hdf5-1.10" + - "load cdo/2.2.2-hdf5-1.10" + + add_export_vars: + # Run module show on the modules to see how + # each module changes or sets env variables + # Module load usually sets _INSTALL_PREFIX + # so we just use that to find the libraries and includes. + HDF5_ROOT: $HDF5_MODULE_INSTALL_PREFIX + HDF5ROOT: $HDF5_ROOT + NETCDF_DIR: $NETCDF_C_MODULE_INSTALL_PREFIX + NETCDFROOT: $NETCDF_C_MODULE_INSTALL_PREFIX + NETCDFFROOT: $NETCDF_FORTRAN_MODULE_INSTALL_PREFIX + + # Path to ecCodes module + ECCODESROOT: /sw/rev/24.05/cascadelake_opa_rocky8/linux-rocky8-cascadelake/oneapi-2023.2.1/eccodes-2.34.0-cwlamwcpvlhsuejrpqjlr7z3pdbkkw56/ + # Path to CDO + CDOROOT: /sw/rev/24.05/cascadelake_opa_rocky8/linux-rocky8-cascadelake/oneapi-2023.2.1/cdo-2.2.2-hmzmwdifoec6niyoau7mobur43v7q52p/ + + PATH: $ECCODESROOT/bin:$CDOROOT/bin:$PATH + # This should be done when correct module is installed + #ECCODESROOT: $ECCODES_MODULE_INSTALL_PREFIX + + # Add everything to LD_LIBRARY_PATH + # Gottingen support recommended to also add LD_RUN_PATH + # Add both lib and lib64 for ecCodes since it varies + LD_LIBRARY_PATH: $ECCODESROOT/lib/:$ECCODESROOT/lib64/:$NETCDFROOT/lib/:$NETCDFFROOT/lib/:$HDF5ROOT/lib/:$LD_LIBRARY_PATH + + # For OASIS + NETCDF_CXX_LIBRARIES: $NETCDF_DIR/lib + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + + intel2023_impi2021_tk: + # TODO: find the correct libraries and dependencies + add_module_actions: + - "load intel-oneapi-compilers/2023.2.1" + - "load intel-oneapi-mpi/2021.10.0" + - "load netcdf-fortran/4.6.1-mpi" + # TODO: find the correct libraries and dependencies + add_export_vars: + NETCDF_DIR: "/sw/rev/24.05/cascadelake_opa_rocky8/linux-rocky8-cascadelake/gcc-11.4.0/netcdf-c-4.9.2-r45vhneis4kgtjbelerbeuywfo3iodp5" + NETCDFF_DIR: "/sw/rev/24.05/cascadelake_opa_rocky8/linux-rocky8-cascadelake/oneapi-2023.2.1/netcdf-fortran-4.6.1-hk2dv2ct67ossrcdh6lhxy6eyq2kuzaa" + HDF5ROOT: "/sw/rev/24.05/cascadelake_opa_rocky8/linux-rocky8-cascadelake/oneapi-2023.2.1/hdf5-1.14.3-6stsphcp6h4srcqryqp2vqt5e73fnb7v" + LD_LIBRARY_PATH: $NETCDFF_DIR/lib:$NETCDF_DIR/lib:$HDF5ROOT/lib:$LD_LIBRARY_PATH + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDF_DIR/include + NETCDF_CXX_LIBRARIES: $NETCDF_DIR/lib + IO_LIB_ROOT: "/home/shkifmsw/sw/HPC_libraries/intel2022.2_impi2021.6_20230815" + PATH: $IO_LIB_ROOT/bin:$PATH + SZIPROOT: $IO_LIB_ROOT + HDF5_ROOT: $HDF5ROOT + NETCDFROOT: $NETCDF_DIR + NETCDFFROOT: $NETCDFF_DIR + ECCODESROOT: $IO_LIB_ROOT + + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + OASIS_NETCDF: $NETCDF_DIR + OASIS_NETCDFF: $NETCDFF_DIR + MPEU_Fortran_INCLUDE_DIRECTORIES: "/home/shktkeme/esm/models/foci-agrif_mops_oasismct4/oasis/INSTALL_OASIS.ESMTOOLS/include" + MPEU_Fortran_LIBRARIES: "/home/shktkeme/esm/models/foci-agrif_mops_oasismct4/oasis/INSTALL_OASIS.ESMTOOLS/lib" + + + intel2023_ompi416: + # Modules to load + add_module_actions: + # Parallel HDF5 + - "load hdf5/1.14.3" + - "load netcdf-c/4.9.2" + # Dont load netcdf. Use path instead + - "load netcdf-fortran/4.6.1-mpi" + # ecCodes required by OpenIFS + # Some strange ELF issue with ecCodes module + # Will use own ecCodes below (this is not a permanent solution) + - "load eccodes/2.34.0" + # CDO loads eccodes/2.25.0 which conflicts with eccodes/2.34.0 + - "load cdo/2.2.2" + - "load nco/5.1.6" + + add_export_vars: + # Run module show on the modules to see how + # each module changes or sets env variables + # Module load usually sets _INSTALL_PREFIX + # so we just use that to find the libraries and includes. + HDF5_ROOT: $HDF5_MODULE_INSTALL_PREFIX + HDF5ROOT: $HDF5_ROOT + NETCDF_DIR: $NETCDF_C_MODULE_INSTALL_PREFIX + NETCDFROOT: $NETCDF_C_MODULE_INSTALL_PREFIX + # Use path for netcdf-fortran instead + NETCDFFROOT: $NETCDF_FORTRAN_MODULE_INSTALL_PREFIX + #NETCDFFROOT: /sw/rev/24.05/sapphirerapids_opa_rocky8/linux-rocky8-sapphirerapids/gcc-11.4.0/netcdf-fortran-4.6.1-b4s43qtqze4kel6knhp7imr2yshypvjy/ + # we cant use ecCodes module due to some ELF error + # Will give path to my own ecCodes + ECCODESROOT: $ECCODES_MODULE_INSTALL_PREFIX + #ECCODESROOT: /sw/rev/24.05/cascadelake_opa_rocky8/linux-rocky8-cascadelake/oneapi-2023.2.1/eccodes-2.34.0-cwlamwcpvlhsuejrpqjlr7z3pdbkkw56/ + + # Add NETCDF to LD_LIBRARY_PATH + # Gottingen support recommended to also add LD_RUN_PATH + LD_LIBRARY_PATH: $ECCODESROOT/lib64/:$NETCDF_DIR/lib/:$LD_RUN_PATH:$LD_LIBRARY_PATH + NETCDF_CXX_LIBRARIES: $NETCDF_DIR/lib + + # For OASIS + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + + gcc11_ompi416: + # Modules to load + add_module_actions: + # Parallel HDF5 + - "load hdf5/1.14.3" + - "load netcdf-c/4.9.2" + - "load netcdf-fortran/4.6.1-mpi" + # ecCodes required by OpenIFS + - "load eccodes/2.34.0" + # cdo and nco required for pre- and post processing of OpenIFS and NEMO + - "load cdo/2.2.2" + - "load nco/5.1.6" + + add_export_vars: + # Run module show on the modules to see how + # each module changes or sets env variables + # Module load usually sets _INSTALL_PREFIX + # so we just use that to find the libraries and includes. + HDF5_ROOT: $HDF5_MODULE_INSTALL_PREFIX + HDF5ROOT: $HDF5_ROOT + NETCDF_DIR: $NETCDF_C_MODULE_INSTALL_PREFIX + NETCDFROOT: $NETCDF_C_MODULE_INSTALL_PREFIX + NETCDFFROOT: $NETCDF_FORTRAN_MODULE_INSTALL_PREFIX + ECCODESROOT: $ECCODES_MODULE_INSTALL_PREFIX + # Add NETCDF to LD_LIBRARY_PATH + # Gottingen support recommended to also add LD_RUN_PATH + LD_LIBRARY_PATH: $NETCDF_DIR/lib/:$LD_RUN_PATH:$LD_LIBRARY_PATH + NETCDF_CXX_LIBRARIES: $NETCDF_DIR/lib + + # For OASIS + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + + spack_libs: + choose_compiler_mpi: + intel2021_impi2019: + add_export_vars: + # using intel 2021 + intel mpi 2019 + #SPACK_ROOT: "/home/shkjocke/.spack/install/linux-rocky8-cascadelake/intel-2021.10.0/" + #MKLROOT: $SPACK_ROOT/intel-mkl-2020.4.304-osfsndi25x7ompvdhkuc3e7oy6w7x22y/mkl/ + #I_MPI_ROOT: $SPACK_ROOT/intel-mpi-2019.10.317-vh3d4dgpdnc5ijnbpi27qlc2e65s6gs7/impi/2019.10.317/ + #SZIPROOT: $SPACK_ROOT/libaec-1.0.6-s3yiohe2h2ndywnrwc6lzj5wwc4znojo/ + #HDF5ROOT: $SPACK_ROOT/hdf5-1.10.7-24p3eg5v3tbihcedtbvwapzjftechyyd + #HDF5_ROOT: $HDF5ROOT + #NETCDFROOT: $SPACK_ROOT/netcdf-c-4.9.2-jgl7ozmpjq7milfey4hmkq2qevhglvsc + #NETCDFFROOT: $SPACK_ROOT/netcdf-fortran-4.5.3-nmr2cb375x4woufnpgc2kbzldgdqvssi + #ECCODESROOT: $SPACK_ROOT/eccodes-2.34.0-x4itugitwwo7cbxoxmsj4gprctnlui5i + + # intel 2023 + impi 2021 + SPACK_ROOT: "/home/shkjocke/.spack/install/linux-rocky8-cascadelake/oneapi-2023.2.1/" + MKLROOT: $SPACK_ROOT/intel-oneapi-mkl-2023.2.0-h5ucstnjeb3alppgni63w4jpi6mguwsy/mkl/2023.2.0/ + I_MPI_ROOT: $SPACK_ROOT/intel-oneapi-mpi-2021.10.0-lmq35q4ue5xziuhtz6hc25xvw7gwov33/impi/2021.10.0/ + SZIPROOT: $SPACK_ROOT/libaec-1.0.6-vqv6cuzfvcxou7crktob7zjbxwfm2yhc/ + HDF5ROOT: $SPACK_ROOT/hdf5-1.10.7-amszjwv3rqdfl6nk3jrt42mc7i7kykyf/ + HDF5_ROOT: $HDF5ROOT + NETCDFROOT: $SPACK_ROOT/netcdf-c-4.9.2-hfyf5eu4hji45jx23rwdjvcejgm4awpa + NETCDFFROOT: $SPACK_ROOT/netcdf-fortran-4.5.3-atv5woc3ewjyylewbylm3o2zixb6rv33 + ECCODESROOT: $SPACK_ROOT/eccodes-2.34.0-2ls624lxsfralpzotgntfwpblyli7ahw + + # and we need to add stuff to LD_LIBRARY_PATH manually + LD_LIBRARY_PATH: $SZIPROOT/lib:$HDF5ROOT/lib:$NETCDFROOT/lib:$NETCDFFROOT/lib:$ECCODESROOT/lib64:$LD_LIBRARY_PATH + PATH: $ECCODESROOT/bin:$PATH + + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + + geomar_libs: + # This option is never used! + # It is kept here as a skeleton in case we want to use it in the future add_export_vars: IO_LIB_ROOT: "" PATH: $IO_LIB_ROOT/bin:$PATH @@ -184,6 +622,10 @@ choose_iolibraries: NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $IO_LIB_ROOT + OASIS_NETCDFF: $IO_LIB_ROOT choose_compiler_mpi: @@ -191,17 +633,6 @@ choose_iolibraries: add_export_vars: IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2019.0.5_impi2019.5_20200811 - intel2021_impi2021: - add_export_vars: - IO_LIB_ROOT: /home/shkjocke/sw/HPC_libraries/intel2021.2_impi2021.2_20211007 - - intel2019_impi2019: - add_export_vars: - IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2019.0.5_impi2019.5_20200811 - - intel2019_ompi: - add_export_vars: - IO_LIB_ROOT: /home/shkifmsw/sw/HPC_libraries/intel2019.0.5_ompi3.1.6_20201117 # some yamls use computer.fc, etc to identify the compiler, so we need to add them fc: "$FC" @@ -210,7 +641,5 @@ mpifc: "$MPIFC" mpicc: "$MPICC" cxx: "$CXX" -launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind} --distribution=cyclic:cyclic --export=ALL" - further_reading: - batch_system/slurm.yaml diff --git a/configs/machines/juwels.yaml b/configs/machines/juwels.yaml index d55531dd6..8f2c1b706 100644 --- a/configs/machines/juwels.yaml +++ b/configs/machines/juwels.yaml @@ -306,6 +306,10 @@ choose_iolibraries: NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' PATH: $IO_LIB_ROOT/bin:$PATH + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT choose_compiler_mpi: intel2022_ompi2022: @@ -334,6 +338,10 @@ choose_iolibraries: NETCDFROOT: $IO_LIB_ROOT NETCDFFROOT: $IO_LIB_ROOT ECCODESROOT: $IO_LIB_ROOT + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include diff --git a/configs/machines/levante.yaml b/configs/machines/levante.yaml index 8c9f67494..2e6ee3b71 100644 --- a/configs/machines/levante.yaml +++ b/configs/machines/levante.yaml @@ -219,6 +219,9 @@ choose_iolibraries: MPI_DIR: /sw/spack-levante/openmpi-4.1.2-yfwe6t mpi_DIR: /sw/spack-levante/openmpi-4.1.2-yfwe6t LD_LIBRARY_PATH: $MPI_HOME/lib:$TBBMALLOC_DIR:$LD_LIBRARY_PATH + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDF_PATH + OASIS_NETCDFF: $NETCDF_ROOT system_libs: add_module_actions: @@ -238,6 +241,9 @@ choose_iolibraries: NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_LIBRARIES: $NETCDFROOT/lib + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT # flags required for ECHAM6 (and possibly other models) # compilation with cmake if parastationMPI or openMPI is used # contact seb-wahl for details @@ -301,6 +307,10 @@ choose_iolibraries: OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' # kh 07.03.22 currently required to use Intel MPI LD_LIBRARY_PATH[(2)]: '$I_MPI_ROOT/libfabric/lib:$LD_LIBRARY_PATH' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT geomar_libs: add_export_vars: @@ -320,6 +330,11 @@ choose_iolibraries: NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT + choose_compiler_mpi: intel2022_impi2021: add_export_vars: diff --git a/configs/machines/nesh.yaml b/configs/machines/nesh.yaml index 05560fe85..2a85bb206 100644 --- a/configs/machines/nesh.yaml +++ b/configs/machines/nesh.yaml @@ -1,4 +1,14 @@ -# NESH YAML CONFIGURATION FILES +# Configuration file for NESH, HPC at Kiel University, Germany +# +# Reference: https://www.hiperf.rz.uni-kiel.de/nesh/ +# +# NESH underwent significant upgrade of hardware and software late 2023 +# The yaml file for the old machine is kept as nesh_old +# +# Hardware: +# nesh-srp[100-339]: 2x Intel Xeon Gold 6426Y (Sapphire Rapids), 32 cores (2.5-4.1GHz), 256GB main memory +# nesh-clk[344-623]: 2x Intel Xeon Gold 6226R (Cascade Lake), 32 cores (2.9-3.9GHz), 192GB main memory +# name: nesh account: None @@ -15,6 +25,7 @@ choose_use_hyperthreading: False: hyperthreading_flag: "--ntasks-per-core=1" +# There is no accounting on NESH accounting: false batch_system: "slurm" @@ -22,125 +33,147 @@ batch_system: "slurm" jobtype: compute sh_interpreter: "/usr/bin/bash" -choose_jobtype: - tidy_and_resubmit: - partition: cluster - post: - partition: cluster - compute: - partition: cluster - +# Set default partition on NESH +partition: base +# Set partition name and core per node choose_partition: - cluster: - cores_per_node: 32 - + base: + partition_name: base + partition_cpn: 32 + highmem: + partition_name: highmem + partition_cpn: 32 + +# Define all partitions available +partitions: + compute: + name: ${computer.partition_name} + cores_per_node: ${computer.partition_cpn} + pp: + name: ${computer.partition_name} + cores_per_node: ${computer.partition_cpn} + +# Intel chips support hyperthreading where each core +# presents two logical core to the system. +# It is thus possible to run 2 threads per core +# However, this seems to not lead to any speedup, so +# we will stick to one thread per core. logical_cpus_per_core: 2 threads_per_core: 1 +# Sebastians work directory as default pool pool_directories: - pool: "/gxfs_work1/geomar/smomw235/foci_input2/" - focipool: "/gxfs_work1/geomar/smomw235/foci_input2/" + pool: "/gxfs_work/geomar/smomw235/foci_input2/" + focipool: "/gxfs_work/geomar/smomw235/foci_input2/" pool_dir: "/not/available/on/nesh/" -# TODO: system_libs not yet properly configured as I (seb-wahl) don't use them -compiler_mpi: intel2019_impi2019 -iolibraries: geomar_libs +# Set default compiler and MPI +# NOTE: Many models set a default compiler_mpi in e.g. configs/setups/focioifs.yaml +# so the default set below may not actually do anything... +compiler_mpi: intel2023_impi2021 +iolibraries: system_libs -# basic modules and export vars needed -# for all compiler and I/O settings +# Module and export commands to run for all compilers module_actions: - - "load cmake" + # clear all currently loaded modules + - "purge" + # everything else depends on compiler environment + #- "load cmake" + #- "load git git-lfs" export_vars: LC_ALL: en_US.UTF-8 additional_flags: - --mem=72000 - - --constraint="cascade" +# Now choose the compiler_mpi +# Note: We must set the environment first, oneapi or gcc choose_compiler_mpi: - - intel2020_impi2020: + + intel2023_impi2021: + add_module_actions: - - "load intel/20.0.4" - - "load intelmpi/20.0.4" + # Load Intel compilers, MKL and Intel MPI + - "load oneapi2023-env/2023.2.0" + - "load oneapi/2023.2.0" + - "load oneapi-mpi/2021.10.0" + - "load oneapi-mkl/2023.1.0" + # Load git-lfs + - "load git-lfs/3.4.0" + add_export_vars: - FC: mpiifort - F77: mpiifort - MPIFC: mpiifort - FCFLAGS: -free - CC: mpiicc - CXX: mpiicpc - MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - I_MPI_PMI_LIBRARY: libpmi.so - I_MPI_FABRICS: shm:ofi - I_MPI_LARGE_SCALE_THRESHOLD: 8192 - I_MPI_DYNAMIC_CONNECTION: 1 + #FC: mpiifort + #F77: mpiifort + #MPIFC: mpiifort + #CC: mpiicc + #CXX: mpiicpc + #MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + #MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + + FC: mpiifx + F77: mpiifx + MPIFC: mpiifx + CC: mpiicx + CXX: mpiicx + MPIROOT: "\"$(mpiifx -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + MPI_LIB: "\"$(mpiifx -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - intel2019_impi2019: - add_module_actions: - - "load intel/19.0.4" - - "load intelmpi/19.0.4" - add_export_vars: - FC: mpiifort - F77: mpiifort - MPIFC: mpiifort FCFLAGS: -free - CC: mpiicc - CXX: mpiicpc - MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" I_MPI_PMI_LIBRARY: libpmi.so I_MPI_FABRICS: shm:ofi - #FI_PROVIDER: verbs - #FI_VERBS_IFACE: ib I_MPI_LARGE_SCALE_THRESHOLD: 8192 I_MPI_DYNAMIC_CONNECTION: 1 - #I_MPI_SLURM_EXT: 0 - - intel2019_ompi: - add_module_actions: - - "load intel/19.0.4" - - "load openmpi-intel19/3.1.6" - add_export_vars: - FC: mpifort - F77: mpifort - MPIFC: mpifort - FCFLAGS: -free - CC: mpicc - CXX: mpic++ - MPIROOT: "\"$(mpifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - - intel2020_ompi: - add_module_actions: - - "load intel/20.0.4" - - "load openmpi-intel20/3.1.6" - add_export_vars: - FC: mpifort - F77: mpifort - MPIFC: mpifort - FCFLAGS: -free - CC: mpicc - CXX: mpic++ - MPIROOT: "\"$(mpifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" - MPI_LIB: "\"$(mpifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" - + +# Now we choose whether to use the modules available on the system (system_libs) +# or modules compiled by ourselves (geomar_libs etc) choose_iolibraries: system_libs: - # TODO: find the correct libraries and dependencies + add_module_actions: - - "load netcdf" + # CMake required to build some models + - "load cmake/3.27.4" + # HDF5 and netCDF required by most models + - "load hdf5/1.14.1-2-with-with-oneapi-mpi-2021.10.0" + - "load netcdf-c/4.9.2-with-oneapi-mpi-2021.10.0" + - "load netcdf-fortran/4.6.0-with-oneapi-mpi-2021.10.0" + # ecCodes required by OpenIFS + #- "load eccodes/2.25.0" + - "load eccodes/2.34.1" + # CDO and NCO required for some pre and post processing + - "load cdo/1.9.9" + - "load nco/5.1.5" + # TODO: find the correct libraries and dependencies add_export_vars: - NETCDF_DIR: /sw/dataformats/netcdf/intel.18/4.7.3/skl/ - LD_LIBRARY_PATH: $NETCDF_DIR/lib/:$LD_LIBRARY_PATH - + # + HDF5_ROOT: "/gxfs_home/sw/spack/spack0.20.1/usr/opt/spack/linux-rocky8-x86_64/oneapi-2023.2.0/hdf5-1.14.1-2-gniprdhdkijaxy6i3khsln6lrqjucyjw/" + HDF5ROOT: $HDF5_ROOT + NETCDF_DIR: "/gxfs_home/sw/spack/spack0.20.1/usr/opt/spack/linux-rocky8-x86_64/oneapi-2023.2.0/netcdf-c-4.9.2-jehsuqkvfvcqhspfczokywh3bxpzdb7o/" + NETCDFROOT: $NETCDF_DIR + NETCDFFROOT: "/gxfs_home/sw/spack/spack0.20.1/usr/opt/spack/linux-rocky8-x86_64/oneapi-2023.2.0/netcdf-fortran-4.6.0-dq6omuhdb5wvodcyxzgh4p54jqclsp6q/" + #ECCODESROOT: "/gxfs_home/sw/spack/spack0.20.1/usr/opt/spack/linux-rocky8-x86_64/oneapi-2023.2.0/eccodes-2.25.0-6phun47jsw5l2ztchbjipsg6gtxsa4z2/" + ECCODESROOT: "/gxfs_home/sw/spack/spack0.20.1/usr/opt/spack/linux-rocky8-x86_64/oneapi-2023.2.0/eccodes-2.34.1-yk4grts4gppymo2cg5jrqydkrkb3we43/" + NETCDF_CXX_LIBRARIES: "/gxfs_home/sw/spack/spack0.20.1/usr/opt/spack/linux-rocky8-x86_64/oneapi-2023.2.0/netcdf-cxx4-4.3.1-n6bh6c2tp5yrnd5xotosfnk5vu3yxoga/lib" + # Loading modules adds them to LD_LIBRARY_PATH + #LD_LIBRARY_PATH: $NETCDF_DIR/lib/:$LD_LIBRARY_PATH + + # For OASIS + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDF_DIR + OASIS_NETCDFF: $NETCDF_DIR + + # This option is currently not configured. But leave it here for reference geomar_libs: choose_compiler_mpi: intel2020_impi2020: @@ -161,62 +194,10 @@ choose_iolibraries: NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' - intel2019_impi2019: - add_export_vars: - IO_LIB_ROOT: ~smomw235/sw/HPC_libraries/intel2019.0.4_impi2019.0.4_20210122 - PATH: $IO_LIB_ROOT/bin:$PATH - LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$LD_LIBRARY_PATH - - SZIPROOT: $IO_LIB_ROOT - HDF5ROOT: $IO_LIB_ROOT - HDF5_ROOT: $HDF5ROOT - NETCDFROOT: $IO_LIB_ROOT - NETCDFFROOT: $IO_LIB_ROOT - ECCODESROOT: $IO_LIB_ROOT - - HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include - NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include - NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include - NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include - OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' - - intel2019_ompi: - add_export_vars: - IO_LIB_ROOT: ~smomw235/sw/HPC_libraries/intel2019.0.4_ompi3.1.6_20210322 - PATH: $IO_LIB_ROOT/bin:$PATH - LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$LD_LIBRARY_PATH - - SZIPROOT: $IO_LIB_ROOT - HDF5ROOT: $IO_LIB_ROOT - HDF5_ROOT: $HDF5ROOT - NETCDFROOT: $IO_LIB_ROOT - NETCDFFROOT: $IO_LIB_ROOT - ECCODESROOT: $IO_LIB_ROOT - - HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include - NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include - NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include - NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include - OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' - - intel2020_ompi: - add_export_vars: - IO_LIB_ROOT: ~smomw235/sw/HPC_libraries/intel2020.0.4_ompi3.1.6_20210322 - PATH: $IO_LIB_ROOT/bin:$PATH - LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$LD_LIBRARY_PATH - - SZIPROOT: $IO_LIB_ROOT - HDF5ROOT: $IO_LIB_ROOT - HDF5_ROOT: $HDF5ROOT - NETCDFROOT: $IO_LIB_ROOT - NETCDFFROOT: $IO_LIB_ROOT - ECCODESROOT: $IO_LIB_ROOT - - HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include - NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include - NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include - NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include - OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDFROOT + OASIS_NETCDFF: $NETCDFFROOT # some yamls use computer.fc, etc to identify the compiler, so we need to add them fc: "$FC" diff --git a/configs/machines/olaf.yaml b/configs/machines/olaf.yaml new file mode 100644 index 000000000..aa6f99657 --- /dev/null +++ b/configs/machines/olaf.yaml @@ -0,0 +1,167 @@ +# Olaf (IBS, Korea) configuration file + +name: olaf +account: iccp + +# hyperthreading false by default +use_hyperthreading: False + +# NEED TO EXPLORE HOW TO DO HETEROGENEOUS PARALLELIZATION IN THE FUTURE +# for now use taskset +taskset: true + +# hyperthreading options +choose_use_hyperthreading: + "1": + hyperthreading_flag: "" + True: + hyperthreading_flag: "" + "0": + choose_heterogeneous_parallelization: + False: + hyperthreading_flag: "--ntasks-per-core=1" + True: + hyperthreading_flag: "" + launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind}" + add_export_vars: + I_MPI_SLURM_EXT: 0 + add_unset_vars: + - "SLURM_DISTRIBUTION" + - "SLURM_NTASKS" + - "SLURM_NPROCS" + - "SLURM_ARBITRARY_NODELIST" + False: + choose_heterogeneous_parallelization: + False: + hyperthreading_flag: "--ntasks-per-core=1" + True: + hyperthreading_flag: "" + launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind}" + add_export_vars: + I_MPI_SLURM_EXT: 0 + add_unset_vars: + - "SLURM_DISTRIBUTION" + - "SLURM_NTASKS" + - "SLURM_NPROCS" + - "SLURM_ARBITRARY_NODELIST" + +accounting: true + +batch_system: "slurm" + +# Available: +# * intel2021_impi2021 +compiler_mpi: intel2021_impi2021 + +jobtype: compute +sh_interpreter: "/bin/bash" + +# Olaf compute nodes: +# 2x Intel Xeon 8360Y (2.6GHz, 36 Cores) +# Normal: Max 3 days +# Long: Max 14 days + +partition: normal_cpu + +choose_partition: + 'normal_cpu': + partition_name: 'normal_cpu' + partition_cpn: 72 + 'long_cpu': + partition_name: 'long_cpu' + partition_cpn: 72 + +partitions: + compute: + name: ${computer.partition_name} + cores_per_node: ${computer.partition_cpn} + pp: + name: ${computer.partition_name} + cores_per_node: ${computer.partition_cpn} + +logical_cpus_per_core: 2 + +threads_per_core: 1 +hetjob_flag: packjob + +pool_directories: + pool: "/scratch/usr/hbkawi" + focipool: "/proj/internal_group/iccp/jkjellsson/foci_input2/" + +# we install our own libraries using +# https://git.geomar.de/HPC/libraries +iolibraries: geomar_libs + +# purge all modules first +# and load git (with lfs) and cmake +module_actions: + - "purge" + - "load git-lfs" + - "load cmake/3.28.1" + +# Each user should set these in .bashrc +export_vars: + LC_ALL: en_US.UTF-8 + LANG: en_US.UTF-8 + +# Compiler specific settings +choose_compiler_mpi: + + intel2021_impi2021: + add_module_actions: + - "load intel/2021.3.0" + # impi/2021.3.0 causes MPI_IProbe seg fault in XIOS + # switching to 2021.7.1 solved it. + - "load impi/2021.7.1" + #- "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" + #- "load gcc/9.3.0" + add_export_vars: + FC: mpiifort + F77: mpiifort + MPIFC: mpiifort + FCFLAGS: -free + CC: mpiicc + CXX: mpiicpc + MPIROOT: "\"$(mpiifort -show | perl -lne 'm{ -I(.*?)/include } and print $1')\"" + MPI_LIB: "\"$(mpiifort -show |sed -e 's/^[^ ]*//' -e 's/-[I][^ ]*//g')\"" + +choose_iolibraries: + geomar_libs: + add_export_vars: + IO_LIB_ROOT: "" + PATH: $IO_LIB_ROOT/bin:$PATH + LD_LIBRARY_PATH: $IO_LIB_ROOT/lib:$LD_LIBRARY_PATH + + SZIPROOT: $IO_LIB_ROOT + HDF5ROOT: $IO_LIB_ROOT + HDF5_ROOT: $HDF5ROOT + NETCDFROOT: $IO_LIB_ROOT + NETCDFFROOT: $IO_LIB_ROOT + ECCODESROOT: $IO_LIB_ROOT + + HDF5_C_INCLUDE_DIRECTORIES: $HDF5_ROOT/include + NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFFROOT/include + NETCDF_C_INCLUDE_DIRECTORIES: $NETCDFROOT/include + NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include + OASIS3MCT_FC_LIB: '"-L$NETCDFFROOT/lib -lnetcdff"' + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $IO_LIB_ROOT + OASIS_NETCDFF: $IO_LIB_ROOT + + choose_compiler_mpi: + intel2021_impi2021: + add_export_vars: + IO_LIB_ROOT: /proj/internal_group/iccp/sw/HPC_libraries/intel2021.0.3_impi2021.0.3_20240313/ + +# some yamls use computer.fc, etc to identify the compiler, so we need to add them +fc: "$FC" +cc: "$CC" +mpifc: "$MPIFC" +mpicc: "$MPICC" +cxx: "$CXX" + +launcher_flags: "--mpi=pmi2 -l --kill-on-bad-exit=1 --cpu_bind=${cpu_bind} --distribution=cyclic:cyclic --export=ALL" + +further_reading: + - batch_system/slurm.yaml diff --git a/configs/machines/ollie.yaml b/configs/machines/ollie.yaml index a0c9305ef..79b9529cd 100644 --- a/configs/machines/ollie.yaml +++ b/configs/machines/ollie.yaml @@ -104,7 +104,11 @@ export_vars: NETCDF_Fortran_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_INCLUDE_DIRECTORIES: $NETCDFROOT/include NETCDF_CXX_LIBRARIES: $NETCDFROOT/lib - + + # For OASIS3-MCT5 from CERFACS + OASIS_NETCDF: $NETCDF_DIR + OASIS_NETCDFF: $NETCDF_DIR + PERL5LIB: /usr/lib64/perl5 LAPACK_LIB: '"-lmkl_intel_lp64 -lmkl_core -mkl=sequential -lpthread -lm -ldl"' LAPACK_LIB_DEFAULT: '"-L/global/AWIsoft/intel/2018/compilers_and_libraries_2018.5.274/linux/mkl/lib/intel64 -lmkl_intel_lp64 -lmkl_core -lmkl_sequential"' diff --git a/configs/setups/awicm/awicm-2.0.yaml b/configs/setups/awicm/awicm-2.0.yaml index e36bd75f2..4b8edfb51 100644 --- a/configs/setups/awicm/awicm-2.0.yaml +++ b/configs/setups/awicm/awicm-2.0.yaml @@ -112,6 +112,7 @@ fesom: oasis3mct: model_dir: ${general.model_dir}/oasis + mct_version: 2.8 process_ordering: - fesom diff --git a/configs/setups/awicm/awicm.yaml b/configs/setups/awicm/awicm.yaml index 3752e1129..c82386eb2 100644 --- a/configs/setups/awicm/awicm.yaml +++ b/configs/setups/awicm/awicm.yaml @@ -166,6 +166,7 @@ recom: oasis3mct: model_dir: ${general.model_dir}/oasis + mct_version: 2.8 process_ordering: - fesom diff --git a/configs/setups/awiesm/awiesm-2.2.yaml b/configs/setups/awiesm/awiesm-2.2.yaml index 4f5549871..57cc4660c 100644 --- a/configs/setups/awiesm/awiesm-2.2.yaml +++ b/configs/setups/awiesm/awiesm-2.2.yaml @@ -184,6 +184,7 @@ fesom: oasis3mct: model_dir: ${general.model_dir}/oasis + mct_version: 2.8 process_ordering: - fesom diff --git a/configs/setups/awiesm/awiesm.yaml b/configs/setups/awiesm/awiesm.yaml index 431667d25..c6808bcd5 100644 --- a/configs/setups/awiesm/awiesm.yaml +++ b/configs/setups/awiesm/awiesm.yaml @@ -507,6 +507,7 @@ recom: oasis3mct: model_dir: ${general.model_dir}/oasis + mct_version: 2.8 process_ordering: - fesom diff --git a/configs/setups/foci/echam_monitoring.ipynb b/configs/setups/foci/echam_monitoring.ipynb index 6c29f4d27..db89aa81f 100644 --- a/configs/setups/foci/echam_monitoring.ipynb +++ b/configs/setups/foci/echam_monitoring.ipynb @@ -23,7 +23,7 @@ "# this is the parameters cell for papermill\n", "# Via papermill (https://papermill.readthedocs.io/en/latest/) the settings below can be overwritten via command line arguments. \n", "# This allows to run this notebook in batch mode. The cell below is tagged as parameter cell. \n", - "expid = \"mon3y\"\n", + "expid = \"FOCI2.2-SW220\"\n", "iniyear = 1850\n", "exproot = \"/home/shkifmsw/esm/esm-experiments\"\n", "obsroot = \"/home/shkifmsw/foci_input2/OBS_MONITORING/T63/\"" @@ -32,7 +32,7 @@ { "cell_type": "code", "execution_count": null, - "id": "58bde0c7-1cce-4431-8f7b-29cbadf91789", + "id": "5dd9fc88-74bd-40e0-83c6-b61f63bbea5a", "metadata": {}, "outputs": [], "source": [ @@ -117,7 +117,7 @@ "#import shutil\n", "from cdo import *\n", "# TODO: in interactive mode, set path to cdo below, comment to run in batch mode on any machine\n", - "os.environ['CDO'] = '/home/shkifmsw/miniconda3/envs/jupyter_mon/bin/cdo'\n", + "os.environ['CDO'] = os.path.expanduser('~')+'/miniforge/envs/jupyter_mon/bin/cdo'\n", "cdo = Cdo()" ] }, @@ -319,6 +319,16 @@ " " ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "570d2d23-9b7e-44c0-8731-058c771c6713", + "metadata": {}, + "outputs": [], + "source": [ + "f\"{monitoring['echammonroot']}/{monitoring['expid']}_{ftypes}_yearfldmean.nc\"" + ] + }, { "cell_type": "markdown", "id": "1dee3f8f-0c6d-4572-a7d5-53ff6825ca1b", @@ -380,13 +390,21 @@ " gl.xformatter = LONGITUDE_FORMATTER\n", " gl.yformatter = LATITUDE_FORMATTER" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc3c89b6-9aef-4dc7-b2ee-40836e3ed1e9", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "jupyter_mon", + "display_name": "Python [conda env:jupyter_mon]", "language": "python", - "name": "jupyter_mon" + "name": "conda-env-jupyter_mon-py" }, "language_info": { "codemirror_mode": { @@ -398,7 +416,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.12" + "version": "3.12.7" }, "toc-autonumbering": false, "toc-showcode": false, diff --git a/configs/setups/foci/echam_monitoring.sh b/configs/setups/foci/echam_monitoring.sh index 95d24b238..ccf18c23d 100755 --- a/configs/setups/foci/echam_monitoring.sh +++ b/configs/setups/foci/echam_monitoring.sh @@ -11,7 +11,7 @@ basedir="$HOME/esm/esm-experiments/" obsroot="$HOME/foci_input2/OBS_MONITORING/T63/" expid="test" iniyear=1850 -condapath="$HOME/miniconda3/" +condapath="$HOME/miniforge3/" # #------- DO NOT EDIT BELOW THIS LINE UNLESS YOU KNOW WHAT YOU ARE DOING ------# # @@ -26,7 +26,7 @@ while getopts "h?:r:i:p:c:o:" opt; do echo " -o path to obs data (basedir, default is $obsroot)" echo " -r experiment / run id (run, default is $expid)" echo " -i initial year (initial year, default is $iniyear)" - echo " -c root path to conda env (condapath, default is \$HOME/miniconda3/)" + echo " -c root path to conda env (condapath, default is \$HOME/miniforge3/)" echo exit 0 ;; @@ -50,12 +50,12 @@ echo "Doing ECHAM6 monitoring in $basedir for $expid from year $iniyear onwards" echo "Using conda environment from $condapath" echo if ! source $condapath/bin/activate jupyter_mon ; then - echo + echo echo "source $condapath/bin/activate jupyter_mon failed" echo "install with" echo " conda env create -n jupyter_mon --file $(dirname $0)/jupyter_mon.yaml" echo " source $condapath/bin/activate jupyter_mon" - echo ' python -m ipykernel install --user --name jupyter_mon --display-name "jupyter_mon"' + echo ' python -m ipykernel install --user --name jupyter_mon --display-name "jupyter_mon"' echo exit 1 else @@ -63,7 +63,7 @@ else fi cd $(dirname $0) -papermill echam_monitoring.ipynb echam_monitoring_${expid}.ipynb -p expid $expid -p iniyear $iniyear -p exproot $basedir -p obsroot $obsroot +papermill echam_monitoring.ipynb echam_monitoring_${expid}.ipynb -k jupyter_mon -p expid $expid -p iniyear $iniyear -p exproot $basedir -p obsroot $obsroot jupyter-nbconvert --no-input --to html echam_monitoring_${expid}.ipynb mv -v *.html $basedir/$expid/mon/echam/ diff --git a/configs/setups/foci/foci.yaml b/configs/setups/foci/foci.yaml index 988a1f490..061da3f6e 100644 --- a/configs/setups/foci/foci.yaml +++ b/configs/setups/foci/foci.yaml @@ -33,35 +33,73 @@ general: postprocessing: false post_time: "00:05:00" compute_time: "01:30:00" - + environment_changes: + choose_computer.name: + glogin: + add_export_vars: + - 'ECHAM6_THREADS=1' + - 'NEMO_THREADS=1' + - 'I_MPI_DEBUG=1' available_versions: - default - default_autotools - fs + - fs_oasismct4 - default_oasismct4 + - default_oasismct5 + - mops_oasismct4 - agrif - agrif_oasismct4 + - agrif_mops_oasismct4 choose_version: + # default version, uses OASIS3MCT2.8 as MPIESM default: couplings: - nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci + # same as above but compilation of ECHAM6 with autotools as in + # old mkexp based environment. Included as test setup when I (Sebastian) + # tried (without success) to debug why FOCI within ESM-Tools is ~20% slower default_autotools: couplings: - nemo-ORCA05_LIM2_KCM_AOW_autotools+echam-6.3.05p2-foci_autotools + # same as default but using the nonlinear free surface (key_vvl) fs: couplings: - nemo-ORCA05_LIM2_KCM_AOW_FS+echam-6.3.05p2-foci + # same as above but coupler updated to OASISMCT4. IMPORTANT: uses different coupling + # setings and interpolation methods + fs_oasismct4: + couplings: + - nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4+echam-6.3.05p2-foci_oasismct4 + # same as default but coupler updated to OASISMCT4. IMPORTANT: uses different coupling + # setings and interpolation methods default_oasismct4: couplings: - - nemo-ORCA05_LIM2_KCM_AOW+echam-6.3.05p2-foci_oasismct4 + - nemo-ORCA05_LIM2_KCM_AOW_OASISMCT4+echam-6.3.05p2-foci_oasismct4 + # exactly the same as default_oasismct4 but uses OASISMCT5 instead OASISMCT4 + default_oasismct5: + couplings: + - nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+echam-6.3.05p2-foci_oasismct5 + # includes MOPS in the ocean, uses OASISMCT4. IMPORTANT: uses different coupling + # settings and interpolation methods w.r.t mops with OASISMCT2.8 (only available in old + # mkexp based setup of FOCI) + mops_oasismct4: + couplings: + - nemo-ORCA05_LIM2_FOCI_MOPS+echam-6.3.05p2-foci_oasismct4 + # FOCI with AGRIF using old file based coupling based on OASISMCT2.8, no AOW tracer agrif: couplings: - - nemo-ORCA05_LIM2_FOCI_AGRIF_AOW+echam-6.3.05p2-foci + - nemo-ORCA05_LIM2_FOCI_AGRIF+echam-6.3.05p2-foci + # FOCI using direct coupling via OASISMCT4, IMPORTANT: uses different coupling + # settings and interpolation methods agrif_oasismct4: couplings: - nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+echam-6.3.05p2-foci_oasismct4 - #add_include_models: - #- xios + # FOCI with MOPS using direct coupling via OASISMCT4, IMPORTANT: uses different coupling + # settings and interpolation methods + agrif_mops_oasismct4: + couplings: + - nemo-ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4+echam-6.3.05p2-foci_oasismct4 ######################################################################################### ########### necessary changes to submodels compared to standalone setups ################ @@ -72,7 +110,7 @@ echam: # compiletime and runtime env changes have to be placed within one of the components (does not # matter which one) and then they are valid for all components. compiletime_environment_changes: - iolibraries: geomar_libs + iolibraries: system_libs choose_computer.name: levante: #compiler_mpi: intel2022_impi2021 @@ -82,14 +120,14 @@ echam: #compiler_mpi: intel2019_impi2019 compiler_mpi: intel2022_psmpi2022 blogin: - compiler_mpi: intel2019_impi2019 + compiler_mpi: intel2024_impi2021 glogin: - compiler_mpi: intel2019_impi2019 + compiler_mpi: intel2023_impi2021 nesh: compiler_mpi: intel2019_impi2019 runtime_environment_changes: - iolibraries: geomar_libs + iolibraries: system_libs choose_computer.name: levante: #compiler_mpi: intel2022_impi2021 @@ -101,13 +139,17 @@ echam: #add_module_actions: # - "source $EBROOTIMPI/bin/mpivars.sh -ofi_internal=0 release_mt" blogin: - compiler_mpi: intel2019_impi2019 - add_module_actions: - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" + #compiler_mpi: intel2019_impi2019 + #add_module_actions: + # - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" + compiler_mpi: intel2019_impi2019_nemo4 + #compiler_mpi: intel2021_impi2021 glogin: - compiler_mpi: intel2019_impi2019 + compiler_mpi: intel2023_impi2021 + #compiler_mpi: intel2019_impi2019_nemo4 add_module_actions: - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" + - "source $I_MPI_ROOT/../../setvars.sh" + #- "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" nesh: compiler_mpi: intel2019_impi2019 add_module_actions: @@ -116,16 +158,64 @@ echam: choose_general.version: default: version: "6.3.05p2-foci" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] default_autotools: version: "6.3.05p2-foci" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] fs: version: "6.3.05p2-foci" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] + fs_oasismct4: + version: "6.3.05p2-foci_oasismct4" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] default_oasismct4: version: "6.3.05p2-foci_oasismct4" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] + default_oasismct5: + version: "6.3.05p2-foci_oasismct4" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] + + mops_oasismct4: + version: "6.3.05p2-foci_oasismct4" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT, A_CO2OCE, CO2TRAAT, FFOCE_A, CO2CONAT, CO2FLXAT] + add_namelist_changes: + namelist.echam: + runctl: + lcouple_co2: true + submodelctl: + lco2: true + radctl: + ico2: 1 + + add_restartstreams: + - tracer + + add_streams: + - tracer agrif: version: "6.3.05p2-foci" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] agrif_oasismct4: version: "6.3.05p2-foci_oasismct4" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] + + agrif_mops_oasismct4: + version: "6.3.05p2-foci_oasismct4" + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT, A_CO2OCE, CO2TRAAT, FFOCE_A, CO2CONAT, CO2FLXAT] + add_namelist_changes: + namelist.echam: + runctl: + lcouple_co2: true + submodelctl: + lco2: true + radctl: + ico2: 1 + + add_restartstreams: + - tracer + + add_streams: + - tracer pool_dir: "${computer.pool_directories.focipool}/ECHAM6" greenhouse_dir: "${pool_dir}/input" @@ -135,6 +225,15 @@ echam: nproca: 24 nprocb: 24 + add_compile_infos: + add_choose_version: + 6.3.05p2-foci_oasismct4: + add_export_vars: + MPEU_Fortran_INCLUDE_DIRECTORIES: "${oasis3mct.model_dir}/INSTALL_OASIS.ESMTOOLS/include" + MPEU_Fortran_LIBRARIES: "${oasis3mct.model_dir}/INSTALL_OASIS.ESMTOOLS/lib" + #MPEU_Fortran_INCLUDE_DIRECTORIES: "/home/shktkeme/esm/models/foci-agrif_mops_oasismct4/oasis/INSTALL_OASIS.ESMTOOLS/include" + #MPEU_Fortran_LIBRARIES: "/home/shktkeme/esm/models/foci-agrif_mops_oasismct4/oasis/INSTALL_OASIS.ESMTOOLS/lib" + # do we want to link files or copy. For ECHAM6 we can't link the restart file # as hdrestart.nc is named the same for input and output. Another crazy habbit of ECHAM6 file_movements: @@ -311,9 +410,13 @@ echam: message: "high wind speed was found during your run..." file: "${general.work_dir}/atmout" frequency: 90 - - foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] + # due to a bug in esm_master all POSSIBLE coupling fields have to be listed here + # esm_master shouldn't bother about coupling fields but does for some unknown reason + # Sebastian Wahl 05/2023 + #foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT] + + foci_fields: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QsrMix, A_QnsIce, A_QnsMix, ATotRain, ATotSnow, AIceEvap, A_dQnsdT, A_CO2OCE, CO2TRAAT, FFOCE_A, CO2CONAT, CO2FLXAT] coupling_fields: "[[foci_fields-->FIELD]]": grid: atmo @@ -483,10 +586,37 @@ nemo: version: "ORCA05_LIM2_KCM_AOW_autotools" fs: version: "ORCA05_LIM2_KCM_AOW_FS" - agrif: + fs_oasismct4: + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + # the default coupling settings all branches of ORCA05_LIM2_KCM_AOW_FS_OASISMCT4 + # are the ones used with FOCIOIFS + # to be able to use the same branches as FOCIOIFS, we just need to change two + # coupling settings in namsbc_cpl. With this we avoid using a separate branch + # just for the two settings below + default_oasismct4: + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + add_namelist_changes: + namelist_cfg: + namsbc_cpl: + sn_rcv_emp: ['kcm_lim_2','no','','',''] + sn_rcv_rnf: ['none','no','','',''] + default_oasismct5: + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + add_namelist_changes: + namelist_cfg: + namsbc_cpl: + sn_rcv_emp: ['kcm_lim_2','no','','',''] + sn_rcv_rnf: ['none','no','','',''] + agrif_aow: version: "ORCA05_LIM2_FOCI_AGRIF_AOW" + agrif: + version: "ORCA05_LIM2_FOCI_AGRIF" agrif_oasismct4: version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + agrif_mops_oasismct4: + version: "ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4" + mops_oasismct4: + version: "ORCA05_LIM2_FOCI_MOPS_OASISMCT4" model_dir: ${general.model_dir}/nemo-${nemo.version} setup_dir: ${general.model_dir} @@ -598,6 +728,11 @@ oasis3mct: - 'OIceEvap <--conserv2-- AIceEvap' - 'O_dQnsdT <--conserv2-- A_dQnsdT' + add_input_files: + rmp_a2n_B: rmp_a2n_B + rmp_a2n_C: rmp_a2n_C + rmp_n2a_C: rmp_n2a_C + default_autotools: version: "foci" export_mode_a2o: EXPORTED @@ -625,6 +760,11 @@ oasis3mct: - 'OIceEvap <--conserv2-- AIceEvap' - 'O_dQnsdT <--conserv2-- A_dQnsdT' + add_input_files: + rmp_a2n_B: rmp_a2n_B + rmp_a2n_C: rmp_a2n_C + rmp_n2a_C: rmp_n2a_C + fs: version: "foci" export_mode_a2o: EXPORTED @@ -652,6 +792,11 @@ oasis3mct: - 'OIceEvap <--conserv2-- AIceEvap' - 'O_dQnsdT <--conserv2-- A_dQnsdT' + add_input_files: + rmp_a2n_B: rmp_a2n_B + rmp_a2n_C: rmp_a2n_C + rmp_n2a_C: rmp_n2a_C + agrif: version: "foci" export_mode_a2o: EXPNEST @@ -679,6 +824,11 @@ oasis3mct: - 'OIceEvap <--conserv2-- AIceEvap' - 'O_dQnsdT <--conserv2-- A_dQnsdT' + add_input_files: + rmp_a2n_B: rmp_a2n_B + rmp_a2n_C: rmp_a2n_C + rmp_n2a_C: rmp_n2a_C + default_oasismct4: norestart: "T" @@ -697,26 +847,99 @@ oasis3mct: - 'A_SnwTck <--gauswgt_average-- O_SnwTck' - 'A_OCurx1 <--gauswgt_average-- O_OCurx1' - 'A_OCury1 <--gauswgt_average-- O_OCury1' - #- 'AIceFrac <--bilinear_average-- OIceFrac' - #- 'A_SSTSST <--bilinear_average-- O_SSTSST' - #- 'A_TepIce <--bilinear_average-- O_TepIce' - #- 'A_IceTck <--bilinear_average-- O_IceTck' - #- 'A_SnwTck <--bilinear_average-- O_SnwTck' - #- 'A_OCurx1 <--bilinear_average-- O_OCurx1' - #- 'A_OCury1 <--bilinear_average-- O_OCury1' flxatmos: - - 'O_OTaux1 <--gauswgt-- A_OTaux1' - - 'O_OTauy1 <--gauswgt-- A_OTauy1' - - 'O_ITaux1 <--gauswgt-- A_ITaux1' - - 'O_ITauy1 <--gauswgt-- A_ITauy1' - - 'O_QsrIce <--gauswgt-- A_QsrIce' - - 'O_QsrMix <--gauswgt_glbpos-- A_QsrMix' - - 'O_QnsIce <--gauswgt-- A_QnsIce' - - 'O_QnsMix <--gauswgt_glbpos-- A_QnsMix' - - 'OTotRain <--gauswgt_global-- ATotRain' - - 'OTotSnow <--gauswgt-- ATotSnow' - - 'OIceEvap <--gauswgt-- AIceEvap' - - 'O_dQnsdT <--gauswgt-- A_dQnsdT' + - 'O_OTaux1 <--a2ot_gauswgt-- A_OTaux1' + - 'O_OTauy1 <--a2ot_gauswgt-- A_OTauy1' + - 'O_ITaux1 <--a2ot_gauswgt-- A_ITaux1' + - 'O_ITauy1 <--a2ot_gauswgt-- A_ITauy1' + - 'O_QsrIce <--a2oc_gauswgt-- A_QsrIce' + - 'O_QsrMix <--a2oc_gauswgt_glbpos-- A_QsrMix' + - 'O_QnsIce <--a2oc_gauswgt-- A_QnsIce' + - 'O_QnsMix <--a2oc_gauswgt_glbpos-- A_QnsMix' + - 'OTotRain <--a2oc_gauswgt_global-- ATotRain' + - 'OTotSnow <--a2oc_gauswgt-- ATotSnow' + - 'OIceEvap <--a2oc_gauswgt-- AIceEvap' + - 'O_dQnsdT <--a2oc_gauswgt-- A_dQnsdT' + + add_input_files: + rmp_ot2a: rmp_ot2a + rmp_a2oc: rmp_a2oc + rmp_a2ot: rmp_a2ot + + default_oasismct5: + + norestart: "T" + mct_version: "5.0" + version: "5.0-geomar" + + export_mode_a2o: EXPORTED + + coupling_target_fields: + + sstocean: + - 'AIceFrac <--gauswgt_average-- OIceFrac' + - 'A_SSTSST <--gauswgt_average-- O_SSTSST' + - 'A_TepIce <--gauswgt_average-- O_TepIce' + - 'A_IceTck <--gauswgt_average-- O_IceTck' + - 'A_SnwTck <--gauswgt_average-- O_SnwTck' + - 'A_OCurx1 <--gauswgt_average-- O_OCurx1' + - 'A_OCury1 <--gauswgt_average-- O_OCury1' + flxatmos: + - 'O_OTaux1 <--a2ot_gauswgt-- A_OTaux1' + - 'O_OTauy1 <--a2ot_gauswgt-- A_OTauy1' + - 'O_ITaux1 <--a2ot_gauswgt-- A_ITaux1' + - 'O_ITauy1 <--a2ot_gauswgt-- A_ITauy1' + - 'O_QsrIce <--a2oc_gauswgt-- A_QsrIce' + - 'O_QsrMix <--a2oc_gauswgt_glbpos-- A_QsrMix' + - 'O_QnsIce <--a2oc_gauswgt-- A_QnsIce' + - 'O_QnsMix <--a2oc_gauswgt_glbpos-- A_QnsMix' + - 'OTotRain <--a2oc_gauswgt_global-- ATotRain' + - 'OTotSnow <--a2oc_gauswgt-- ATotSnow' + - 'OIceEvap <--a2oc_gauswgt-- AIceEvap' + - 'O_dQnsdT <--a2oc_gauswgt-- A_dQnsdT' + + add_input_files: + rmp_ot2a: rmp_ot2a + rmp_a2oc: rmp_a2oc + rmp_a2ot: rmp_a2ot + + mops_oasismct4: + + norestart: "T" + mct_version: "4.0" + version: "4.0-geomar" + + export_mode_a2o: EXPORTED + + coupling_target_fields: + + sstocean: + - 'AIceFrac <--gauswgt_average-- OIceFrac' + - 'A_SSTSST <--gauswgt_average-- O_SSTSST' + - 'A_TepIce <--gauswgt_average-- O_TepIce' + - 'A_IceTck <--gauswgt_average-- O_IceTck' + - 'A_SnwTck <--gauswgt_average-- O_SnwTck' + - 'A_OCurx1 <--gauswgt_average-- O_OCurx1' + - 'A_OCury1 <--gauswgt_average-- O_OCury1' + - 'A_CO2OCE <--gauswgt_average-- CO2OCEAN' + - 'CO2TRAAT <--gauswgt_average-- CO2TRA' + - 'FFOCE_A <--gauswgt_average-- FF_OCE' + + flxatmos: + - 'O_OTaux1 <--a2ot_gauswgt-- A_OTaux1' + - 'O_OTauy1 <--a2ot_gauswgt-- A_OTauy1' + - 'O_ITaux1 <--a2ot_gauswgt-- A_ITaux1' + - 'O_ITauy1 <--a2ot_gauswgt-- A_ITauy1' + - 'O_QsrIce <--a2oc_gauswgt-- A_QsrIce' + - 'O_QsrMix <--a2oc_gauswgt_glbpos-- A_QsrMix' + - 'O_QnsIce <--a2oc_gauswgt-- A_QnsIce' + - 'O_QnsMix <--a2oc_gauswgt_glbpos-- A_QnsMix' + - 'OTotRain <--a2oc_gauswgt_global-- ATotRain' + - 'OTotSnow <--a2oc_gauswgt-- ATotSnow' + - 'OIceEvap <--a2oc_gauswgt-- AIceEvap' + - 'O_dQnsdT <--a2oc_gauswgt-- A_dQnsdT' + - 'O_AtmCO2 <--a2oc_gauswgt-- CO2CONAT' + - 'CO2FLXOC <--a2oc_gauswgt_global-- CO2FLXAT' agrif_oasismct4: @@ -741,26 +964,20 @@ oasis3mct: - 'A_SnwTck <--gauswgt_average-- O_SnwTck' - 'A_OCurx1 <--gauswgt_average-- O_OCurx1' - 'A_OCury1 <--gauswgt_average-- O_OCury1' - #- 'AIceFrac <--bilinear_average-- OIceFrac' - #- 'A_SSTSST <--bilinear_average-- O_SSTSST' - #- 'A_TepIce <--bilinear_average-- O_TepIce' - #- 'A_IceTck <--bilinear_average-- O_IceTck' - #- 'A_SnwTck <--bilinear_average-- O_SnwTck' - #- 'A_OCurx1 <--bilinear_average-- O_OCurx1' - #- 'A_OCury1 <--bilinear_average-- O_OCury1' flxatmos: - - 'O_OTaux1 <--gauswgt-- A_OTaux1' - - 'O_OTauy1 <--gauswgt-- A_OTauy1' - - 'O_ITaux1 <--gauswgt-- A_ITaux1' - - 'O_ITauy1 <--gauswgt-- A_ITauy1' - - 'O_QsrIce <--gauswgt-- A_QsrIce' - - 'O_QsrMix <--gauswgt_glbpos-- A_QsrMix' - - 'O_QnsIce <--gauswgt-- A_QnsIce' - - 'O_QnsMix <--gauswgt_glbpos-- A_QnsMix' - - 'OTotRain <--gauswgt_global-- ATotRain' - - 'OTotSnow <--gauswgt-- ATotSnow' - - 'OIceEvap <--gauswgt-- AIceEvap' - - 'O_dQnsdT <--gauswgt-- A_dQnsdT' + - 'O_OTaux1 <--a2ot_gauswgt-- A_OTaux1' + - 'O_OTauy1 <--a2ot_gauswgt-- A_OTauy1' + - 'O_ITaux1 <--a2ot_gauswgt-- A_ITaux1' + - 'O_ITauy1 <--a2ot_gauswgt-- A_ITauy1' + - 'O_QsrIce <--a2oc_gauswgt-- A_QsrIce' + - 'O_QsrMix <--a2oc_gauswgt_glbpos-- A_QsrMix' + - 'O_QnsIce <--a2oc_gauswgt-- A_QnsIce' + - 'O_QnsMix <--a2oc_gauswgt_glbpos-- A_QnsMix' + - 'OTotRain <--a2oc_gauswgt_global-- ATotRain' + - 'OTotSnow <--a2oc_gauswgt-- ATotSnow' + - 'OIceEvap <--a2oc_gauswgt-- AIceEvap' + - 'O_dQnsdT <--a2oc_gauswgt-- A_dQnsdT' + # TODO: test if the naming of the remapping file works flxatmos_1: - '1_O_OTaux1 <--gauswgt-- A_OTaux1' - '1_O_OTauy1 <--gauswgt-- A_OTauy1' @@ -774,6 +991,108 @@ oasis3mct: - '1_OTotSnow <--gauswgt-- ATotSnow' - '1_OIceEvap <--gauswgt-- AIceEvap' - '1_O_dQnsdT <--gauswgt-- A_dQnsdT' + + add_input_files: + rmp_ot2a: rmp_ot2a + rmp_a2oc: rmp_a2oc + rmp_a2ot: rmp_a2ot + rmp_a2agr1: rmp_a2agr1 + + fs_oasismct4: + + norestart: "T" + mct_version: "4.0" + version: "4.0-geomar" + + export_mode_a2o: EXPORTED + + coupling_target_fields: + + sstocean: + - 'AIceFrac <--gauswgt_average-- OIceFrac' + - 'A_SSTSST <--gauswgt_average-- O_SSTSST' + - 'A_TepIce <--gauswgt_average-- O_TepIce' + - 'A_IceTck <--gauswgt_average-- O_IceTck' + - 'A_SnwTck <--gauswgt_average-- O_SnwTck' + - 'A_OCurx1 <--gauswgt_average-- O_OCurx1' + - 'A_OCury1 <--gauswgt_average-- O_OCury1' + flxatmos: + - 'O_OTaux1 <--a2ot_gauswgt-- A_OTaux1' + - 'O_OTauy1 <--a2ot_gauswgt-- A_OTauy1' + - 'O_ITaux1 <--a2ot_gauswgt-- A_ITaux1' + - 'O_ITauy1 <--a2ot_gauswgt-- A_ITauy1' + - 'O_QsrIce <--a2oc_gauswgt-- A_QsrIce' + - 'O_QsrMix <--a2oc_gauswgt_glbpos-- A_QsrMix' + - 'O_QnsIce <--a2oc_gauswgt-- A_QnsIce' + - 'O_QnsMix <--a2oc_gauswgt_glbpos-- A_QnsMix' + - 'OTotRain <--a2oc_gauswgt_global-- ATotRain' + - 'OTotSnow <--a2oc_gauswgt-- ATotSnow' + - 'OIceEvap <--a2oc_gauswgt-- AIceEvap' + - 'O_dQnsdT <--a2oc_gauswgt-- A_dQnsdT' + + add_input_files: + rmp_ot2a: rmp_ot2a + rmp_a2oc: rmp_a2oc + rmp_a2ot: rmp_a2ot + + agrif_mops_oasismct4: + + norestart: "T" + mct_version: "4.0" + version: "4.0-geomar" + + export_mode_a2o: EXPORTED + + add_restart_in_files: + flxatmos1: flxatmos1 + add_restart_out_files: + flxatmos1: flxatmos1 + + coupling_target_fields: + + sstocean: + - 'AIceFrac <--gauswgt_average-- OIceFrac' + - 'A_SSTSST <--gauswgt_average-- O_SSTSST' + - 'A_TepIce <--gauswgt_average-- O_TepIce' + - 'A_IceTck <--gauswgt_average-- O_IceTck' + - 'A_SnwTck <--gauswgt_average-- O_SnwTck' + - 'A_OCurx1 <--gauswgt_average-- O_OCurx1' + - 'A_OCury1 <--gauswgt_average-- O_OCury1' + - 'A_CO2OCE <--gauswgt_average-- CO2OCEAN' + - 'CO2TRAAT <--gauswgt_average-- CO2TRA' + - 'FFOCE_A <--gauswgt_average-- FF_OCE' + + flxatmos: + - 'O_OTaux1 <--a2ot_gauswgt-- A_OTaux1' + - 'O_OTauy1 <--a2ot_gauswgt-- A_OTauy1' + - 'O_ITaux1 <--a2ot_gauswgt-- A_ITaux1' + - 'O_ITauy1 <--a2ot_gauswgt-- A_ITauy1' + - 'O_QsrIce <--a2oc_gauswgt-- A_QsrIce' + - 'O_QsrMix <--a2oc_gauswgt_glbpos-- A_QsrMix' + - 'O_QnsIce <--a2oc_gauswgt-- A_QnsIce' + - 'O_QnsMix <--a2oc_gauswgt_glbpos-- A_QnsMix' + - 'OTotRain <--a2oc_gauswgt_global-- ATotRain' + - 'OTotSnow <--a2oc_gauswgt-- ATotSnow' + - 'OIceEvap <--a2oc_gauswgt-- AIceEvap' + - 'O_dQnsdT <--a2oc_gauswgt-- A_dQnsdT' + - 'O_AtmCO2 <--a2oc_gauswgt-- CO2CONAT' + - 'CO2FLXOC <--a2oc_gauswgt_global-- CO2FLXAT' + + flxatmos_1: + - '1_O_OTaux1 <--gauswgt-- A_OTaux1' + - '1_O_OTauy1 <--gauswgt-- A_OTauy1' + - '1_O_ITaux1 <--gauswgt-- A_ITaux1' + - '1_O_ITauy1 <--gauswgt-- A_ITauy1' + - '1_O_QsrIce <--gauswgt-- A_QsrIce' + - '1_O_QsrMix <--gauswgt-- A_QsrMix' + - '1_O_QnsIce <--gauswgt-- A_QnsIce' + - '1_O_QnsMix <--gauswgt-- A_QnsMix' + - '1_OTotRain <--gauswgt-- ATotRain' + - '1_OTotSnow <--gauswgt-- ATotSnow' + - '1_OIceEvap <--gauswgt-- AIceEvap' + - '1_O_dQnsdT <--gauswgt-- A_dQnsdT' + - '1_O_AtmCO2 <--gauswgt-- CO2CONAT' + - '1_CO2FLXOC <--gauswgt-- CO2FLXAT' coupling_directions: 'opat->atmo': @@ -888,60 +1207,87 @@ oasis3mct: remapping: - gauswgt: search_bin: latitude - nb_of_neighbours: 10 - weight: 0.15 - #- mapping: - # #mapname: rmp_atmo_to_agr1_GAUSWGT_${nemo.resolution}.nc - # map_regrid_on: src + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_opat_to_atmo_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + map_regrid_on: dst + # TODO: implement the remapping in a clean way + # 1. make remapping weights restart again + # 2. for default_oasismct4 and agrif_oasismct4 implement the mapping: correctly gauswgt: time_transformation: instant remapping: - gauswgt: search_bin: latitude - nb_of_neighbours: 10 - weight: 0.15 - #- mapping: - # #mapname: rmp_atmo_to_agr1_GAUSWGT_${nemo.resolution}.nc - # map_regrid_on: src + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_atmo_to_opat_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + map_regrid_on: src + + a2ot_gauswgt: + time_transformation: instant + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_atmo_to_opat_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + map_regrid_on: src - gauswgt_global: + a2oc_gauswgt: time_transformation: instant remapping: - gauswgt: search_bin: latitude - nb_of_neighbours: 10 - weight: 0.15 - #- mapping: - # #mapname: rmp_atmo_to_agr1_GAUSWGT_${nemo.resolution}.nc - # map_regrid_on: src + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_atmo_to_opac_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + map_regrid_on: src + + a2oc_gauswgt_global: + time_transformation: instant + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_atmo_to_opac_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + map_regrid_on: src postprocessing: conserv: method: global - gauswgt_glbpos: + a2oc_gauswgt_glbpos: time_transformation: instant remapping: - gauswgt: search_bin: latitude - nb_of_neighbours: 10 - weight: 0.15 - #- mapping: - # #mapname: rmp_atmo_to_agr1_GAUSWGT_${nemo.resolution}.nc - # map_regrid_on: src + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_atmo_to_opac_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + map_regrid_on: src postprocessing: conserv: method: glbpos input_files: - rmp_a2n_B: rmp_a2n_B - rmp_a2n_C: rmp_a2n_C - rmp_n2a_C: rmp_n2a_C areas: areas masks: masks grids: grids input_in_work: + rmp_ot2a: rmp_opat_to_atmo_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2oc: rmp_atmo_to_opac_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2ot: rmp_atmo_to_opat_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2agr1: rmp_atmo_to_${nemo.nest1}_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2n_B: rmp_atmo_to_opat_BICUBIC_D_${echam.resolution}_${nemo.resolution}.nc rmp_a2n_C: rmp_atmo_to_opac_CONSERV_FRACNNEI_D_${echam.resolution}_${nemo.resolution}.nc rmp_n2a_C: rmp_opat_to_atmo_CONSERV_FRACNNEI_${echam.resolution}_${nemo.resolution}.nc @@ -949,10 +1295,17 @@ oasis3mct: masks: masks.nc grids: grids.nc + # GAUSWGT_192 refers to a namcouple setting of GAUSWGT D SCALAR LATITUDE 1 9 2 input_sources: + rmp_ot2a: ${input_dir}/GAUSWGT_192/rmp_opat_to_atmo_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2oc: ${input_dir}/GAUSWGT_192/rmp_atmo_to_opac_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2ot: ${input_dir}/GAUSWGT_192/rmp_atmo_to_opat_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2agr1: ${input_dir}/GAUSWGT_192/rmp_atmo_to_${nemo.nest1}_GAUSWGT_${echam.resolution}_${nemo.resolution}.nc + rmp_a2n_B: ${input_dir}/rmp_atmo_to_opat_BICUBIC_D_${echam.resolution}_${nemo.resolution}.nc rmp_a2n_C: ${input_dir}/rmp_atmo_to_opac_CONSERV_FRACNNEI_D_${echam.resolution}_${nemo.resolution}.nc rmp_n2a_C: ${input_dir}/rmp_opat_to_atmo_CONSERV_FRACNNEI_${echam.resolution}_${nemo.resolution}.nc + areas: ${input_dir}/areas_${echam.resolution}_${nemo.resolution}_frac.nc masks: ${input_dir}/masks_${echam.resolution}_${nemo.resolution}_frac.nc grids: ${input_dir}/grids_${echam.resolution}_${nemo.resolution}_frac.nc diff --git a/configs/setups/foci/nemo_monitoring.sh b/configs/setups/foci/nemo_monitoring.sh index b20956738..43c954146 100755 --- a/configs/setups/foci/nemo_monitoring.sh +++ b/configs/setups/foci/nemo_monitoring.sh @@ -4,11 +4,19 @@ ################################################################################# # default settings for the variables that can be changed via the command line # +# seb-wahl: The monitoring needs some rework to work on the updated HRLN cluster +# - test support of apptainer +# - remove dependency on Willi's 10 year old python2 based code, do the AMOC etc +# calculations with XIOS online of based on xorca etc. +echo +echo "`date`: The NEMO monitoring needs some re-design before it can be run" +echo "`date`: on $(hostname) again. Sebastian Wahl 2024-11-26" +echo +exit 0 basedir=~/esm/esm-experiments/ # change via -p EXP_ID="test_experiment" # change via -r -envfile="$basedir/$EXP_ID/scripts/env.sh" # change via -x -ncpus=24 -use_singularity=true +ncpus=48 +use_apptainer=true # #------- DO NOT EDIT BELOW THIS LINE UNLESS YOU KNOW WHAT YOU ARE DOING ------# # @@ -48,7 +56,7 @@ shift $((OPTIND-1)) [ "$1" = "--" ] && shift # update vars with command line options if set -envfile="$basedir/$EXP_ID/scripts/env.sh" +[[ -z $envfile ]] && envfile="$basedir/$EXP_ID/scripts/env.sh" export PBS_NP=${ncpus} echo @@ -103,12 +111,12 @@ sw_bind="" if [[ "$(hostname)" =~ "nesh" ]] ; then echo "`date` NOTE: This code runs on $(hostname)" # need to do this as /gxfs_work1/gxfs_home_interim/sw is a soft link to - # /gxfs_work1/gxfs_home_interim/sw which singularity does not like as the + # /gxfs_work1/gxfs_home_interim/sw which apptainer does not like as the # soft link can't be resolved in the container sw_bind="--bind /gxfs_home/sw:/gxfs_work1/gxfs_home_interim/sw" - shome_bind="--bind /home/smomw235:/home/smomw235" + shome_bind="--bind /gxfs_home/geomar/smomw235:/gxfs_home/geomar/smomw235" foci_input2="/gxfs_work1/geomar/smomw235/foci_input2" - # only used if use_singularity=false + # only used if use_apptainer=false MINICONDA_HOME=~smomw235/miniconda3 module load nco elif [[ "$(hostname)" =~ blogin* ]] || [[ "$(hostname)" =~ glogin* ]] || \ @@ -121,7 +129,7 @@ elif [[ "$(hostname)" =~ blogin* ]] || [[ "$(hostname)" =~ glogin* ]] || \ # required shared libs are installed in our conda environment export LD_LIBRARY_PATH=/opt/conda/envs/monitoring/lib:$LD_LIBRARY_PATH foci_input2="/scratch/usr/shkifmsw/foci_input2" - # only used if use_singularity=false + # only used if use_apptainer=false MINICONDA_HOME=~shkifmsw/miniconda3 module load nco else @@ -142,9 +150,9 @@ exclude_freq_from_diag_and_plots = 1d,5d,1m,730h,5y,10y,20y simple_mode = True EOF -if $use_singularity ; then - module load singularity - # run monitoring from the singularity container +if $use_apptainer ; then + module load apptainer + # run monitoring from the apptainer container # TODO: currently the .sif files is expected in the cwd, this is not the best solution ln -sfv ${foci_input2}/SINGULARITY/mkexp-monitoring.sif . @@ -153,7 +161,7 @@ if $use_singularity ; then SINGULARITYENV_LD_LIBRARY_PATH=$LD_LIBRARY_PATH \ SINGULARITYENV_APPEND_PATH=$(dirname $(which ncrcat)) \ SINGULARITYENV_PYTHONPATH=/usr/local/Monitoring \ - singularity exec --bind $WORK:$WORK --bind $HOME:$HOME \ + apptainer exec --bind $WORK:$WORK --bind $HOME:$HOME \ $sw_bind $input_bind $shome_bind --bind ${IO_LIB_ROOT}/bin:/usr/local/bin \ mkexp-monitoring.sif python \ /usr/local/Monitoring/scripts/monitoring_parallel.py \ @@ -185,11 +193,11 @@ FLORIDA_BAHAMAS_transports DRAKE_transports AUS_AA_transports ITF_transports MOZAMBIQUE_CHANNEL_transports SOUTH_AFR_transports KERGUELEN_transports CAMPBELL_transports AFR_AUSTR_transports AUSTR_AM_transports AM_AFR_transports DAVIS_transports -icediags moc psi speed +icediags moc mocsig psi speed section_23W section_ACT section_DAVIS section_OSNAP section_STAtlOMZ section_WoceA1E section_WoceA1W section_WoceA24N section_WoceS04A -amoc_max_25.000N amoc_max_36.000N amoc_max_45.000N" +amoc_max_25.000N amoc_max_36.000N amoc_max_45.000N amoc_max_26.500N" frequency='1y' datadir=${MONITORING_PATH}/derived_data/${EXP_ID} diff --git a/configs/setups/foci/nemo_postprocessing.sh b/configs/setups/foci/nemo_postprocessing.sh index 9cbeb62ac..70a86dee8 100755 --- a/configs/setups/foci/nemo_postprocessing.sh +++ b/configs/setups/foci/nemo_postprocessing.sh @@ -21,7 +21,11 @@ module load nco || module load NCO OCEAN_CHECK_NETCDF4=false # set to false to skip netcdf4 conversion, time consuming but reduces file size by at least 50% OCEAN_CONVERT_NETCDF4=true -OCEAN_FILE_TAGS="grid_T grid_U grid_V icemod ptrc_T" +# In NEMO 3.6 we had grid_T, grid_U etc +# In NEMO 4 we also use diaptr2D, diaptr3D and grid_U_vsum +# It should be fine to add them here. The script will search for them +# if they exist they will be used, if not they will be skipped +OCEAN_FILE_TAGS="grid_T grid_U grid_V grid_W icemod ptrc_T diaptr2D diaptr3D grid_U_vsum" # Other settings max_jobs=20 @@ -180,7 +184,12 @@ endyear=$(date --date="$enddate" "+%Y") # simulation that ran in multiyear intervals. if [[ -z $increment ]] ; then if [[ $startyear == $endyear ]] ; then - increment=$((endmonth - startmonth + 1)) + # freq is 'y' for a full single year + if [[ "$startmonth" == "01" ]] && [[ "$endmonth" == "12" ]] ; then + increment=1 + else + increment=$((${endmonth#0} - ${startmonth#0} + 1)) + fi else increment=$((endyear - startyear + 1)) fi @@ -226,7 +235,7 @@ if ${OCEAN_CONVERT_NETCDF4} ; then else currdate1=$nextdate currdate2=$(date --date="$currdate1 + ${increment} year - 1 day" "+%Y%m%d") - nextdate=$(date --date="$currdate2 + ${increment} year" "+%Y%m%d") + nextdate=$(date --date="$currdate1 + ${increment} year" "+%Y%m%d") fi for filetag in $filetags @@ -236,7 +245,8 @@ if ${OCEAN_CONVERT_NETCDF4} ; then input=${s}_${currdate1}_${currdate2}_${filetag}.nc3 output=${s}_${currdate1}_${currdate2}_${filetag}.nc # !!! output files will have the same name as the old input file !!! - if [[ -f $output ]] ; then + echo " Looking for $output " + if [[ -f $output ]] ; then mv $output $input # If too many jobs run at the same time, wait @@ -310,7 +320,7 @@ do else currdate1=$nextdate currdate2=$(date --date="$currdate1 + ${increment} year - 1 day" "+%Y%m%d") - nextdate=$(date --date="$currdate2 + ${increment} year" "+%Y%m%d") + nextdate=$(date --date="$currdate1 + ${increment} year" "+%Y%m%d") fi for filetag in $filetags @@ -384,7 +394,7 @@ do else currdate1=$nextdate currdate2=$(date --date="$currdate1 + 1 year - 1 day" "+%Y%m%d") - nextdate=$(date --date="$currdate2 + 1 year" "+%Y%m%d") + nextdate=$(date --date="$currdate1 + 1 year" "+%Y%m%d") fi # output=${EXP_ID}_1y_${currdate1}_${currdate2}_${filetag}.nc diff --git a/configs/setups/focioifs/focioifs.yaml b/configs/setups/focioifs/focioifs.yaml index de3902290..2783d15a3 100644 --- a/configs/setups/focioifs/focioifs.yaml +++ b/configs/setups/focioifs/focioifs.yaml @@ -50,43 +50,138 @@ general: - xios - oasis3mct - rnfmap - requires: - - nemobasemodel-3.6foci + #requires: + #- nemobasemodel-3.6foci available_versions: - - agrif + - 'agrif' + - 'agrif-3.0' + - 'agrif-4.0' - '2.0' - '2.1' - '2.1-O12' - '2.1.1' + - '2.2' + - '3.0' + - '3.0.1' + - '4.0' + - '4.1' + - '4.1.1' + + # Version descriptions: + # + # 2.0 43r3 + NEMO 3.6 + # 2.1 - new runoff method + # 2.1.1 - split runoff and calving + # + # 3.0 NEMO visc + diff changes. MCT5. + # 3.0.1 - no Smag (as used in 3.0-agrif) + # + # 4.0 Using NEMO 4 + # 4.0.1 - with ECWAM (with backported Charnock cap) + # 4.1 - with eORCA05.L75 + # 4.1.1 - with ECWAM (and backported Charnock cap) + # agrif-4.0 + # + # 2.1 also comes with O12 (ORCA12) + # 3.0 comes with AGRIF + # choose_version: - '2.1': + # Backport Charnock cap from cy47 + # From Jean Bidlot + '4.1.1': runoff_method: "EM21" - calving_method: "old" + calving_method: "JS" couplings: - - nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci21 + - nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40 + + # Introduce ECWAM wave model + '4.1': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-eORCA05_Z75_SI3_COUPLED+oifs43r3-foci40 + + # NEMO 4.2.2 and AGRIF + 'agrif-4.0': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-ORCA05_SI3_COUPLED_AGRIF+oifs43r3-foci40 + + # Using NEMO 4.2.2 + '4.0': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-ORCA05_Z46_SI3_COUPLED+oifs43r3-foci40 + + # Turn off Smagorinsky, turn off vvl + # Twin to agrif-3.0 + '3.0.1': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-ORCA05_LIM2_KCM_AOW_OASISMCT5+oifs43r3-foci30 + + # Same as 2.2 but renamed 3.0 + # Used for CMIP6 DECK runs + '3.0': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT5_SMAG+oifs43r3-foci30 + + # Smagorinsky + more tuning + '2.2': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG+oifs43r3-foci22 + + # Introduce calving similar to AWI-CM3 '2.1.1': runoff_method: "EM21" calving_method: "JS" couplings: - nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci211 + + # 2.1 with ORCA12 '2.1-O12': runoff_method: "EM21" calving_method: "old" couplings: - nemo-ORCA12_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci21 + + # Introduce new runoff remapping from Eric M + '2.1': + runoff_method: "EM21" + calving_method: "old" + couplings: + - nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci21 + + # First version with 43r3 '2.0': runoff_method: "old" calving_method: "old" couplings: - nemo-ORCA05_LIM2_KCM_AOW_FS_OASISMCT4+oifs43r3-foci - agrif: + + # Older version using AGRIF + 'agrif': runoff_method: "EM21" - calving_method: "old" + calving_method: "JS" couplings: - nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4+oifs43r3-foci21 + + # using AGRIF and OASIS3-MCT5. Identical to 3.0.1 + 'agrif-3.0': + runoff_method: "EM21" + calving_method: "JS" + couplings: + - nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT5+oifs43r3-foci30 - version: "2.0" + version: "3.0" scenario: "PI-CTRL" resolution: ${oifs.resolution}_${nemo.resolution} postprocessing: false @@ -134,13 +229,18 @@ oifs: compiler_mpi: intel2022_openmpi iolibraries: system_libs blogin: - compiler_mpi: intel2019_impi2019 + compiler_mpi: intel2024_impi2021 glogin: - compiler_mpi: intel2019_impi2019 + compiler_mpi: intel2023_impi2021 + #compiler_mpi: intel2023_ompi416 + iolibraries: system_libs + #compiler_mpi: intel2021_impi2019 + #iolibraries: spack_libs juwels: compiler_mpi: intel2020_psmpi2020 nesh: - compiler_mpi: intel2020_impi2020 + compiler_mpi: intel2023_impi2021 + iolibraries: system_libs runtime_environment_changes: iolibraries: geomar_libs @@ -155,18 +255,35 @@ oifs: compiler_mpi: intel2022_openmpi iolibraries: system_libs blogin: - compiler_mpi: intel2019_impi2019 + compiler_mpi: intel2024_impi2021 glogin: - compiler_mpi: intel2019_impi2019 + #compiler_mpi: intel2023_ompi416 + compiler_mpi: intel2023_impi2021 + iolibraries: system_libs + #compiler_mpi: intel2021_impi2019 + #iolibraries: spack_libs juwels: compiler_mpi: intel2020_psmpi2020 nesh: - compiler_mpi: intel2020_impi2020 - add_module_actions: - - "source $I_MPI_ROOT/intel64/bin/mpivars.sh release_mt" + compiler_mpi: intel2023_impi2021 + iolibraries: system_libs version: "43r3" + # This is 0 (off) by default in oifs.yaml + # but should always be 2 for coupled runs + # See comment in oifs.yaml for explanation + sclct_switch: 2 + + # By default (in oifs.yaml) the Cariolle scheme is used + # But to be CMIP6 compliant we should use the O3 + # prescribed by O3 + o3_scheme: cmip6 + + # From 43r3v2 we have the option to use + # a new solar spectrum + solarspectrum: True + pool_dir: ${computer.pool_directories.focipool} model_dir: ${general.model_dir}/oifs-${oifs.version} setup_dir: ${general.model_dir} @@ -190,23 +307,17 @@ oifs: # 575 would also be ok (575 = 23 * 25) nproc: 287 - # By default we turn off ECWAM wave model - wam: False - - namelist_changes: - fort.4: - NAMFOCICFG: - # Turn on coupling - # TODO: Add variable for AGRIF coupling - FOCI_CPL_NEMO_LIM: ".true." - # For coupled setups we can use ocean currents use_ocean_currents: False choose_use_ocean_currents: True: add_namelist_changes: fort.4: + NAMMCC: + # If false then u,v=0 + LNEMOLIMCUR: ".true." NAEPHY: + # Send u,v to surf scheme LECURR: ".true." False: add_namelist_changes: @@ -218,14 +329,41 @@ oifs: choose_resolution: TCO95: nproc: 287 + add_namelist_changes: + fort.4: + NAMCLDP: + # Increase diffusion rate for clouds + # see eq 7.47 in IFS 43r3 documentation, part IV + # This was done to avoid strong cold bias in Tco95 + # which was caused by excessive cloud cover + RCLDIFF: 5.e-6 # default 3e-6 + NAMMCC: + # Lower ocean albedo to increase global surface temp + RALBSEAD_NML: 0.045 TCO199: nproc: 575 + TCO319: + add_namelist_changes: + fort.4: + NAMCLDP: + # Same tuning as for Tco95, but I think this is sensitive + # to time step, so I have no good reason to set this for + # Tco319 where time step is 900s. + # For now, increase RCLDIFF, but this should be investigated + RCLDIFF: 5.e-6 # default 3e-6 + NAMMCC: + # Lower ocean albedo to increase global surface temp + RALBSEAD_NML: 0.045 TCO399: nproc: 862 choose_general.resolution: TCO95_ORCA05: nproc: 279 + TCO95_eORCA05: + nproc: 288 + TCO95_eORCA025: + nproc: 279 TCO95_ORCA12: nproc: 287 @@ -234,7 +372,12 @@ oifs: # L grid is wet points including lakes # R grid is dry points foci_fields_a: [A_QsrMix, A_QnsMix, ATotRain, ATotEvap, ATotSnow, AIceEvap] - foci_fields_l: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, + choose_nemo.generation: + "3.6": + foci_fields_l: [AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, + A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QnsIce, A_dQnsdT] + "4.2": + foci_fields_l: [A_AlbIce, AIceFrac, A_SSTSST, A_TepIce, A_IceTck, A_SnwTck, A_OCurx1, A_OCury1, A_OTaux1, A_OTauy1, A_ITaux1, A_ITauy1, A_QsrIce, A_QnsIce, A_dQnsdT] #foci_fields_r: [A_Runoff, A_Calving] @@ -249,18 +392,174 @@ oifs: # add coupling fields for AGRIF choose_general.version: - agrif: + "4.1.1": + # Turn on wave model + wam: True + # Namelist changes + # Flags for WAM are controlled by the variable above + add_namelist_changes: + fort.4: + NAMFOCICFG: + FOCI_CPL_NEMO_LIM: false + # this will set LNEMOLIMTEMP etc in OpenIFS + FOCI_CPL_NEMO_SI3: true + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + LNEMOLIMALB: ".true." + LNEMOLIMTEMP: ".true." + wam_namelist: + NALINE: + # turn on cap on Charnock + LLCAPCHNK: true + + "4.1": + # Turn on wave model + wam: True + # Namelist changes + # Flags for WAM are controlled by the variable above + add_namelist_changes: + fort.4: + NAMFOCICFG: + FOCI_CPL_NEMO_LIM: false + # this will set LNEMOLIMTEMP etc in OpenIFS + FOCI_CPL_NEMO_SI3: true + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + LNEMOLIMALB: ".true." + LNEMOLIMTEMP: ".true." + wam_namelist: + NALINE: + # turn off cap on Charnock + LLCAPCHNK: false + "agrif-4.0": + wam: False + add_namelist_changes: + fort.4: + NAMFOCICFG: + FOCI_CPL_NEMO_LIM: false + FOCI_CPL_NEMO_SI3: true + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + # coupling of albedo is true by default + # but better be sure + LNEMOLIMALB: ".true." + # coupling of ice temp is NOT on by default + # Also, be warned that this may be hard-coded to .false. + # Run grep LNEMOLIMTEMP NODE.001_01 to double-check + LNEMOLIMTEMP: ".true." + "4.0": + wam: False + add_namelist_changes: + fort.4: + NAMFOCICFG: + FOCI_CPL_NEMO_LIM: false + FOCI_CPL_NEMO_SI3: true + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + # coupling of albedo is true by default + # but better be sure + LNEMOLIMALB: ".true." + # coupling of ice temp is NOT on by default + # Also, be warned that this may be hard-coded to .false. + # Run grep LNEMOLIMTEMP NODE.001_01 to double-check + LNEMOLIMTEMP: ".true." + "3.0.1": + wam: False + add_namelist_changes: + fort.4: + NAMFOCICFG: + # Turn on coupling + # TODO: Add variable for AGRIF coupling + FOCI_CPL_NEMO_LIM: ".true." + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + # Lower Arctic sea ice albedo by 20% + # Note: The ice temperature from LIM2 is not correctly + # coupled to the ice temperature in OpenIFS (ISTL1) + # This may be the cause for the excessive Arctic sea ice cover + # in EC-Earth and FOCI-OpenIFS. + # A very ugly solution: Make ice darker to reduce bias + # Note: The coupling was fixed for OpenIFS and SI3, + # so the fix here is only for LIM2. + RALBSCALE_AR: 0.8 + "3.0": + wam: False + add_namelist_changes: + fort.4: + NAMFOCICFG: + # Turn on coupling + # TODO: Add variable for AGRIF coupling + FOCI_CPL_NEMO_LIM: ".true." + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + RALBSCALE_AR: 0.8 # see comment for v3.0.1 + "3.0-agrif": + wam: False + add_namelist_changes: + fort.4: + NAMFOCICFG: + # Turn on coupling + # TODO: Add variable for AGRIF coupling + FOCI_CPL_NEMO_LIM: ".true." + NAERAD: + NAERANT_SCALE: 1 # turn off aerosol scaling + NAMMCC: + RALBSCALE_AR: 0.8 # see comment for v3.0.1 + # Note: 2.2 is idential to 3.0 + # but is kept here for back-compatibility + "2.2": + wam: False + add_namelist_changes: + fort.4: + NAMFOCICFG: + FOCI_CPL_NEMO_LIM: ".true." + NAERAD: + NAERANT_SCALE: 1 + NAMMCC: + RALBSCALE_AR: 0.8 + "*": + wam: False + solarspectrum: False + o3_scheme: default + add_namelist_changes: + fort.4: + NAMFOCICFG: + # Turn on coupling + # TODO: Add variable for AGRIF coupling + FOCI_CPL_NEMO_LIM: ".true." + NAMMCC: + RALBSCALE_AR: 0.8 # see comment for v3.0.1 + + # If we run with 1 nest, then add additional coupling fields + choose_with_nest1: + 1: + # Coupling fields are different for NEMO 3.6 and 4.2, so we need to put it in a choose block + # In 4.2 we add AlbIce. The OIceFrc changes name... + add_choose_nemo.generation: + "3.6": + fociagrif_fields_l: [M01_AIceFrac, M01_A_SSTSST, M01_A_TepIce, M01_A_IceTck, + M01_A_SnwTck, M01_A_OCurx1, M01_A_OCury1, M01_A_OTaux1, + M01_A_OTauy1, M01_A_ITaux1, M01_A_ITauy1, M01_A_QsrIce, + M01_A_QnsIce, M01_ATotSnow, M01_AIceEvap, M01_A_dQnsdT] + "4.2": + fociagrif_fields_l: [M01_A_AlbIce, M01_AIceFrac, M01_A_SSTSST, M01_A_TepIce, M01_A_IceTck, + M01_A_SnwTck, M01_A_OCurx1, M01_A_OCury1, M01_A_OTaux1, + M01_A_OTauy1, M01_A_ITaux1, M01_A_ITauy1, M01_A_QsrIce, + M01_A_QnsIce, M01_ATotSnow, M01_AIceEvap, M01_A_dQnsdT] fociagrif_fields_a: [M01_A_QsrMix, M01_A_QnsMix, M01_ATotRain] - fociagrif_fields_l: [M01_AIceFrac, M01_A_SSTSST, M01_A_TepIce, M01_A_IceTck, - M01_A_SnwTck, M01_A_OCurx1, M01_A_OCury1, M01_A_OTaux1, - M01_A_OTauy1, M01_A_ITaux1, M01_A_ITauy1, M01_A_QsrIce, - M01_A_QnsIce, M01_ATotSnow, M01_AIceEvap, M01_A_dQnsdT] fociagrif_fields_n: [M01_A_AgrSpg] add_namelist_changes: fort.4: NAMFOCICFG: # Turns on coupling to AGRIF # Each coupling field is duplicated once + #FOCI_CPL_NEMO_LIM: ".true." FOCI_CPL_NB_OCE_ZOOM: 1 add_coupling_fields: "[[fociagrif_fields_a-->FIELD]]": @@ -304,15 +603,86 @@ oifs: nproc: 1 nemo: - - # For 2.1-O12 we choose ORCA12 config for NEMO + # Joakim uses nproca / nprocb sometimes instead of jpni / jpnj + nproca: ${jpni} + nprocb: ${jpnj} + + # See nemo.yaml for description of versions choose_general.version: + "4.1.1": + version: "ORCA05_Z46_SI3_COUPLED" + resolution: ORCA05 + "4.1": + version: "ORCA05_Z46_SI3_COUPLED" + resolution: ORCA05 + "agrif-4.0": + version: "ORCA05_SI3_COUPLED_AGRIF" + resolution: ORCA05 + "4.0": + version: "ORCA05_Z46_SI3_COUPLED" + resolution: ORCA05 + "3.0.1": + version: "ORCA05_LIM2_KCM_AOW_OASISMCT4" + resolution: ORCA05 + # After consulting with Gurvan + add_namelist_changes: + namelist_cfg: + namdyn_ldf: + # Increased viscosity + # (old value of 6e11 was very low) + rn_ahm_0_blp: -1.709e12 + namtra_ldf: + # Reduce the effect of GM + rn_aeiv_scale: 0.5 + # This does not work. We cant change branch + # in this choose block. + # In the future, we should have one version for all + # NEMO 3.6 configs and just change branches. + #branch: foci30 + "3.0": + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG" + resolution: ORCA05 + # After consulting with Gurvan + add_namelist_changes: + namelist_cfg: + namtra_ldf: + # Reduce the effect of GM + # This signficantly increases ACC transport + rn_aeiv_scale: 0.5 + "2.2": + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4_SMAG" + resolution: ORCA05 + # After consulting with Gurvan + add_namelist_changes: + namelist_cfg: + namtra_ldf: + # Reduce the effect of GM + # This signficantly increases ACC transport + rn_aeiv_scale: 0.5 "2.1-O12": version: "ORCA12_LIM2_KCM_AOW_FS_OASISMCT4" resolution: ORCA12 # dont this this does anything, but keep it for safety default: version: "3.6foci" + "agrif-3.0": + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + resolution: ORCA05 + add_namelist_changes: + namelist_cfg: + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + namtra_ldf: + rn_aeiv_scale: 0.5 + "agrif": + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + resolution: ORCA05 + add_namelist_changes: + namelist_cfg: + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + namtra_ldf: + rn_aeiv_scale: 0.5 "*": version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" resolution: ORCA05 @@ -327,10 +697,17 @@ nemo: work_to_run: link choose_resolution: + # TODO: check settings with Joakim + eORCA025: + nproca: 48 + nprocb: 48 + loccunif_nb: 16 ORCA05: nproca: 24 nprocb: 20 loccunif_nb: 4 + eORCA05: + loccunif_nb: 4 # Joakim: Need to find good choice for ORCA12 ORCA12: nproca: 24 @@ -341,6 +718,9 @@ nemo: choose_nemo.nest1: viking10: loccunif_nb_nest: 16 + ORION10: + loccunif_nb_nest: 16 + loccunif_nb_nest: 12 namelist_changes: @@ -372,13 +752,13 @@ nemo: namsbc_cpl: sn_rcv_emp: ['conservative', 'no', '', '', ''] sn_rcv_rnf: ['coupled', 'no', '', '', ''] - # Commented out calving since its only for Antarctica - # and its not needed in VIKING10 - #sn_rcv_cal: ['coupled', 'no', '', '', ''] + # Runoff and calving is multiplied by agrif mask + # so calving = 0 for VIKING10, but not for e.g. ORION + sn_rcv_cal: ['coupled', 'no', '', '', ''] coupling_freq: "$(( ${time_step} / ${nemo.time_step} ))" - + # number of neighbours for runoff remapping (if runoff_method = EM21) # 1 is ok for ORCA1, so 4 should be ok for ORCA05 # Small number means lots of runoff in each few grid cells which could be unstable @@ -401,6 +781,12 @@ nemo: ## coupling_freq_in_steps: $((${oasis3mct.coupling_time_step} / ${nemo.time_step})) + choose_oasis3mct.o2a_remap_method: + "conserv": + sst_grid_name: "opac" + "gauswgt": + sst_grid_name: "opat" + # Postprocessing choose_general.postprocessing: True: @@ -437,24 +823,64 @@ rnfmap: time_step: ${oasis3mct.coupling_time_step} choose_general.version: + "4.1.1": + version: foci211 + runoff_method: "JK22" + "4.1": + version: foci211 + runoff_method: "JK22" + "agrif-4.0": + version: agrif + runoff_method: "JK22" + with_agrif: True + # Set runoff mapper to send runoff to AGRIF as well + add_namelist_changes: + namelist.runoffmapper: + namrnfmap: + AgrifzoomNb: 1 + "4.0": + version: foci211 + runoff_method: "JK22" + "3.0.1": + version: foci211 + runoff_method: "JK22" + "3.0": + version: foci211 + runoff_method: "JK22" "2.1.1": version: foci211 runoff_method: "JK22" + "2.2": + version: foci211 + runoff_method: "JK22" "2.1": version: focioifs21 runoff_method: "EM21" "2.1-O12": version: focioifs21 - runoff_method: "EM21" + runoff_method: "EM21" + "2.0": + version: focioifs + runoff_method: "default" + "agrif": + version: agrif + runoff_method: "JK22" #"EM21" + with_agrif: True + add_namelist_changes: + namelist.runoffmapper: + namrnfmap: + AgrifzoomNb: 1 + "agrif-3.0": + version: agrif + runoff_method: "JK22" #"EM21" + with_agrif: True + add_namelist_changes: + namelist.runoffmapper: + namrnfmap: + AgrifzoomNb: 1 "*": version: focioifs1 runoff_method: "old" - - #choose_general.runoff_method: - # "EM21": - # runoff_method: "EM21" - # "*": - # runoff_method: "old" choose_general.calving_method: 'JS': @@ -514,8 +940,11 @@ oasis3mct: r2o_lag: 0 #"${nemo.time_step}" # adding lag here causes deadlock a2o_seq: 1 o2a_seq: 1 - - coupling_time_step: 10800 + + # Coupling time step was 10800 (3hr) for FOCI1, but this is not a good choice + # We should use 3600 (1hr) instead. + # The shorter the better + coupling_time_step: 3600 #10800 export_mode: EXPORTED choose_general.resolution: @@ -523,22 +952,45 @@ oasis3mct: # Default values (probably ok if grids similar) nb_of_neighbours_a2o: 9 weight_a2o: 2.0 + TCO95_eORCA025: + # Default values (probably ok if grids similar) + nb_of_neighbours_a2o: 9 + weight_a2o: 2.0 TCO95_ORCA12: # Suggested by Eric Maisonnave for Tco95 -> VIKING10 nb_of_neighbours_a2o: 15 weight_a2o: 0.1 + TCO319_ORCA12: + # We take nb 25 and variance 0.1 from TL799 VIKING10 + # see: https://cerfacs.fr/wp-content/uploads/2019/11/GlobC-TR-Maisonnave-odus_report_4-1.pdf + # Error goes down until nb 25, then constant + # Error also goes up for variance above 0.1 + nb_of_neighbours_a2o: 25 + weight_a2o: 0.1 + + # The default remapping methods are: + # atm->oce GAUSWGT + # oce->atm GAUSWGT + # cal->oce BILINEAR + # + # Alternative: + # atm->oce CONSERV (does not work) + # atm->oce BILINCUB (bilinear for all except bicubic for wind) + # oce->atm BILINEAR (bilinear for all) + # oce->atm CONSERV (does not work) + # cal->oce GAUSWGT (seems ok) + a2o_remap_method: "gauswgt" + o2a_remap_method: "gauswgt" + c2o_remap_method: "default" + + # Alternative: CONSERV (does not work) + agr2a_remap_method: "default" + coupling_target_fields: # Coupling ocean fields to atm - sstocean: - - 'AIceFrac:A_SSTSST:A_TepIce:A_IceTck:A_SnwTck:A_OCurx1:A_OCury1 <--o2agauswgt-- OIceFrac:O_SSTSST:O_TepIce:O_IceTck:O_SnwTck:O_OCurx1:O_OCury1' - #- 'AIceFrac <--o2agauswgt-- OIceFrac' - #- 'A_SSTSST <--o2agauswgt-- O_SSTSST' - #- 'A_TepIce <--o2agauswgt-- O_TepIce' - #- 'A_IceTck <--o2agauswgt-- O_IceTck' - #- 'A_SnwTck <--o2agauswgt-- O_SnwTck' - #- 'A_OCurx1 <--o2agauswgt-- O_OCurx1' - #- 'A_OCury1 <--o2agauswgt-- O_OCury1' + # This is done in choose below as the coupling fields depend + # on the NEMO version used. # Couple HTESSEL runoff to river routing: # This is done in a choose_general.version block below @@ -548,44 +1000,73 @@ oasis3mct: # Couple atm fluxes to ocean (non-conserving) flxatmos: - - 'O_QsrIce:O_QnsIce:O_dQnsdT <--a2ogauswgtnc-- A_QsrIce:A_QnsIce:A_dQnsdT' - #- 'O_QsrIce <--a2ogauswgtnc-- A_QsrIce' - #- 'O_QnsIce <--a2ogauswgtnc-- A_QnsIce' - #- 'OTotSnow <--a2ogauswgtnc-- ATotSnow' - #- 'OIceEvap <--a2ogauswgtnc-- AIceEvap' - #- 'O_dQnsdT <--a2ogauswgtnc-- A_dQnsdT' + - 'O_QsrIce:O_QnsIce:O_dQnsdT <--a2o_nc-- A_QsrIce:A_QnsIce:A_dQnsdT' # Coupled atm stress to ocean (non-conserving) atmtau: - - 'O_OTaux1:O_OTauy1:O_ITaux1:O_ITauy1 <--a2ogauswgtnc-- A_OTaux1:A_OTauy1:A_ITaux1:A_ITauy1' - #- 'O_OTaux1 <--a2ogauswgtnc-- A_OTaux1' - #- 'O_OTauy1 <--a2ogauswgtnc-- A_OTauy1' - #- 'O_ITaux1 <--a2ogauswgtnc-- A_ITaux1' - #- 'O_ITauy1 <--a2ogauswgtnc-- A_ITauy1' + - 'O_OTaux1:O_OTauy1:O_ITaux1:O_ITauy1 <--aw2o_nc-- A_OTaux1:A_OTauy1:A_ITaux1:A_ITauy1' # Couple atm fluxes to ocean (conserving) atmflx: - - 'O_QsrMix:O_QnsMix:OTotRain:OTotSnow:OIceEvap:OTotEvap <--a2ogauswgtcn-- A_QsrMix:A_QnsMix:ATotRain:ATotSnow:AIceEvap:ATotEvap' - #- 'O_QsrMix <--a2ogauswgtcn-- A_QsrMix' - #- 'O_QnsMix <--a2ogauswgtcn-- A_QnsMix' - #- 'OTotRain <--a2ogauswgtcn-- ATotRain' - #- 'OTotSnow <--a2ogauswgtcn-- ATotSnow' - #- 'OIceEvap <--a2ogauswgtcn-- AIceEvap' - #- 'OTotEvap <--a2ogauswgtcn-- ATotEvap' + - 'O_QsrMix:O_QnsMix:OTotRain:OTotSnow:OIceEvap:OTotEvap <--a2o_cn-- A_QsrMix:A_QnsMix:ATotRain:ATotSnow:AIceEvap:ATotEvap' + + # Coupling fields are different between 3.6 and 4.2 + # In 3.6, we dont have ice albedo. Ice fraction is called OIceFrac + # In 4.2, we have ice albedo (O_AlbIce). Ice fraction is called OIceFrc + choose_nemo.generation: + "3.6": + add_coupling_target_fields: + sstocean: + - 'AIceFrac:A_SSTSST:A_TepIce:A_IceTck:A_SnwTck:A_OCurx1:A_OCury1 <--o2a_nc-- OIceFrac:O_SSTSST:O_TepIce:O_IceTck:O_SnwTck:O_OCurx1:O_OCury1' + "4.2": + add_coupling_target_fields: + sstocean: + - 'A_AlbIce:AIceFrac:A_SSTSST:A_TepIce:A_IceTck:A_SnwTck:A_OCurx1:A_OCury1 <--o2a_nc-- O_AlbIce:OIceFrc:O_SSTSST:O_TepIce:OIceTck:OSnwTck:O_OCurx1:O_OCury1' - choose_general.version: - '2.1.1': - add_coupling_target_fields: - # Couple HTESSEL runoff to river routing - rnfatm: - - 'R_Runoff_atm:R_Calving_atm <--a2rgauswgt-- A_Runoff:A_Calving' - - agrif: + #choose_general.version: + choose_nemo.nest1: + # Couple HTESSEL runoff to river routing + # For versions < 2.2, we only couple runoff + # For versions >=2.2, we couple runoff and calving + # This requires a new branch of rnfmap, the JS calving_method, and this part + # Note: For nests, this is a different beast (see below) + # And before you complain, yes, I know this is not nicely coded. + false: + choose_general.version: + "2.0": + add_coupling_target_fields: + rnfatm: + - 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' + "2.1": + add_coupling_target_fields: + rnfatm: + - 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' + "2.1-O12": + add_coupling_target_fields: + rnfatm: + - 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' + "*": + add_coupling_target_fields: + rnfatm: + - 'R_Runoff_atm:R_Calving_atm <--a2rgauswgt-- A_Runoff:A_Calving' + #'2.1.1': + # add_coupling_target_fields: + # # Couple HTESSEL runoff to river routing + # rnfatm: + # - 'R_Runoff_atm:R_Calving_atm <--a2rgauswgt-- A_Runoff:A_Calving' + #'2.2': + # add_coupling_target_fields: + # # Couple HTESSEL runoff to river routing + # rnfatm: + # - 'R_Runoff_atm:R_Calving_atm <--a2rgauswgt-- A_Runoff:A_Calving' + + "*": add_restart_in_files: - rmp_a2agr_L1: rmp_a2agr_L1 + #rmp_a2agr_L1: rmp_a2agr_L1 rmp_a2agr_A1: rmp_a2agr_A1 rmp_agr2a_1L: rmp_agr2a_1L rmp_agr2a_2L: rmp_agr2a_2L + rmp_agr2r_R1: rmp_agr2r_R1 rmp_r2agr_R1: rmp_r2agr_R1 rmp_c2agr_R1: rmp_c2agr_R1 @@ -597,16 +1078,17 @@ oasis3mct: agrifspg: agrifspg add_restart_out_files: - rmp_a2agr_L1: rmp_a2agr_L1 + #rmp_a2agr_L1: rmp_a2agr_L1 rmp_a2agr_A1: rmp_a2agr_A1 rmp_agr2a_1L: rmp_agr2a_1L rmp_agr2a_2L: rmp_agr2a_2L + rmp_agr2r_R1: rmp_agr2r_R1 rmp_r2agr_R1: rmp_r2agr_R1 rmp_c2agr_R1: rmp_c2agr_R1 sstocean1: sstocean1 flxatmos1: flxatmos1 - #rnrunoff1: rnrunoff1 + rnrunoff1: rnrunoff1 atmtau1: atmtau1 atmflx1: atmflx1 agrifspg: agrifspg @@ -614,89 +1096,36 @@ oasis3mct: add_coupling_target_fields: # Couple HTESSEL runoff to river routing rnfatm: - - 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' - sstocean_1: - #- 'M01_AIceFrac <--agr2agauswgt-- 1_OIceFrac' - #- 'M01_A_SSTSST <--agr2agauswgt-- 1_O_SSTSST' - #- 'M01_A_TepIce <--agr2agauswgt-- 1_O_TepIce' - #- 'M01_A_IceTck <--agr2agauswgt-- 1_O_IceTck' - #- 'M01_A_SnwTck <--agr2agauswgt-- 1_O_SnwTck' - #- 'M01_A_OCurx1 <--agr2agauswgt-- 1_O_OCurx1' - #- 'M01_A_OCury1 <--agr2agauswgt-- 1_O_OCury1' - #- 'M01_AIceFrac <--agr2adistwgt-- 1_OIceFrac' - #- 'M01_A_SSTSST <--agr2adistwgt-- 1_O_SSTSST' - #- 'M01_A_TepIce <--agr2adistwgt-- 1_O_TepIce' - #- 'M01_A_IceTck <--agr2adistwgt-- 1_O_IceTck' - #- 'M01_A_SnwTck <--agr2adistwgt-- 1_O_SnwTck' - #- 'M01_A_OCurx1 <--agr2adistwgt-- 1_O_OCurx1' - #- 'M01_A_OCury1 <--agr2adistwgt-- 1_O_OCury1' - - 'M01_AIceFrac:M01_A_SSTSST:M01_A_TepIce:M01_A_IceTck:M01_A_SnwTck:M01_A_OCurx1:M01_A_OCury1 <--agr2abilin-- 1_OIceFrac:1_O_SSTSST:1_O_TepIce:1_O_IceTck:1_O_SnwTck:1_O_OCurx1:1_O_OCury1' - #- 'M01_AIceFrac <--agr2abilin-- 1_OIceFrac' - #- 'M01_A_SSTSST <--agr2abilin-- 1_O_SSTSST' - #- 'M01_A_TepIce <--agr2abilin-- 1_O_TepIce' - #- 'M01_A_IceTck <--agr2abilin-- 1_O_IceTck' - #- 'M01_A_SnwTck <--agr2abilin-- 1_O_SnwTck' - #- 'M01_A_OCurx1 <--agr2abilin-- 1_O_OCurx1' - #- 'M01_A_OCury1 <--agr2abilin-- 1_O_OCury1' + - 'R_Runoff_atm:R_Calving_atm <--a2rgauswgt-- A_Runoff:A_Calving' + #- 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' atmflx_1: - #- '1_O_QsrMix <--a2agrgauswgtcn-- A_QsrMix' - #- '1_O_QnsMix <--a2agrgauswgtcn-- A_QnsMix' - #- '1_OTotRain <--a2agrgauswgtcn-- ATotRain' - #- '1_O_QsrMix <--a2agrdistwgtcn-- A_QsrMix' - #- '1_O_QnsMix <--a2agrdistwgtcn-- A_QnsMix' - #- '1_OTotRain <--a2agrdistwgtcn-- ATotRain' - - '1_O_QsrMix:1_O_QnsMix:1_OTotRain:1_OTotSnow:1_OTotEvap:1_OIceEvap <--a2agrgauswgtcn-- A_QsrMix:A_QnsMix:ATotRain:ATotSnow:ATotEvap:AIceEvap' - #- '1_O_QsrMix <--a2agrbilincn-- A_QsrMix' - #- '1_O_QnsMix <--a2agrbilincn-- A_QnsMix' - #- '1_OTotRain <--a2agrbilincn-- ATotRain' - #- '1_OTotSnow <--a2agrbilincn-- ATotSnow' - #- '1_OTotEvap <--a2agrbilincn-- ATotEvap' - #- '1_OIceEvap <--a2agrbilincn-- AIceEvap' + - '1_O_QsrMix:1_O_QnsMix:1_OTotRain:1_OTotSnow:1_OTotEvap:1_OIceEvap <--a2agr_cn-- A_QsrMix:A_QnsMix:ATotRain:ATotSnow:ATotEvap:AIceEvap' atmtau_1: - #- '1_O_OTaux1 <--a2agrgauswgtnc-- A_OTaux1' - #- '1_O_OTauy1 <--a2agrgauswgtnc-- A_OTauy1' - #- '1_O_ITaux1 <--a2agrgauswgtnc-- A_ITaux1' - #- '1_O_ITauy1 <--a2agrgauswgtnc-- A_ITauy1' - #- '1_O_OTaux1 <--a2agrdistwgt-- A_OTaux1' - #- '1_O_OTauy1 <--a2agrdistwgt-- A_OTauy1' - #- '1_O_ITaux1 <--a2agrdistwgt-- A_ITaux1' - #- '1_O_ITauy1 <--a2agrdistwgt-- A_ITauy1' - - '1_O_OTaux1:1_O_OTauy1:1_O_ITaux1:1_O_ITauy1 <--a2agrgauswgtcn-- A_OTaux1:A_OTauy1:A_ITaux1:A_ITauy1' - #- '1_O_OTaux1 <--a2agrbilin-- A_OTaux1' - #- '1_O_OTauy1 <--a2agrbilin-- A_OTauy1' - #- '1_O_ITaux1 <--a2agrbilin-- A_ITaux1' - #- '1_O_ITauy1 <--a2agrbilin-- A_ITauy1' + - '1_O_OTaux1:1_O_OTauy1:1_O_ITaux1:1_O_ITauy1 <--a2agr_cn-- A_OTaux1:A_OTauy1:A_ITaux1:A_ITauy1' flxatmos_1: - #- '1_O_QsrIce <--a2agrgauswgtnc-- A_QsrIce' - #- '1_O_QnsIce <--a2agrgauswgtnc-- A_QnsIce' - #- '1_OTotSnow <--a2agrgauswgtnc-- ATotSnow' - #- '1_OIceEvap <--a2agrgauswgtnc-- AIceEvap' - #- '1_O_dQnsdT <--a2agrgauswgtnc-- A_dQnsdT' - #- '1_O_QsrIce <--a2agrdistwgt-- A_QsrIce' - #- '1_O_QnsIce <--a2agrdistwgt-- A_QnsIce' - #- '1_OTotSnow <--a2agrdistwgt-- ATotSnow' - #- '1_OIceEvap <--a2agrdistwgt-- AIceEvap' - #- '1_O_dQnsdT <--a2agrdistwgt-- A_dQnsdT' - - '1_O_QsrIce:1_O_QnsIce:1_O_dQnsdT <--a2agrgauswgtcn-- A_QsrIce:A_QnsIce:A_dQnsdT' - #- '1_O_QsrIce <--a2agrbilin-- A_QsrIce' - #- '1_O_QnsIce <--a2agrbilin-- A_QnsIce' - #- '1_OTotSnow <--a2agrbilin-- ATotSnow' - #- '1_OIceEvap <--a2agrbilin-- AIceEvap' - #- '1_O_dQnsdT <--a2agrbilin-- A_dQnsdT' + - '1_O_QsrIce:1_O_QnsIce:1_O_dQnsdT <--a2agr_cn-- A_QsrIce:A_QnsIce:A_dQnsdT' rnrunoff_1: - #- '1_O_Runoff <--r2agrbilinear-- R_Runoff_oce' - #- '1_OCalving <--r2agrbilinear-- R_Calving_oce' - - '1_O_Runoff <--r2agrloccunif-- R_Runoff_oce' - #- '1_OCalving <--r2agrzero-- R_Calving_oce' + - '1_O_Runoff <--r2agrloccunif-- 1_R_Runoff_oce' + - '1_OCalving <--c2agrzero-- 1_R_Calving_oce' agrifspg: - #- 'M01_A_AgrSpg <--agr22agauswgt-- 1_O_AgrSpg' - 'M01_A_AgrSpg <--agr22adistwgt-- 1_O_AgrSpg' - #- 'M01_A_AgrSpg <--agr22abilin-- 1_O_AgrSpg' - '*': - # Couple HTESSEL runoff to river routing - add_coupling_target_fields: - rnfatm: - - 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' + - '1_R_AgrSpg <--agr22rdistwgt-- 1_O_AgrSpg' + + # Coupling fields are different for 3.6 and 4.2, so we need to put this part inside a choose block + add_choose_nemo.generation: + "3.6": + add_coupling_target_fields: + sstocean_1: + - 'M01_AIceFrac:M01_A_SSTSST:M01_A_TepIce:M01_A_IceTck:M01_A_SnwTck:M01_A_OCurx1:M01_A_OCury1 <--agr2a_nc-- 1_OIceFrac:1_O_SSTSST:1_O_TepIce:1_O_IceTck:1_O_SnwTck:1_O_OCurx1:1_O_OCury1' + "4.2": + add_coupling_target_fields: + sstocean_1: + - 'M01_AIceFrac:M01_A_SSTSST:M01_A_TepIce:M01_A_IceTck:M01_A_SnwTck:M01_A_OCurx1:M01_A_OCury1:M01_A_AlbIce <--agr2a_nc-- 1_OIceFrc:1_O_SSTSST:1_O_TepIce:1_OIceTck:1_OSnwTck:1_O_OCurx1:1_O_OCury1:1_O_AlbIce' + #'*': + # # Couple HTESSEL runoff to river routing + # add_coupling_target_fields: + # rnfatm: + # - 'R_Runoff_atm <--a2rgauswgt-- A_Runoff' # new behaviour: locally conservative remapping of runoff # and split runoff and Antarctic calving @@ -706,7 +1135,7 @@ oasis3mct: add_coupling_target_fields: rnrunoff: - 'O_Runoff <--r2oloccunif-- R_Runoff_oce' - - 'OCalving <--c2obilincn-- R_Calving_oce' + - 'OCalving <--c2o_cn-- R_Calving_oce' add_restart_in_files: rmp_c2o_RC: rmp_c2o_RC rmp_r2o_RC: rmp_r2o_RC @@ -743,6 +1172,10 @@ oasis3mct: 'opat->atml': lag: ${o2a_lag} seq: 3 + # NEMO to OIFS wet points excluding peridic NEMO points (i=1, i=imax) + 'opac->atml': + lag: ${o2a_lag} + seq: 3 # OIFS wet points including lakes to NEMO including cyclic points 'atml->opat': lag: ${a2o_lag} @@ -779,25 +1212,15 @@ oasis3mct: 'rnfm->agr1': lag: ${r2o_lag} seq: 3 - 'rnfs->agr1': + 'rnfs->agrc': lag: ${r2o_lag} seq: 3 - + # AGRIF mask to runoff mapper + 'agr2->rnfm': + lag: ${o2a_lag} + seq: 2 coupling_methods: - # NEMO to OpenIFS (Lgrid) - # GAUSWGT remapping. No conservation post processing - o2agauswgt: - time_transformation: average - remapping: - - gauswgt: - search_bin: latitude - nb_of_neighbours: 9 - weight: 2.0 - - mapping: - mapname: rmp_opat_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc - map_regrid_on: dst - # OpenIFS (HTESSEL, Rgrid) to runoff mapper # GAUSWGT remapping. GLBPOS conservation. a2rgauswgt: @@ -884,47 +1307,18 @@ oasis3mct: postprocessing: conserv: method: global - c2obilincn: - time_transformation: average - remapping: - - bilinear: - search_bin: latitude - nb_of_search_bins: 40 - - mapping: - mapname: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc - map_regrid_on: dst - postprocessing: - conserv: - method: global - - # OpenIFS (Lgrid) to NEMO - # GAUSWGT remapping. No conservation post processing. - a2ogauswgtnc: - time_transformation: average - remapping: - - gauswgt: - search_bin: latitude - nb_of_neighbours: ${nb_of_neighbours_a2o} - weight: ${weight_a2o} - - mapping: - mapname: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc - map_regrid_on: src - - # OpenIFS (Agrid) to NEMO - # GAUSWGT remapping. GLBPOS conservation. - a2ogauswgtcn: - time_transformation: average - remapping: - - gauswgt: - search_bin: latitude - nb_of_neighbours: ${nb_of_neighbours_a2o} - weight: ${weight_a2o} - - mapping: - mapname: rmp_${oifs.oasis_grid_name_a}_to_opac_GAUSWGT_${nemo.resolution}.nc - map_regrid_on: src - postprocessing: - conserv: - method: glbpos + #c2obilincn: + # time_transformation: average + # remapping: + # - bilinear: + # search_bin: latitude + # nb_of_search_bins: 100 + # - mapping: + # mapname: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc + # map_regrid_on: dst + # postprocessing: + # conserv: + # method: global # OpenIFS (Lgrid) to AGRIF # GAUSWGT remapping. No conservation post processing. @@ -962,7 +1356,7 @@ oasis3mct: nb_of_neighbours: 25 weight: 2.0 - mapping: - mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.nest1}.nc map_regrid_on: dst # AGRIF mask to OpenIFS (Lgrid) @@ -1020,7 +1414,7 @@ oasis3mct: search_bin: latitude nb_of_neighbours: 25 - mapping: - mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.resolution}.nc + mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.nest1}.nc map_regrid_on: dst agr2abilin: time_transformation: average @@ -1029,7 +1423,7 @@ oasis3mct: search_bin: latitude nb_of_search_bins: 1 - mapping: - mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc + mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.nest1}.nc map_regrid_on: dst agr22adistwgt: time_transformation: average @@ -1038,7 +1432,7 @@ oasis3mct: search_bin: latitude nb_of_neighbours: 4 - mapping: - mapname: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.resolution}.nc + mapname: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.nest1}.nc map_regrid_on: dst agr22abilin: time_transformation: average @@ -1049,6 +1443,16 @@ oasis3mct: - mapping: mapname: rmp_agr2_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc map_regrid_on: dst + agr22rdistwgt: + time_transformation: average + remapping: + - distwgt: + search_bin: latitude + nb_of_neighbours: 4 + - mapping: + mapname: rmp_agr2_to_rnfm_DISTWGT.nc + map_regrid_on: dst + # Runoff to AGRIF r2agrbilinear: time_transformation: average @@ -1070,19 +1474,317 @@ oasis3mct: - mapping: mapname: rmp_rnfm_to_agr1_LOCCUNIF_${nemo.loccunif_nb_nest}_${nemo.nest1}.nc map_regrid_on: dst + + # + # Here we set the remapping method using switches + # a2o_remap_method: gauswgt, conserv, bilincub + # + # Note: gauswgt is default and is very stable + # conserv does not work. Some OASIS bug that we dont understand + # bilincub is bilinear for all fields but bicubic for wind stress + # + # + choose_a2o_remap_method: + "gauswgt": + rmp_a2o_nc_file: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc + rmp_aw2o_nc_file: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc + rmp_a2o_cn_file: rmp_${oifs.oasis_grid_name_a}_to_opac_GAUSWGT_${nemo.resolution}.nc + add_coupling_methods: + # OpenIFS (Lgrid) to NEMO + # GAUSWGT remapping. No conservation post processing. + a2o_nc: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: ${nb_of_neighbours_a2o} + weight: ${weight_a2o} + - mapping: + mapname: ${rmp_a2o_nc_file} + map_regrid_on: src + + # OpenIFS (Lgrid) to NEMO + # GAUSWGT remapping. No conservation post processing. + aw2o_nc: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: ${nb_of_neighbours_a2o} + weight: ${weight_a2o} + - mapping: + mapname: ${rmp_aw2o_nc_file} + map_regrid_on: src + + # OpenIFS (Agrid) to NEMO + # GAUSWGT remapping. GLBPOS conservation. + a2o_cn: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: ${nb_of_neighbours_a2o} + weight: ${weight_a2o} + - mapping: + mapname: ${rmp_a2o_cn_file} + map_regrid_on: src + postprocessing: + conserv: + method: glbpos - # Requires setting remap_matrix=0 in rmp file (see above comment for r2ozero) - r2agrzero: - time_transformation: average - remapping: - - mapping: - mapname: rmp_rnfs_to_agr1_ZERO.nc - map_regrid_on: src - postprocessing: - conserv: - method: global + "bilincub": + rmp_a2o_nc_file: rmp_${oifs.oasis_grid_name_l}_to_opat_BILINEAR_${nemo.resolution}.nc + rmp_aw2o_nc_file: rmp_${oifs.oasis_grid_name_l}_to_opat_BICUBIC_${nemo.resolution}.nc + rmp_a2o_cn_file: rmp_${oifs.oasis_grid_name_a}_to_opat_BILINEAR_${nemo.resolution}.nc + add_coupling_methods: + # OpenIFS (Lgrid) to NEMO + # BILINEAR remapping. No conservation post processing. + a2o_nc: + time_transformation: average + remapping: + - bilinear: + search_bin: latitude + nb_of_search_bins: 1 + - mapping: + mapname: ${rmp_a2o_nc_file} + map_regrid_on: src + + # OpenIFS (Lgrid) to NEMO + # BILINEAR remapping. No conservation post processing. + aw2o_nc: + time_transformation: average + remapping: + - bicubic: + search_bin: latitude + nb_of_search_bins: 1 + - mapping: + mapname: ${rmp_aw2o_nc_file} + map_regrid_on: src + + # OpenIFS (Agrid) to NEMO + # BILINEAR remapping. GLBPOS conservation. + a2o_cn: + time_transformation: average + remapping: + - bilinear: + search_bin: latitude + nb_of_search_bins: 1 + - mapping: + mapname: ${rmp_a2o_cn_file} + map_regrid_on: src + postprocessing: + conserv: + method: glbpos - #add_input_files: + "conserv": + rmp_a2o_nc_file: rmp_${oifs.oasis_grid_name_l}_to_opac_CONSERV_${nemo.resolution}.nc + rmp_aw2o_nc_file: rmp_${oifs.oasis_grid_name_l}_to_opac_CONSERV_${nemo.resolution}.nc + rmp_a2o_cn_file: rmp_${oifs.oasis_grid_name_a}_to_opac_CONSERV_${nemo.resolution}.nc + add_coupling_methods: + # OpenIFS Lgrid to NEMO + # 1st order conservative + a2o_nc: + time_transformation: average + remapping: + - conserv: + search_bin: latitude + nb_of_search_bins: 500 + normalization: fracnnei + order: first + - mapping: + mapname: ${rmp_a2o_nc_file} + + # OpenIFS Lgrid to NEMO + # 1st order conservative + aw2o_nc: + time_transformation: average + remapping: + - conserv: + search_bin: latitude + nb_of_search_bins: 500 + normalization: fracnnei + order: first + - mapping: + mapname: ${rmp_aw2o_nc_file} + + # OpenIFS Agrid to NEMO + # 1st order conservative + # and GLBPOS global conservation + a2o_cn: + time_transformation: average + remapping: + - conserv: + search_bin: latitude + nb_of_search_bins: 500 + normalization: fracnnei + order: first + - mapping: + mapname: ${rmp_a2o_cn_file} + postprocessing: + conserv: + method: glbpos + choose_o2a_remap_method: + "gauswgt": + rmp_o2a_file: rmp_opat_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + add_coupling_methods: + # NEMO to OpenIFS (Lgrid) + # GAUSWGT remapping. No conservation post processing + o2a_nc: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: ${rmp_o2a_file} + map_regrid_on: dst + "bilinear": + rmp_o2a_file: rmp_opat_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc + add_coupling_methods: + # NEMO to OpenIFS (Lgrid) + # BILINEAR remapping. + o2a_nc: + time_transformation: average + remapping: + - bilinear: + search_bin: latitude + nb_of_search_bins: 1 + - mapping: + mapname: ${rmp_o2a_file} + map_regrid_on: src + "conserv": + rmp_o2a_file: rmp_opac_to_${oifs.oasis_grid_name_l}_CONSERV_${nemo.resolution}.nc + add_coupling_methods: + # NEMO to OpenIFS (Lgrid) + # GAUSWGT remapping. No conservation post processing + o2a_nc: + time_transformation: average + remapping: + - conserv: + search_bin: latitude + nb_of_search_bins: 500 + normalization: fracnnei + order: first + - mapping: + mapname: ${rmp_o2a_file} + map_regrid_on: dst + + + choose_agr2a_remap_method: + "default": + add_coupling_methods: + # AGRIF to OpenIFS (Agrid) for SST, ice fraction etc + # Bilinear remapping + agr2a_nc: + time_transformation: average + remapping: + - bilinear: + search_bin: latitude + nb_of_search_bins: 1 + - mapping: + mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.nest1}.nc + map_regrid_on: dst + a2agr_cn: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: 15 + weight: 0.1 + - mapping: + mapname: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.nest1}.nc + map_regrid_on: src + "conserv": + add_coupling_methods: + # AGRIF to OpenIFS (Agrid) for SST, ice fraction + # 1st order conservative + agr2a_nc: + time_transformation: average + remapping: + - conserv: + search_bin: latitude + nb_of_search_bins: 500 + normalization: fracnnei + order: first + - mapping: + mapname: rmp_agr1_to_${oifs.oasis_grid_name_l}_CONSERV_${nemo.nest1}.nc + a2agr_cn: + time_transformation: average + remapping: + - conserv: + search_bin: latitude + nb_of_search_bins: 500 + normalization: fracnnei + order: first + - mapping: + mapname: rmp_${oifs.oasis_grid_name_a}_to_agr1_CONSERV_${nemo.nest1}.nc + + choose_c2o_remap_method: + "default": + add_coupling_methods: + c2o_cn: + time_transformation: average + remapping: + - bilinear: + search_bin: latitude + nb_of_search_bins: 100 + - mapping: + mapname: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc + map_regrid_on: dst + postprocessing: + conserv: + method: global + + # Requires setting remap_matrix=0 in rmp file (see above comment for r2ozero) + c2agrzero: + time_transformation: average + remapping: + - bilinear: + search_bin: latitude + nb_of_search_bins: 1000 + #- distwgt: + # search_bin: latitude + # nb_of_neighbours: 4 + - mapping: + mapname: rmp_rnfs_to_agrc_ZERO.nc + map_regrid_on: dst + postprocessing: + conserv: + method: global + + "gauswgt": + add_coupling_methods: + # BILINEAR sometimes needs a very high NB + # Better to use GAUSWGT? + c2o_cn: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc + map_regrid_on: dst + postprocessing: + conserv: + method: global + + c2agrzero: + time_transformation: average + remapping: + - gauswgt: + search_bin: latitude + nb_of_neighbours: 9 + weight: 2.0 + - mapping: + mapname: rmp_rnfs_to_agrc_ZERO.nc + map_regrid_on: dst + postprocessing: + conserv: + method: global + input_files: areas: areas masks: masks @@ -1096,9 +1798,6 @@ oasis3mct: #add_input_sources: input_sources: - #areas: ${pool_dir}/grids_areas_masks.nc - #masks: ${pool_dir}/grids_areas_masks.nc - #grids: ${pool_dir}/grids_areas_masks.nc areas: ${pool_dir}/areas.nc masks: ${pool_dir}/masks.nc grids: ${pool_dir}/grids.nc @@ -1109,6 +1808,7 @@ oasis3mct: rmp_a2r_RR: rmp_a2r_RR rmp_r2f_RF: rmp_r2f_RF rmp_a2o_LT: rmp_a2o_LT + rmp_aw2o_LT: rmp_aw2o_LT rmp_a2o_AC: rmp_a2o_AC #rmp_c2o_RC: rmp_c2o_RC #rmp_r2o_RC: rmp_r2o_RC @@ -1122,23 +1822,23 @@ oasis3mct: #add_restart_out_in_work: restart_out_in_work: - rmp_o2a_TL: rmp_opat_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_o2a_TL: ${rmp_o2a_file} rmp_a2r_RR: rmp_${oifs.oasis_grid_name_r}_to_rnfa_GAUSWGT.nc rmp_r2f_RF: rmp_rnfo_to_rnfo_BILINEAR_${nemo.resolution}.nc rmp_r2o_RC: rmp_rnfm_to_opac_LOCCUNIF_${nemo.loccunif_nb}_${nemo.resolution}.nc - rmp_a2o_LT: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc - rmp_a2o_AC: rmp_${oifs.oasis_grid_name_a}_to_opac_GAUSWGT_${nemo.resolution}.nc - #rmp_c2o_RC: rmp_rnfs_to_opaa_BILINEAR_${nemo.resolution}.nc + rmp_a2o_LT: ${rmp_a2o_nc_file} + rmp_aw2o_LT: ${rmp_aw2o_nc_file} + rmp_a2o_AC: ${rmp_a2o_cn_file} rmp_c2o_RC: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc - + # These four are for AGRIF-OpenIFS remapping rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.nest1}.nc rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.nest1}.nc - rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc - rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.resolution}.nc - #rmp_r2agr_R1: rmp_rnfo_to_agr1r_BILINEAR_${nemo.resolution}.nc + rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.nest1}.nc + rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.nest1}.nc + rmp_agr2r_R1: rmp_agr2_to_rnfm_DISTWGT.nc rmp_r2agr_R1: rmp_rnfm_to_agr1_LOCCUNIF_${nemo.loccunif_nb_nest}_${nemo.nest1}.nc - rmp_c2agr_R1: rmp_rnfm_to_agr1_ZERO.nc + rmp_c2agr_R1: rmp_rnfs_to_agrc_ZERO.nc sstocean: sstocean flxatmos: flxatmos @@ -1155,28 +1855,22 @@ oasis3mct: agrifspg: agrifspg restart_out_sources: - rmp_o2a_TL: rmp_opat_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_o2a_TL: ${rmp_o2a_file} rmp_a2r_RR: rmp_${oifs.oasis_grid_name_r}_to_rnfa_GAUSWGT.nc - #rmp_r2f_RF: rmp_rnfo_to_rnfo_GAUSWGT_${nemo.resolution}.nc rmp_r2f_RF: rmp_rnfo_to_rnfo_BILINEAR_${nemo.resolution}.nc rmp_r2o_RC: rmp_rnfm_to_opac_LOCCUNIF_${nemo.loccunif_nb}_${nemo.resolution}.nc - #rmp_c2o_RC: rmp_rnfs_to_opaa_BILINEAR_${nemo.resolution}.nc rmp_c2o_RC: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc - rmp_a2o_LT: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc - rmp_a2o_AC: rmp_${oifs.oasis_grid_name_a}_to_opac_GAUSWGT_${nemo.resolution}.nc + rmp_a2o_LT: ${rmp_a2o_nc_file} + rmp_aw2o_LT: ${rmp_aw2o_nc_file} + rmp_a2o_AC: ${rmp_a2o_cn_file} rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.nest1}.nc rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.nest1}.nc - rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc - rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.resolution}.nc - #rmp_r2agr_R1: rmp_rnfo_to_agr1r_BILINEAR_${nemo.resolution}.nc + rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.nest1}.nc + rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.nest1}.nc + rmp_agr2r_R1: rmp_agr2_to_rnfm_DISTWGT.nc rmp_r2agr_R1: rmp_rnfm_to_agr1_LOCCUNIF_${nemo.loccunif_nb_nest}_${nemo.nest1}.nc - rmp_c2agr_R1: rmp_rnfm_to_agr1_ZERO.nc - - #rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.resolution}.nc - #rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.resolution}.nc - #rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc - #rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_c2agr_R1: rmp_rnfs_to_agrc_ZERO.nc sstocean: sstocean flxatmos: flxatmos @@ -1209,28 +1903,22 @@ oasis3mct: atmflx: atmflx restart_in_in_work: - rmp_o2a_TL: rmp_opat_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_o2a_TL: ${rmp_o2a_file} rmp_a2r_RR: rmp_${oifs.oasis_grid_name_r}_to_rnfa_GAUSWGT.nc - #rmp_r2f_RF: rmp_rnfo_to_rnfo_GAUSWGT_${nemo.resolution}.nc rmp_r2f_RF: rmp_rnfo_to_rnfo_BILINEAR_${nemo.resolution}.nc - rmp_r2o_RC: rmp_rnfm_to_opac_LOCCUNIF_${nemo.loccunif_nb}_${nemo.resolution}.nc - #rmp_c2o_RC: rmp_rnfs_to_opaa_BILINEAR_${nemo.resolution}.nc + rmp_r2o_RC: rmp_rnfm_to_opac_LOCCUNIF_${nemo.loccunif_nb}_${nemo.resolution}.nc rmp_c2o_RC: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc - rmp_a2o_LT: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc - rmp_a2o_AC: rmp_${oifs.oasis_grid_name_a}_to_opac_GAUSWGT_${nemo.resolution}.nc + rmp_a2o_LT: ${rmp_a2o_nc_file} + rmp_aw2o_LT: ${rmp_aw2o_nc_file} + rmp_a2o_AC: ${rmp_a2o_cn_file} rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.nest1}.nc rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.nest1}.nc - rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc - rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.resolution}.nc - #rmp_r2agr_R1: rmp_rnfo_to_agr1r_BILINEAR_${nemo.resolution}.nc + rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.nest1}.nc + rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.nest1}.nc + rmp_agr2r_R1: rmp_agr2_to_rnfm_DISTWGT.nc rmp_r2agr_R1: rmp_rnfm_to_agr1_LOCCUNIF_${nemo.loccunif_nb_nest}_${nemo.nest1}.nc - rmp_c2agr_R1: rmp_rnfm_to_agr1_ZERO.nc - - #rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.resolution}.nc - #rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.resolution}.nc - #rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc - #rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_c2agr_R1: rmp_rnfs_to_agrc_ZERO.nc sstocean: sstocean flxatmos: flxatmos @@ -1247,28 +1935,22 @@ oasis3mct: agrifspg: agrifspg restart_in_sources: - rmp_o2a_TL: rmp_opat_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_o2a_TL: ${rmp_o2a_file} rmp_a2r_RR: rmp_${oifs.oasis_grid_name_r}_to_rnfa_GAUSWGT.nc - #rmp_r2f_RF: rmp_rnfo_to_rnfo_GAUSWGT_${nemo.resolution}.nc rmp_r2f_RF: rmp_rnfo_to_rnfo_BILINEAR_${nemo.resolution}.nc rmp_r2o_RC: rmp_rnfm_to_opac_LOCCUNIF_${nemo.loccunif_nb}_${nemo.resolution}.nc - #rmp_c2o_RC: rmp_rnfs_to_opaa_BILINEAR_${nemo.resolution}.nc rmp_c2o_RC: rmp_rnfs_to_opaa_ZERO_${nemo.resolution}.nc - rmp_a2o_LT: rmp_${oifs.oasis_grid_name_l}_to_opat_GAUSWGT_${nemo.resolution}.nc - rmp_a2o_AC: rmp_${oifs.oasis_grid_name_a}_to_opac_GAUSWGT_${nemo.resolution}.nc + rmp_a2o_LT: ${rmp_a2o_nc_file} + rmp_aw2o_LT: ${rmp_aw2o_nc_file} + rmp_a2o_AC: ${rmp_a2o_cn_file} rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.nest1}.nc rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.nest1}.nc - rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.resolution}.nc - rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.resolution}.nc - #rmp_r2agr_R1: rmp_rnfo_to_agr1r_BILINEAR_${nemo.resolution}.nc + rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_BILINEAR_${nemo.nest1}.nc + rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_DISTWGT_${nemo.nest1}.nc + rmp_agr2r_R1: rmp_agr2_to_rnfm_DISTWGT.nc rmp_r2agr_R1: rmp_rnfm_to_agr1_LOCCUNIF_${nemo.loccunif_nb_nest}_${nemo.nest1}.nc - rmp_c2agr_R1: rmp_rnfm_to_agr1_ZERO.nc - - #rmp_a2agr_L1: rmp_${oifs.oasis_grid_name_l}_to_agr1_GAUSWGT_${nemo.resolution}.nc - #rmp_a2agr_A1: rmp_${oifs.oasis_grid_name_a}_to_agr1_GAUSWGT_${nemo.resolution}.nc - #rmp_agr2a_1L: rmp_agr1_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc - #rmp_agr2a_2L: rmp_agr2_to_${oifs.oasis_grid_name_l}_GAUSWGT_${nemo.resolution}.nc + rmp_c2agr_R1: rmp_rnfs_to_agrc_ZERO.nc sstocean: sstocean${this_oasis_date_stamp} flxatmos: flxatmos${this_oasis_date_stamp} @@ -1285,6 +1967,8 @@ oasis3mct: agrifspg: agrifspg${this_oasis_date_stamp} # Set up CPU layout +# A bit strange to set default depending on cores per node +# Would be better to set defaults for each machine choose_partitions.compute.cores_per_node: 24: choose_resolution: @@ -1352,3 +2036,23 @@ choose_partitions.compute.cores_per_node: nproc: 40 rnfmap: nproc: 1 + 96: + choose_resolution: + T95_ORCA05: + oifs: + # This is a good choice + # Few tasks and more threads is good for OpenIFS + #nproc: 168 + #omp_num_threads: 4 + # But since multi-threading does not always work + # the default will be MPI only + nproc: 288 + nemo: + nproca: 36 + nprocb: 24 + nproc: 864 + xios: + nproc: 48 + rnfmap: + nproc: 1 + diff --git a/configs/setups/focioifs/oifs_postprocessing.sh b/configs/setups/focioifs/oifs_postprocessing.sh index c8f5576cb..371ed7ff6 100755 --- a/configs/setups/focioifs/oifs_postprocessing.sh +++ b/configs/setups/focioifs/oifs_postprocessing.sh @@ -23,7 +23,7 @@ module load nco || module load NCO ATM_CHECK_NETCDF4=false # set to false to skip netcdf4 conversion, time consuming but reduces file size by at least 50% ATM_CONVERT_NETCDF4=true -ATM_FILE_TAGS="regular_sfc regular_pv regular_pl regular_ml reduced_sfc reduced_pv reduced_pl reduced_ml" +ATM_FILE_TAGS="regular_sfc regular_pv regular_pl regular_pl_zoom regular_ml regular_th reduced_sfc reduced_pv reduced_pl reduced_ml regular_th" # Other settings max_jobs=20 @@ -79,8 +79,10 @@ echo echo "Doing postprocessing in $basedir for $EXP_ID from $startdate to $enddate" echo "Using an environment from $envfile" echo +echo " Start and end given: $startdate $enddate " startdate=$(date --date "$startdate" "+%Y%m%d") enddate=$(date --date "$enddate" "+%Y%m%d") +echo " Start and end formatted: $startdate $enddate " if [[ ${#startdate} -ne 8 ]] || [[ ${#enddate} -ne 8 ]]; then echo echo " Please provide start and end date in yyyymmdd format e.g." @@ -177,25 +179,38 @@ startmonth=$(date --date="$startdate" "+%m") endyear=$(date --date="$enddate" "+%Y") endmonth=$(date --date="$enddate" "+%m") +echo " Start year,month: $startyear $startmonth " +echo " End year, month: $endyear $endmonth " + [[ "$startmonth" == "01" ]] && [[ "$endmonth" == "12" ]] && freq="y" -# calculate increment if not set, set to 1 to postprocess multiple years of +# calculate increment if not set, set to 1 to postprocess multiple years of # simulation that ran in multiyear intervals. if [[ -z $increment ]] ; then if [[ $startyear == $endyear ]] ; then - increment=$((endmonth - startmonth + 1)) - else + # freq is 'y' for a full single year + if [[ "$startmonth" == "01" ]] && [[ "$endmonth" == "12" ]] ; then + increment=1 + else + # remove leading 0 if it exists. Its fine for 01, 02 etc + # but 08 would be interpreted as octal number... + increment=$((${endmonth#0} - ${startmonth#0} + 1)) + fi + else increment=$((endyear - startyear + 1)) fi fi +echo " Increment: $increment " +echo " Freq $freq " + # Temporary directory id=$$ post_dir=$DATA_DIR/${id}_$startdate-$enddate [[ -d $post_dir ]] && print "Hey: previous job failed or still running; removing temp dir" rm -r $post_dir -mkdir $post_dir +mkdir -v $post_dir # # Convert OpenIFS/XIOS netcdf3 output to netcdf4 using the chunking algorithm @@ -231,7 +246,10 @@ if ${ATM_CONVERT_NETCDF4} ; then currdate2=$(date --date="$currdate1 + ${increment} year - 1 day" "+%Y%m%d") nextdate=$(date --date="$currdate2 + ${increment} year" "+%Y%m%d") fi - + + echo " Looking for files at $currdate1 " + echo " covering period $currdate1 $currdate2 " + for filetag in $filetags do for s in $steps @@ -338,16 +356,31 @@ mkdir ym # TODO: only works for yearly restart intervals at the moment # can be improved, see nemo_postprocessing.sh if ${ATM_CONVERT_NETCDF4} ; then - for ((year=startyear; year<=endyear; ++year)) - do - for filetag in $filetags + + nextdate=$startdate + while [[ $nextdate -lt $enddate ]] + do + # treat special case of 18930401, see echam_postprocessing.sh + if [[ $freq == "m" ]] ; then + currdate1=$nextdate + currdate2=$(date --date="$currdate1 + ${increment} month - 1 day" "+%Y%m%d") + nextdate=$(date --date="$currdate1 + ${increment} month" "+%Y%m%d") + else + currdate1=$nextdate + currdate2=$(date --date="$currdate1 + ${increment} year - 1 day" "+%Y%m%d") + nextdate=$(date --date="$currdate2 + ${increment} year" "+%Y%m%d") + fi + + for filetag in $filetags do - for s in $steps + # first we try to compute annual means from monthly files + for s in 1m 5d 1d do # !!! output files will have the same name as the old input file !!! - input=${EXP_ID}_${s}_${year}0101_${year}1231_${filetag}.nc - output=${EXP_ID}_1y_${year}0101_${year}1231_${filetag}.nc - + input=${EXP_ID}_${s}_${currdate1}_${currdate2}_${filetag}.nc + output=${EXP_ID}_1y_${currdate1}_${currdate2}_${filetag}.nc + + # if we already compute annual means using 1m, then doing it for 5d or 1d wont happen if [[ "$freq" == "y" ]] && [[ -f $input ]] && [[ ! -f ym/$output ]] && [[ ! -f ym/${output}3 ]]; then touch ym/$output diff --git a/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_0.25xahm0_notracer_i2021.yaml b/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_0.25xahm0_notracer_i2021.yaml new file mode 100755 index 000000000..b1971abf8 --- /dev/null +++ b/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_0.25xahm0_notracer_i2021.yaml @@ -0,0 +1,80 @@ +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "agrif" + homedir: !ENV ${HOME} + + compute_time: "12:00:00" + initial_date: "1951-01-01T00:00:00" # Initial exp. date + final_date: "1952-01-01T00:00:00" # Final date of the experiment + postprocessing: true + nmonth: 12 + clean_runs: 2 + + lresume: True + ini_parent_exp_id: "WG10_MO040" + ini_parent_date: "19501231" + ini_nemo_restart_steps: 17520 + ini_parent_dir: "/scratch/usr/shkifmsw/tmp/${ini_parent_exp_id}/" + #ini_parent_dir: "/scratch/usr/shkmalod/esm-experiments/${ini_parent_exp_id}/restart/" + + # machine specific setup + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-agrif/ + +echam: + scenario: "1950" + resolution: "T63" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 16 + nprocb: 12 + ini_parent_dir: "${general.ini_parent_dir}/echam/" +nemo: +# + time_step: 1200 + global_tag: "" + nest1: WG10 + #nest: NPAC10 + #agrif_dir: /path/to/my/input_files/AGRIF/agrif_${nest} + #nest: VIKING10 + #nest: INALT10x + resolution: "ORCA05" + version: "ORCA05_LIM2_FOCI_AGRIF" + restart_rate: ${general.nday} + restart_unit: "days" + jpni: 36 + jpnj: 24 + nproc: 864 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + namelist_changes: + namelist_cfg: + namctl: + nn_timing: 1 + # rn_aht_0 change + namtra_ldf: + rn_aht_0: 599 + ## geothermal heating globally + #nambbc: + # ln_trabbc: true + # nn_geoflx: 1 + # rn_geoflx_cst: 86.4e-3 # Constant value of geothermal heat flux [W/m2], this is the default + 1_namelist_cfg: + ## horizontal bilaplacian eddy viscosity [m4/s] reduced in nest c.f. default agrif + namdyn_ldf: + rn_ahm_0_blp: -6000000000.0 + ## geothermal heating in nest + # nambbc: + # ln_trabbc: true + # nn_geoflx: 1 + # rn_geoflx_cst: 86.4e-3 # Constant value of geothermal heat flux [W/m2], this is the default + +xios: + nproc: 36 diff --git a/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_0.25xahm0_notracer_malin.yaml b/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_0.25xahm0_notracer_malin.yaml new file mode 100755 index 000000000..756e79432 --- /dev/null +++ b/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_0.25xahm0_notracer_malin.yaml @@ -0,0 +1,130 @@ +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "agrif" + homedir: !ENV ${HOME} + + compute_time: "06:30:00" + initial_date: "1951-01-01T00:00:00" # Initial exp. date + final_date: "1955-01-01T00:00:00" # Final date of the experiment + postprocessing: true + nmonth: 12 + clean_runs: 2 + + lresume: True + ini_parent_exp_id: "WG10_MO040" + ini_parent_date: "19501231" + ini_nemo_restart_steps: 17520 + ini_parent_dir: "/scratch/usr/shkifmsw/tmp/${ini_parent_exp_id}/" + #ini_parent_dir: "/scratch/usr/shkmalod/esm-experiments/${ini_parent_exp_id}/restart/" + + # machine specific setup + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-agrif/ + +oasis3mct: + coupling_time_step: 3600 + +echam: + scenario: "1950" + resolution: "T63" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 16 + nprocb: 12 + namelist_changes: + namelist.echam: + runctl: + nproma: 96 + putocean: [ 8, 'steps', 'exact', -450 ] + getocean: [ 8, 'steps', 'exact', 0 ] + +nemo: +# + global_tag: "" + nest1: WG10v4 + #nest: NPAC10 + #nest: VIKING10 + #nest: INALT10x + agrif_dir: /scratch/usr/shkifmsw/foci_input2/AGRIF/agrif_${nest1} + #agrif_dir: /path/to/my/input_files/AGRIF/agrif_${nest} + resolution: "ORCA05" + version: "ORCA05_LIM2_FOCI_AGRIF" + restart_rate: ${general.nday} + restart_unit: "days" + jpni: 36 + jpnj: 24 + nproc: 864 + #jpni: 48 + #jpnj: 36 + #nproc: 1728 + # the brute-force approach to the hostfile_srun missing / empty file + #pre_run_commands: "cp -f ~/MO053_hostfile ${work_dir}/hostfile_srun" + #add_input_sources: + # manual_hostfile: ${general.homedir}/MO053_hostfile + #add_input_files: + # manual_hostfile: manual_hostfile + #add_input_in_work: + # manual_hostfile: hostfile_srun + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + add_namelist_changes: + namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + # eddy diffusivity + namtra_ldf: + #iso-neutral diffusion + rn_aht_0: 605 + #switched back to 600 from 300 after 50 years + # scaling of "eddy-induced" velocities in G&M scheme + rn_aeiv_scale: 0.5 + namelist_top_cfg: + # eddy diffusivity seen by tracers + namtrc_ldf: + rn_ahtrc_0: 600 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + 1_namelist_cfg: + # error34_007: test if file based coupling + # setting is ignored!!!! + #namsbc: + # ln_echam: false + ## horizontal bilaplacian eddy viscosity [m4/s] reduced in nest c.f. default agrif + namdyn_ldf: + rn_ahm_0_blp: -1.37e10 + # eddy diffusivity in nest - set similar to ORCA12! + namtra_ldf: + rn_aht_0: 120 + 1_namelist_top_cfg: + # eddy diffusivity seen by tracers in nest + namtrc_ldf: + rn_ahtrc_0: 120 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #1_namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + +xios: + nproc: 36 diff --git a/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_aht0_ahm0_OCE2ATM_eiv_oasismct4_restart.yaml b/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_aht0_ahm0_OCE2ATM_eiv_oasismct4_restart.yaml new file mode 100755 index 000000000..105113cb1 --- /dev/null +++ b/runscripts/foci/SOCHIC/foci-agrif-1950_annual_restart_aht0_ahm0_OCE2ATM_eiv_oasismct4_restart.yaml @@ -0,0 +1,134 @@ +#computer: +# additional_flags: "--qos=preempt" +# +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "agrif_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "07:00:00" + initial_date: "1950-01-01T00:00:00" # Initial exp. date + final_date: "2150-01-01T00:00:00" # Final date of the experiment + postprocessing: True + nmonth: 12 + clean_runs: 2 + + # machine specific setup + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + lresume: true + ini_parent_exp_id: "WG10_MO053" + ini_parent_date: "20491231" + ini_nemo_restart_steps: 1753200 + ini_parent_dir: "/scratch/usr/shkmalod/esm-experiments/${ini_parent_exp_id}/restart/" + +foci: + #model_dir: ${general.homedir}/esm/models/tmp/foci-agrif_oasismct4/ + model_dir: ${general.homedir}/esm/models/foci-agrif_oasismct4/ + +oasis3mct: + input_dir: "/scratch/usr/shkifmsw/foci_input2/OASIS3_ECHAM6T63_ORCA05/input/T63_ORCA05_${nemo.nest1}/" + coupling_time_step: 3600 + export_mode: EXPOUT + +echam: + scenario: "1950" + resolution: "T63" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 16 + nprocb: 12 + namelist_changes: + namelist.echam: + runctl: + nproma: 96 + putocean: [ 8, 'steps', 'exact', -450 ] + getocean: [ 8, 'steps', 'exact', 0 ] + +nemo: + nest1: WG10v4 + #nest: NPAC10 + #nest: VIKING10 + #nest: INALT10x + agrif_dir: /scratch/usr/shkifmsw/foci_input2/AGRIF/agrif_${nest1} + #agrif_dir: /path/to/my/input_files/AGRIF/agrif_${nest} + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + restart_rate: ${general.nday} + restart_unit: "days" + #jpni: 48 + #jpnj: 36 + #nproc: 1728 + jpni: 36 + jpnj: 24 + nproc: 864 + # the brute-force approach to the hostfile_srun missing / empty file + #pre_run_commands: "cp -f ~/FOCI2.2-SW230_hostfile ${work_dir}/hostfile_srun" + #add_input_sources: + # manual_hostfile: ${general.homedir}/MO053_hostfile + #add_input_files: + # manual_hostfile: manual_hostfile + #add_input_in_work: + # manual_hostfile: hostfile_srun + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + add_namelist_changes: + namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + # eddy diffusivity + namtra_ldf: + #iso-neutral diffusion + rn_aht_0: 605 + #switched back to 600 from 300 after 50 years + # scaling of "eddy-induced" velocities in G&M scheme + rn_aeiv_scale: 0.5 + namelist_top_cfg: + # eddy diffusivity seen by tracers + namtrc_ldf: + rn_ahtrc_0: 600 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + 1_namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.37e10 + # eddy diffusivity in nest - set similar to ORCA12! + namtra_ldf: + rn_aht_0: 120 + 1_namelist_top_cfg: + # eddy diffusivity seen by tracers in nest + namtrc_ldf: + rn_ahtrc_0: 120 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #1_namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + +xios: + nproc: 36 + #xml_dir: /home/shkifmsw/esm/models/tmp/foci-agrif_oasismct4/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4/CONFIG/ORCA05_LIM2_KCM_AGRIF_OASISMCT4/EXP00 diff --git a/runscripts/foci/SOCHIC/foci-agrif-SW232.yaml b/runscripts/foci/SOCHIC/foci-agrif-SW232.yaml new file mode 100755 index 000000000..29dce1a6a --- /dev/null +++ b/runscripts/foci/SOCHIC/foci-agrif-SW232.yaml @@ -0,0 +1,162 @@ +#computer: +# additional_flags: "--qos=preempt" +# +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "agrif_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:00:00" + #initial_date: "2049-01-06T00:00:00" # Initial exp. date + #final_date: "2049-02-01T00:00:00" # Final date of the experiment + #nday: 26 + #initial_date: "2049-04-01T00:00:00" # Initial exp. date + #final_date: "2050-01-01T00:00:00" # Final date of the experiment + #nmonth: 1 + initial_date: "2050-01-01T00:00:00" # Initial exp. date + final_date: "2055-01-01T00:00:00" # Final date of the experiment + nmonth: 12 + postprocessing: True + clean_runs: 2 + + # machine specific setup + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + # TODO: BUG: settings are NOT used if ini_parent_exp_id and current expid are the same + lresume: true + #ini_parent_exp_id: "WG10_1day_300s" + #ini_parent_date: "20490105" + #ini_nemo_restart_steps: 1440 + #ini_parent_dir: "/scratch/usr/shkifmsw/esm-experiments/${ini_parent_exp_id}/restart/" + # + #ini_parent_exp_id: "WG10-SW232-2m-ts1200" + #ini_parent_date: "20490331" + #ini_nemo_restart_steps: 4248 + #ini_parent_dir: "/scratch/usr/shkifmsw/esm-experiments/${ini_parent_exp_id}/restart/" + # + ini_parent_exp_id: "WG10-SW232-1m-1200s" + ini_parent_date: "20491231" + ini_nemo_restart_steps: 19800 + #ini_parent_dir: "/scratch/usr/shkifmsw/esm-experiments/${ini_parent_exp_id}/restart/" + ini_parent_dir: "/scratch/usr/shkifmsw/foci_input2/FOCI_RESTART/${ini_parent_exp_id}/" + # + #ini_parent_exp_id: "WG10-SW232-9m-ts1200" + #ini_parent_date: "20491231" + # TODO: BUG: settings are NOT used if ini_parent_exp_id and current expid are the same + #ini_nemo_restart_steps: 99000 + #ini_parent_dir: "/scratch/usr/shkifmsw/esm-experiments/${ini_parent_exp_id}/restart/" + +computer: + partition: "standard96:test" + +foci: + #model_dir: ${general.homedir}/esm/models/tmp/foci-agrif_oasismct4/ + model_dir: ${general.homedir}/esm/models/foci-agrif_oasismct4/ + +oasis3mct: + input_dir: "/scratch/usr/shkifmsw/foci_input2/OASIS3_ECHAM6T63_ORCA05/input/T63_ORCA05_${nemo.nest1}/" + coupling_time_step: 3600 + #export_mode: EXPOUT + #export_mode_a2o: EXPOUT + +echam: + scenario: "1950" + resolution: "T63" + #restart_rate: ${general.nday} + #restart_unit: "days" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 16 + nprocb: 12 + namelist_changes: + namelist.echam: + runctl: + nproma: 96 + putocean: [ 8, 'steps', 'exact', -450 ] + getocean: [ 8, 'steps', 'exact', 0 ] + +nemo: + time_step: 1200 + nest1: WG10v4 + agrif_dir: /scratch/usr/shkifmsw/foci_input2/AGRIF/agrif_${nest1} + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + jpni: 36 + jpnj: 24 + nproc: 864 + # the brute-force approach to the hostfile_srun missing / empty file + #pre_run_commands: "cp -f ~/FOCI2.2-SW230_hostfile ${work_dir}/hostfile_srun" + #add_input_sources: + # manual_hostfile: ${general.homedir}/MO053_hostfile + #add_input_files: + # manual_hostfile: manual_hostfile + #add_input_in_work: + # manual_hostfile: hostfile_srun + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + add_namelist_changes: + namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + # eddy diffusivity + namtra_ldf: + #iso-neutral diffusion + rn_aht_0: 605 + #switched back to 600 from 300 after 50 years + # scaling of "eddy-induced" velocities in G&M scheme + rn_aeiv_scale: 0.5 + namelist_top_cfg: + # eddy diffusivity seen by tracers + namtrc_ldf: + rn_ahtrc_0: 600 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + 1_namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.37e10 + # eddy diffusivity in nest - set similar to ORCA12! + namtra_ldf: + rn_aht_0: 120 + 1_namelist_top_cfg: + # eddy diffusivity seen by tracers in nest + namtrc_ldf: + rn_ahtrc_0: 120 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #1_namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + # + #choose_lresume: + # true: + # choose_general.run_number: + # 1: + # global_tag: "" + +xios: + nproc: 36 + #xml_dir: /home/shkifmsw/esm/models/tmp/foci-agrif_oasismct4/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4/CONFIG/ORCA05_LIM2_KCM_AGRIF_OASISMCT4/EXP00 diff --git a/runscripts/foci/SOCHIC/foci-agrif_WG10_oasismct4_newrestarts.yaml b/runscripts/foci/SOCHIC/foci-agrif_WG10_oasismct4_newrestarts.yaml new file mode 100755 index 000000000..788c285c4 --- /dev/null +++ b/runscripts/foci/SOCHIC/foci-agrif_WG10_oasismct4_newrestarts.yaml @@ -0,0 +1,136 @@ +#computer: +# additional_flags: "--qos=preempt" +# +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "agrif_oasismct4" + homedir: !ENV ${HOME} + + partition: "standard96:test" + compute_time: "00:15:00" + initial_date: "2049-01-01T00:00:00" # Initial exp. date + final_date: "2049-01-06T00:00:00" # Final date of the experiment + postprocessing: False + nday: 1 + + # machine specific setup + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + lresume: true + ini_parent_exp_id: "WG10_MO053" + ini_parent_date: "20491231" + ini_nemo_restart_steps: 1753200 + ini_parent_dir: "/scratch/usr/shkmalod/esm-experiments/${ini_parent_exp_id}/restart/" + +foci: + #model_dir: ${general.homedir}/esm/models/tmp/foci-agrif_oasismct4/ + model_dir: ${general.homedir}/esm/models/foci-agrif_oasismct4/ + +oasis3mct: + input_dir: "/scratch/usr/shkifmsw/foci_input2/OASIS3_ECHAM6T63_ORCA05/input/T63_ORCA05_${nemo.nest1}/" + coupling_time_step: 3600 + #export_mode: EXPOUT + #export_mode_a2o: EXPOUT + +echam: + scenario: "1950" + resolution: "T63" + restart_rate: ${general.nday} + restart_unit: "days" + nproca: 16 + nprocb: 12 + namelist_changes: + namelist.echam: + runctl: + nproma: 96 + putocean: [ 8, 'steps', 'exact', -450 ] + getocean: [ 8, 'steps', 'exact', 0 ] + +nemo: + time_step: 300 + nest1: WG10v4 + #nest: NPAC10 + #nest: VIKING10 + #nest: INALT10x + agrif_dir: /scratch/usr/shkifmsw/foci_input2/AGRIF/agrif_${nest1} + #agrif_dir: /path/to/my/input_files/AGRIF/agrif_${nest} + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + restart_rate: ${general.nday} + restart_unit: "days" + #jpni: 48 + #jpnj: 36 + #nproc: 1728 + jpni: 36 + jpnj: 24 + nproc: 864 + # the brute-force approach to the hostfile_srun missing / empty file + #pre_run_commands: "cp -f ~/FOCI2.2-SW230_hostfile ${work_dir}/hostfile_srun" + #add_input_sources: + # manual_hostfile: ${general.homedir}/MO053_hostfile + #add_input_files: + # manual_hostfile: manual_hostfile + #add_input_in_work: + # manual_hostfile: hostfile_srun + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + add_namelist_changes: + namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + # eddy diffusivity + namtra_ldf: + #iso-neutral diffusion + rn_aht_0: 605 + #switched back to 600 from 300 after 50 years + # scaling of "eddy-induced" velocities in G&M scheme + rn_aeiv_scale: 0.5 + namelist_top_cfg: + # eddy diffusivity seen by tracers + namtrc_ldf: + rn_ahtrc_0: 600 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + 1_namelist_cfg: + # output initial state + namrun: + nn_istate: 1 + ## horizontal bilaplacian eddy viscosity [m4/s] + ## number based on desired characteristic velocity (here, 12 cm/s) + namdyn_ldf: + rn_ahm_0_blp: -1.37e10 + # eddy diffusivity in nest - set similar to ORCA12! + namtra_ldf: + rn_aht_0: 120 + 1_namelist_top_cfg: + # eddy diffusivity seen by tracers in nest + namtrc_ldf: + rn_ahtrc_0: 120 + # Note! rn_ahtrc_0 should match rn_aht_0 above + #1_namelist_ice_cfg: + # ice thickness for lateral accretion in the Northern (Southern) Hemisphere + #namicethd: + # hiccrit: [0.3, 1.0] + # ice dynamic: 1st bulk-rheology parameter + #namicedyn: + # pstar: 15000 + +xios: + nproc: 36 + #xml_dir: /home/shkifmsw/esm/models/tmp/foci-agrif_oasismct4/nemo-ORCA05_LIM2_KCM_AGRIF_OASISMCT4/CONFIG/ORCA05_LIM2_KCM_AGRIF_OASISMCT4/EXP00 diff --git a/runscripts/foci/exp-foci2.2/FOCI2.2-SW220.yaml b/runscripts/foci/exp-foci2.2/FOCI2.2-SW220.yaml new file mode 100755 index 000000000..0b9df61a9 --- /dev/null +++ b/runscripts/foci/exp-foci2.2/FOCI2.2-SW220.yaml @@ -0,0 +1,68 @@ +# nesh only +# It is currently under investigation whether the extra +# #SBATCH flags below improve model performance +# Sebastian Wahl 05/2021 +# uncomment below if you run on nesh at CAU +#computer: +# additional_flags: +# - --mem=72000 +# - --constraint="cascade" + +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "fs_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:30:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2700-01-01T00:00:00" # Final date of the experiment + postprocessing: true + scenario: "PI-CTRL" + nmonth: 12 + clean_runs: 2 + + # machine specific setup + # nesh: comment account + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs_oasismct4/ + +echam: + resolution: "T63" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 24 + nprocb: 24 + # parallel I/O, off by default + #nprocio: 6 + #namelist_changes: + # namelist.echam: + # parctl: + # iomode: 2 + # nprocio: ${nprocio} + +nemo: + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_cpl: + sn_rcv_rnf: ['none', 'no', '', '', ''] + sn_rcv_emp: ['kcm_lim_2', 'no', '', '', ''] + +xios: + nproc: 12 diff --git a/runscripts/foci/exp-foci2.2/FOCI2.2-SW221.yaml b/runscripts/foci/exp-foci2.2/FOCI2.2-SW221.yaml new file mode 100755 index 000000000..36b11a99b --- /dev/null +++ b/runscripts/foci/exp-foci2.2/FOCI2.2-SW221.yaml @@ -0,0 +1,83 @@ +# nesh only +# It is currently under investigation whether the extra +# #SBATCH flags below improve model performance +# Sebastian Wahl 05/2021 +# uncomment below if you run on nesh at CAU +#computer: +# additional_flags: +# - --mem=72000 +# - --constraint="cascade" + +general: + + # machine specific setup + # nesh: comment account + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + use_venv: False + verbose: False + setup_name: "foci" + version: "fs_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:30:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2000-01-01T00:00:00" # Final date of the experiment + postprocessing: true + scenario: "PI-CTRL" + nmonth: 12 + clean_runs: 2 + + lresume: True + ini_parent_exp_id: "FOCI2.2-SW220" + ini_parent_date: "26991231" + ini_nemo_restart_steps: 14901888 + ini_parent_dir: "${base_dir}/${ini_parent_exp_id}/restart" + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs_oasismct4/ + +echam: + resolution: "T63" + lresume: ${general.lresume} + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 24 + nprocb: 24 + # parallel I/O, off by default + #nprocio: 6 + #namelist_changes: + # namelist.echam: + # parctl: + # iomode: 2 + # nprocio: ${nprocio} + +nemo: + pre_run_commands: "cp -f ~/FOCI2.2-SW221_hostfile ${work_dir}/hostfile_srun" + global_tag: "" + choose_lresume: + true: + choose_general.run_number: + 1: + global_tag: "" + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_cpl: + sn_rcv_rnf: ['none', 'no', '', '', ''] + sn_rcv_emp: ['kcm_lim_2', 'no', '', '', ''] + +xios: + nproc: 12 diff --git a/runscripts/foci/exp-foci2.2/FOCI2.2-SW222.yaml b/runscripts/foci/exp-foci2.2/FOCI2.2-SW222.yaml new file mode 100755 index 000000000..14273f598 --- /dev/null +++ b/runscripts/foci/exp-foci2.2/FOCI2.2-SW222.yaml @@ -0,0 +1,86 @@ +# nesh only +# It is currently under investigation whether the extra +# #SBATCH flags below improve model performance +# Sebastian Wahl 05/2021 +# uncomment below if you run on nesh at CAU +#computer: +# additional_flags: +# - --mem=72000 +# - --constraint="cascade" + +general: + + # machine specific setup + # nesh: comment account + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + use_venv: False + verbose: False + setup_name: "foci" + version: "fs_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:30:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2000-01-01T00:00:00" # Final date of the experiment + postprocessing: true + scenario: "1percCO2" + nmonth: 12 + clean_runs: 2 + + lresume: True + ini_parent_exp_id: "FOCI2.2-SW220" + ini_parent_date: "26991231" + ini_nemo_restart_steps: 14901888 + ini_parent_dir: "${base_dir}/${ini_parent_exp_id}/restart" + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs_oasismct4/ + +echam: + resolution: "T63" + lresume: ${general.lresume} + restart_rate: ${general.nmonth} + restart_unit: "months" + scenario: ${general.scenario} + nproca: 24 + nprocb: 24 + # parallel I/O, off by default + #nprocio: 6 + namelist_changes: + namelist.echam: + runctl: + nproma: 8 + # parctl: + # iomode: 2 + # nprocio: ${nprocio} + +nemo: + pre_run_commands: "cp -f ~/FOCI2.2-SW221_hostfile ${work_dir}/hostfile_srun" + global_tag: "" + choose_lresume: + true: + choose_general.run_number: + 1: + global_tag: "" + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_cpl: + sn_rcv_rnf: ['none', 'no', '', '', ''] + sn_rcv_emp: ['kcm_lim_2', 'no', '', '', ''] + +xios: + nproc: 12 diff --git a/runscripts/foci/exp-foci2.2/FOCI2.2-SW223.yaml b/runscripts/foci/exp-foci2.2/FOCI2.2-SW223.yaml new file mode 100755 index 000000000..99fbf457f --- /dev/null +++ b/runscripts/foci/exp-foci2.2/FOCI2.2-SW223.yaml @@ -0,0 +1,88 @@ +# nesh only +# It is currently under investigation whether the extra +# #SBATCH flags below improve model performance +# Sebastian Wahl 05/2021 +# uncomment below if you run on nesh at CAU +#computer: +# additional_flags: +# - --mem=72000 +# - --constraint="cascade" + +general: + + # machine specific setup + # nesh: comment account + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + use_venv: False + verbose: False + setup_name: "foci" + version: "fs_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:30:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2000-01-01T00:00:00" # Final date of the experiment + postprocessing: true + scenario: "PI-CTRL" + nmonth: 12 + clean_runs: 2 + + lresume: True + ini_parent_exp_id: "FOCI2.2-SW220" + ini_parent_date: "26991231" + ini_nemo_restart_steps: 14901888 + ini_parent_dir: "${base_dir}/${ini_parent_exp_id}/restart" + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs_oasismct4/ + +echam: + resolution: "T63" + lresume: ${general.lresume} + restart_rate: ${general.nmonth} + restart_unit: "months" + scenario: ${general.scenario} + nproca: 24 + nprocb: 24 + # parallel I/O, off by default + #nprocio: 6 + namelist_changes: + namelist.echam: + runctl: + nproma: 8 + radctl: + co2vmr: 0.001137267944336 + # parctl: + # iomode: 2 + # nprocio: ${nprocio} + +nemo: + pre_run_commands: "cp -f ~/FOCI2.2-SW221_hostfile ${work_dir}/hostfile_srun" + global_tag: "" + choose_lresume: + true: + choose_general.run_number: + 1: + global_tag: "" + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_cpl: + sn_rcv_rnf: ['none', 'no', '', '', ''] + sn_rcv_emp: ['kcm_lim_2', 'no', '', '', ''] + +xios: + nproc: 12 diff --git a/runscripts/foci/exp-foci2.2/FOCI2.2-SW224.yaml b/runscripts/foci/exp-foci2.2/FOCI2.2-SW224.yaml new file mode 100755 index 000000000..8daf2b0d1 --- /dev/null +++ b/runscripts/foci/exp-foci2.2/FOCI2.2-SW224.yaml @@ -0,0 +1,86 @@ +# nesh only +# It is currently under investigation whether the extra +# #SBATCH flags below improve model performance +# Sebastian Wahl 05/2021 +# uncomment below if you run on nesh at CAU +#computer: +# additional_flags: +# - --mem=72000 +# - --constraint="cascade" + +general: + + # machine specific setup + # nesh: comment account + account: shk00057 + base_dir: ${homedir}/esm/esm-experiments/ + + use_venv: False + verbose: False + setup_name: "foci" + version: "fs_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:30:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2014-01-01T00:00:00" # Final date of the experiment + postprocessing: true + scenario: "cmip6hist" + nmonth: 12 + clean_runs: 2 + + lresume: True + ini_parent_exp_id: "FOCI2.2-SW220" + ini_parent_date: "26991231" + ini_nemo_restart_steps: 14901888 + ini_parent_dir: "${base_dir}/${ini_parent_exp_id}/restart" + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs_oasismct4/ + +echam: + resolution: "T63" + lresume: ${general.lresume} + restart_rate: ${general.nmonth} + restart_unit: "months" + scenario: ${general.scenario} + nproca: 24 + nprocb: 24 + # parallel I/O, off by default + #nprocio: 6 + namelist_changes: + namelist.echam: + runctl: + nproma: 8 + # parctl: + # iomode: 2 + # nprocio: ${nprocio} + +nemo: + pre_run_commands: "cp -f ~/FOCI2.2-SW221_hostfile ${work_dir}/hostfile_srun" + global_tag: "" + choose_lresume: + true: + choose_general.run_number: + 1: + global_tag: "" + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_cpl: + sn_rcv_rnf: ['none', 'no', '', '', ''] + sn_rcv_emp: ['kcm_lim_2', 'no', '', '', ''] + +xios: + nproc: 12 diff --git a/runscripts/foci/foci-agrif-mops-restart-pictl_run.yaml b/runscripts/foci/foci-agrif-mops-restart-pictl_run.yaml new file mode 100755 index 000000000..7156d0f8f --- /dev/null +++ b/runscripts/foci/foci-agrif-mops-restart-pictl_run.yaml @@ -0,0 +1,141 @@ +computer: + pool_dir: "/scratch/usr/shkifmsw/foci_input2/" +general: + use_venv: False + verbose: True + setup_name: "foci" + version: "agrif_mops_oasismct4" + homedir: !ENV ${HOME} + + # operational settings: yearly restarts + compute_time: "00:20:00" + initial_date: "1900-01-01T00:00:00" # Initial exp. date + final_date: "1940-01-01T00:00:00" # Final date of the experiment + nmonth: 12 + postprocessing: 1 + clean_runs: 2 + # test settings, daily restarts + #compute_time: "00:03:00" + #initial_date: "1900-01-01T00:00:00" # Initial exp. date + #final_date: "1900-01-04T00:00:00" # Final date of the experiment + #nday: 1 + #postprocessing: 0 + + # machine specific setup + account: shktkeme + base_dir: ${homedir}/esm/esm-experiments/ + + lresume: True + # test data provided by Tronje Kemena + ini_parent_exp_id: "FOCI2.0-TK105_FAM_ESM_piControl_2129TK012" + ini_parent_date: "18991231" + ini_nemo_restart_steps: 876576 + #ini_parent_dir: "/scratch/usr/shktkeme/foci_input/FOCI-AGRIF-MOPS-OASISMCT4/${ini_parent_exp_id}/" + ini_parent_dir: "/scratch/usr/shkifmsw/foci_input2/FOCI_RESTART/${ini_parent_exp_id}/" + +foci: + model_dir: ${general.homedir}/esm/models/foci-agrif_mops_oasismct4/ + +# set input_dir for oasis grids, masks and areas +# only required as long as we generate the remapping files on the fly +oasis3mct: + input_dir: "/scratch/usr/shkifmsw/foci_input2/OASIS3_ECHAM6T63_ORCA05/input/T63_ORCA05_${nemo.nest1}/" + # test settings, write out coupling files + # export_mode_a2o: "EXPOUT" + #export_mode_a2o: "EXPOUT" + #export_mode: "EXPOUT" + debug_level: 0 + +echam: + # all ssp* scenarios use historical forcing before 2015, i.e. the setting + # scenario: "cmip6hist" and # scenario: "ssp???" produce the same results + # until year 2014 + # available scenarios: PI-CTRL, 1percCO2, ssp126, ssp245, ssp370, ssp585, ssp585os + # technically tested: PI-CTRL, cmip6hist, ssp370 and ssp585 + # seb-wahl, 2021-08-11 + scenario: "PI-CTRL" + resolution: "T63" + # operational settings: yearly restarts + restart_rate: ${general.nmonth} + restart_unit: "months" + # test settings, daily restarts + #restart_rate: ${general.nday} + #restart_unit: "days" + nproca: 12 + nprocb: 16 + nprocio: 0 + namelist_changes: + namelist.echam: + parctl: + iomode: 0 + nprocio: ${nprocio} + radctl: + iaero: 3 + io3: 4 + isolrad: 6 + ich4: 3 + in2o: 3 + ico2: 1 + co2vmr: 284.3169860840e-06 + ch4vmr: 808.2490234375e-09 + n2ovmr: 273.0210571289e-09 + yr_perp: 1850 + +nemo: + # set your own agrif input directory + agrif_dir: /scratch/usr/shktkeme/foci_input/AGRIF/agrif_VIKING10/host_bathy_meter_nest_smoothed_for_TK011 + nest1: VIKING10 + resolution: "ORCA05" + # overwrite default time step of 1800s + time_step: 1800 + version: "ORCA05_LIM2_FOCI_AGRIF_MOPS_OASISMCT4" + jpni: 36 + jpnj: 24 + nproc: 864 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # alkalinity masks + namelist_changes: + namelist_top_cfg: + namtrc_rad: + ln_trcrad: .false. + 1_namelist_top_cfg: + namtrc_rad: + ln_trcrad: .false. + namelist_cfg: + namzgr: + cn_batmeter: "bathy_meter.nc" + + 1_namelist_cfg: + namsbc_cpl: + sn_snd_co2: "coupled" + sn_rcv_co2: "coupled" + namlbc: + rn_shlat: 0.0 + namnc4: + nn_nchunks_k: 46 + #nn_rstctl does not work in 1_namelist_cfg + #namelist_changes: + # namelist_cfg: + # namsbc: + # ln_alk: true + #forcing_dir: /scratch/usr/shktkeme/foci_input/alk_mask/CoastEU/ + #forcing_sources: + # alkalinity: ${forcing_dir}/alkalinity_mask_y@YEAR@.nc + #forcing_files: + # alkalinity: alkalinity + add_input_files: + batmeter: bathy_meter + cn_batmeter: bathy_meter + 1_batmeter: 1_bathy_meter + 1_cn_batmeter: 1_bathy_meter + input_sources: + coordinates: ${agrif_dir}/coordinates.nc + bathy_meter: ${agrif_dir}/bathy_meter.nc + 1_bathy_meter: ${agrif_dir}/1_bathy_meter.nc + 1_coordinates: ${agrif_dir}/1_coordinates_ORCA05.nc + fixed_grids: ${agrif_dir}/AGRIF_FixedGrids.in + cordinates: ${agrif_dir}/coordinates_ORCA05.nc + + +xios: + nproc: 48 diff --git a/runscripts/foci/foci-agrif-oasismct4-initial-piCtl.yaml b/runscripts/foci/foci-agrif-oasismct4-initial-piCtl.yaml index 6c0ac674c..12aa90737 100755 --- a/runscripts/foci/foci-agrif-oasismct4-initial-piCtl.yaml +++ b/runscripts/foci/foci-agrif-oasismct4-initial-piCtl.yaml @@ -11,7 +11,7 @@ general: # operational settings: yearly restarts compute_time: "06:35:00" initial_date: "1850-01-01T00:00:00" - final_date: "1852-01-01T00:00:00" + final_date: "1862-01-01T00:00:00" postprocessing: 0 nmonth: 12 # test settings: 5 daily restarts @@ -28,9 +28,9 @@ foci: model_dir: ${general.homedir}/esm/models/foci-agrif_oasismct4/ # set input_dir for oasis grids, masks and areas -# until everything is finalized +# only required as long as we generate the remapping files on the fly oasis3mct: - input_dir: "/scratch/usr/shkifmsw/foci_input2/OASIS3_ECHAM6T63_ORCA05/input/T63_ORCA05_VIKING10/" + input_dir: "/scratch/usr/shkifmsw/foci_input2/OASIS3_ECHAM6T63_ORCA05/input/T63_ORCA05_${nemo.nest1}/" # test settings, write out coupling files # export_mode_a2o: "EXPOUT" @@ -50,8 +50,6 @@ nemo: # set your own agrif input directory #agrif_dir: /path/to/my/input_files/AGRIF/agrif_${nest} nest1: VIKING10 - #nest1: INALT10x - #nest1: NPAC10 resolution: "ORCA05" version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" jpni: 36 diff --git a/runscripts/foci/foci-fs-initial-piCtl_daily_restart_lowcpu.yaml b/runscripts/foci/foci-fs-initial-piCtl_daily_restart_lowcpu.yaml new file mode 100755 index 000000000..2029de868 --- /dev/null +++ b/runscripts/foci/foci-fs-initial-piCtl_daily_restart_lowcpu.yaml @@ -0,0 +1,51 @@ +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "fs" + homedir: !ENV ${HOME} + + compute_time: "00:15:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-01-04T00:00:00" # Final date of the experiment + postprocessing: 0 + nday: 1 + + # machine specific setup + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs/ + +oasis3mct: + coupling_time_step: 10800 + +echam: + scenario: "PI-CTRL" + resolution: "T63" + restart_rate: ${general.nday} + restart_unit: "days" + nproca: 8 + nprocb: 4 + namelist_changes: + namelist.echam: + runctl: + nproma: 96 + putocean: [ 24, 'steps', 'exact', -450 ] + getocean: [ 24, 'steps', 'exact', 0 ] + +nemo: + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS" + nproc: 24 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + #namelist_changes: + # namelist_cfg: + # namtra_ldf: + # rn_aht_0: 599 +xios: + nproc: 8 diff --git a/runscripts/foci/foci-fs_oasismct4_initial-piCtl_yearly_restart.yaml b/runscripts/foci/foci-fs_oasismct4_initial-piCtl_yearly_restart.yaml new file mode 100755 index 000000000..5663e361c --- /dev/null +++ b/runscripts/foci/foci-fs_oasismct4_initial-piCtl_yearly_restart.yaml @@ -0,0 +1,68 @@ +# nesh only +# It is currently under investigation whether the extra +# #SBATCH flags below improve model performance +# Sebastian Wahl 05/2021 +# uncomment below if you run on nesh at CAU +#computer: +# additional_flags: +# - --mem=72000 +# - --constraint="cascade" + +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "fs_oasismct4" + homedir: !ENV ${HOME} + + compute_time: "01:30:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1852-01-01T00:00:00" # Final date of the experiment + postprocessing: true + scenario: "PI-CTRL" + nmonth: 12 + clean_runs: 2 + + # machine specific setup + # nesh: comment account + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs_oasismct4/ + +echam: + resolution: "T63" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 24 + nprocb: 24 + # parallel I/O, off by default + #nprocio: 6 + #namelist_changes: + # namelist.echam: + # parctl: + # iomode: 2 + # nprocio: ${nprocio} + +nemo: + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS_OASISMCT4" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_cpl: + sn_rcv_rnf: ['none', 'no', '', '', ''] + sn_rcv_emp: ['kcm_lim_2', 'no', '', '', ''] + +xios: + nproc: 12 diff --git a/runscripts/foci/foci-initial-piCtl_daily_restart_lowcpu.yaml b/runscripts/foci/foci-initial-piCtl_daily_restart_lowcpu.yaml index 773c2cc49..6023c6310 100755 --- a/runscripts/foci/foci-initial-piCtl_daily_restart_lowcpu.yaml +++ b/runscripts/foci/foci-initial-piCtl_daily_restart_lowcpu.yaml @@ -1,3 +1,5 @@ +computer: + pool_dir: "/scratch/usr/shkifmsw/foci_input2/" general: use_venv: False verbose: False @@ -12,7 +14,7 @@ general: nday: 1 # machine specific setup - account: shk00018 + account: shk00060 base_dir: ${homedir}/esm/esm-experiments/ foci: diff --git a/runscripts/foci/foci-initial-piCtl_yearly_restart.yaml b/runscripts/foci/foci-initial-piCtl_yearly_restart.yaml index b1df852cc..c0e2e2b50 100755 --- a/runscripts/foci/foci-initial-piCtl_yearly_restart.yaml +++ b/runscripts/foci/foci-initial-piCtl_yearly_restart.yaml @@ -1,13 +1,5 @@ -# nesh only -# It is currently under investigation whether the extra -# #SBATCH flags below improve model performance -# Sebastian Wahl 05/2021 -# uncomment below if you run on nesh at CAU -#computer: -# additional_flags: -# - --mem=72000 -# - --constraint="cascade" - +computer: + pool_dir: "/scratch/usr/shkifmsw/foci_input2/" general: use_venv: False verbose: False @@ -15,7 +7,7 @@ general: version: "default" homedir: !ENV ${HOME} - compute_time: "01:30:00" + compute_time: "00:59:00" initial_date: "1850-01-01T00:00:00" # Initial exp. date final_date: "1852-01-01T00:00:00" # Final date of the experiment postprocessing: true @@ -25,7 +17,7 @@ general: # machine specific setup # nesh: comment account - account: shk00018 + account: shk00060 base_dir: ${homedir}/esm/esm-experiments/ foci: diff --git a/runscripts/foci/foci-mops-restart-scenario_run.yaml b/runscripts/foci/foci-mops-restart-scenario_run.yaml new file mode 100755 index 000000000..bc3a10b88 --- /dev/null +++ b/runscripts/foci/foci-mops-restart-scenario_run.yaml @@ -0,0 +1,95 @@ +computer: + pool_dir: "/scratch/usr/shkifmsw/foci_input2/" +general: + use_venv: False + verbose: True + setup_name: "foci" + version: "mops_oasismct4" + homedir: !ENV ${HOME} + + # operational settings: yearly restarts + #compute_time: "00:15:00" + #initial_date: "2015-01-01T00:00:00" # Initial exp. date + #final_date: "2016-01-01T00:00:00" # Final date of the experiment + #nmonth: 12 + #clean_runs: 2 + # test settings, daily restarts + compute_time: "00:15:00" + initial_date: "2015-01-01T00:00:00" # Initial exp. date + final_date: "2015-01-04T00:00:00" # Final date of the experiment + nday: 1 + + postprocessing: False + # machine specific setup + account: shk00060 + base_dir: ${homedir}/esm/esm-experiments/ + + lresume: True + ini_parent_exp_id: "FOCI1.20.0-CC105_RCP_ESM_spinup2099" + ini_parent_date: "20141231" + ini_nemo_restart_steps: 2410600 + #ini_parent_dir: "/scratch/usr/shktkeme/foci_input/FOCI-MOPS/${ini_parent_exp_id}" + ini_parent_dir: "/scratch/usr/shkifmsw/foci_input2/FOCI_RESTART/${ini_parent_exp_id}" + + #oasis3mct: + # export_mode_a2o: "EXPOUT" + # export_mode: "EXPOUT" + # debug_level: 0 + +foci: + model_dir: ${general.homedir}/esm/models/foci-mops_oasismct4/ + +echam: + # all ssp* scenarios use historical forcing before 2015, i.e. the setting + # scenario: "cmip6hist" and # scenario: "ssp???" produce the same results + # until year 2014 + # available scenarios: 1percCO2, ssp126, ssp245, ssp370, ssp585, ssp585os + # technically tested: cmip6hist, ssp370 and ssp585 + # seb-wahl, 2021-08-11 + scenario: "ssp370" + resolution: "T63" + # operational settings: yearly restarts + #restart_rate: ${general.nmonth} + #restart_unit: "months" + # test settings, daily restarts + restart_rate: ${general.nday} + restart_unit: "days" + nproca: 24 + nprocb: 24 + nprocio: 12 + # set's the namelist parameters, and links the correct + # carbon_emission.nc file + with_lco2_emis: True + namelist_changes: + namelist.echam: + parctl: + iomode: 2 + nprocio: ${nprocio} + #co2ctl: + #lco2_emis: true + #lco2_2perc: true + submodelctl: + lmethox: true +nemo: + resolution: "ORCA05" + # overwrite default time step of 1800s + time_step: 2160 + version: "ORCA05_LIM2_FOCI_MOPS_OASISMCT4" + jpni: 28 + jpnj: 24 + nproc: 672 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # alkalinity masks + namelist_changes: + namelist_top_cfg: + namtrc_rad: + ln_trcrad: .false. + #ln_alk: false + #forcing_dir: /scratch/usr/shktkeme/foci_input/alk_mask/CoastEU/ + #forcing_sources: + #alkalinity: ${forcing_dir}/alkalinity_mask_y@YEAR@.nc + #forcing_files: + #alkalinity: alkalinity + +xios: + nproc: 12 diff --git a/runscripts/foci/foci-mops-ssp-scenario-restart-2014-ESM.yaml b/runscripts/foci/foci-mops-ssp-scenario-restart-2014-ESM.yaml new file mode 100755 index 000000000..b9eca9858 --- /dev/null +++ b/runscripts/foci/foci-mops-ssp-scenario-restart-2014-ESM.yaml @@ -0,0 +1,112 @@ +computer: + pool_dir: "/scratch/usr/shkifmsw/foci_input2/" +general: + use_venv: False + verbose: True + setup_name: "foci" + version: "mops_oasismct4" + homedir: !ENV ${HOME} + + # operational settings: yearly restarts + compute_time: "01:00:00" + initial_date: "2015-01-01T00:00:00" # Initial exp. date + final_date: "2016-01-01T00:00:00" # Final date of the experiment + nmonth: 12 + clean_runs: 2 + # test settings, daily restarts + # compute_time: "00:15:00" + # initial_date: "2015-01-01T00:00:00" # Initial exp. date + # final_date: "2015-01-04T00:00:00" # Final date of the experiment + # nday: 1 + + postprocessing: True + # machine specific setup + account: shktkeme + base_dir: ${homedir}/esm/esm-experiments/ + partition: standard96:test + + lresume: True + ini_parent_exp_id: "FOCI1.20.0-CC105_RCP_ESM_spinup2099" + ini_parent_date: "20141231" + ini_nemo_restart_steps: 2410600 + ini_parent_dir: "/scratch/usr/shktkeme/foci_input/FOCI-MOPS/${ini_parent_exp_id}" + #ini_parent_dir: "/scratch/usr/shkifmsw/foci_input2/FOCI_RESTART/${ini_parent_exp_id}" + + #oasis3mct: + # export_mode_a2o: "EXPOUT" + # export_mode: "EXPOUT" + # debug_level: 0 + +foci: + model_dir: ${general.homedir}/esm/models/foci-mops_oasismct4/ + +echam: + # all ssp* scenarios use historical forcing before 2015, i.e. the setting + # scenario: "cmip6hist" and # scenario: "ssp???" produce the same results + # until year 2014 + # available scenarios: 1percCO2, ssp126, ssp245, ssp370, ssp585, ssp585os + # technically tested: cmip6hist, ssp370 and ssp585 + # seb-wahl, 2021-08-11 + scenario: "ssp370" + resolution: "T63" + # operational settings: yearly restarts + restart_rate: ${general.nmonth} + restart_unit: "months" + # test settings, daily restarts + #restart_rate: ${general.nday} + #restart_unit: "days" + nproca: 24 + nprocb: 24 + nprocio: 12 + # set's the namelist parameters, and links the correct + # carbon_emission.nc file + with_lco2_emis: True + namelist_changes: + namelist.echam: + runctl: + lcouple_co2: True + default_output: False + ltdiag: True + radctl: + iaero: 8 + ighg: 1 + io3: 4 + isolrad: 1 + ico2: 1 + ich4: 4 + in2o: 4 + icfc: 4 + submodelctl: + lco2: True + lmethox: True + parctl: + iomode: 2 + nprocio: ${nprocio} + co2ctl: + lco2_emis: True + lco2_2perc: True +nemo: + resolution: "ORCA05" + # overwrite default time step of 1800s + time_step: 2160 + version: "ORCA05_LIM2_FOCI_MOPS_OASISMCT4" + jpni: 28 + jpnj: 24 + nproc: 672 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # alkalinity masks + namelist_changes: + namelist_top_cfg: + namtrc_rad: + ln_trcrad: False + namtrc: + ln_trcdta: False + #ln_alk: false + #forcing_dir: /scratch/usr/shktkeme/foci_input/alk_mask/CoastEU/ + #forcing_sources: + #alkalinity: ${forcing_dir}/alkalinity_mask_y@YEAR@.nc + #forcing_files: + #alkalinity: alkalinity + +xios: + nproc: 12 diff --git a/runscripts/foci/special_examples/foci-agrif-1950_annual_restart.yaml b/runscripts/foci/special_examples/foci-agrif-1950_annual_restart.yaml new file mode 100755 index 000000000..f3eac7173 --- /dev/null +++ b/runscripts/foci/special_examples/foci-agrif-1950_annual_restart.yaml @@ -0,0 +1,67 @@ +#computer: +# additional_flags: "--qos=preempt" +# +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "agrif" + homedir: !ENV ${HOME} + + compute_time: "06:00:00" + initial_date: "1950-01-01T00:00:00" # Initial exp. date + final_date: "2200-01-01T00:00:00" # Final date of the experiment + postprocessing: 0 + nmonth: 12 + clean_runs: 2 + + # machine specific setup + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-agrif/ + +echam: + scenario: "1950" + resolution: "T63" + restart_rate: ${general.nmonth} + restart_unit: "months" + nproca: 16 + nprocb: 12 + +nemo: + nest1: WG10 + #nest: NPAC10 + #agrif_dir: /path/to/my/input_files/AGRIF/agrif_${nest} + #nest: VIKING10 + #nest: INALT10x + resolution: "ORCA05" + version: "ORCA05_LIM2_FOCI_AGRIF_AOW" + restart_rate: ${general.nday} + restart_unit: "days" + jpni: 36 + jpnj: 24 + nproc: 864 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 599 + ## geothermal heating globally + #nambbc: + # ln_trabbc: true + # nn_geoflx: 1 + # rn_geoflx_cst: 86.4e-3 # Constant value of geothermal heat flux [W/m2], this is the default + ## geothermal heating in nest + #1_namelist_cfg: + # nambbc: + # ln_trabbc: true + # nn_geoflx: 1 + # rn_geoflx_cst: 86.4e-3 # Constant value of geothermal heat flux [W/m2], this is the default + +xios: + nproc: 36 diff --git a/runscripts/foci/special_examples/foci-fs1hcpl-initial-piCtl_daily_restart_highcpu.yaml b/runscripts/foci/special_examples/foci-fs1hcpl-initial-piCtl_daily_restart_highcpu.yaml new file mode 100755 index 000000000..ce2f2e522 --- /dev/null +++ b/runscripts/foci/special_examples/foci-fs1hcpl-initial-piCtl_daily_restart_highcpu.yaml @@ -0,0 +1,55 @@ +general: + use_venv: False + verbose: False + setup_name: "foci" + version: "fs" + homedir: !ENV ${HOME} + + compute_time: "00:15:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-01-04T00:00:00" # Final date of the experiment + postprocessing: 0 + nday: 1 + + # machine specific setup + account: shk00018 + base_dir: ${homedir}/esm/esm-experiments/ + +foci: + model_dir: ${general.homedir}/esm/models/foci-fs/ + +oasis3mct: + coupling_time_step: 3600 + export_mode: EXPOUT + export_mode_a2o: EXPOUT + +echam: + scenario: "PI-CTRL" + resolution: "T63" + restart_rate: ${general.nday} + restart_unit: "days" + nproca: 24 + nprocb: 24 + namelist_changes: + namelist.echam: + runctl: + nproma: 96 + putocean: [ 8, 'steps', 'exact', -450 ] + getocean: [ 8, 'steps', 'exact', 0 ] + +nemo: + resolution: "ORCA05" + version: "ORCA05_LIM2_KCM_AOW_FS" + jpni: 24 + jpnj: 24 + nproc: 576 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + # + # if an initial run does not work, or if you experience instabilities in NEMO or the NEST + # the following settings may help to get past those instabilities + #namelist_changes: + # namelist_cfg: + # namtra_ldf: + # rn_aht_0: 599 +xios: + nproc: 8 diff --git a/runscripts/focioifs/focioifs-piCtl-initial-blogin.yaml b/runscripts/focioifs/focioifs-piCtl-initial-blogin.yaml index 3b6223ef8..82d7b7c74 100755 --- a/runscripts/focioifs/focioifs-piCtl-initial-blogin.yaml +++ b/runscripts/focioifs/focioifs-piCtl-initial-blogin.yaml @@ -7,12 +7,12 @@ general: version: "2.1" resolution: "TCO95_ORCA05" - compute_time: "12:00:00" + compute_time: "01:00:00" initial_date: "1850-01-01T00:00:00" # Initial exp. date - final_date: "2300-01-01T00:00:00" # Final date of the experiment + final_date: "1850-02-01T00:00:00" # Final date of the experiment - nyear: 10 - nmonth: 0 + nyear: 0 + nmonth: 1 nday: 0 restart_rate: 1 restart_unit: days @@ -22,7 +22,8 @@ general: # machine specific setup account: shk00018 - base_dir: ${general.homedir}/esm/esm-experiments/ + #base_dir: ${general.homedir}/esm/esm-experiments/ + base_dir: /scratch/usr/shkjocke/esm-speed3/ focioifs: model_dir: ${general.homedir}/esm/models/focioifs-2.1 @@ -64,7 +65,7 @@ oasis3mct: norestart: F use_lucia: True #export_mode: EXPOUT - debug_level: 50 + #debug_level: 50 pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" oasis_date_stamp: "" diff --git a/runscripts/focioifs/focioifs-piCtl-test_manycpu.yaml b/runscripts/focioifs/focioifs-piCtl-test_manycpu.yaml index 01c5ffc8b..ed288df45 100755 --- a/runscripts/focioifs/focioifs-piCtl-test_manycpu.yaml +++ b/runscripts/focioifs/focioifs-piCtl-test_manycpu.yaml @@ -12,20 +12,20 @@ general: setup_name: "focioifs" resolution: "TCO95_ORCA05" - compute_time: "15:00:00" + compute_time: "00:30:00" initial_date: "1850-01-01T00:00:00" # Initial exp. date final_date: "2500-01-01T00:00:00" # Final date of the experiment - nyear: 10 - nmonth: 0 + nyear: 0 + nmonth: 1 nday: 0 restart_rate: 1 - restart_unit: days + restart_unit: months hours: 0 # machine specific setup - #account: shk00018 - base_dir: ${homedir}/esm/esm-slask/ + account: shkifmsw + base_dir: ${homedir}/esm/esm-experiments/ computer: additional_flags: @@ -69,8 +69,8 @@ oasis3mct: use_lucia: True #export_mode: EXPOUT #debug_level: 50 - pool_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" - ini_parent_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" + #pool_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" + #ini_parent_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" oasis_date_stamp: "" nemo: @@ -92,5 +92,5 @@ rnfmap: xios: with_model: focioifs #xml_dir: "${oifs.pool_dir}/OASIS3_OPENIFS43R3-TCO95_ORCA05/output_3h+6hrLev_5dUVTS" - xml_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_spinup/" + #xml_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_spinup/" nproc: 16 diff --git a/runscripts/focioifs/focioifs21-piCtl-initial-glogin.yaml b/runscripts/focioifs/focioifs21-piCtl-initial-glogin.yaml index fec78f4b2..4bed22894 100755 --- a/runscripts/focioifs/focioifs21-piCtl-initial-glogin.yaml +++ b/runscripts/focioifs/focioifs21-piCtl-initial-glogin.yaml @@ -9,11 +9,11 @@ general: compute_time: "01:00:00" initial_date: "1950-01-01T00:00:00" # Initial exp. date - final_date: "1951-01-01T00:00:00" # Final date of the experiment + final_date: "1950-01-06T00:00:00" # Final date of the experiment - nyear: 1 + nyear: 0 nmonth: 0 - nday: 0 + nday: 5 restart_rate: 1 restart_unit: days hours: 0 @@ -88,7 +88,10 @@ nemo: sn_tem_levitus: /scratch/projects/shk00018/ORCA05/votemper_EN4_gridded_195001-ORCA05_DROWN.nc sn_sal_levitus: /scratch/projects/shk00018/ORCA05/vosaline_EN4_gridded_195001-ORCA05_DROWN.nc add_namelist_changes: - namelist_cfg: + namelist_cfg: + namrun: + cn_ocerst_indir: ${parent_restart_dir} + cn_ocerst_outdir: ${parent_restart_dir} namtsd: sn_tem: ['votemper_EN4_gridded_195001-ORCA05_DROWN.nc', -12, 'votemper', .true., .true., 'yearly', ' ', ' ', ' '] sn_sal: ['vosaline_EN4_gridded_195001-ORCA05_DROWN.nc', -12, 'vosaline', .true., .true., 'yearly', '', ' ', ' '] @@ -101,6 +104,15 @@ nemo: rn_aht_0: 600 namsbc_rnf: rn_rfact: 1.0 + + namelist_ice_cfg: + namicerun: + cn_icerst_indir: ${parent_restart_dir} + cn_icerst_outdir: ${parent_restart_dir} + namelist_top_cfg: + namtrc_run: + cn_trcrst_indir: ${parent_restart_dir} + cn_trcrst_outdir: ${parent_restart_dir} rnfmap: nproc: 1 diff --git a/runscripts/focioifs/focioifs21-piCtl-restart-blogin.yaml b/runscripts/focioifs/focioifs21-piCtl-restart-blogin.yaml index 5ccb49ab7..48c00f740 100755 --- a/runscripts/focioifs/focioifs21-piCtl-restart-blogin.yaml +++ b/runscripts/focioifs/focioifs21-piCtl-restart-blogin.yaml @@ -9,7 +9,7 @@ general: compute_time: "12:00:00" initial_date: "4000-01-01T00:00:00" # Initial exp. date - final_date: "4100-01-01T00:00:00" # Final date of the experiment + final_date: "4300-01-01T00:00:00" # Final date of the experiment nyear: 10 nmonth: 0 @@ -75,6 +75,11 @@ oifs: use_ocean_currents: 0 sclct_switch: 2 initial_date_cold: "4000-01-01" + + #add_namelist_changes: + # fort.4: + # NAMMCC: + # RALBSEAD_SCALE: 0.6667 oasis3mct: lresume: ${general.lresume} diff --git a/runscripts/focioifs/focioifs211-piCtl-restart-glogin-directOutput.yaml b/runscripts/focioifs/focioifs211-piCtl-restart-glogin-directOutput.yaml new file mode 100755 index 000000000..5bef02684 --- /dev/null +++ b/runscripts/focioifs/focioifs211-piCtl-restart-glogin-directOutput.yaml @@ -0,0 +1,141 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_ORCA05" + version: "2.1.1" + + compute_time: "12:00:00" + initial_date: "4000-01-01T00:00:00" # Initial exp. date + final_date: "4100-01-01T00:00:00" # Final date of the experiment + + nyear: 10 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + account: shk00018 + base_dir: /scratch/usr/shkjocke/esm-experiments/ + + lresume: True + ini_parent_exp_id: "FOCI_GJK006" + ini_string_parent_date: "4000-01-01" + ini_nemo_restart_steps: 20161344 + ini_parent_dir: "/scratch/usr/shkjocke/esm-experiments/${ini_parent_exp_id}/restart/" + oasis_date_stamp: "_39900101-39991231" + # We will not restart OpenIFS, techinically speaking + # with the eternal restart feature, the complete yaml of the previous run needs to be available: + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml${oasis_date_stamp}" + + # This activates post processing for OpenIFS and NEMO + postprocessing: 1 + +computer: + additional_flags: '--qos=preempt' + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-2.1.1/ + +oifs: + lresume: True + # This would be for a traditional restart + # but not used here + prev_run_config_file: "${general.prev_run_config_file}" + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/oifs/" + ini_parent_date: "${prev_date}" + + version: "43r3" + with_xios: True + scenario: "piControl" + mip: "cmip6" + pool_dir: /scratch/projects/shk00018/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + #input_dir: "/scratch/projects/shk00018/focioifs_restarts/FOCI_GJK006/restart/oifs/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + # This would use initial conditions from ERA-Interim 2008-01-01 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + # This uses initial conditions from FOCI_GJK006 4000-01-02 + #prepifs_expid: ECE3 + #prepifs_startdate: 40000101 + + resolution: TCO95 + levels: L91 + nproc: 287 + omp_num_threads: 1 + use_ocean_currents: 0 + sclct_switch: 2 + initial_date_cold: "4000-01-01" + + #add_namelist_changes: + # fort.4: + # NAMMCC: + # RALBSEAD_NML: 0.045 + +oasis3mct: + lresume: ${general.lresume} + pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" + # This would be for a cold start (SST = 0C,etc) + #ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" + # Use FOCI_GJK006 4000-01-01 + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "/scratch/projects/shk00018/focioifs_restarts/${ini_parent_exp_id}/restart/oasis3mct/" + ini_parent_date: "${prev_date}" + + norestart: F + use_lucia: True + export_mode: EXPORTED + debug_level: 5 + # in FOCIOIFS all oasis input files are treated as restarts. + # in FOCI all remapping files are handled as inputs. Both approaches work. + #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct" + oasis_date_stamp: "${general.oasis_date_stamp}" + #oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/nemo/" + ini_parent_date: "${prev_date}" + + resolution: "ORCA05" + jpni: 24 + jpnj: 18 + nproc: 432 + add_namelist_changes: + namelist_cfg: + namrun: + cn_ocerst_indir: ${parent_restart_dir} + cn_ocerst_outdir: ${experiment_restart_out_dir} + namtra_ldf: + rn_aht_0: 600 + namsbc_rnf: + rn_rfact: 0.962 + namelist_ice_cfg: + namicerun: + cn_icerst_indir: ${parent_restart_dir} + cn_icerst_outdir: ${experiment_restart_out_dir} + namelist_top_cfg: + namtrc_run: + cn_trcrst_indir: ${parent_restart_dir} + cn_trcrst_outdir: ${experiment_restart_out_dir} + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_spinup/" + nproc: 48 diff --git a/runscripts/focioifs/focioifs211-piCtl-restart-glogin.yaml b/runscripts/focioifs/focioifs211-piCtl-restart-glogin.yaml index 5bef02684..b0f8ab8ac 100755 --- a/runscripts/focioifs/focioifs211-piCtl-restart-glogin.yaml +++ b/runscripts/focioifs/focioifs211-piCtl-restart-glogin.yaml @@ -24,7 +24,7 @@ general: ini_parent_exp_id: "FOCI_GJK006" ini_string_parent_date: "4000-01-01" ini_nemo_restart_steps: 20161344 - ini_parent_dir: "/scratch/usr/shkjocke/esm-experiments/${ini_parent_exp_id}/restart/" + ini_parent_dir: "/scratch/projects/shk00018/focioifs_restarts/${ini_parent_exp_id}/restart/" oasis_date_stamp: "_39900101-39991231" # We will not restart OpenIFS, techinically speaking # with the eternal restart feature, the complete yaml of the previous run needs to be available: @@ -33,8 +33,9 @@ general: # This activates post processing for OpenIFS and NEMO postprocessing: 1 -computer: - additional_flags: '--qos=preempt' +#computer: +# #partition_name: "standard96:eoptimized" +# additional_flags: '--qos=preempt' focioifs: model_dir: ${general.homedir}/esm/models/focioifs-2.1.1/ @@ -54,7 +55,6 @@ oifs: mip: "cmip6" pool_dir: /scratch/projects/shk00018/foci_input2/ input_dir: "${pool_dir}/openifs_cy43_tco95/" - #input_dir: "/scratch/projects/shk00018/focioifs_restarts/FOCI_GJK006/restart/oifs/" rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" @@ -79,10 +79,16 @@ oifs: sclct_switch: 2 initial_date_cold: "4000-01-01" - #add_namelist_changes: - # fort.4: - # NAMMCC: - # RALBSEAD_NML: 0.045 + add_namelist_changes: + fort.4: + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 1.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + RCLDIFF_CONVI: 7.0 # default 7.0 + #NAMMCC: + # RALBSEAD_NML: 0.045 + # RALBSCALE_AR: 0.8 oasis3mct: lresume: ${general.lresume} @@ -91,7 +97,7 @@ oasis3mct: #ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" # Use FOCI_GJK006 4000-01-01 ini_parent_exp_id: "${general.ini_parent_exp_id}" - ini_parent_dir: "/scratch/projects/shk00018/focioifs_restarts/${ini_parent_exp_id}/restart/oasis3mct/" + ini_parent_dir: "${general.ini_parent_dir}/oasis3mct/" ini_parent_date: "${prev_date}" norestart: F @@ -100,7 +106,7 @@ oasis3mct: debug_level: 5 # in FOCIOIFS all oasis input files are treated as restarts. # in FOCI all remapping files are handled as inputs. Both approaches work. - #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct" + #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct/" oasis_date_stamp: "${general.oasis_date_stamp}" #oasis_date_stamp: "" @@ -117,20 +123,21 @@ nemo: add_namelist_changes: namelist_cfg: namrun: - cn_ocerst_indir: ${parent_restart_dir} - cn_ocerst_outdir: ${experiment_restart_out_dir} + #cn_ocerst_indir: ${parent_restart_dir} + #cn_ocerst_outdir: ${experiment_restart_out_dir} + ln_single_prec_send: ".true." namtra_ldf: rn_aht_0: 600 namsbc_rnf: rn_rfact: 0.962 - namelist_ice_cfg: - namicerun: - cn_icerst_indir: ${parent_restart_dir} - cn_icerst_outdir: ${experiment_restart_out_dir} - namelist_top_cfg: - namtrc_run: - cn_trcrst_indir: ${parent_restart_dir} - cn_trcrst_outdir: ${experiment_restart_out_dir} + #namelist_ice_cfg: + # namicerun: + # cn_icerst_indir: ${parent_restart_dir} + # cn_icerst_outdir: ${experiment_restart_out_dir} + #namelist_top_cfg: + # namtrc_run: + # cn_trcrst_indir: ${parent_restart_dir} + # cn_trcrst_outdir: ${experiment_restart_out_dir} rnfmap: nproc: 1 diff --git a/runscripts/focioifs/focioifs22-4xCO2-restart-glogin.yaml b/runscripts/focioifs/focioifs22-4xCO2-restart-glogin.yaml new file mode 100644 index 000000000..e3ed46d8c --- /dev/null +++ b/runscripts/focioifs/focioifs22-4xCO2-restart-glogin.yaml @@ -0,0 +1,143 @@ +# example to use the a small number of nodes / CPUs that +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "2.1.1" + resolution: "TCO95_ORCA05" + + compute_time: "12:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2000-01-01T00:00:00" # Final date of the experiment + + nyear: 10 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + + lresume: True + # Name of the run you are starting from + ini_parent_exp_id: "FOCI_GJK029" + # Where are you starting from in the above run + ini_string_parent_date: "2550-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + ini_nemo_restart_steps: 12272160 + ini_parent_dir: "${homedir}/esm/esm-experiments/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_25400101-25491231" + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-2.1.1/ + +oifs: + lresume: True + version: "43r3" + with_xios: True + scenario: "4xCO2" + mip: "cmip6" + input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" + rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" + vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" + clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + nproc: 168 + omp_num_threads: 4 + use_ocean_currents: 0 + + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_date: "25491231" + # where are your restart files for OpenIFS + ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/" + ini_string_parent_date: "1850-01-01" + # Config file of last restart + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml${general.oasis_date_stamp}" + # are you branching off from another run + branchoff: true + # where do you want the model to think its starting from? + # for annual restarts, this should be 1 year before general.initial_date + # E.g. for historical, initial_date=1850-01-01, with annual restart, then this is 1849-01-01 + pseudo_initial_date: "1840-01-01" + + # use CMIP6 ozone + o3_scheme: cmip6 + + # new solar spectrum + solarspectrum: True + + # correction for Southern Ocean clouds + sclct_switch: 2 + + add_namelist_changes: + fort.4: + NAERAD: + NAERANT_SCALE: 1 # turn on aerosol scaling + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 5.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + #RCLDIFF_CONVI: 7 # default 7.0 + NAMMCC: + RALBSEAD_NML: 0.045 + RALBSCALE_AR: 0.8 + +oasis3mct: + lresume: True + norestart: F + use_lucia: True + debug_level: 1 + #export_mode: EXPOUT + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/ + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/oasis3mct/" + oasis_date_stamp: "" #"_19990101-19991231" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/nemo/" + ini_parent_date: "25491231" + resolution: "ORCA05" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_rnf: + rn_rfact: 0.984 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_prod/" + nproc: 48 diff --git a/runscripts/focioifs/focioifs22-hist-restart-glogin.yaml b/runscripts/focioifs/focioifs22-hist-restart-glogin.yaml new file mode 100644 index 000000000..f115b5489 --- /dev/null +++ b/runscripts/focioifs/focioifs22-hist-restart-glogin.yaml @@ -0,0 +1,143 @@ +# example to use the a small number of nodes / CPUs that +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "2.1.1" + resolution: "TCO95_ORCA05" + + compute_time: "12:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "2015-01-01T00:00:00" # Final date of the experiment + + nyear: 5 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + + lresume: True + # Name of the run you are starting from + ini_parent_exp_id: "FOCI_GJK029" + # Where are you starting from in the above run + ini_string_parent_date: "2550-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + ini_nemo_restart_steps: 12272160 + ini_parent_dir: "${homedir}/esm/esm-experiments/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_25400101-25491231" + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-2.1.1/ + +oifs: + lresume: True + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" + rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" + vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" + clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + nproc: 168 + omp_num_threads: 4 + use_ocean_currents: 0 + + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_date: "25491231" + # where are your restart files for OpenIFS + ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/" + ini_string_parent_date: "1850-01-01" + # Config file of last restart + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml${general.oasis_date_stamp}" + # are you branching off from another run + branchoff: true + # where do you want the model to think its starting from? + # for annual restarts, this should be 1 year before general.initial_date + # E.g. for historical, initial_date=1850-01-01, with annual restart, then this is 1849-01-01 + pseudo_initial_date: "1845-01-01" + + # use CMIP6 ozone + o3_scheme: cmip6 + + # new solar spectrum + solarspectrum: True + + # correction for Southern Ocean clouds + sclct_switch: 2 + + add_namelist_changes: + fort.4: + NAERAD: + NAERANT_SCALE: 1 # turn on aerosol scaling + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 5.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + #RCLDIFF_CONVI: 7 # default 7.0 + NAMMCC: + RALBSEAD_NML: 0.045 + RALBSCALE_AR: 0.8 + +oasis3mct: + lresume: True + norestart: F + use_lucia: True + debug_level: 1 + #export_mode: EXPOUT + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/ + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/oasis3mct/" + oasis_date_stamp: "" #"_19990101-19991231" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/nemo/" + ini_parent_date: "25491231" + resolution: "ORCA05" + jpni: 24 + jpnj: 24 + nproca: 24 + nprocb: 24 + nproc: 576 + + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + namdyn_ldf: + rn_cmsmag_2: 4 + namsbc_rnf: + rn_rfact: 0.984 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_prod/" + nproc: 48 diff --git a/runscripts/focioifs/focioifs22-piCtl-restart-glogin.yaml b/runscripts/focioifs/focioifs22-piCtl-restart-glogin.yaml new file mode 100755 index 000000000..74bdc483f --- /dev/null +++ b/runscripts/focioifs/focioifs22-piCtl-restart-glogin.yaml @@ -0,0 +1,174 @@ +# example to use the a small number of nodes / CPUs that +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "2.2" + resolution: "TCO95_ORCA05" + + compute_time: "1:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + + lresume: True + # Name of the run you are starting from + ini_parent_exp_id: "FOCI_GJK029" + # Where are you starting from in the above run + #ini_restart_date: "2550-01-01" + ini_string_parent_date: "2550-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + ini_nemo_restart_steps: 12272160 + ini_parent_dir: "${homedir}/esm/esm-experiments/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_25400101-25491231" + +focioifs: + #model_dir: ${general.homedir}/esm/models/focioifs-2.2/ #focioifs-2.1.1/ + model_dir: ${general.homedir}/esm/models/focioifs-2.2/ +computer: + partition: "standard96:test" + #compiler_mpi: intel2019_impi2019 + +oifs: + #runtime_environment_changes: + # choose_computer.name: + # "glogin": + # compiler_mpi: intel2019_impi2019_nemo4 + + + lresume: True + version: "43r3" + with_xios: True + scenario: "piControl" + mip: "cmip6" + input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" + rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" + vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" + clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + nproc: 168 + omp_num_threads: 4 + use_ocean_currents: 0 + + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_date: "25491231" + # where are your restart files for OpenIFS + ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/" + ini_string_parent_date: "1850-01-01" + # Config file of last restart + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml${general.oasis_date_stamp}" + # are you branching off from another run + branchoff: true + # where do you want the model to think its starting from? + # for annual restarts, this should be 1 year before general.initial_date + # E.g. for historical, initial_date=1850-01-01, with annual restart, then this is 1849-01-01 + ini_pseudo_initial_date: "1840-01-01" + + # use CMIP6 ozone + o3_scheme: cmip6 + + # new solar spectrum + solarspectrum: True + + # correction for Southern Ocean clouds + sclct_switch: 2 + + # perturb i.c. to make ensembles + perturb: 0 # set to 1 to add perturbation + ensemble_id: 1 + + # namelist changes to OpenIFS + add_namelist_changes: + fort.4: + NAERAD: + NAERANT_SCALE: 1 # turn on aerosol scaling + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 5.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + #RCLDIFF_CONVI: 7 # default 7.0 + NAMMCC: + RALBSEAD_NML: 0.06 #0.045 + RALBSCALE_AR: 0.8 + # couple ice temp from LIM2 + #LNEMOLIMTEMP: ".true." + #NAEPHY: + # # turn of OpenIFS sea-ice thermodynamics + # LESICE: ".true." + # GGAUSSB controls magnitude of parameterized GWD + # GGAUSSB = -0.95 gives more realistic QBO period. + #NAMGWWMS: + # GGAUSSB: -0.95 + +oasis3mct: + lresume: True + #a2o_remap_method: "conserv" + #o2a_remap_method: "conserv" + norestart: F + use_lucia: True + debug_level: 1 + mct_version: "5.0" + #export_mode: EXPOUT + coupling_time_step: 3600 + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/ + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/oasis3mct/" + ini_restart_date: "2549-12-31" + oasis_date_stamp: "" #"_19990101-19991231" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/nemo/" + ini_parent_date: "25491231" + resolution: "ORCA05" + jpni: 24 + jpnj: 24 + #nproca: 24 + #nprocb: 18 + nproc: 576 + + add_namelist_changes: + namelist_cfg: + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: 0.5 + #namdyn_ldf: + # rn_cmsmag_2: 4 + namsbc_rnf: + rn_rfact: 0.984 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_spinup/" + nproc: 4 + omp_num_threads: 48 diff --git a/runscripts/focioifs/focioifs3-piCtl-initial-glogin.yaml b/runscripts/focioifs/focioifs3-piCtl-initial-glogin.yaml new file mode 100755 index 000000000..962436ece --- /dev/null +++ b/runscripts/focioifs/focioifs3-piCtl-initial-glogin.yaml @@ -0,0 +1,96 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_ORCA05" + version: "3.0" + + compute_time: "04:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1852-01-01T00:00:00" # Final date of the experiment + + nyear: 1 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + account: shk00018 + #base_dir: /scratch/usr/shkjocke/esm-impi2021-spack/ + base_dir: /scratch/usr/shkjocke/esm-phase3/ + + # This activates post processing for OpenIFS and NEMO + postprocessing: 1 + +computer: + partition_name: "standard96" + #additional_flags: '--qos=preempt' + launcher: mpirun + launcher_flags: "" + +focioifs: + #model_dir: /scratch/usr/shkjocke/models_impi2021_spack/focioifs-3.0 + model_dir: /scratch/usr/shkjocke/models_p3/focioifs-3.0 + +oifs: + lresume: False + # This would be for a traditional restart + # but not used here + #prev_run_config_file: "${general.prev_run_config_file}" + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/oifs/" + #ini_parent_date: "${prev_date}" + + version: "43r3" + with_xios: True + scenario: "piControl" + mip: "cmip6" + o3_scheme: cmip6 + pool_dir: /scratch/projects/shk00018/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + # This would use initial conditions from ERA-Interim 2008-01-01 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + resolution: TCO95 + levels: L91 + nproc: 288 + omp_num_threads: 1 + use_ocean_currents: 0 + +oasis3mct: + lresume: True + pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" + ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" + norestart: F + # For a new run, it might be good to set + # use_lucia to True to check timing + use_lucia: False + #export_mode: EXPOUT + debug_level: 2 + +nemo: + input_dir: /scratch/usr/shkifmsw/foci_input2/NEMO_ORCA05/input/ORCA05/ + + resolution: "ORCA05" + jpni: 24 + jpnj: 18 + nproc: 432 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_prod/" + nproc: 48 diff --git a/runscripts/focioifs/focioifs-piCtl-initial-nesh.yaml b/runscripts/focioifs/focioifs3-piCtl-initial-nesh.yaml similarity index 57% rename from runscripts/focioifs/focioifs-piCtl-initial-nesh.yaml rename to runscripts/focioifs/focioifs3-piCtl-initial-nesh.yaml index fd9781ff5..c3312b013 100755 --- a/runscripts/focioifs/focioifs-piCtl-initial-nesh.yaml +++ b/runscripts/focioifs/focioifs3-piCtl-initial-nesh.yaml @@ -8,53 +8,52 @@ general: use_venv: False verbose: False homedir: !ENV ${HOME} + workdir: !ENV ${WORK} setup_name: "focioifs" resolution: "TCO95_ORCA05" - compute_time: "15:00:00" + compute_time: "01:00:00" initial_date: "1850-01-01T00:00:00" # Initial exp. date - final_date: "2200-01-01T00:00:00" # Final date of the experiment + final_date: "1850-02-01T00:00:00" # Final date of the experiment - nyear: 10 - nmonth: 0 + nyear: 0 + nmonth: 1 nday: 0 - restart_rate: 1 - restart_unit: days hours: 0 - # machine specific setup + # we dont need compute budgets on NESH #account: shk00018 - base_dir: ${homedir}/esm/esm-slask/ + base_dir: ${workdir}/esm-slask/ computer: additional_flags: - --mem=72000 - - --constraint=cascade + #- --constraint=cascade focioifs: - model_dir: ${general.homedir}/esm/models/focioifs-2.0 + model_dir: /gxfs_work/geomar/smomw352/models_oneapi/focioifs-3.0 oifs: version: "43r3" with_xios: True scenario: "piControl" mip: "cmip6" - input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" - rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" - vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" - clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" - ifsdata_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/ifsdata/" - cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" - cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" - icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + pool_dir: "/gxfs_work/geomar/smomw352/foci_input2/" + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco95/2008010100/" icmcl_file: "ICMCLhagwINIT" prepifs_expid: hagw prepifs_startdate: 20080101 resolution: TCO95 levels: L91 lresume: 0 - nproc: 143 + nproc: 144 use_ocean_currents: 0 perturb: 0 sclct_switch: 2 @@ -76,8 +75,8 @@ oasis3mct: use_lucia: True #export_mode: EXPOUT #debug_level: 50 - pool_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" - ini_parent_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" + pool_dir: "/gxfs_work/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" + ini_parent_dir: "/gxfs_work/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/" oasis_date_stamp: "" nemo: @@ -85,13 +84,14 @@ nemo: jpni: 16 jpnj: 16 nproc: 256 - + input_dir: "/gxfs_work/geomar/smomw352/foci_input2/nemo_orca05/input/ORCA05/" + add_namelist_changes: namelist_cfg: - namtra_ldf: - rn_aht_0: 599 + #namtra_ldf: + # rn_aht_0: 599 namsbc_rnf: - rn_rfact: 1.067 + rn_rfact: 0.987 rnfmap: nproc: 1 @@ -99,5 +99,5 @@ rnfmap: xios: with_model: focioifs #xml_dir: "${oifs.pool_dir}/OASIS3_OPENIFS43R3-TCO95_ORCA05/output_3h+6hrLev_5dUVTS" - xml_dir: "/gxfs_work1/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_spinup/" - nproc: 16 + xml_dir: "/gxfs_work/geomar/smomw352/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_spinup/" + nproc: 4 diff --git a/runscripts/focioifs/focioifs3.0.1-piCtrl-agrif-glogin.yaml b/runscripts/focioifs/focioifs3.0.1-piCtrl-agrif-glogin.yaml new file mode 100755 index 000000000..0b7ad7dba --- /dev/null +++ b/runscripts/focioifs/focioifs3.0.1-piCtrl-agrif-glogin.yaml @@ -0,0 +1,121 @@ +# +# Runscript to run FOCI-OpenIFS with VIKING10 +# on glogin +# +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "agrif-3.0" + resolution: "TCO95_ORCA05" + + compute_time: "06:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1852-01-01T00:00:00" # Final date of the experiment + + nyear: 1 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + +computer: + partition: "standard96" + +focioifs: + model_dir: /scratch/usr/shkjocke/model_test/focioifs-agrif-3.0/ + +oifs: + version: "43r3" + with_xios: True + scenario: "piControl" + mip: "cmip6" + input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" + rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" + vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" + clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + lresume: 0 + nproc: 96 + omp_num_threads: 1 + use_ocean_currents: 1 + + # Tell OpenIFS to expect + # coupling fields from nest + with_nest1: true + +oasis3mct: + coupling_time_step: 3600 + mct_version: "5.0" + norestart: F + use_lucia: True + debug_level: 1 + #export_mode: EXPOUT + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/ + ini_parent_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/ + oasis_date_stamp: "" + +nemo: + # Note for future runs: + # We currently use a 5x grid refinement in VIKING10 and 3x time step + # It would make more sense to also refine time step by 5x so that Courant number is reasonable + # This can be done by simply changing the 3 to a 5 in the 2nd line of AGRIF_FixedGrids.in + # So far, we stick with 3x to keep consistency with existing runs. + agrif_dir: /scratch/projects/shk00018/foci_input2/agrif_viking10_AS/ + nest_refinement: 5 + resolution: "ORCA05" + time_step: 1800 + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + # This gives subdomains of 15x15 grid cells which is near the limit in NEMO + # More nproc should not be used + jpni: 36 + jpnj: 24 + nproc: 864 + nest1: viking10 + + add_namelist_changes: + 1_namelist_cfg: + # Viscosity in ORCA05 is -1.709e12 + # If grid refinement is factor 5, then new visc should be + # computed as -1.709e12 / 5^3 + # + # Note: -1.709e12 is applied at the largest ORCA05 cell, at equator + # but coeff is actually -1.267e12 where VIKING10 starts + # So: If largest cell in ORCA05 (at eq) is dx_max, + # and largest ORCA05 grid cell size in nest region is dx_nest_max + # then you need to compute the following: + # rn_ahm_0_blp = -1.709e12 * (dx_nest_max / dx_max) / (refinement_factor^3) + # + # For VIKING10: -1.013e10 + # + # Similar considerations should be done for other nests + namldf_dyn: + rn_ahm_0_blp: -1.013e10 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/output_for_prod/ + # This should make 4 XIOS cores spread over 2 nodes (on HLRN standard96( + nproc: 24 + diff --git a/runscripts/focioifs/focioifs4-piCtl-initial-glogin.yaml b/runscripts/focioifs/focioifs4-piCtl-initial-glogin.yaml new file mode 100755 index 000000000..735d52869 --- /dev/null +++ b/runscripts/focioifs/focioifs4-piCtl-initial-glogin.yaml @@ -0,0 +1,173 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_ORCA05" + version: "4.0" + + compute_time: "12:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + #restart_rate: 1 + #restart_unit: days + hours: 0 + account: shk00018 + base_dir: /scratch/usr/shkjocke/esm-phase3/ + + # This activates post processing for OpenIFS and NEMO + postprocessing: 1 + + lresume: False + # Name of the run you are starting from + ini_parent_exp_id: "foci3-test12" + # Where are you starting from in the above run + ini_parent_date: "18491231" + ini_string_parent_date: "1850-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + #ini_nemo_restart_steps: 262800 + #ini_parent_dir: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_18400101-18491231" + # Previous run config file + # (should be general enough) + prev_run_config_file: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/config/${ini_parent_exp_id}_finished_config.yaml${oasis_date_stamp}" + +computer: + partition_name: "standard96" + #additional_flags: '--qos=preempt' + #taskset: false + #hetjob_flag: hetjob + +focioifs: + model_dir: /scratch/usr/shkjocke/models_p3/focioifs-4.0/ + +oifs: + + lresume: ${general.lresume} + # This would be for a traditional restart + # but not used here + prev_run_config_file: "${general.prev_run_config_file}" + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/18591231/" + #ini_parent_date: "${prev_date}" + + version: "43r3" + time_step: 1800 + with_xios: True + scenario: "piControl" + mip: "cmip6" + o3_scheme: cmip6 + pool_dir: /scratch/projects/shk00018/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + # This would use initial conditions from ERA-Interim 2008-01-01 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + # This uses initial conditions from FOCI_GJK006 4000-01-02 + #prepifs_expid: ECE3 + #prepifs_startdate: 40000101 + + resolution: TCO95 + levels: L91 + nproc: 288 + omp_num_threads: 1 + use_ocean_currents: 1 + sclct_switch: 1 + #initial_date_cold: "4000-01-01" + solarspectrum: True + + ini_pseudo_initial_date: "1859-01-01" + + add_namelist_changes: + fort.4: + NAMCUMF: + # Conversion rate from cloud water to rain + # See eq 6.38 in IFS doc 43r3 (variable c00) + RPRCON: 1.4e-3 # default 1.4e-3 + NAMCLDP: + # Vi in section 7.2.4i in IFS doc 43r3 + RVICE: 0.13 # default 0.13 + +oasis3mct: + lresume: True + pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/" + # This would be for a cold start (SST = 0C,etc) + ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/" # "${general.ini_parent_dir}/restart/oasis3mct/" + + a2o_remap_method: "bilincub" + o2a_remap_method: "bilinear" + norestart: F + use_lucia: False + #export_mode: EXPOUT + debug_level: 1 + mct_version: "5.0" + # in FOCIOIFS all oasis input files are treated as restarts. + # in FOCI all remapping files are handled as inputs. Both approaches work. + #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct/" + #oasis_date_stamp: "${general.oasis_date_stamp}" + #oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/nemo/" + #ini_parent_date: "${prev_date}" + input_dir: "/scratch/projects/shk00018/foci_input2/NEMO4_ORCA05_input/" + + resolution: "ORCA05" + #version: "ORCA05_Z46_SI3_COUPLED" + # 64x51 = 3264 + # 48x48 = 2304 + # 36x36 = 1296 + # 36x24 = 864 + # 24x24 = 576 + _nx: 720 + _ny: 510 + jpni: 24 + jpnj: 18 + #nproca: ${nemo.jpni} + #nprocb: ${nemo.jpnj} + nproc: 432 + add_namelist_changes: + namelist_cfg: + namctl: + ln_timing: ".false." + namrun: + nn_istate: 0 + namdyn_ldf: + # Use time-varying viscosity set by deformation rate + # (Smagorinsky) + nn_ahm_ijk_t: 32 + namtra_ldf: + # Diffusion set by local baroclinic growth rate + # Recommended by CdL to bump up by 50% + nn_aht_ijk_t: 20 + rn_aht_scale: 1.5 + namtra_eiv: + # Reduce eddy-ind diffusivity to help ACC + rn_aeiv_scale: 0.5 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/output_for_diag/" + nproc: 20 + omp_num_threads: 1 diff --git a/runscripts/focioifs/focioifs4-piCtl-initial-levante.yaml b/runscripts/focioifs/focioifs4-piCtl-initial-levante.yaml new file mode 100755 index 000000000..7730b68f7 --- /dev/null +++ b/runscripts/focioifs/focioifs4-piCtl-initial-levante.yaml @@ -0,0 +1,179 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_ORCA05" + version: "4.0" + + compute_time: "12:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + #restart_rate: 1 + #restart_unit: days + hours: 0 + account: bb1460 + base_dir: /work/bb1460/b350071/esm-experiments + + # This activates post processing for OpenIFS and NEMO + postprocessing: True + + lresume: False + # Name of the run you are starting from + ini_parent_exp_id: "foci3-test12" + # Where are you starting from in the above run + ini_parent_date: "18491231" + ini_string_parent_date: "1850-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + #ini_nemo_restart_steps: 262800 + #ini_parent_dir: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_18400101-18491231" + # Previous run config file + # (should be general enough) + prev_run_config_file: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/config/${ini_parent_exp_id}_finished_config.yaml${oasis_date_stamp}" + +#computer: + #partition_name: "standard96" + #additional_flags: '--qos=preempt' + #taskset: false + #hetjob_flag: hetjob + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-4.0/ + +oifs: + + lresume: ${general.lresume} + # This would be for a traditional restart + # but not used here + prev_run_config_file: "${general.prev_run_config_file}" + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/18591231/" + #ini_parent_date: "${prev_date}" + + version: "43r3" + time_step: 1800 + with_xios: True + scenario: "piControl" + mip: "cmip6" + o3_scheme: cmip6 + # TODO: clone https://git.geomar.de/foci/foci_input2/openifs_cy43_general.git + # https://git.geomar.de/foci/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05.git + pool_dir: /work/bb11460/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + # This would use initial conditions from ERA-Interim 2008-01-01 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + # This uses initial conditions from FOCI_GJK006 4000-01-02 + #prepifs_expid: ECE3 + #prepifs_startdate: 40000101 + + resolution: TCO95 + levels: L91 + nproc: 288 + omp_num_threads: 1 + use_ocean_currents: 1 + sclct_switch: 1 + #initial_date_cold: "4000-01-01" + solarspectrum: True + + ini_pseudo_initial_date: "1859-01-01" + + add_namelist_changes: + fort.4: + NAMCUMF: + # Conversion rate from cloud water to rain + # See eq 6.38 in IFS doc 43r3 (variable c00) + RPRCON: 1.4e-3 # default 1.4e-3 + NAMCLDP: + # Vi in section 7.2.4i in IFS doc 43r3 + RVICE: 0.13 # default 0.13 + +oasis3mct: + lresume: True + # TODO: clone https://git.geomar.de/foci/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05.git to /work/bb1460/foci_input2 + pool_dir: "/work/bb1460/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/" + # This would be for a cold start (SST = 0C,etc) + ini_parent_dir: "${pool_dir}" + # for a real restart you would use + # "${general.ini_parent_dir}/restart/oasis3mct/" + + a2o_remap_method: "bilincub" + o2a_remap_method: "bilinear" + norestart: F + use_lucia: False + #export_mode: EXPOUT + debug_level: 1 + mct_version: "5.0" + # in FOCIOIFS all oasis input files are treated as restarts. + # in FOCI all remapping files are handled as inputs. Both approaches work. + #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct/" + #oasis_date_stamp: "${general.oasis_date_stamp}" + #oasis_date_stamp: "" + +nemo: + # TODO: clone https://git.geomar.de/foci/foci_input2/nemo4_orca05.git + lresume: ${general.lresume} + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/nemo/" + #ini_parent_date: "${prev_date}" + input_dir: "/work/bb1460/foci_input2/NEMO4_ORCA05_input/" + + resolution: "ORCA05" + #version: "ORCA05_Z46_SI3_COUPLED" + # 64x51 = 3264 + # 48x48 = 2304 + # 36x36 = 1296 + # 36x24 = 864 + # 24x24 = 576 + _nx: 720 + _ny: 510 + jpni: 24 + jpnj: 18 + #nproca: ${nemo.jpni} + #nprocb: ${nemo.jpnj} + nproc: 432 + add_namelist_changes: + namelist_cfg: + namctl: + ln_timing: ".false." + namrun: + nn_istate: 0 + namdyn_ldf: + # Use time-varying viscosity set by deformation rate + # (Smagorinsky) + nn_ahm_ijk_t: 32 + namtra_ldf: + # Diffusion set by local baroclinic growth rate + # Recommended by CdL to bump up by 50% + nn_aht_ijk_t: 20 + rn_aht_scale: 1.5 + namtra_eiv: + # Reduce eddy-ind diffusivity to help ACC + rn_aeiv_scale: 0.5 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/work/bb1460/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/output_for_diag/" + nproc: 20 + omp_num_threads: 1 diff --git a/runscripts/focioifs/focioifs4-piCtl-initial-olaf.yaml b/runscripts/focioifs/focioifs4-piCtl-initial-olaf.yaml new file mode 100755 index 000000000..b60f394c3 --- /dev/null +++ b/runscripts/focioifs/focioifs4-piCtl-initial-olaf.yaml @@ -0,0 +1,155 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_ORCA05" + version: "4.0" + + compute_time: "01:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + #restart_rate: 1 + #restart_unit: days + hours: 0 + account: iccp + base_dir: /proj/home/ibs/iccp/jkjellsson/esm-experiments/ + + # This activates post processing for OpenIFS and NEMO + postprocessing: 1 + + lresume: False + # Name of the run you are starting from + ini_parent_exp_id: "foci3-test12" + # Where are you starting from in the above run + ini_parent_date: "18491231" + ini_string_parent_date: "1850-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + #ini_nemo_restart_steps: 262800 + #ini_parent_dir: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_18400101-18491231" + # Previous run config file + # (should be general enough) + prev_run_config_file: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/config/${ini_parent_exp_id}_finished_config.yaml${oasis_date_stamp}" + + #computer: + #partition_name: "" + #additional_flags: '--qos=preempt' + +focioifs: + model_dir: /proj/home/ibs/iccp/jkjellsson/esm/models/focioifs-4.0/ + +oifs: + + lresume: ${general.lresume} + # This would be for a traditional restart + # but not used here + prev_run_config_file: "${general.prev_run_config_file}" + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/18591231/" + #ini_parent_date: "${prev_date}" + + version: "43r3" + time_step: 1800 + with_xios: True + scenario: "piControl" + mip: "cmip6" + o3_scheme: cmip6 + pool_dir: /proj/internal_group/iccp/jkjellsson/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + # This would use initial conditions from ERA-Interim 2008-01-01 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + resolution: TCO95 + levels: L91 + nproc: 288 + omp_num_threads: 1 + use_ocean_currents: 0 + sclct_switch: 2 + #initial_date_cold: "4000-01-01" + solarspectrum: True + + ini_pseudo_initial_date: "1859-01-01" + +oasis3mct: + lresume: True + pool_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/" + # This would be for a cold start (SST = 0C,etc) + ini_parent_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/" # "${general.ini_parent_dir}/restart/oasis3mct/" + + a2o_remap_method: "bilincub" + o2a_remap_method: "bilinear" + norestart: F + use_lucia: True + #export_mode: EXPOUT + debug_level: 1 + mct_version: "5.0" + # in FOCIOIFS all oasis input files are treated as restarts. + # in FOCI all remapping files are handled as inputs. Both approaches work. + #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct/" + #oasis_date_stamp: "${general.oasis_date_stamp}" + #oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/nemo/" + #ini_parent_date: "${prev_date}" + input_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/nemo4_orca05/" + + resolution: "ORCA05" + version: "ORCA05_Z46_SI3_COUPLED" + # 64x51 = 3264 + # 48x48 = 2304 + # 36x36 = 1296 + # 36x24 = 864 + # 24x24 = 576 + _nx: 720 + _ny: 510 + jpni: 24 + jpnj: 18 + #nproca: ${nemo.jpni} + #nprocb: ${nemo.jpnj} + nproc: 432 + add_namelist_changes: + namelist_cfg: + namctl: + ln_timing: ".false." + namrun: + nn_istate: 0 + namdyn_ldf: + # Use time-varying viscosity set by deformation rate + # (Smagorinsky) + nn_ahm_ijk_t: 32 + namtra_ldf: + # Diffusion set by local baroclinic growth rate + nn_aht_ijk_t: 20 + rn_aht_scale: 1.5 + namtra_eiv: + rn_aeiv_scale: 0.5 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05/output_for_diag/" + nproc: 20 + omp_num_threads: 1 diff --git a/runscripts/focioifs/focioifs4-piCtrl-agrif-glogin.yaml b/runscripts/focioifs/focioifs4-piCtrl-agrif-glogin.yaml new file mode 100755 index 000000000..a79138e6c --- /dev/null +++ b/runscripts/focioifs/focioifs4-piCtrl-agrif-glogin.yaml @@ -0,0 +1,130 @@ +# +# Runscript to run FOCI-OpenIFS with NWPAC10 +# on glogin +# +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "agrif-4.0" + resolution: "TCO95_ORCA05" + + # How much walltime to request + compute_time: "01:00:00" + + # Time period to simulate + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + # How often to restart + nyear: 0 + nmonth: 1 + nday: 0 + hours: 0 + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + # Activate NEMO and OpenIFS post processing + postprocessing: 1 + +computer: + partition: "standard96" + +focioifs: + model_dir: /scratch/usr/shkjocke/model_test/focioifs-agrif-4.0/ + +oifs: + runtime_environment_changes: + choose_computer.name: + "glogin": + compiler_mpi: intel2019_impi2019_nemo4 + #compiler_mpi: intel2022_impi2021 + + version: "43r3" + with_xios: True + scenario: "piControl" + mip: "cmip6" + input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" + rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" + vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" + clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + lresume: 0 + nproc: 96 + omp_num_threads: 1 + + # tau = rho * Cd (u10m - use_ocean_current * u_current) + use_ocean_currents: 1 + + # Tell OpenIFS to expect + # coupling fields from nest + with_nest1: true + +oasis3mct: + coupling_time_step: 3600 + mct_version: "5.0" + norestart: F + use_lucia: True + debug_level: 1 + export_mode: EXPORTED + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05_nwpac10b/ + ini_parent_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05_nwpac10b/ + oasis_date_stamp: "" + c2o_remap_method: "gauswgt" + +nemo: + # Note for future runs: + # We currently use a 5x grid refinement in VIKING10 and 3x time step + # It would make more sense to also refine time step by 5x so that Courant number is reasonable + # This can be done by simply changing the 3 to a 5 in the 2nd line of AGRIF_FixedGrids.in + # So far, we stick with 3x to keep consistency with existing runs. + input_dir: "/scratch/projects/shk00018/foci_input2/NEMO4_ORCA05_input/" + agrif_dir: "/scratch/projects/shk00018/foci_input2/agrif_nwpac10/" + nest_refinement: 3 + resolution: "ORCA05" + time_step: 1800 + version: "ORCA05_SI3_COUPLED_AGRIF" + # Remove periodic points from namcouple (720 instead of 722) + _nx: 720 + _ny: 510 + # This gives subdomains of 15x15 grid cells which is near the limit in NEMO + # More nproc should not be used + jpni: 48 + jpnj: 36 + nproc: 1728 + nest1: NWPAC10 + + #add_namelist_changes: + # namelist_cfg: + # namdyn_hpg: + # ln_hpg_sco: '.false.' + # ln_hpg_zco: '.false.' + # ln_hpg_zps: '.true.' + # #1_namelist_cfg: + # # namdyn_hpg: + # # ln_hpg_sco: '.false.' + # # ln_hpg_zco: '.false.' + # # ln_hpg_zps: '.true.' + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05_nwpac10/output_for_diag/ + # This should make 4 XIOS cores spread over 2 nodes (on HLRN standard96( + nproc: 24 + diff --git a/runscripts/focioifs/focioifs4-piCtrl-agrif-olaf.yaml b/runscripts/focioifs/focioifs4-piCtrl-agrif-olaf.yaml new file mode 100755 index 000000000..1abe2c3fa --- /dev/null +++ b/runscripts/focioifs/focioifs4-piCtrl-agrif-olaf.yaml @@ -0,0 +1,124 @@ +# +# Runscript to run FOCI-OpenIFS with NWPAC10 +# on Olaf (IBS, Korea) +# +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + projdir: !ENV ${PROJ} + + setup_name: "focioifs" + version: "agrif-4.0" + resolution: "TCO95_ORCA05" + + # How much walltime to request + compute_time: "01:00:00" + + # Time period to simulate + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + # How often to restart + nyear: 0 + nmonth: 1 + nday: 0 + hours: 0 + + account: iccp + base_dir: ${general.projdir}/esm-experiments/ + + # Activate NEMO and OpenIFS post processing + postprocessing: 1 + +computer: + partition: "normal_cpu" + +focioifs: + model_dir: /proj/home/ibs/iccp/jkjellsson/esm/models/focioifs-agrif-4.0/ + +oifs: + version: "43r3" + with_xios: True + scenario: "piControl" + mip: "cmip6" + pool_dir: /proj/internal_group/iccp/jkjellsson/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + lresume: 0 + nproc: 96 + omp_num_threads: 1 + + # tau = rho * Cd (u10m - use_ocean_current * u_current) + use_ocean_currents: 1 + + # Tell OpenIFS to expect + # coupling fields from nest + with_nest1: true + +oasis3mct: + coupling_time_step: 3600 + mct_version: "5.0" + norestart: F + use_lucia: True + debug_level: 1 + export_mode: EXPORTED + pool_dir: /proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05_nwpac10/ + ini_parent_dir: /proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05_nwpac10/ + oasis_date_stamp: "" + c2o_remap_method: "gauswgt" + +nemo: + # Note for future runs: + # We currently use a 5x grid refinement in VIKING10 and 3x time step + # It would make more sense to also refine time step by 5x so that Courant number is reasonable + # This can be done by simply changing the 3 to a 5 in the 2nd line of AGRIF_FixedGrids.in + # So far, we stick with 3x to keep consistency with existing runs. + input_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/nemo4_orca05/" + agrif_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/agrif_nwpac10/" + nest_refinement: 3 + resolution: "ORCA05" + time_step: 1800 + version: "ORCA05_SI3_COUPLED_AGRIF" + # Remove periodic points from namcouple (720 instead of 722) + _nx: 720 + _ny: 510 + # This gives subdomains of 15x15 grid cells which is near the limit in NEMO + # More nproc should not be used + jpni: 25 + jpnj: 17 + nproc: 347 + nest1: NWPAC10 + + #add_namelist_changes: + # namelist_cfg: + # namdyn_hpg: + # ln_hpg_sco: '.false.' + # ln_hpg_zco: '.false.' + # ln_hpg_zps: '.true.' + # #1_namelist_cfg: + # # namdyn_hpg: + # # ln_hpg_sco: '.false.' + # # ln_hpg_zco: '.false.' + # # ln_hpg_zps: '.true.' + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: /proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-orca05_nwpac10/output_for_diag/ + # This should make 4 XIOS cores spread over 2 nodes (on HLRN standard96( + nproc: 20 + diff --git a/runscripts/focioifs/focioifs41-piCtl-initial-olaf.yaml b/runscripts/focioifs/focioifs41-piCtl-initial-olaf.yaml new file mode 100755 index 000000000..5bcb29abb --- /dev/null +++ b/runscripts/focioifs/focioifs41-piCtl-initial-olaf.yaml @@ -0,0 +1,159 @@ +# Runscript to run FOCI-OpenIFS 4.1 on Olaf +# OpenIFS 43r3 + NEMO 4.2.2 +# Tco95_L91 + eORCA05_L75 + +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_eORCA05" + version: "4.0" + + compute_time: "01:00:00" + initial_date: "1850-01-01T00:00:00" # Initial exp. date + final_date: "1850-02-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + #restart_rate: 1 + #restart_unit: days + hours: 0 + account: iccp + base_dir: /proj/home/ibs/iccp/jkjellsson/esm-experiments/ + + # This activates post processing for OpenIFS and NEMO + postprocessing: 1 + + lresume: False + # Name of the run you are starting from + ini_parent_exp_id: "foci3-test12" + # Where are you starting from in the above run + ini_parent_date: "18491231" + ini_string_parent_date: "1850-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + #ini_nemo_restart_steps: 262800 + #ini_parent_dir: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_18400101-18491231" + # Previous run config file + # (should be general enough) + prev_run_config_file: "/scratch/usr/shkjocke/esm-nemo4/${ini_parent_exp_id}/config/${ini_parent_exp_id}_finished_config.yaml${oasis_date_stamp}" + + #computer: + #partition_name: "" + #additional_flags: '--qos=preempt' + +focioifs: + model_dir: /proj/internal_group/iccp/jkjellsson/models/focioifs-4.1/ + +oifs: + + lresume: ${general.lresume} + # This would be for a traditional restart + # but not used here + prev_run_config_file: "${general.prev_run_config_file}" + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/18591231/" + #ini_parent_date: "${prev_date}" + + version: "43r3" + time_step: 1800 + with_xios: True + scenario: "piControl" + mip: "cmip6" + o3_scheme: cmip6 + pool_dir: /proj/internal_group/iccp/jkjellsson/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + # This would use initial conditions from ERA-Interim 2008-01-01 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + resolution: TCO95 + levels: L91 + nproc: 288 + omp_num_threads: 1 + use_ocean_currents: 0 + sclct_switch: 2 + #initial_date_cold: "4000-01-01" + solarspectrum: True + + ini_pseudo_initial_date: "1859-01-01" + +oasis3mct: + lresume: True + pool_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-eorca05/" + # This would be for a cold start (SST = 0C,etc) + ini_parent_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-eorca05/" + + a2o_remap_method: "bilincub" + o2a_remap_method: "bilinear" + norestart: F + use_lucia: True + #export_mode: EXPOUT + debug_level: 1 + mct_version: "5.0" + # in FOCIOIFS all oasis input files are treated as restarts. + # in FOCI all remapping files are handled as inputs. Both approaches work. + #ini_parent_dir: "${general.ini_parent_dir}/oasis3mct/" + #oasis_date_stamp: "${general.oasis_date_stamp}" + #oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + #ini_parent_exp_id: "${general.ini_parent_exp_id}" + #ini_parent_dir: "${general.ini_parent_dir}/nemo/" + #ini_parent_date: "${prev_date}" + input_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/NEMO4_eORCA05/L75/" + + resolution: "eORCA05" + version: "eORCA05_Z75_SI3_COUPLED" + # 64x51 = 3264 + # 48x48 = 2304 + # 36x36 = 1296 + # 36x24 = 864 + # 24x24 = 576 + _nx: 720 + _ny: 603 + jpni: 24 + jpnj: 18 + #nproca: ${nemo.jpni} + #nprocb: ${nemo.jpnj} + nproc: 432 + add_namelist_changes: + namelist_cfg: + namctl: + ln_timing: ".false." + namrun: + nn_istate: 0 + namdyn_ldf: + # Use time-varying viscosity set by deformation rate + # (Smagorinsky) + nn_ahm_ijk_t: 32 + namtra_ldf: + # Diffusion set by local baroclinic growth rate + nn_aht_ijk_t: 20 + rn_aht_scale: 1.5 + namtra_eiv: + rn_aeiv_scale: 0.5 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/proj/internal_group/iccp/jkjellsson/foci_input2/oasis3_openifs43r3-tco95_nemo4-eorca05/output_for_diag/" + nproc: 8 + omp_num_threads: 1 diff --git a/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-HR-glogin-phase3.yaml b/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-HR-glogin-phase3.yaml new file mode 100755 index 000000000..63d7eb647 --- /dev/null +++ b/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-HR-glogin-phase3.yaml @@ -0,0 +1,205 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + workdir: !ENV ${WORK} + + setup_name: "focioifs" + version: "agrif-3.0" + resolution: "TCO319_ORCA05" + + compute_time: "12:00:00" + initial_date: "2039-01-01T00:00:00" # Initial exp. date + final_date: "2050-01-01T00:00:00" # Final date of the experiment + + nyear: 1 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00057 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + + lresume: True + # Name of the run you are starting from + ini_parent_exp_id: "FOCI_GJK038" + # Where are you starting from in the above run + #ini_parent_date: "39991231" + ini_string_parent_date: "2039-01-01" + # Time step in NEMO from restarts in parent run + # NOTE: ESM-Tools looks for a global NEMO restart file + # You must run nocs_combine on restarts first. + ini_nemo_restart_steps: 1560336 # 2038-12-31 + ini_parent_dir: "${homedir}/esm/esm-experiments/${ini_parent_exp_id}/" + # Time stamp from OASIS for restarts in parent run + oasis_date_stamp: "_20380101-20381231" + +computer: + partition: "standard96" + +focioifs: + model_dir: ${general.workdir}/models_p3/focioifs-agrif-3.0/ + #nemo: + # remove_choose_general.version.agrif-3.0.add_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + # choose_general.version: + # "agrif-3.0": + # remove_add_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + # remove_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + +oifs: + + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_date: "${prev_date}" + + # When did OpenIFS start in the parent run + ini_parent_dir: "${general.ini_parent_dir}/restart/oifs/" + ini_string_parent_date: "2039-01-01" + # Config file of last restart + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml${general.oasis_date_stamp}" + # Number of days in OpenIFS restarts (usually 7305 unless leap year) + start_ndays_source: 730 + # Start date of last leg (for annual restarts, one year before the start date) + pseudo_initial_date: "2038-01-01" + + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + pool_dir: "/scratch/projects/shk00018/foci_input2/" + input_dir: "${pool_dir}/openifs_cy43_tco319/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco319/319_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco319/1950010100/" + icmcl_file: "ICMCLaazgINIT" + prepifs_expid: aazg + prepifs_startdate: 19500101 + resolution: TCO319 + levels: L137 + time_step: 900 + nproc: 576 + omp_num_threads: 4 + use_ocean_currents: 1 + + # use CMIP6 ozone + o3_scheme: cmip6 + + # new solar spectrum + solarspectrum: True + + with_nest1: true + add_namelist_changes: + fort.4: + NAERAD: + NAERANT_SCALE: 1 # turn on aerosol scaling + NCMIPFIXYR: 1950 # Fix GHG to 1950 + LSOLAR1950: ".true." # solar cycle for 1950 as HrMIP + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 5.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + #RCLDIFF_CONVI: 7 # default 7.0 + NAMMCC: + RALBSEAD_NML: 0.045 + RALBSCALE_AR: 0.8 + +oasis3mct: + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/oasis3mct/" + + norestart: F + use_lucia: True + c2o_remap_method: "gauswgt" + coupling_time_step: 3600 + mct_version: "5.0" + debug_level: 0 + export_mode: EXPORTED + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca05_viking10/ + oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/restart/nemo/" + ini_parent_date: ${prev_date} + + agrif_dir: /scratch/projects/shk00018/foci_input2/agrif_viking10_jk/ + input_dir: /scratch/usr/shkifmsw/foci_input2/NEMO_ORCA05/input/ORCA05/ + resolution: "ORCA05" + #time_step: 600 + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + jpni: 36 + jpnj: 24 + nproca: 36 + nprocb: 24 + nproc: 864 + nest1: viking10 + + add_namelist_changes: + namelist_cfg: + #namsbc: + # nn_fsbc: 1 + namsbc_rnf: + rn_rfact: 1.00 + # Malins visc and diff changes + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + namtra_ldf: + rn_aht_0: 600 + rn_aeiv_scale: "remove_from_namelist" + 1_namelist_cfg: + #namsbc: + # nn_fsbc: 1 + namlbc: + rn_shlat: 0 + namsbc_rnf: + rn_rfact: 1.0 + namdyn_ldf: + rn_ahm_0_blp: -1.37e10 + namtra_ldf: + rn_aht_0: 120 + + #remove_nemo.choose_general.version.agrif-3.0.add_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + #remove_nemo.namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + + #remove_choose_general.version.agrif-3.0.namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + + #choose_general.version: + # "agrif-3.0": + # remove_add_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + # #remove_namelist_changes.namelist_cfg.namtra_ldf: + # # - rn_aeiv_scale + + #remove_namelist_changes: + # namelist_cfg: + # namtra_ldf: + # rn_aeiv_scale: 0.5 + #remove_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + #remove_add_namelist_changes.namelist_cfg.namtra_ldf: + # - rn_aeiv_scale + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca05_viking10/output_for_prod/ + nproc: 6 + omp_num_threads: 48 diff --git a/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-HR-glogin.yaml b/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-HR-glogin.yaml new file mode 100755 index 000000000..f7540b1da --- /dev/null +++ b/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-HR-glogin.yaml @@ -0,0 +1,141 @@ +# example to use the a small number of nodes / CPUs that +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "agrif" + resolution: "TCO319_ORCA05" + + compute_time: "12:00:00" + initial_date: "1950-01-01T00:00:00" # Initial exp. date + final_date: "2050-01-01T00:00:00" # Final date of the experiment + + nyear: 1 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00057 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + +computer: + partition: "standard96" + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-agrif-3.0/ + version: "agrif-3.0" + +oifs: + runtime_environment_changes: + choose_computer.name: + "glogin": + compiler_mpi: intel2019_impi2019 + + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + pool_dir: "/scratch/projects/shk00018/foci_input2/" + input_dir: "${pool_dir}/openifs_cy43_tco319/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco319/319_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco319/1950010100/" + icmcl_file: "ICMCLaazgINIT" + prepifs_expid: aazg + prepifs_startdate: 19500101 + resolution: TCO319 + levels: L137 + time_step: 900 + lresume: 0 + nproc: 576 + omp_num_threads: 4 + use_ocean_currents: 1 + + # use CMIP6 ozone + o3_scheme: cmip6 + + # new solar spectrum + solarspectrum: True + + with_nest1: true + add_namelist_changes: + fort.4: + NAERAD: + NAERANT_SCALE: 1 # turn on aerosol scaling + NCMIPFIXYR: 1950 # Fix GHG to 1950 + LSOLAR1950: ".true." # solar cycle for 1950 as HrMIP + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 5.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + #RCLDIFF_CONVI: 7 # default 7.0 + NAMMCC: + RALBSEAD_NML: 0.045 + RALBSCALE_AR: 0.8 + +oasis3mct: + norestart: F + use_lucia: True + c2o_remap_method: "gauswgt" + coupling_time_step: 3600 + mct_version: "5.0" + debug_level: 0 + export_mode: EXPORTED + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca05_viking10/ + ini_parent_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca05_viking10/ + oasis_date_stamp: "" + +nemo: + agrif_dir: /scratch/projects/shk00018/foci_input2/agrif_viking10_jk/ + resolution: "ORCA05" + #time_step: 600 + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + jpni: 36 + jpnj: 24 + nproca: 36 + nprocb: 24 + nproc: 864 + nest1: viking10 + + add_namelist_changes: + namelist_cfg: + #namsbc: + # nn_fsbc: 1 + namsbc_rnf: + rn_rfact: 1.00 + # Malins visc and diff changes + namdyn_ldf: + rn_ahm_0_blp: -1.709e12 + namtra_ldf: + rn_aht_0: 600 + 1_namelist_cfg: + #namsbc: + # nn_fsbc: 1 + namlbc: + rn_shlat: 0 + namsbc_rnf: + rn_rfact: 1.0 + namdyn_ldf: + rn_ahm_0_blp: -1.37e10 + namtra_ldf: + rn_aht_0: 120 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca05_viking10/output_for_prod/ + nproc: 6 + omp_num_threads: 48 diff --git a/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-glogin.yaml b/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-glogin.yaml new file mode 100755 index 000000000..49197663b --- /dev/null +++ b/runscripts/focioifs/highresmip/focioifs3.0.1-sp1950-agrif-glogin.yaml @@ -0,0 +1,158 @@ +# example to use the a small number of nodes / CPUs that +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "agrif-3.0" + resolution: "TCO95_ORCA05" + + compute_time: "04:00:00" + initial_date: "1950-01-01T00:00:00" # Initial exp. date + final_date: "2050-01-01T00:00:00" # Final date of the experiment + + nyear: 1 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + postprocessing: 1 + +computer: + partition: "standard96" + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-agrif-3.0/ + #version: "agrif-3.0" + +oifs: + #runtime_environment_changes: + # choose_computer.name: + # "glogin": + # compiler_mpi: intel2019_impi2019 + + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + input_dir: "${pool_dir}/OPENIFS43R3-TCO95/" + rtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/rtables/" + vtables_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/vtables/" + clim_dir: "${pool_dir}/OPENIFS43R3-TCO95/95_4/" + ifsdata_dir: "/scratch/projects/shk00018/foci_input2/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/OPENIFS_CY43_GENERAL/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + time_step: 1800 + lresume: 0 + nproc: 144 + omp_num_threads: 2 + use_ocean_currents: 1 + + # use CMIP6 ozone + o3_scheme: cmip6 + + # new solar spectrum + solarspectrum: True + + with_nest1: true + add_namelist_changes: + fort.4: + NAERAD: + NAERANT_SCALE: 1 # turn on aerosol scaling + NCMIPFIXYR: 1950 # Fix GHG to 1950 + LSOLAR1950: ".true." # solar cycle for 1950 as HrMIP + NAMCLDP: + # diffusion coeff for cloud edges + RCLDIFF: 5.e-6 # default 3e-6 + # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + #RCLDIFF_CONVI: 7 # default 7.0 + NAMMCC: + RALBSEAD_NML: 0.045 + RALBSCALE_AR: 0.8 + +oasis3mct: + #o2a_remap_method: "conserv" + #a2o_remap_method: "conserv" + #agr2a_remap_method: "conserv" + c2o_remap_method: "gauswgt" + coupling_time_step: 3600 + mct_version: "5.0" + norestart: F + use_lucia: True + debug_level: 1 + #export_mode: EXPOUT + pool_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/ + ini_parent_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/ + oasis_date_stamp: "" + +nemo: + # Note for future runs: + # We currently use a 5x grid refinement in VIKING10 and 3x time step + # It would make more sense to also refine time step by 5x so that Courant number is reasonable + # This can be done by simply changing the 3 to a 5 in the 2nd line of AGRIF_FixedGrids.in + # So far, we stick with 3x to keep consistency with existing runs. + agrif_dir: /scratch/projects/shk00018/foci_input2/agrif_viking10_jk/ + resolution: "ORCA05" + #time_step: 600 + version: "ORCA05_LIM2_KCM_AGRIF_OASISMCT4" + # This gives subdomains of 15x15 grid cells which is near the limit in NEMO + # More nproc should not be used + jpni: 48 + jpnj: 36 + nproc: 1728 + nest1: viking10 + + add_namelist_changes: + namelist_cfg: + # It may be necessary to set fsbc=1 to avoid + # breaking CFL for LIM2 + #namsbc: + # nn_fsbc: 1 + namsbc_rnf: + rn_rfact: 1.00 + # Malins visc and diff changes + namldf_dyn: + rn_ahm_0_blp: -1.709e12 + namldf_tra: + rn_aht_0: 600 + 1_namelist_cfg: + #namsbc: + # nn_fsbc: 1 + namlbc: + rn_shlat: 0 + namsbc_rnf: + rn_rfact: 1.0 + # Computed as -1.709e12 / 5^3 + # Note: -1.709e12 is applied at the largest ORCA05 cell, at equator + # but coeff is actually -1.267e12 where VIKING10 starts + # So ahm should be -1.267e12 / 5^3 = -1.013e10 + # We should scale this by 0.74. + namldf_dyn: + rn_ahm_0_blp: -1.37e10 + # Computed as 600 / 5 + # Similarly as above, this should be 89 + namldf_tra: + rn_aht_0: 120 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: /scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05_viking10/output_for_prod/ + # This should make 4 XIOS cores spread over 2 nodes (on HLRN standard96( + nproc: 4 + omp_num_threads: 48 diff --git a/runscripts/focioifs/highresmip/focioifs3.0.1-spinup1950-initial-glogin.yaml b/runscripts/focioifs/highresmip/focioifs3.0.1-spinup1950-initial-glogin.yaml new file mode 100755 index 000000000..241bca09b --- /dev/null +++ b/runscripts/focioifs/highresmip/focioifs3.0.1-spinup1950-initial-glogin.yaml @@ -0,0 +1,135 @@ +# +# Runscript for HighResMIP simulations with FOCI-OpenIFS 3.0.1 (no AGRIF) +# Run on glogin +# +# We use +# +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + resolution: "TCO95_ORCA05" + version: "3.0.1" + + compute_time: "04:00:00" + initial_date: "1950-01-01T00:00:00" # Initial exp. date + final_date: "2050-01-01T00:00:00" # Final date of the experiment + + nyear: 1 + nmonth: 0 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + account: shk00018 + base_dir: /scratch/usr/shkjocke/esm-experiments/ + + # This activates post processing for OpenIFS and NEMO + postprocessing: 1 + + lresume: False + +computer: + partition: "standard96" + #additional_flags: '--qos=preempt' + +focioifs: + # locally modified model version + # normally, v2.2. uses Smagorinsky, but we turn this off for AGRIF + # so for comparison, we should turn it off here as well + # Its an easy fix. Simply go to the cpp file for NEMO, remove key_dynldf_smag + # and replace key_dynldf_c3d by key_dynldf_c2d. Then clean and recompile + model_dir: /scratch/usr/shkjocke/model_test/focioifs-3.0.1/ + +oifs: + resume: ${general.lresume} + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + pool_dir: /scratch/projects/shk00018/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco95/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco95/95_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/OPENIFS43R3-TCO95/2008010100/" + icmcl_file: "ICMCLhagwINIT" + prepifs_expid: hagw + prepifs_startdate: 20080101 + resolution: TCO95 + levels: L91 + nproc: 288 # 360x4 / 96 = 15 nodes + time_step: 1800 + omp_num_threads: 1 + use_ocean_currents: 1 + + # accidentally had 0 for Tco319-VIKING10 + # so we have to stick with 0 here as well, + # even if it will cause big problems in Southern Ocean + sclct_switch: 0 + + o3_scheme: cmip6 # Use CMIP6 prescibed ozone + solarspectrum: True # Use new Coddington spectrum + + add_namelist_changes: + fort.4: + NAERAD: + # Solar rad as HighResMIP + LSOLAR1950: ".true." + # Fix GHG at 1950 + NCMIPFIXYR: 1950 + # Aerosol scaling + NAERANT_SCALE: 1 + #NAMCLDP: + # # diffusion coeff for cloud edges + # RCLDIFF: 5.e-6 # default 3e-6 + # # for convection, RCLDIFF=RCLDIFF*RCLDIFF_CONVI + # #RCLDIFF_CONVI: 7.0 # default 7.0 + #NAMMCC: + # RALBSEAD_NML: 0.045 + # RALBSCALE_AR: 0.8 + +oasis3mct: + lresume: True + coupling_time_step: 3600 + pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" + ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/" + mct_version: "5.0" + norestart: F + use_lucia: True + #export_mode: EXPORTED + debug_level: 0 + +nemo: + lresume: ${general.lresume} + resolution: "ORCA05" + jpni: 24 + jpnj: 18 + nproc: 432 + add_namelist_changes: + namelist_cfg: + #namrun: + # ln_single_prec_send: ".true." + # We should scale GM by 0.5, but I forgot to do this for the + # Tco319-VIKING10 run. So to make fair comparisons we should + # not do it for Tco95-ORCA05 either + # But for future runs, definitely set this to 0.5 + # (it should be done by default in focioifs.yaml anyway) + namtra_ldf: + rn_aeiv_scale: 1 + namsbc_rnf: + rn_rfact: 0.984 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco95_orca05/output_for_prod/" + nproc: 20 + #omp_num_threads: 48 diff --git a/runscripts/focioifs/orca12_spinup/focioifs-piCtl-orca12-HRA-day31-restart-blogin.yaml b/runscripts/focioifs/orca12_spinup/focioifs-piCtl-orca12-HRA-day31-restart-blogin.yaml new file mode 100755 index 000000000..862ac87ff --- /dev/null +++ b/runscripts/focioifs/orca12_spinup/focioifs-piCtl-orca12-HRA-day31-restart-blogin.yaml @@ -0,0 +1,143 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: "focioifs" + version: "2.1-O12" + resolution: "TCO319_ORCA12" + + compute_time: "12:00:00" + initial_date: "1950-03-01T00:00:00" # Initial exp. date + final_date: "1951-01-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + postprocessing: True + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + lresume: True + ini_parent_exp_id: "FOCI_BJK015" + ini_string_parent_date: "1950-02-01" + ini_nemo_restart_steps: 3168 + ini_parent_dir: "/scratch/usr/shkjocke/esm-experiments/${ini_parent_exp_id}/restart/" + oasis_date_stamp: "_19500121-19500131" + # with the eternal restart feature, the complete yaml of the previous run needs to be available: + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml" #${oasis_date_stamp}" + +focioifs: + model_dir: ${general.homedir}/esm/models/focioifs-2.1-O12 + +oifs: + # 2023-07-20: Intel license issue forced us to upgrade to 2022 version + # but here we force ORCA12 to stick with 2019 + # It does not matter that we set it for oifs, + # the change will propagate to other components + runtime_environment_changes: + choose_computer.name: + glogin: + compiler_mpi: "intel2019_impi2019_nemo4" + + lresume: ${general.lresume} + prev_run_config_file: "${general.prev_run_config_file}" + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/oifs/" + ini_parent_date: "${prev_date}" + + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + pool_dir: /scratch/projects/shk00018/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco319/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco319/319_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco319/2008010100/" + icmcl_file: "ICMCLaazgINIT" + prepifs_expid: aazg + prepifs_startdate: 19500101 + resolution: TCO319 + levels: L137 + time_step: 900 + nproc: 288 + omp_num_threads: 4 + use_ocean_currents: 0 + perturb: 0 + sclct_switch: 2 + + add_namelist_changes: + fort.4: + NAERAD: + NCMIPFIXYR: 1950 + LSOLAR1950: ".true." + +oasis3mct: + lresume: ${general.lresume} + pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/" + ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/" + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_date: "${prev_date}" + + #norestart: T + use_lucia: True + #export_mode: EXPOUT + debug_level: 5 + coupling_time_step: 3600 + #oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/nemo/" + ini_parent_date: "${prev_date}" + + resolution: "ORCA12" + jpni: 80 + jpnj: 48 + nproc: 2744 + time_step: 300 #120 #60 #300 + input_dir: "/scratch/projects/shk00018/ORCA12/" + add_namelist_changes: + namelist_cfg: + namrun: + ln_single_prec_send: ".true." + namsbc: + nn_fsbc: 1 + namsbc_rnf: + rn_rfact: 1.0 + namtra_ldf: + rn_aht_0: 125 + rn_aht_m: 125 + namdyn_ldf: + rn_ahm_0_blp: -1.25e10 + rn_ahm_m_blp: -1e11 + namtrd: + ln_dyn_trd: '.true.' + ln_tra_trd: '.true.' + namelist_ice_cfg: + namicedyn: + pstar: 1.5e4 + ahi0: 200 + telast: 120 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/output_5dUVTS_1dPLev/" + #xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/output_1dUVTS_1dPLev/" + nproc: 480 + #omp_num_threads: 12 diff --git a/runscripts/focioifs/orca12_spinup/focioifs-piCtl-orca12-HRA-phase3-restart-glogin.yaml b/runscripts/focioifs/orca12_spinup/focioifs-piCtl-orca12-HRA-phase3-restart-glogin.yaml new file mode 100755 index 000000000..d6050d764 --- /dev/null +++ b/runscripts/focioifs/orca12_spinup/focioifs-piCtl-orca12-HRA-phase3-restart-glogin.yaml @@ -0,0 +1,139 @@ +general: + use_venv: False + verbose: False + homedir: !ENV ${HOME} + workdir: !ENV ${WORK} + + setup_name: "focioifs" + version: "2.1-O12" + resolution: "TCO319_ORCA12" + + compute_time: "12:00:00" + initial_date: "1960-08-01T00:00:00" # Initial exp. date + final_date: "1970-01-01T00:00:00" # Final date of the experiment + + nyear: 0 + nmonth: 1 + nday: 0 + restart_rate: 1 + restart_unit: days + hours: 0 + + postprocessing: True + + # machine specific setup + account: shk00018 + base_dir: ${general.homedir}/esm/esm-experiments/ + + lresume: True + ini_parent_exp_id: "FOCI_GJK035" + ini_string_parent_date: "1960-08-01" + ini_nemo_restart_steps: 1095264 + ini_parent_dir: "/scratch/usr/shkjocke/esm-experiments/${ini_parent_exp_id}/restart/" + oasis_date_stamp: "_19600701-19600731" + # with the eternal restart feature, the complete yaml of the previous run needs to be available: + prev_run_config_file: "${ini_parent_dir}/../../config/${ini_parent_exp_id}_finished_config.yaml" #${oasis_date_stamp}" + +computer: + partition_name: "standard96" + +focioifs: + model_dir: ${general.workdir}/models_p3/focioifs-2.1-O12 + +oifs: + lresume: ${general.lresume} + prev_run_config_file: "${general.prev_run_config_file}" + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/oifs/" + ini_parent_date: "${prev_date}" + ini_pseudo_initial_date: "1960-07-01" + + version: "43r3" + with_xios: True + scenario: "historical" + mip: "cmip6" + pool_dir: /scratch/projects/shk00018/foci_input2/ + input_dir: "${pool_dir}/openifs_cy43_tco319/" + rtables_dir: "${pool_dir}/openifs_cy43_general/rtables/" + vtables_dir: "${pool_dir}/openifs_cy43_general/vtables/" + clim_dir: "${pool_dir}/openifs_cy43_tco319/319_4/" + ifsdata_dir: "${pool_dir}/openifs_cy43_general/ifsdata/" + cmip5_data_dir: "${pool_dir}/openifs_cy43_general/cmip5_ghg/" + cmip6_data_dir: "${pool_dir}/openifs_cy43_general/cmip6-data/" + icmcl_dir: "${pool_dir}/openifs_cy43_tco319/2008010100/" + icmcl_file: "ICMCLaazgINIT" + prepifs_expid: aazg + prepifs_startdate: 19500101 + resolution: TCO319 + levels: L137 + time_step: 900 + nproc: 288 + omp_num_threads: 4 + use_ocean_currents: 0 + perturb: 0 + sclct_switch: 2 + + add_namelist_changes: + fort.4: + NAERAD: + NCMIPFIXYR: 1950 + LSOLAR1950: ".true." + +oasis3mct: + lresume: ${general.lresume} + pool_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/" + ini_parent_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/" + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_date: "${prev_date}" + + #norestart: T + use_lucia: True + #export_mode: EXPOUT + debug_level: 0 + coupling_time_step: 3600 + #oasis_date_stamp: "" + +nemo: + lresume: ${general.lresume} + ini_parent_exp_id: "${general.ini_parent_exp_id}" + ini_parent_dir: "${general.ini_parent_dir}/nemo/" + ini_parent_date: "${prev_date}" + + resolution: "ORCA12" + jpni: 80 + jpnj: 48 + nproc: 2744 + time_step: 300 #120 #60 #300 + input_dir: "/scratch/projects/shk00018/ORCA12/" + add_namelist_changes: + namelist_cfg: + namrun: + ln_single_prec_send: ".true." + namsbc: + nn_fsbc: 1 + namsbc_rnf: + rn_rfact: 1.0 + namtra_ldf: + rn_aht_0: 125 + rn_aht_m: 125 + namdyn_ldf: + rn_ahm_0_blp: -1.25e10 + rn_ahm_m_blp: -1e11 + namtrd: + ln_dyn_trd: '.true.' + ln_tra_trd: '.true.' + namelist_ice_cfg: + namicedyn: + pstar: 1.5e4 + ahi0: 200 + telast: 120 + +rnfmap: + nproc: 1 + +xios: + with_model: focioifs + xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/output_5dUVTS_1dPLev/" + #xml_dir: "/scratch/projects/shk00018/foci_input2/oasis3_openifs43r3-tco319_orca12/output_1dUVTS_1dPLev/" + nproc: 480 + #omp_num_threads: 12 diff --git a/runscripts/nemo/nemo-eORCA025.z75.SI3.yaml b/runscripts/nemo/nemo-eORCA025.z75.SI3.yaml new file mode 100755 index 000000000..7e16d780d --- /dev/null +++ b/runscripts/nemo/nemo-eORCA025.z75.SI3.yaml @@ -0,0 +1,25 @@ +general: + use_venv: false + homedir: !ENV ${HOME} + setup_name: nemo + compute_time: "02:15:00" + account: shkifmsw + initial_date: "1980-01-01T00:00:00" # Initial exp. date + final_date: "1983-01-01T00:00:00" # Final date of the experiment + leapyear: False + nyear: 1 + base_dir: ${homedir}/esm/esm-experiments/ + model_dir: ${homedir}/esm/models/nemo-ORCA025.z75.SI3 + +nemo: + version: eORCA025.z75.SI3 + resolution: "eORCA025" + leapyear: False + nproc: 480 + jpni: 24 + jpnj: 20 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + +xios: + model_dir: ${general.homedir}/esm/models/nemo-ORCA025.z75.SI3/xios + nproc: 24 diff --git a/runscripts/nemo/nemo4-eORCA025_Z75_SI3_JRA55.yaml b/runscripts/nemo/nemo4-eORCA025_Z75_SI3_JRA55.yaml new file mode 100755 index 000000000..4ac089228 --- /dev/null +++ b/runscripts/nemo/nemo4-eORCA025_Z75_SI3_JRA55.yaml @@ -0,0 +1,28 @@ +computer: + partition: "standard96" +general: + use_venv: false + homedir: !ENV ${HOME} + setup_name: nemo + compute_time: "04:59:00" + account: shkifmsw + initial_date: "1980-01-01T00:00:00" # Initial exp. date + final_date: "1983-01-01T00:00:00" # Final date of the experiment + leapyear: False + nyear: 1 + base_dir: ${homedir}/esm/esm-experiments/ + model_dir: ${homedir}/esm/models/nemo-eORCA025_Z75_SI3_JRA55 + +nemo: + version: eORCA025_Z75_SI3_JRA55 + resolution: "eORCA025" + leapyear: False + nproc: 1416 + # jpni and jpnj are 0 by default, i.e. automatic distribution on CPUs + jpni: 0 + jpnj: 0 + #time_step: 600 + +xios: + model_dir: ${general.homedir}/esm/models/nemo-eORCA025.z75.SI3/xios + nproc: 24 diff --git a/runscripts/oifs/oifs-43r3-climMEMODEL-nesh.yaml b/runscripts/oifs/oifs-43r3-climMEMODEL-nesh.yaml new file mode 100644 index 000000000..4c3775c4f --- /dev/null +++ b/runscripts/oifs/oifs-43r3-climMEMODEL-nesh.yaml @@ -0,0 +1,179 @@ +general: + # Some general settings + use_venv: False + verbose: False + homedir: !ENV ${HOME} + + setup_name: oifs + + # Wall time for the job + # Should be longer than you think + compute_time: "00:45:00" + # What date do we start from + initial_date: "1979-01-01T00:00:00" + # When do we finish + final_date: "1979-07-01T00:00:00" + + # How often do we restart + # nyear:0, nmonth: 0, nday:5 means we restart each 5 days + # nyear:0, nmonth: 1, nday:0 means we restart each month + nyear: 0 + nmonth: 1 + nday: 0 + + # Where is your model code (you dont need to change it) + model_dir: ${homedir}/esm/models/oifs-43r3-v2/ + # Where do you want to store model experiments + # This should be on WORK or a link to WORK + base_dir: ${homedir}/esm/esm-experiments/ + +oifs: + # General settings we dont need to change + version: 43r3 + with_xios: True + # model time step + time_step: 1800 + + # where to find input data + input_dir: /gxfs_work1/geomar/smomw352/foci_input2/openifs_cy43_tco95/ + rtables_dir: ${pool_dir}/OPENIFS_CY43_GENERAL/rtables/ + vtables_dir: ${pool_dir}/OPENIFS_CY43_GENERAL/vtables/ + clim_dir: ${pool_dir}/OPENIFS43R3-TCO95/95_4/ + ifsdata_dir: /gxfs_work1/geomar/smomw352/foci_input2/openifs_cy43_general/ifsdata/ + cmip6_data_dir: /gxfs_work1/geomar/smomw352/foci_input2/openifs_cy43_general/cmip6-data/ + icmcl_dir: ${pool_dir}/OPENIFS43R3-TCO95-AMIP/ + icmcl_file: ICMCLhagwINIT_AMIP-1-1-6_19790101_20171231 + prepifs_expid: hagw + prepifs_startdate: 20080101 + + # how many MPI tasks to parallelise on + nproc: 128 + # how many OpenMP threads to use for each task + # WARNING: More than 1 not tested on NESH + # + # cores = nproc x omp_num_threads + omp_num_threads: 1 + + # run post processing after each job + # * compress output to netCDF4 classic zip + # * compute annual mean (if job is at least 1 year) + post_processing: True + + # activate PEXTRA to run model diagnostics, e.g. + # temperature tendency due to dynamics, physics etc + # Useful for some studies, but very expensive + pextra: 0 + + # how to restart the model automatically + # after each job + # Dont use anything else than eternal, unless you are crazy + restart_type: eternal + + # Which MIP are we following. Probably CMIP6 + mip: cmip6 + # What scenario should we take CO2, solar constant + # etc from? + scenario: historical + # How should we prescribe O3 concentrations + o3_scheme: cmip6 + # What resolution are we using + # (Note: If you change this, you also need to change + # the input data above) + resolution: TCO95 + levels: L91 + # Are you restarting or starting from scratch + lresume: 0 + # Should we perturb initial SSTs + # (useful to make ensembles) + # Perturbation is done by adding 0.1K random noise + # to skin temperature at first time step + perturb: 1 + # If perturb: 1, then set a unique ensemble ID here + ensemble_id: 1 + # Re-distribute SW radiation to reduce stratospheric bias + # See Coddington (2016) https://doi.org/10.1175/BAMS-D-14-00265.1 + solarspectrum: 1 + + # Here we can change parameters for the model + add_namelist_changes: + fort.4: + NAERAD: + # This scales anthropogenic aerosols + # over time so that aerosol conc increases + # over e.g. south-east Asia and decreases + # over Europe. + NAERANT_SCALE: 1 + # Greenhouse-gas concentrations are taken + # for the year of the run. + # But you can force the model to take + # gas conc from a specific year here. + # For example, 1850 or 2100. + NCMIPFIXYR: 0 + # You can force the model to quadruple CO2 + # concentration from NCMIPFIXYR above. + LA4xCO2: ".false." + NAMCLDP: + # Diffusion coeff for cloud edges + # This has been tuned to account for the + # excessive cloud cover in IFS + # (eq. 7.47, IFS 43r3 doc, part 4) + RCLDIFF: 5.e-6 # default 3e-6 + # Diffuson for convective clouds + # RCLDIFF=RCLDIFF*RCLDIFF_CONVI + RCLDIFF_CONVI: 7 + # Fall speed of ice particles + # (sec 7.2.4i, IFS 43r3 doc, part 4) + RVICE: 0.13 + # Critical liquid water concentration needed + # to form rain droplets over sea + # Increasing it makes it harder to form precipitation + # (eq 7.53, 7.54, IFS 43r3 doc, part 4) + RCLCRIT_SEA: 2.5E-4 + NAMCUMF: + # Conversion rate of cloud liquid water to precip + RPRCON: 1.4E-3 + # Entrainment rate in convective clouds + # (eq 6.7, IFS 43r3 doc, part 4) + ENTRORG: 1.75E-3 + # Detrainment in penetrative convection + # (eq 6.8, IFS 43r3 doc, part 4) + DETRPEN: 0.75E-4 + # Entrainment rate + ENTRDD: 3.0E-4 + NAMMCC: + # Albedo of ocean surface + # Probably not relevant for AMIP + RALBSEAD_NML: 0.045 + # Scale factor for Arctic sea-ice albedo + # Arctic ice thermodynamics are wrong as 1.5m ice + # thickness is assumed. Some tuning is needed + # to correct Arctic ice in coupled runs + RALBSCALE_AR: 0.8 + NAMGWWMS: + # Scale factor for gravity-wave launch flux + # Launch flux at equator is A from + # Scinocca (2003 eq 25) + # This should be reduced for high-res + # Tco319 used -0.95 (95pct reduction) + # Reducing will weaken QBO etc + GGAUSSB: -0.25 + NAMORB: + # Set orbital parameters for Earth + # Settings (for a 1995 orbit) are: + # ORBOBLIQ (obliquity) = 23.4441 + # ORBECCEN (eccentricity) = 0.016715 + # ORBMVELP (moving vernal equinox long) = 102.7 + # Comment these lines out to use OpenIFS default + LCORBMD: '.false.' + ORBMODE: 'fixed_parameters' + ORBECCEN: 0.016715 + ORBOBLIQ: 22. #23.4441 + ORBMVELP: 102.7 + + +xios: + # Choose settings for model output + # This already includes some common variables. If you are looking for something + # specific, you can make your own settings file + xml_dir: /gxfs_work1/geomar/smomw352/foci_input2/openifs_cy43_tco95/output_for_prod/ + nproc: 16 diff --git a/runscripts/oifs/oifs-43r3-tco95-amip-blogin_tuning.yaml b/runscripts/oifs/oifs-43r3-tco95-amip-blogin_tuning.yaml index f07c78bc2..f68c75d55 100644 --- a/runscripts/oifs/oifs-43r3-tco95-amip-blogin_tuning.yaml +++ b/runscripts/oifs/oifs-43r3-tco95-amip-blogin_tuning.yaml @@ -3,11 +3,11 @@ general: verbose: False setup_name: oifs - compute_time: "02:00:00" + compute_time: "00:30:00" initial_date: "1979-01-01T00:00:00" - final_date: "1980-01-01T00:00:00" - nyear: 1 - nmonth: 0 + final_date: "1979-02-01T00:00:00" + nyear: 0 + nmonth: 1 nday: 0 restart_rate: 1 restart_unit: days @@ -18,11 +18,11 @@ general: # machine specific setup account: shk00018 model_dir: /home/shkjocke/esm/models/oifs-43r3-v1/ - base_dir: /scratch/usr/shkjocke/esm-tuning-v2/ + base_dir: /scratch/usr/shkjocke/esm-speed3/ oifs: version: 43r3 - with_xios: True + with_xios: True pool_dir: /scratch/projects/shk00018/foci_input2/ input_dir: ${pool_dir}/openifs_cy43_tco95/ rtables_dir: ${pool_dir}/openifs_cy43_general/rtables/ @@ -39,8 +39,9 @@ oifs: resolution: TCO95 levels: L91 lresume: 0 - post_processing: 0 - nproc: 480 + post_processing: 1 + nproc: 288 + omp_num_threads: 2 time_step: 1800 perturb: 1 ensemble_id: 1 @@ -61,5 +62,5 @@ oifs: xios: # 3hr surface + 6hr model lev + 1d pressure lev + 1d PV + 1m surf and pressure level - xml_dir: /scratch/usr/shkjocke/foci_input2/OPENIFS43R3-TCO95/output_1m_sfc_pl/ + xml_dir: /scratch/projects/shk00018/foci_input2/openifs_cy43_tco95/output_1m+1d/ nproc: 96 diff --git a/setup.cfg b/setup.cfg index 66c1ec847..0c1b30e0f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 6.52.0 +current_version = 6.53.0 commit = True tag = True diff --git a/setup.py b/setup.py index 5a4cfae6d..c14462a39 100644 --- a/setup.py +++ b/setup.py @@ -117,6 +117,6 @@ test_suite="tests", tests_require=test_requirements, url="https://github.com/esm-tools/esm_tools", - version="6.52.0", + version="6.53.0", zip_safe=False, ) diff --git a/src/esm_archiving/__init__.py b/src/esm_archiving/__init__.py index 14fd90c35..f9ec395db 100644 --- a/src/esm_archiving/__init__.py +++ b/src/esm_archiving/__init__.py @@ -4,7 +4,7 @@ __author__ = """Paul Gierz""" __email__ = "pgierz@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .esm_archiving import (archive_mistral, check_tar_lists, delete_original_data, determine_datestamp_location, diff --git a/src/esm_calendar/__init__.py b/src/esm_calendar/__init__.py index 91951dd84..31a86a2e1 100644 --- a/src/esm_calendar/__init__.py +++ b/src/esm_calendar/__init__.py @@ -2,6 +2,6 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .esm_calendar import * diff --git a/src/esm_cleanup/__init__.py b/src/esm_cleanup/__init__.py index c2dab9a7c..a464b250e 100644 --- a/src/esm_cleanup/__init__.py +++ b/src/esm_cleanup/__init__.py @@ -2,4 +2,4 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" diff --git a/src/esm_database/__init__.py b/src/esm_database/__init__.py index ee16e313e..ae5fe5614 100644 --- a/src/esm_database/__init__.py +++ b/src/esm_database/__init__.py @@ -2,4 +2,4 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" diff --git a/src/esm_environment/__init__.py b/src/esm_environment/__init__.py index 04c1c628a..c9c16cb54 100644 --- a/src/esm_environment/__init__.py +++ b/src/esm_environment/__init__.py @@ -2,6 +2,6 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .esm_environment import * diff --git a/src/esm_master/__init__.py b/src/esm_master/__init__.py index 40ab86240..b99923c6f 100644 --- a/src/esm_master/__init__.py +++ b/src/esm_master/__init__.py @@ -2,7 +2,7 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from . import database diff --git a/src/esm_master/esm_master.py b/src/esm_master/esm_master.py index 92b501b63..d76b585f4 100644 --- a/src/esm_master/esm_master.py +++ b/src/esm_master/esm_master.py @@ -93,9 +93,12 @@ def main_flow(parsed_args, target): user_task.generate_task_script() # Print config - model_nested_dirs = complete_config["general"]["model_dir"].split("/") - model_name = model_nested_dirs.pop(-1) - finished_config_path = f'{"/".join(model_nested_dirs)}/{model_name}-finished_config.yaml' + current_path = os.getcwd() + model_dir_rel_pwd = complete_config["general"]["model_dir"].replace( + f"{current_path}/", "" + ) + model_name = model_dir_rel_pwd.split("/")[0] + finished_config_path = f"{current_path}/{model_name}-finished_config.yaml" yaml_dump(complete_config, config_file_path=finished_config_path) if parsed_args.get("check", False): diff --git a/src/esm_motd/__init__.py b/src/esm_motd/__init__.py index 7c4767663..bd1994afe 100644 --- a/src/esm_motd/__init__.py +++ b/src/esm_motd/__init__.py @@ -2,6 +2,6 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .esm_motd import * diff --git a/src/esm_parser/__init__.py b/src/esm_parser/__init__.py index 0d0f63edd..649309865 100644 --- a/src/esm_parser/__init__.py +++ b/src/esm_parser/__init__.py @@ -2,7 +2,7 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .dict_to_yaml import * diff --git a/src/esm_parser/dict_to_yaml.py b/src/esm_parser/dict_to_yaml.py index 4c1ed317b..27f3fecd3 100644 --- a/src/esm_parser/dict_to_yaml.py +++ b/src/esm_parser/dict_to_yaml.py @@ -101,8 +101,9 @@ def dictwithprov_representer(dumper, dictwithprov): ) if "oasis3mct" in config: + from esm_runscripts import oasis my_yaml.representer.add_representer( - esm_runscripts.oasis.oasis, oasis_representer + oasis.oasis, oasis_representer ) # Avoid saving ``prev_run`` information in the config file diff --git a/src/esm_plugin_manager/__init__.py b/src/esm_plugin_manager/__init__.py index d93758f73..fc34839d6 100644 --- a/src/esm_plugin_manager/__init__.py +++ b/src/esm_plugin_manager/__init__.py @@ -2,6 +2,6 @@ __author__ = """Dirk Barbi, Paul Gierz, Sebastian Wahl""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .esm_plugin_manager import * diff --git a/src/esm_profile/__init__.py b/src/esm_profile/__init__.py index b9b2750d2..82c58737e 100644 --- a/src/esm_profile/__init__.py +++ b/src/esm_profile/__init__.py @@ -2,6 +2,6 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .esm_profile import * diff --git a/src/esm_runscripts/__init__.py b/src/esm_runscripts/__init__.py index 3535b3e2b..61f5d3cca 100644 --- a/src/esm_runscripts/__init__.py +++ b/src/esm_runscripts/__init__.py @@ -2,7 +2,7 @@ __author__ = """Dirk Barbi""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .batch_system import * from .chunky_parts import * diff --git a/src/esm_tests/__init__.py b/src/esm_tests/__init__.py index 55275b2ac..a8fe56d2b 100644 --- a/src/esm_tests/__init__.py +++ b/src/esm_tests/__init__.py @@ -2,7 +2,7 @@ __author__ = """Miguel Andres-Martinez""" __email__ = "miguel.andres-martinez@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .initialization import * from .read_shipped_data import * diff --git a/src/esm_tools/__init__.py b/src/esm_tools/__init__.py index 10ddbfa32..16e5354e2 100644 --- a/src/esm_tools/__init__.py +++ b/src/esm_tools/__init__.py @@ -23,7 +23,7 @@ __author__ = """Dirk Barbi, Paul Gierz""" __email__ = "dirk.barbi@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" import functools import inspect diff --git a/src/esm_utilities/__init__.py b/src/esm_utilities/__init__.py index 03539812e..4bdbb8917 100644 --- a/src/esm_utilities/__init__.py +++ b/src/esm_utilities/__init__.py @@ -2,6 +2,6 @@ __author__ = """Paul Gierz""" __email__ = "pgierz@awi.de" -__version__ = "6.52.0" +__version__ = "6.53.0" from .utils import *