diff --git a/CMakeLists.txt b/CMakeLists.txt index ef52a34..37edb4d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -2,9 +2,12 @@ cmake_minimum_required(VERSION 3.12) enable_language( Fortran ) add_subdirectory(../iso_c_fortran_bmi ${CMAKE_BINARY_DIR}/iso_c_bmi) +set( CMAKE_VERBOSE_MAKEFILE on ) + list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/../../cmake/") # Uncomment this and rebuild artifacts to enable debugging #set(CMAKE_BUILD_TYPE Debug) + add_compile_options( "-Wall" "$<$:-g;-fbacktrace;-fbounds-check>" @@ -63,7 +66,8 @@ set(MODEL_SOURCE_DIR src/) set(BMI_SOURCE_DIR src/bmi/) file(GLOB BMI_SOURCE ${BMI_SOURCE_DIR}*.f90) -file( GLOB MODEL_SOURCES ${MODEL_SOURCE_DIR}/snow19/*.f ${MODEL_SOURCE_DIR}/driver/*.f90 ${MODEL_SOURCE_DIR}/share/*.f90) +#file( GLOB MODEL_SOURCES ${MODEL_SOURCE_DIR}/snow19/*.f ${MODEL_SOURCE_DIR}/driver/*.f90 ${MODEL_SOURCE_DIR}/share/*.f90) +file( GLOB MODEL_SOURCES ${MODEL_SOURCE_DIR}/snow19/*.f ${MODEL_SOURCE_DIR}/share/*.f90 ${MODEL_SOURCE_DIR}/driver/*.f90 ${CMAKE_CURRENT_LIST_DIR}/../datetime-fortran-1.7.0/src/*.f90) if(WIN32) add_library(snow17bmi ${BMI_SOURCE} ) @@ -72,16 +76,19 @@ else() endif() #target_include_directories(snow17bmi PUBLIC ${netCDF_INCLUDE_DIR} ${netCDF_MOD_PATH}) # netcdf not needed for this code -target_include_directories( snow17bmi PUBLIC ) +#target_include_directories( snow17bmi PUBLIC ${FDATETIME_INCLUDE_DIRS} ) #We know we are building this for NGEN support, so define the preprocessor directive in the compile options #Not sure why the global target_compile_options(snow17bmi PUBLIC -cpp -ffree-line-length-none) +#target_compile_options(snow17bmi PUBLIC -O3 -fdefault-real-8 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all) ##### NETCDF ##### #target_link_libraries( snow17bmi iso_c_bmi ${netCDF_C_LIB} ${netCDF_FORTRAN_LIB}) ##### END NETCDF ##### +#target_link_libraries( snow17bmi PUBLIC iso_c_bmi fortran_stdlib::fortran_stdlib ${FDATETIME_LIBRARIES} ) target_link_libraries( snow17bmi PUBLIC iso_c_bmi ) + set_target_properties(snow17bmi PROPERTIES VERSION ${PROJECT_VERSION}) include(GNUInstallDirs) diff --git a/INSTALL.md b/INSTALL.md index 3115705..0c35ea8 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -22,4 +22,30 @@ cd ../output/ # to view output data ``` Note that the ex1/ example is part of the repo. The unpacked directory and test contents will not be included in a push back to the online repo. To the program for other purposes, create new input files and output directories outside of the `snow17/` repository directory, and link to the executable in `snow17/bin/`. +## Logger + +The Errror Warning and Trapping Systems (EWTS) has been added to this module using a logging schema. All write statements have been converted to `write_log` statements, which saves the ouptut to a log file based on the log level. + +When running within the ngen framework, the log file and log level are handled programatically. When running standalone, logging is defaulted to DISABLED. + +**Running Standalone** + +In order to generate log messages when running standalone, the `NGEN_EWTS_LOGGING` environment variable must be set to `ENABLED`. This is the only required environment variable . Other optional logger environment variables exist for specifying the log file full pathname and setting the log level. If the user only enables logging, the log level will be set to INFO and the filename will be created based on the user and module names. All logger setup details are written to the console when the module is run. +``` +# Case Sensitive +export NGEN_EWTS_LOGGING=ENABLED +export NGEN_LOG_FILE_PATH= +export SNOW17_LOGLEVEL= +``` +**Log Levels** +| Level | Description | Typical Use | +|---------|-----------------------------------------------------|-----------------------------------------------| +| DEBUG | Detailed diagnostic info for development/troubleshooting. | Variable values, function entry/exit. | +| FATAL | Critical failure that aborts or makes app unrecoverable. | Crashes, memory errors, invalid state. | +| INFO | General events confirming expected operations. | Startup/shutdown, configs, task completions. | +| SEVERE | Significant problem; app may continue in degraded state. | Failed services, corrupted configs, data loss.| +| WARNING | Potential issue that doesn’t stop execution. | Deprecated APIs, missing files, repeatable errors. | + +Default log level is INFO. The log level is hierarchical. Setting it to INFO, will log INFO, WARNING, SEVERE and FATAL +messages. diff --git a/LICENSE b/LICENSE index d645695..01fbb8d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,3 +1,25 @@ +Copyright 2025 Raytheon Company + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +Licensed under: https://opensource.org/license/bsd-2-clause + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +All rights reserved. Based on Government sponsored work under contract GS-35F-204GA. + +----------------- + +“Software code created by U.S. Government employees is not subject to copyright +in the United States (17 U.S.C. §105). The United States/Department of Commerce +reserve all rights to seek and obtain copyright protection in countries other +than the United States for Software authored in its entirety by the Department +of Commerce. To this end, the Department of Commerce hereby grants to Recipient +a royalty-free, nonexclusive license to use, copy, and create derivative works +of the Software outside of the United States.” Apache License Version 2.0, January 2004 diff --git a/build/Makefile b/build/Makefile index 16479e6..85d535e 100644 --- a/build/Makefile +++ b/build/Makefile @@ -1,146 +1,159 @@ #======================================================================== # Makefile to compile the Snow-17 and driver code for multi-zone model #======================================================================== - + #======================================================================== # PART 1: Define directory paths and compiler - + # Define main/top directory (parent of /build/) # Note: the relative path of "../" drops into snow17 parent directory F_MASTER_DIR = ../ - +DATETIME_FORTRAN_INSTALLATION_DIR=../../../extern + # Location of the compiled modules # MOD_PATH = $(F_MASTER_DIR)/build/ -- not used - + # Define directories driver_dir = $(F_MASTER_DIR)/src/driver/ snow19_dir = $(F_MASTER_DIR)/src/snow19/ share_dir = $(F_MASTER_DIR)/src/share/ bmi_dir = $(F_MASTER_DIR)/src/bmi/ exe_dir = $(F_MASTER_DIR)/bin/ - +fdatetime_dir = $(DATETIME_FORTRAN_INSTALLATION_DIR)/datetime-fortran-1.7.0/src/ + # Define the executable and path EXE = $(exe_dir)/snow17.exe - + # Define your Fortran compiler #FC = pgf90 #FC = ifort FC = gfortran - + #======================================================================== # PART 2: Define the libraries and flags (can be an include file) - + # --- Define the libraries and path to include files ifeq "$(FC)" "pgf90" LOCL_PATH = /usr/local - LIB = -L$(LOCL_PATH)/lib + LIB = -L$(LOCL_PATH)/lib INC = -I ./ endif - + ifeq "$(FC)" "gfortran" LOCL_PATH = /usr/local - LIB = -L$(LOCL_PATH)/lib + LIB = -L$(LOCL_PATH)/lib INC = -I ../ endif - + # --- Define flags ifeq "$(FC)" "gfortran" FLAGS_DEBUG = -static -Wall -g -ffree-line-length-none -cpp -fcheck=all - FLAGS = -O3 -fdefault-real-8 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all - FLAGS2 = -O3 -fdefault-real-8 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all + #FLAGS = -O3 -fdefault-real-8 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all + FLAGS = -O3 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all + #FLAGS2 = -O3 -fdefault-real-8 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all + FLAGS2 = -O3 -fno-align-commons -ffree-line-length-none -cpp -fcheck=all endif - + ifeq "$(FC)" "ifort" FLAGS_PROF = -static -debug -warn all -check all -FR -O0 -auto -WB -traceback -g -fltconsistency -fpe0 FLAGS = -O3 -warn all -check all - FLAGS2 = -O3 - FLAGS77 = -O3 -f77rtl + FLAGS2 = -O3 + FLAGS77 = -O3 -f77rtl endif - + ifeq "$(FC)" "pgf90" FLAGS_PROF = -Bstatic -Mbackslash -g -Mchkptr -Mchkstk -Mpgicoff -Minform=inform -Ktrap=divz,inv -Mprof=lines,time # FLAGS = -Mfreeform -O3 -Mbackslash -g -Mchkptr -Mchkstk -Mpgicoff -Minform=inform -Ktrap=divz,inv FLAGS = -O3 -r8 -Kieee FLAGS2 = -O3 -r8 endif - - + + #======================================================================== # PART 3: Assemble all of the various subroutines - + # --- snow-19 code (fortran 90, different options) snow19 = \ - zero19.f \ - rout19.f \ - aesc19.f \ - melt19.f \ - SNEW.f \ - SNOWT.f \ - SNOWPACK.f \ - adjc19.f \ - aeco19.f \ - updt19.f \ - SNDEPTH.f \ - PACK19.f \ - exsnow19.f + zero19.f \ + rout19.f \ + aesc19.f \ + melt19.f \ + SNEW.f \ + SNOWT.f \ + SNOWPACK.f \ + adjc19.f \ + aeco19.f \ + updt19.f \ + SNDEPTH.f \ + PACK19.f \ + exsnow19.f model_snow19 = $(patsubst %, $(snow19_dir)/%, $(snow19)) - - + + # -- share utilities and code used by the driver in running the model run_util = \ - nrtype.f90 \ - constants.f90 \ - namelistModule.f90 \ + snowLogger.f90 \ + nrtype.f90 \ + constants.f90 \ + namelistModule.f90 \ parametersType.f90 \ forcingType.f90 \ dateTimeUtilsModule.f90 \ runInfoType.f90 \ modelVarType.f90 \ ioModule.f90 \ - runSnow17.f90 + runSnow17.f90 model_run = $(patsubst %, $(share_dir)/%, $(run_util)) - + # --- BMI functions used by the driver to run the model bmi_functions = bmi.f90 bmi_snow17.f90 snow17_bmi = $(patsubst %, $(bmi_dir)/%, $(bmi_functions)) + +fdatetime_code = datetime_module.f90 +fdatetime = $(patsubst %, $(fdatetime_dir)/%, $(fdatetime_code)) # --- snow17 driver driver_snow17 = driver_bmi.f90 snow17_drv = $(patsubst %, $(driver_dir)/%, $(driver_snow17)) - - + + # --- prepare to stitch it all together # check just using the original names ... this does nothing? run_code = $(model_run) model = $(model_snow19) bmi = $(snow17_bmi) - - + + #======================================================================== -# PART 4: Compile it - -all: model link clean +# PART 4: Compile it + +all: model link clean debug: model_debug link -devel: model link - +devel: model link + check: echo test echo $(FC) - + # compile model code model: - $(FC) $(FLAGS2) -c $(model) $(INC) + #$(FC) $(FLAGS2) -c $(model) $(INC) $(STDLIB_CFLAGS) $(DATETIME_F_CFLAGS) + $(FC) $(FLAGS2) -c $(model) $(fdatetime) $(INC) + #$(FC) $(FLAGS) -c $(run_code) $(bmi) $(snow17_drv) $(INC) $(STDLIB_CFLAGS) $(DATETIME_F_CFLAGS) $(FC) $(FLAGS) -c $(run_code) $(bmi) $(snow17_drv) $(INC) - + model_debug: - $(FC) $(FLAGS2) -g -fcheck=all -c $(model) $(INC) - $(FC) $(FLAGS) -g -fcheck=all -c $(run_code) $(bmi) $(snow17_drv) $(INC) - + #$(FC) $(FLAGS2) -g -fcheck=all -c $(model) $(INC) $(STDLIB_CFLAGS) $(DATETIME_F_CFLAGS) + $(FC) $(FLAGS2) -g -fcheck=all -c $(model) $(fdatetime) $(INC) + #$(FC) $(FLAGS) -g -fcheck=all -c $(run_code) $(bmi) $(snow17_drv) $(INC) $(STDLIB_CFLAGS) $(DATETIME_F_CFLAGS) + (FC) $(FLAGS) -g -fcheck=all -c $(run_code) $(bmi) $(snow17_drv) $(INC) + # link routines link: -# $(FC) -fPIC -Bstatic_pgi -rpath,/usr/local/netcdf4-pgi/lib *.o -I./ $(LIBNETCDF) -o $(EXE) - $(FC) -fPIC -I./ $(LIB) -o $(EXE) *.o - +# $(FC) -fPIC -Bstatic_pgi -rpath,/usr/local/netcdf4-pgi/lib *.o -I./ $(LIBNETCDF) -o $(EXE) + #$(FC) -fPIC -I./ $(LIB) -o $(EXE) *.o $(STDLIB_LIBS) $(DATETIME_F_LIBS) + $(FC) -fPIC -I./ $(LIB) -o $(EXE) *.o + # Remove object files clean: rm -f *.o diff --git a/src/bmi/bmi_snow17.f90 b/src/bmi/bmi_snow17.f90 index ec9b054..beeae15 100644 --- a/src/bmi/bmi_snow17.f90 +++ b/src/bmi/bmi_snow17.f90 @@ -7,7 +7,7 @@ module bmi_snow17_module #else use bmif_2_0 #endif - + use snow_log_module use runModule use, intrinsic :: iso_c_binding, only: c_ptr, c_loc, c_f_pointer implicit none @@ -175,6 +175,7 @@ function snow17_initialize(this, config_file) result (bmi_status) !else !call initialize_from_defaults(this%model) end if + call write_log("Initialization Done!", LOG_LEVEL_INFO) bmi_status = BMI_SUCCESS end function snow17_initialize @@ -263,6 +264,7 @@ function snow17_update_until(this, time) result (bmi_status) ! check to see if desired time to advance to is earlier than current time (can't go backwards) if (time < this%model%runinfo%curr_datetime) then bmi_status = BMI_FAILURE + call write_log("check to see if desired time to advance to is earlier than current time", LOG_LEVEL_SEVERE) return end if ! otherwise try to advance to end time @@ -287,8 +289,19 @@ function snow17_var_grid(this, name, grid) result (bmi_status) 'precip_scf', 'sneqv', 'snowh', 'raim') ! output vars grid = 0 bmi_status = BMI_SUCCESS + case('scf', 'mfmax', 'mfmin', 'uadj', 'si', & ! parameters + 'pxtemp', 'nmf', 'tipm', 'mbase', 'plwhc', & + 'daygm', 'adc','elev', 'latitude', & + 'hru_area', 'hru_id', 'total_area') + grid = 0 + bmi_status = BMI_SUCCESS + case('adc1', 'adc2', 'adc3', 'adc4', 'adc5', & ! parameters + 'adc6', 'adc7', 'adc8', 'adc9', 'adc10', 'adc11') + grid = 0 + bmi_status = BMI_SUCCESS case default grid = -1 + call write_log("snow17_var_grid - " // name // " not found.", LOG_LEVEL_SEVERE) bmi_status = BMI_FAILURE end select end function snow17_var_grid @@ -310,6 +323,7 @@ function snow17_grid_type(this, grid, type) result (bmi_status) ! bmi_status = BMI_SUCCESS case default type = "-" + call write_log("snow17_grid_type - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) bmi_status = BMI_FAILURE end select end function snow17_grid_type @@ -332,6 +346,7 @@ function snow17_grid_rank(this, grid, rank) result (bmi_status) case default rank = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_rank - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_rank @@ -351,6 +366,7 @@ function snow17_grid_shape(this, grid, shape) result (bmi_status) case default shape(:) = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_shape - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_shape @@ -372,6 +388,7 @@ function snow17_grid_size(this, grid, size) result (bmi_status) case default size = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_size - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_size @@ -391,6 +408,7 @@ function snow17_grid_spacing(this, grid, spacing) result (bmi_status) case default spacing(:) = -1.d0 bmi_status = BMI_FAILURE + call write_log("snow17_grid_spacing - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_spacing ! @@ -410,6 +428,7 @@ function snow17_grid_origin(this, grid, origin) result (bmi_status) case default origin(:) = -1.d0 bmi_status = BMI_FAILURE + call write_log("snow17_grid_apacing - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_origin @@ -427,6 +446,7 @@ function snow17_grid_x(this, grid, x) result (bmi_status) case default x(:) = -1.d0 bmi_status = BMI_FAILURE + call write_log("snow17_grid_x - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_x @@ -444,6 +464,7 @@ function snow17_grid_y(this, grid, y) result (bmi_status) case default y(:) = -1.d0 bmi_status = BMI_FAILURE + call write_log("snow17_grid_y - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_y @@ -461,6 +482,7 @@ function snow17_grid_z(this, grid, z) result (bmi_status) case default z(:) = -1.d0 bmi_status = BMI_FAILURE + call write_log("snow17_grid_z - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_z @@ -477,6 +499,7 @@ function snow17_grid_node_count(this, grid, count) result(bmi_status) case default count = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_node_count - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_grid_node_count @@ -489,6 +512,7 @@ function snow17_grid_edge_count(this, grid, count) result(bmi_status) count = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_edge_count - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end function snow17_grid_edge_count ! Get the number of faces in an unstructured grid. @@ -500,6 +524,7 @@ function snow17_grid_face_count(this, grid, count) result(bmi_status) count = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_face_count - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end function snow17_grid_face_count ! Get the edge-node connectivity. @@ -511,6 +536,7 @@ function snow17_grid_edge_nodes(this, grid, edge_nodes) result(bmi_status) edge_nodes(:) = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_edge_nodes - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end function snow17_grid_edge_nodes ! Get the face-edge connectivity. @@ -522,6 +548,7 @@ function snow17_grid_face_edges(this, grid, face_edges) result(bmi_status) face_edges(:) = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_face_edges - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end function snow17_grid_face_edges ! Get the face-node connectivity. @@ -533,6 +560,7 @@ function snow17_grid_face_nodes(this, grid, face_nodes) result(bmi_status) face_nodes(:) = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_face_nodes - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end function snow17_grid_face_nodes ! Get the number of nodes for each face. @@ -544,6 +572,7 @@ function snow17_grid_nodes_per_face(this, grid, nodes_per_face) result(bmi_statu nodes_per_face(:) = -1 bmi_status = BMI_FAILURE + call write_log("snow17_grid_nodes_per_face - " // itoa(grid) // " not found.", LOG_LEVEL_SEVERE) end function snow17_grid_nodes_per_face ! The data type of the variable, as a string. @@ -558,9 +587,23 @@ function snow17_var_type(this, name, type) result (bmi_status) 'precip_scf', 'sneqv', 'snowh', 'raim') ! output vars type = "real" bmi_status = BMI_SUCCESS + case('scf', 'mfmax', 'mfmin', 'uadj', 'si', & ! parameters + 'pxtemp', 'nmf', 'tipm', 'mbase', 'plwhc', & + 'daygm', 'adc','elev', 'latitude', & + 'hru_area', 'total_area') + type = "real" + bmi_status = BMI_SUCCESS + case('adc1', 'adc2', 'adc3', 'adc4', 'adc5', & ! parameters + 'adc6', 'adc7', 'adc8', 'adc9', 'adc10', 'adc11') + type = "real" + bmi_status = BMI_SUCCESS + case('hru_id') + type = "character" + bmi_status = BMI_SUCCESS case default type = "-" bmi_status = BMI_FAILURE + call write_log("snow17_var_type - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_var_type @@ -590,9 +633,22 @@ function snow17_var_units(this, name, units) result (bmi_status) case("raim") units = "mm/s" bmi_status = BMI_SUCCESS + case("elev") + units = "mm/s" + bmi_status = BMI_SUCCESS + case("hru_area", "total_area") + units = "km**2" + bmi_status = BMI_SUCCESS + case("adc", "scf", "mfmax", "mfmin", "uadj", "si", "pxtemp", "nmf", "tipm", "mbase", "plwhc", "daygm") + units = "unitless" + bmi_status = BMI_SUCCESS + case('adc1', 'adc2', 'adc3', 'adc4', 'adc5', 'adc6', 'adc7', 'adc8', 'adc9', 'adc10', 'adc11') + units = "unitless" + bmi_status = BMI_SUCCESS case default units = "-" bmi_status = BMI_FAILURE + call write_log("snow17_var_unit - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_var_units @@ -606,7 +662,9 @@ function snow17_var_itemsize(this, name, size) result (bmi_status) ! note: the combined variables are used assuming ngen is interacting with the ! catchment-averaged result if snowbands are used + select case(name) + case("precip") size = sizeof(this%model%forcing%precip(1)) ! 'sizeof' in gcc & ifort bmi_status = BMI_SUCCESS @@ -625,9 +683,91 @@ function snow17_var_itemsize(this, name, size) result (bmi_status) case("raim") size = sizeof(this%model%modelvar%raim_comb) bmi_status = BMI_SUCCESS + case("hru_id") + size = sizeof(this%model%parameters%hru_id) + bmi_status = BMI_SUCCESS + case("hru_area") + size = sizeof(this%model%parameters%hru_area(1)) + bmi_status = BMI_SUCCESS + case("latitude") + size = sizeof(this%model%parameters%latitude(1)) + bmi_status = BMI_SUCCESS + case("elev") + size = sizeof(this%model%parameters%elev(1)) + bmi_status = BMI_SUCCESS + case("scf") + size = sizeof(this%model%parameters%scf(1)) + bmi_status = BMI_SUCCESS + case("mfmax") + size = sizeof(this%model%parameters%mfmax(1)) + bmi_status = BMI_SUCCESS + case("mfmin") + size = sizeof(this%model%parameters%mfmin(1)) + bmi_status = BMI_SUCCESS + case("uadj") + size = sizeof(this%model%parameters%uadj(1)) + bmi_status = BMI_SUCCESS + case("si") + size = sizeof(this%model%parameters%si(1)) + bmi_status = BMI_SUCCESS + case("pxtemp") + size = sizeof(this%model%parameters%pxtemp(1)) + bmi_status = BMI_SUCCESS + case("nmf") + size = sizeof(this%model%parameters%nmf(1)) + bmi_status = BMI_SUCCESS + case("tipm") + size = sizeof(this%model%parameters%tipm(1)) + bmi_status = BMI_SUCCESS + case("mbase") + size = sizeof(this%model%parameters%mbase(1)) + bmi_status = BMI_SUCCESS + case("plwhc") + size = sizeof(this%model%parameters%plwhc(1)) + bmi_status = BMI_SUCCESS + case("daygm") + size = sizeof(this%model%parameters%daygm(1)) + bmi_status = BMI_SUCCESS + case("total_area") + size = sizeof(this%model%parameters%total_area) + bmi_status = BMI_SUCCESS + case("adc1") + size = sizeof(this%model%parameters%adc(1,:)) + bmi_status = BMI_SUCCESS + case("adc2") + size = sizeof(this%model%parameters%adc(2,:)) + bmi_status = BMI_SUCCESS + case("adc3") + size = sizeof(this%model%parameters%adc(3,:)) + bmi_status = BMI_SUCCESS + case("adc4") + size = sizeof(this%model%parameters%adc(4,:)) + bmi_status = BMI_SUCCESS + case("adc5") + size = sizeof(this%model%parameters%adc(5,:)) + bmi_status = BMI_SUCCESS + case("adc6") + size = sizeof(this%model%parameters%adc(6,:)) + bmi_status = BMI_SUCCESS + case("adc7") + size = sizeof(this%model%parameters%adc(7,:)) + bmi_status = BMI_SUCCESS + case("adc8") + size = sizeof(this%model%parameters%adc(8,:)) + bmi_status = BMI_SUCCESS + case("adc9") + size = sizeof(this%model%parameters%adc(9,:)) + bmi_status = BMI_SUCCESS + case("adc10") + size = sizeof(this%model%parameters%adc(10,:)) + bmi_status = BMI_SUCCESS + case("adc11") + size = sizeof(this%model%parameters%adc(11,:)) + bmi_status = BMI_SUCCESS case default size = -1 bmi_status = BMI_FAILURE + call write_log("snow17_var_itemsize - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_var_itemsize @@ -638,7 +778,6 @@ function snow17_var_nbytes(this, name, nbytes) result (bmi_status) integer, intent(out) :: nbytes integer :: bmi_status integer :: s1, s2, s3, grid, grid_size, item_size - s1 = this%get_var_grid(name, grid) s2 = this%get_grid_size(grid, grid_size) s3 = this%get_var_itemsize(name, item_size) @@ -649,6 +788,7 @@ function snow17_var_nbytes(this, name, nbytes) result (bmi_status) else nbytes = -1 bmi_status = BMI_FAILURE + call write_log("snow17_var_nbytes - " // name // " not found.", LOG_LEVEL_SEVERE) end if end function snow17_var_nbytes @@ -681,6 +821,7 @@ function snow17_get_int(this, name, dest) result (bmi_status) case default dest(:) = -1 bmi_status = BMI_FAILURE + call write_log("snow17_get_int - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_int @@ -690,6 +831,7 @@ function snow17_get_float(this, name, dest) result (bmi_status) character (len=*), intent(in) :: name real, intent(inout) :: dest(:) integer :: bmi_status + character(len=256) :: msg ! for logging messages select case(name) case("precip") @@ -709,10 +851,110 @@ function snow17_get_float(this, name, dest) result (bmi_status) bmi_status = BMI_SUCCESS case("raim") dest(1) = this%model%modelvar%raim_comb + + ! handle very small negative raim values that can occur due to round-off error or floating-point artifacts + if (dest(1) < 0.0 .and. dest(1) > -1.0e-6) then + dest(1) = 0.0 + write(msg, '(A,ES12.5,A)') "snow17_get_float - 'raim' is negligibly negative (", & + this%model%modelvar%raim_comb, " mm/s), set to 0.0" + call write_log(msg, LOG_LEVEL_INFO) + bmi_status = BMI_SUCCESS + + ! Throw an error if it’s truly negative + else if (this%model%modelvar%raim(1) <= -1.0e-6) then + write(msg,'(A,ES12.5,A)') "snow17_get_float - 'raim' is invalid (", this%model%modelvar%raim(1), \ + " mm/s), must be non-negative." + call write_log(msg, LOG_LEVEL_SEVERE) + bmi_status = BMI_FAILURE + else + bmi_status = BMI_SUCCESS + end if + + !case("hru_id") + ! dest = [this%model%parameters%hru_id] + ! bmi_status = BMI_SUCCESS + case("hru_area") + dest = [this%model%parameters%hru_area] + bmi_status = BMI_SUCCESS + case("latitude") + dest = [this%model%parameters%latitude] + bmi_status = BMI_SUCCESS + case("elev") + dest = [this%model%parameters%elev] + bmi_status = BMI_SUCCESS + case("scf") + dest = [this%model%parameters%scf] + bmi_status = BMI_SUCCESS + case("mfmax") + dest = [this%model%parameters%mfmax] + bmi_status = BMI_SUCCESS + case("mfmin") + dest = [this%model%parameters%mfmin] + bmi_status = BMI_SUCCESS + case("uadj") + dest = [this%model%parameters%uadj] + bmi_status = BMI_SUCCESS + case("si") + dest = [this%model%parameters%si] + bmi_status = BMI_SUCCESS + case("pxtemp") + dest = [this%model%parameters%pxtemp] + bmi_status = BMI_SUCCESS + case("nmf") + dest = [this%model%parameters%nmf] + bmi_status = BMI_SUCCESS + case("tipm") + dest = [this%model%parameters%tipm] + bmi_status = BMI_SUCCESS + case("mbase") + dest = [this%model%parameters%mbase] + bmi_status = BMI_SUCCESS + case("plwhc") + dest = [this%model%parameters%plwhc] + bmi_status = BMI_SUCCESS + case("daygm") + dest = [this%model%parameters%daygm] + bmi_status = BMI_SUCCESS + case("adc") + dest = [this%model%parameters%adc] + bmi_status = BMI_SUCCESS + !case("adc2") + ! dest = [this%model%parameters%adc2] + ! bmi_status = BMI_SUCCESS + !case("adc3") + ! dest = [this%model%parameters%adc3] + ! bmi_status = BMI_SUCCESS + !case("adc4") + ! dest = [this%model%parameters%adc4] + ! bmi_status = BMI_SUCCESS + !case("adc5") + ! dest = [this%model%parameters%adc5] + ! bmi_status = BMI_SUCCESS + !case("adc6") + ! dest = [this%model%parameters%adc6] + ! bmi_status = BMI_SUCCESS + !case("adc7") + ! dest = [this%model%parameters%adc7] + ! bmi_status = BMI_SUCCESS + !case("adc8") + ! dest = [this%model%parameters%adc8] + ! bmi_status = BMI_SUCCESS + !case("adc9") + ! dest = [this%model%parameters%adc9] + ! bmi_status = BMI_SUCCESS + !case("adc10") + ! dest = [this%model%parameters%adc10] + ! bmi_status = BMI_SUCCESS + !case("adc11") + ! dest = [this%model%parameters%adc11] + ! bmi_status = BMI_SUCCESS + case("total_area") + dest(1) = this%model%parameters%total_area bmi_status = BMI_SUCCESS case default dest(:) = -1.0 bmi_status = BMI_FAILURE + call write_log("snow17_get_float - " // name // " not found.", LOG_LEVEL_SEVERE) end select ! NOTE, if vars are gridded, then use: ! dest = reshape(this%model%temperature, [this%model%n_x*this%model%n_y]) @@ -731,6 +973,7 @@ function snow17_get_double(this, name, dest) result (bmi_status) case default dest(:) = -1.d0 bmi_status = BMI_FAILURE + call write_log("snow17_get_double - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_double @@ -750,6 +993,7 @@ function snow17_get_ptr_int(this, name, dest_ptr) result (bmi_status) select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_get_ptr_int - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_ptr_int @@ -765,6 +1009,7 @@ function snow17_get_ptr_float(this, name, dest_ptr) result (bmi_status) select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_get_ptr_float - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_ptr_float @@ -782,6 +1027,7 @@ function snow17_get_ptr_double(this, name, dest_ptr) result (bmi_status) select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_get_ptr_double - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_ptr_double @@ -800,6 +1046,7 @@ function snow17_get_at_indices_int(this, name, dest, inds) & select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_get_at_indices_int - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_at_indices_int @@ -818,6 +1065,7 @@ function snow17_get_at_indices_float(this, name, dest, inds) & select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_get_at_indices_float - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_at_indices_float @@ -836,6 +1084,7 @@ function snow17_get_at_indices_double(this, name, dest, inds) & select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_get_at_indices_double - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_get_at_indices_double @@ -854,6 +1103,7 @@ function snow17_set_int(this, name, src) result (bmi_status) ! bmi_status = BMI_SUCCESS case default bmi_status = BMI_FAILURE + call write_log("snow17_set_int - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_set_int @@ -862,7 +1112,7 @@ function snow17_set_float(this, name, src) result (bmi_status) class (bmi_snow17), intent(inout) :: this character (len=*), intent(in) :: name real, intent(in) :: src(:) - integer :: bmi_status + integer :: bmi_status ! NOTE: if run in a vector (snowband mode), this code will need revising ! to set the basin average (ie, restart capability) @@ -886,8 +1136,90 @@ function snow17_set_float(this, name, src) result (bmi_status) case("raim") this%model%modelvar%raim(1) = src(1) bmi_status = BMI_SUCCESS + !case("hru_id") + ! this%model%parameters%hru_id = src(1) + ! bmi_status = BMI_SUCCESS + case("scf") + this%model%parameters%scf(:) = src(:) + bmi_status = BMI_SUCCESS + case("mfmax") + this%model%parameters%mfmax(:) = src(:) + bmi_status = BMI_SUCCESS + case("mfmin") + this%model%parameters%mfmin(:) = src(:) + bmi_status = BMI_SUCCESS + case("uadj") + this%model%parameters%uadj(:) = src(:) + bmi_status = BMI_SUCCESS + case("si") + this%model%parameters%si(:) = src(:) + bmi_status = BMI_SUCCESS + case("pxtemp") + this%model%parameters%pxtemp(:) = src(:) + bmi_status = BMI_SUCCESS + case("nmf") + this%model%parameters%nmf(:) = src(:) + bmi_status = BMI_SUCCESS + case("tipm") + this%model%parameters%tipm(:) = src(:) + bmi_status = BMI_SUCCESS + case("mbase") + this%model%parameters%mbase(:) = src(:) + bmi_status = BMI_SUCCESS + case("plwhc") + this%model%parameters%plwhc(:) = src(:) + bmi_status = BMI_SUCCESS + case("daygm") + this%model%parameters%daygm(:) = src(:) + bmi_status = BMI_SUCCESS + case("adc1") + this%model%parameters%adc(1,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc2") + this%model%parameters%adc(2,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc3") + this%model%parameters%adc(3,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc4") + this%model%parameters%adc(4,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc5") + this%model%parameters%adc(5,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc6") + this%model%parameters%adc(6,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc7") + this%model%parameters%adc(7,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc8") + this%model%parameters%adc(8,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc9") + this%model%parameters%adc(9,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc10") + this%model%parameters%adc(10,:) = src(:) + bmi_status = BMI_SUCCESS + case("adc11") + this%model%parameters%adc(11,:) = src(:) + bmi_status = BMI_SUCCESS + case("elev") + this%model%parameters%elev(:) = src(:) + bmi_status = BMI_SUCCESS + case("latitude") + this%model%parameters%latitude(:) = src(:) + bmi_status = BMI_SUCCESS + case("hru_area") + this%model%parameters%hru_area(:) = src(:) + bmi_status = BMI_SUCCESS + case("total_area") + this%model%parameters%total_area = src(1) + bmi_status = BMI_SUCCESS case default bmi_status = BMI_FAILURE + call write_log("snow17_set_float - " // name // " not found.", LOG_LEVEL_SEVERE) end select ! NOTE, if vars are gridded, then use: ! this%model%temperature = reshape(src, [this%model%n_y, this%model%n_x]) @@ -905,6 +1237,7 @@ function snow17_set_double(this, name, src) result (bmi_status) select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_set_double - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_set_double @@ -923,6 +1256,7 @@ function snow17_set_at_indices_int(this, name, inds, src) & select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_set_indices_at_int - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_set_at_indices_int @@ -941,6 +1275,7 @@ function snow17_set_at_indices_float(this, name, inds, src) & select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_set_indices_at_float - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_set_at_indices_float @@ -959,6 +1294,7 @@ function snow17_set_at_indices_double(this, name, inds, src) & select case(name) case default bmi_status = BMI_FAILURE + call write_log("snow17_set_indices_at_double - " // name // " not found.", LOG_LEVEL_SEVERE) end select end function snow17_set_at_indices_double @@ -1004,6 +1340,7 @@ function register_bmi(this) result(bmi_status) bind(C, name="register_bmi") if( .not. associated( bmi_box ) .or. .not. associated( bmi_box%ptr ) ) then bmi_status = BMI_FAILURE + call write_log("register_bmi - Cant associate the wrapper pointer to the created model instance", LOG_LEVEL_SEVERE) else !Return the pointer to box this = c_loc(bmi_box) diff --git a/src/driver/driver_bmi.f90 b/src/driver/driver_bmi.f90 index 181a314..8c8ad86 100644 --- a/src/driver/driver_bmi.f90 +++ b/src/driver/driver_bmi.f90 @@ -17,6 +17,7 @@ program multi_driver use bmi_snow17_module use bmif_2_0 use dateTimeUtilsModule + use snow_log_module implicit none @@ -42,12 +43,12 @@ program multi_driver ! Initialize ! Call the initialize_from_file() subroutine in src/RunSnow17.f90 !--------------------------------------------------------------------- - print*, "Initializing run ..." + call write_log("driver - Initializing run ...", LOG_LEVEL_INFO) call get_command_argument(1, namelist_file, status=status) !if( .not. ( present(namelist_file) ) ) then if( status /= 0 ) then namelist_file = "namelist.input" - print*, 'No namelist filename supplied -- attempting to read default file called namelist.input' + call write_log('No namelist filename supplied -- attempting to read default file called namelist.input', LOG_LEVEL_INFO) endif status = m%initialize(namelist_file) @@ -62,9 +63,8 @@ program multi_driver ! create optional screen output for run times call unix_to_datehr(current_time, start_datehr) call unix_to_datehr(end_time, end_datehr) - print*,'----'; - print*, 'Running model => start: ', start_datehr, ' end: ', end_datehr, ' timesteps: ', int((end_time - current_time)/dt) - print*,'----' + call write_log('Running model => start: '// start_datehr // ' end: ' // end_datehr, LOG_LEVEL_INFO) + call write_log(' timesteps: ' // itoa(int((end_time - current_time)/dt)), LOG_LEVEL_INFO) ! loop through timesteps and update model while current time <= end time ( do while (current_time .le. end_time) @@ -76,8 +76,8 @@ program multi_driver ! Finalize the model run ! All model finalization code in ../src/RunSnow17.f90 !--------------------------------------------------------------------- - print*, "Finalizing..." + call write_log("Finalizing...", LOG_LEVEL_INFO) status = m%finalize() - print*, "DONE" + call write_log("DONE", LOG_LEVEL_INFO) end program multi_driver diff --git a/src/share/dateTimeUtilsModule.f90 b/src/share/dateTimeUtilsModule.f90 index add68d6..d4a3133 100644 --- a/src/share/dateTimeUtilsModule.f90 +++ b/src/share/dateTimeUtilsModule.f90 @@ -2,6 +2,7 @@ ! copied from the GMET code at https://github.com/NCAR/GMET/ module dateTimeUtilsModule + use snow_log_module implicit none public @@ -88,7 +89,7 @@ subroutine parse (str, delims, args, nargs) if (len_trim(str) == 0) exit nargs = nargs + 1 if(nargs .gt. size(args)) then - print *,'Number of predictors larger than expected, check nPredict' + call write_log('Number of predictors larger than expected, check nPredict',LOG_LEVEL_FATAL) stop end if call split (str, delims, args(nargs)) @@ -887,8 +888,9 @@ double precision function date_to_unix (date) if (error /= 0) then date_to_unix = -9999.99 - print*, 'error in date_to_unix -- date, year, month, day, hour, min, sec, error:' - print*, date, year, month, day, hour, min, sec, error + call write_log('error in date_to_unix -- date, year, month, day, hour, min, sec, error:'//date//', ' & + //itoa(year)//', '//itoa(month)//', '//itoa(day)//', '//itoa(hour)//', '//itoa(min)//', ' & + //itoa(sec)//', '//itoa(error), LOG_LEVEL_FATAL) stop !return end if @@ -1005,11 +1007,20 @@ subroutine get_utime_list (start_datetime, end_datetime, dt, times) !local integer :: t, ntimes real*8 :: utime + character(50) :: str_mod + character(50) :: str_end + character(50) :: str_start + character(50) :: str_dt if(abs(mod(end_datetime - start_datetime, dt)) > 1e-5) then - print*, 'start and end datetimes are not an even multiple of dt -- check dates in namelist' - print*, 'end_datetime, start_datetime, dt, mod:', end_datetime, start_datetime, dt, mod(end_datetime-start_datetime, dt) - stop + write(str_mod, '(f20.10)' ) mod(end_datetime-start_datetime, dt) + write(str_end, '(f20.10)' ) end_datetime + write(str_start, '(f20.10)' ) start_datetime + write(str_dt, '(f20.10)' ) dt + + call write_log('start and end datetimes are not an even multiple of dt -- check dates in namelist', LOG_LEVEL_FATAL) + call write_log('end_datetime, start_datetime, dt, mod:'//trim(str_end)//trim(str_start)//trim(str_dt)//trim(str_mod), LOG_LEVEL_FATAL) + stop end if ntimes = int((end_datetime - start_datetime)/dt) + 1 diff --git a/src/share/ioModule.f90 b/src/share/ioModule.f90 index d62250a..61ec208 100644 --- a/src/share/ioModule.f90 +++ b/src/share/ioModule.f90 @@ -1,13 +1,15 @@ module ioModule - + use snow_log_module use dateTimeUtilsModule use parametersType use runInfoType use forcingType use modelVarType - + implicit none - + + character(len=128) :: log_msg + contains subroutine read_snow17_parameters(this, param_file_name, runinfo) @@ -32,12 +34,17 @@ subroutine read_snow17_parameters(this, param_file_name, runinfo) ios = 0 ! open parameter file - open(unit=51,file=trim(param_file_name),status='old') - - print*, 'Reading Snow17 parameters' + open(unit=51,file=trim(param_file_name),status='old', IOSTAT=ios) + if (ios /= 0) then + call write_log('Error opening file: ' // param_file_name, LOG_LEVEL_FATAL) + stop + endif + + call write_log("Reading Snow17 parameters", LOG_LEVEL_INFO) ! --- now loop through parameter file and assign parameters n_params_read = 0 + ios = 0 do while(ios .eq. 0) read(unit=51,FMT='(A)',IOSTAT=ios) readline @@ -134,7 +141,7 @@ subroutine read_snow17_parameters(this, param_file_name, runinfo) read(readline, *, iostat=ios) this%adc(11,:) n_params_read = n_params_read + 1 case default - print *, 'Parameter ',param,' not recognized in snow file' + call write_log('Parameter ' // param // ' not recognized in snow file', LOG_LEVEL_SEVERE) end select end if @@ -144,7 +151,7 @@ subroutine read_snow17_parameters(this, param_file_name, runinfo) ! quick check on completeness if(n_params_read /= 26) then - print *, 'Read ', n_params_read , ' SNOW17 params, but need 26. Quitting.'; stop + call write_log("Read " // itoa(n_params_read) // " SNOW17 params, but need 26. Quitting.", LOG_LEVEL_FATAL) end if ! calculate derived parameters @@ -174,7 +181,8 @@ SUBROUTINE init_forcing_files(namelist, runinfo, parameters) real :: pcp, tav ! --- code ------------------------------------------------------------------ - print*, 'Initializing forcing files' + call write_log("Initializing forcing files", LOG_LEVEL_INFO) + ios = 0 found_start = 0 do nh=1, runinfo%n_hrus @@ -185,6 +193,8 @@ SUBROUTINE init_forcing_files(namelist, runinfo, parameters) ! Check if the specified file exists inquire(file = trim(filename), exist = lexist) if (.not. lexist) then + call write_log('Problem !! File ' // trim(filename) // 'does not exists. Quitting.', LOG_LEVEL_FATAL) + call write_log('Check the forcing file specified as a command-line argument.', LOG_LEVEL_FATAL) write(*,'(/," ***** Problem *****")') write(*,'(" ***** File ''", A, "'' does not exist.")') trim(filename) write(*,'(" ***** Check the forcing file specified as a command-line argument",/)') @@ -194,6 +204,7 @@ SUBROUTINE init_forcing_files(namelist, runinfo, parameters) ! Open the forcing file open(runinfo%forcing_fileunits(nh), file = trim(filename), form = 'formatted', action = 'read', iostat = ierr) if (ierr /= 0) then + call write_log("Problem opening file " //trim(filename) //" Error Exiting", LOG_LEVEL_FATAL) write(*,'("Problem opening file ''", A, "''")') trim(filename) stop ": ERROR EXIT" endif @@ -216,7 +227,7 @@ SUBROUTINE init_forcing_files(namelist, runinfo, parameters) end do if(nh .eq. 1) then - print*, ' -- skipped ', skipcount ,' initial records in forcing files' + call write_log('Skipped ' // itoa(skipcount) // ' inaitial records in forcing files', LOG_LEVEL_INFO) endif ! backspace the file to the previous record @@ -226,7 +237,8 @@ SUBROUTINE init_forcing_files(namelist, runinfo, parameters) ! error out if start of any forcing file is not found if (found_start /= runinfo%n_hrus) then - print*, 'ERROR: found the starting date in only', found_start, ' out of', runinfo%n_hrus, ' forcing files. Quitting.'; stop + call write_log('Found the starting date in only' // itoa(found_start )// ' out of' // itoa( runinfo%n_hrus) // 'forcing files. Quitting', LOG_LEVEL_FATAL) + stop endif END SUBROUTINE init_forcing_files @@ -275,7 +287,8 @@ subroutine read_areal_forcing(namelist, parameters, runinfo, forcing) ! read one record from already open files and check success read (UNIT=runinfo%forcing_fileunits(nh), FMT=*, IOSTAT=ierr) yr, mnth, dy, hr, forcing%precip(nh), forcing%tair(nh) if(ierr /= 0) then - print*, 'ERROR: failed to read forcing from file', trim(namelist%forcing_root) // trim(parameters%hru_id(nh)) + log_msg = 'Failed to read forcing from file ' // trim(namelist%forcing_root) // trim(parameters%hru_id(nh)) + call write_log(log_msg // ' STOPPING', LOG_LEVEL_FATAL) STOP end if @@ -284,7 +297,8 @@ subroutine read_areal_forcing(namelist, parameters, runinfo, forcing) !print*, 'Read forcing datehr ', forcing_datehr if(forcing_datehr /= runinfo%curr_datehr) then - print*, 'ERROR: forcing datehr: ',forcing_datehr, ' does not match curr_datehr of run :', runinfo%curr_datehr + log_msg = 'Forcing datehr: ' // forcing_datehr // ' does not match curr_datehr of run :' // runinfo%curr_datehr + call write_log(log_msg // " STOPING", LOG_LEVEL_FATAL) STOP end if @@ -308,12 +322,13 @@ SUBROUTINE init_output_files(namelist, runinfo, parameters) ! --- code ------------------------------------------------------------------ - print*, 'Initializing output files' + call write_log("Initializing output files", LOG_LEVEL_INFO) ! Open the main basin-average output file and write header filename = trim(namelist%output_root) // trim(namelist%main_id) // '.txt' open(runinfo%output_fileunits(1), file = trim(filename), form = 'formatted', action = 'write', status='replace', iostat = ierr) if (ierr /= 0) then + call write_log("Problem opening file " // trim(filename) //". EXITING", LOG_LEVEL_FATAL) write(*,'("Problem opening file ''", A, "''")') trim(filename) stop ": ERROR EXIT" endif @@ -330,6 +345,7 @@ SUBROUTINE init_output_files(namelist, runinfo, parameters) ! Open the output files open(runinfo%output_fileunits(nh+1), file = trim(filename), form = 'formatted', action = 'write', status='replace', iostat = ierr) if (ierr /= 0) then + call write_log("Problem opening file " // trim(filename) //". EXITING", LOG_LEVEL_SEVERE) write(*,'("Problem opening file ''", A, "''")') trim(filename) stop ": ERROR EXIT" endif @@ -358,8 +374,7 @@ SUBROUTINE init_new_state_files(namelist, runinfo, parameters) ! --- code ------------------------------------------------------------------ - print*, 'Initializing new restart files' - + call write_log('Initializing new restart files', LOG_LEVEL_INFO) ! if user setting is to write out state files, open one for each snowband and write header row if (namelist%write_states == 1) then @@ -371,6 +386,7 @@ SUBROUTINE init_new_state_files(namelist, runinfo, parameters) ! Open the output files open(runinfo%state_fileunits(nh), file = trim(filename), form = 'formatted', action = 'write', status='replace', iostat = ierr) if (ierr /= 0) then + call write_log("Problem opening file " // trim(filename) //". EXITING", LOG_LEVEL_FATAL) write(*,'("Problem opening file ''", A, "''")') trim(filename) stop ": ERROR EXIT" endif @@ -400,7 +416,8 @@ subroutine write_snow17_statefile(runinfo, namelist, modelvar, n_curr_hru) write(runinfo%state_fileunits(n_curr_hru), 41, iostat=ierr) runinfo%curr_yr, runinfo%curr_mo, runinfo%curr_dy, runinfo%curr_hr, & modelvar%tprev(n_curr_hru), modelvar%cs(:,n_curr_hru) if(ierr /= 0) then - print*, 'ERROR writing state file information for sub-unit ', n_curr_hru; stop + call write_log("Error writing state file information for sub-unit " // itoa(n_curr_hru) // ". STOPPING", LOG_LEVEL_FATAL) + stop endif return @@ -431,14 +448,14 @@ subroutine read_snow17_statefiles (modelvar, namelist, parameters, runinfo) integer :: states_found ! counter to match hrus ! ---- code ----- - print*, 'Reading restart files' + call write_log('Reading restart files', LOG_LEVEL_INFO) ! starting statefiles match format of statefile outputs (date then variables) ! statefile read looks for matching date timestep before run start because states are written at end of timestep prev_datetime = (runinfo%start_datetime - runinfo%dt) ! decrement unix model run time in seconds by DT call unix_to_datehr (dble(prev_datetime), state_datehr) ! create statefile datestring to match - print*, ' -- state datehr: ', state_datehr - + call write_log(" -- state datehr: " // trim(state_datehr), LOG_LEVEL_INFO) + ! loop over hrus and read and store initial state values states_found = 0 ! set counter do hru=1, runinfo%n_hrus @@ -446,12 +463,19 @@ subroutine read_snow17_statefiles (modelvar, namelist, parameters, runinfo) ! make state filename state_filename = trim(namelist%snow_state_in_root) // trim(parameters%hru_id(hru)) // '.txt' open(unit=95,FILE=trim(state_filename), FORM='formatted', status='old') + open(unit=95,FILE=trim(state_filename), FORM='formatted', status='old', IOSTAT=ios) + if (ios /= 0) then + write(*,'("Problem opening file ''", A, "''")') trim(state_filename) + call write_log("Problem opening file " //trim(state_filename), LOG_LEVEL_SEVERE) + endif + !print*, ' -- reading snow state file: ', trim(state_filename) ! format for input is an unknown number of rows with 20 data columns (1 tprev, 19 for cs) ! the first column is the datestring; neg ios means end of file; pos means something wrong ! skip header row + ios = 0 read(95, *, IOSTAT=ios) ! read each row and check to see if the date matches the initial state date @@ -472,9 +496,9 @@ subroutine read_snow17_statefiles (modelvar, namelist, parameters, runinfo) ! check to make sure enough states on correct dates were found if (states_found /= runinfo%n_hrus) then - print*, 'ERROR: matching state not found in snow17 restart file. Looking for state date: ', state_datehr - print*, ' -- last state read was on: ', statefile_datehr - print*, 'Stopping. Check inputs!'; stop + call write_log("Matching state not found in sac restart file. Looking for state date: " // trim(state_datehr), LOG_LEVEL_FATAL) + call write_log("last state read was on: " // trim(statefile_datehr) // ". STOPPING. CHECK INPUTS!", LOG_LEVEL_FATAL) + stop endif return @@ -514,7 +538,8 @@ SUBROUTINE write_snow17_output(namelist, runinfo, parameters, forcing, modelvar, forcing%precip_scf(n_curr_hru)*runinfo%dt, & modelvar%sneqv(n_curr_hru)*1000., modelvar%snowh(n_curr_hru), modelvar%raim(n_curr_hru)*runinfo%dt if(ierr /= 0) then - print*, 'ERROR writing output information for sub-unit ', n_curr_hru; stop + call write_log('ERROR writing output information for sub-unit ' // itoa(n_curr_hru) // '. STOPPING.', LOG_LEVEL_FATAL) + stop endif end if ! IF case for writing HRU-specific output to file (not including states) @@ -527,7 +552,8 @@ SUBROUTINE write_snow17_output(namelist, runinfo, parameters, forcing, modelvar, forcing%tair_comb, forcing%precip_comb*runinfo%dt, forcing%precip_scf_comb*runinfo%dt, & modelvar%sneqv_comb*1000.0, modelvar%snowh_comb, modelvar%raim_comb*runinfo%dt if(ierr /= 0) then - print*, 'ERROR writing output information for basin average'; stop + call write_log("ERROR writing output information for basin average. STOPPING.", LOG_LEVEL_FATAL) + stop endif endif diff --git a/src/share/namelistModule.f90 b/src/share/namelistModule.f90 index 5c41137..b2c582a 100644 --- a/src/share/namelistModule.f90 +++ b/src/share/namelistModule.f90 @@ -1,4 +1,5 @@ module defNamelist + implicit none ! variable definitions @@ -26,6 +27,7 @@ module defNamelist end module defNamelist module namelistModule +use snow_log_module implicit none type, public :: namelist_type @@ -56,21 +58,26 @@ module namelistModule subroutine readNamelist(this, namelist_file) use defNamelist implicit none - + class(namelist_type) :: this ! Optional namelist_file path/filename to read ! if not given, the program looks for 'namelist.input' in run directory as a default character(len=*), intent (in), optional :: namelist_file - - print*, 'Reading namelist' + integer :: ios + ios = 0 + + call write_log("readNameList : reading namelist", LOG_LEVEL_INFO) ! -- open and read namelist file - open(33, file=namelist_file, form="formatted") + open(33, file=namelist_file, form="formatted", IOSTAT=ios) + if (ios /= 0) then + call write_log('Error opening namelist file ' // namelist_file, LOG_LEVEL_FATAL) + end if + read(33, SNOW17_CONTROL) close(33) - print*, ' -- simulating basin ', main_id, ' with ', n_hrus, ' snowbands' - + call write_log('readNameList -- simulating basin ' // main_id // ' with ' // itoa(n_hrus) // ' snowbands', LOG_LEVEL_INFO) ! -- transfer to namelist datatype this%main_id = main_id this%n_hrus = n_hrus @@ -89,9 +96,10 @@ subroutine readNamelist(this, namelist_file) ! -- namelist entry checks -- if (this%warm_start_run .eq. 1 .and. this%write_states .eq. 1) then this%write_states = 0 - print*, ' -- WARNING: cannot read and write state files at the same time. Setting write_states option to 0 and continuing' + call write_log("readNameList - cannot read and write state files at the same time.", LOG_LEVEL_WARNING) + call write_log("Setting write_states option to 0 and continuing", LOG_LEVEL_WARNING) endif end subroutine readNamelist -end module namelistModule \ No newline at end of file +end module namelistModule diff --git a/src/share/snowLogger.f90 b/src/share/snowLogger.f90 new file mode 100644 index 0000000..6b9f202 --- /dev/null +++ b/src/share/snowLogger.f90 @@ -0,0 +1,339 @@ +module snow_log_module + implicit none + + public :: write_log, is_logger_enabled, get_log_level, itoa, rtoa + public :: LOG_LEVEL_DEBUG, LOG_LEVEL_INFO, LOG_LEVEL_WARNING, LOG_LEVEL_SEVERE, LOG_LEVEL_FATAL + + ! Log levels (made public so other modules can use them) + integer, parameter :: LOG_LEVEL_DEBUG = 1 + integer, parameter :: LOG_LEVEL_INFO = 2 + integer, parameter :: LOG_LEVEL_WARNING = 3 + integer, parameter :: LOG_LEVEL_SEVERE = 4 + integer, parameter :: LOG_LEVEL_FATAL = 5 + + integer :: log_level = LOG_LEVEL_INFO + + character(len=1024) :: log_file_path + integer :: log_unit = 12 + logical :: logging_enabled = .false. + logical :: opened_once = .false. + logical :: logger_initialized = .false. ! Flag to track if the logger has been initialized + + ! To use Logger while running this module stand-alone, set + ! the following environment variables, case-sensitive: + ! NGEN_EWTS_LOGGING=ENABLED + ! NGEN_LOG_FILE_PATH= + ! SNOW17_LOGLEVEL= + ! + ! Constants character(len=1), parameter :: DS = "/" + character(len=16), parameter :: MODULE_NAME = "Snow-17" + character(len=14), parameter :: LOG_DIR_NGENCERF = "/ngencerf/data" + character(len=8), parameter :: LOG_DIR_DEFAULT = "run-logs" + character(len=3), parameter :: LOG_FILE_EXT = "log" + character(len=1), parameter :: DS = "/" + character(len=17), parameter :: EV_EWTS_LOGGING = "NGEN_EWTS_LOGGING" + character(len=18), parameter :: EV_NGEN_LOGFILEPATH = "NGEN_LOG_FILE_PATH" + character(len=19), parameter :: EV_MODULE_LOGFILEPATH = "SNOW17_LOGFILEPATH" + character(len=16), parameter :: EV_MODULE_LOGLEVEL = "SNOW17_LOGLEVEL" + character(len=7), parameter :: log_module_name = "SNOW17" + integer, parameter :: LOG_MODULE_NAME_LEN = 8 ! // Width of module name for log entries + integer, parameter :: LOG_ENTRY_LEVEL_LEN = 7 ! // Width of log level for log entries + +contains + + subroutine initialize_logger() + character(len=256) :: log_env, log_str + integer :: save_log_level + + logger_initialized = .true. + call get_env_var(EV_EWTS_LOGGING, log_env) + if (trim(log_env) == "ENABLED") then + logging_enabled = .true. + print *,trim(MODULE_NAME)," Logging ", trim(log_env) + call flush(6) + else + logging_enabled = .false. + print *,trim(MODULE_NAME)," Logging NOT enabled. EV_EWTS_LOGGING=", trim(log_env) + call flush(6) + return + end if + ! Here because return was not executed above + + ! Set log level from environment variable (if exists) + call get_env_var(EV_MODULE_LOGLEVEL, log_env) + log_str = trim(log_env) + if (log_str == "DEBUG" ) then + log_level = LOG_LEVEL_DEBUG + else if (log_str == "INFO" ) then + log_level = LOG_LEVEL_INFO + else if (log_str == "WARNING" ) then + log_level = LOG_LEVEL_WARNING + else if (log_str == "SEVERE" ) then + log_level = LOG_LEVEL_SEVERE + else if (log_str == "FATAL" ) then + log_level = LOG_LEVEL_FATAL + else + log_str = "INFO (" // trim(EV_MODULE_LOGLEVEL) // " = '" // trim(log_str) // "' INVALID Log Level) Defaulted to INFO" + log_level = LOG_LEVEL_INFO ! Default level + end if + + ! Get the log file path by calling set_log_file_path + call set_log_file_path() + + print *, trim(MODULE_NAME)," Log level: ", log_str + call flush(6) + end subroutine initialize_logger + + function fit_string(str, target_len) result(fixed_str) + implicit none + character(len=*), intent(in) :: str + integer, intent(in) :: target_len + character(len=target_len) :: fixed_str + integer :: copy_len + + ! Determine how many characters to copy (min of source or target length) + copy_len = min(len_trim(str), target_len) + + ! Copy the appropriate number of characters, right pad with spaces automatically + fixed_str = ' ' ! Initialize to spaces + fixed_str(1:copy_len) = str(1:copy_len) + end function fit_string + + subroutine write_log(message, msg_level) + character(len=*), intent(in) :: message + integer, intent (in) :: msg_level + character(len=40) :: timestamp, log_level_str + character(len=LOG_MODULE_NAME_LEN) :: fixed_tag + character(len=LOG_ENTRY_LEVEL_LEN) :: fixed_lvl + character(len=1100) :: log_msg + + if (.not. logger_initialized) then + call initialize_logger() + end if + + if (logging_enabled .and. (msg_level >= log_level)) then + ! Convert log level to a string + if (msg_level == LOG_LEVEL_DEBUG ) then + log_level_str = "DEBUG" + elseif (msg_level == LOG_LEVEL_INFO ) then + log_level_str = "INFO" + elseif (msg_level == LOG_LEVEL_WARNING ) then + log_level_str = "WARNING" + elseif (msg_level == LOG_LEVEL_SEVERE ) then + log_level_str = "SEVERE" + elseif (msg_level == LOG_LEVEL_FATAL ) then + log_level_str = "FATAL" + else + ! If the level is unknown, ignore logging + return + end if + + ! Log the message + call create_timestamp(.false., .true., .true., timestamp) + fixed_tag = fit_string(log_module_name, LOG_MODULE_NAME_LEN) + fixed_lvl = fit_string(log_level_str,LOG_ENTRY_LEVEL_LEN) + log_msg = trim(timestamp) // " " // fixed_tag // " " // fixed_lvl // " " // trim(message) + if (log_file_ready(.true.)) then + write(log_unit, '(A)') trim(log_msg) + else + print *, log_msg + call flush(6) + end if + close(log_unit) ! Since this is a shared file with other modules, need to close this acces after each write + end if + end subroutine write_log + + subroutine create_timestamp(date_only, iso, append_ms, timestamp) + logical, intent(in) :: date_only + logical, intent(in) :: iso + logical, intent(in) :: append_ms + character(len=*), intent(out) :: timestamp + + integer :: values(8) + character(len=32) :: ts_base, ms_str + + call date_and_time(values=values) + + if (date_only) then + write(ts_base, '(I4.4,I2.2,I2.2)') & + values(1), values(2), values(3) + else if (iso) then + write(ts_base, '(I4.4,"-",I2.2,"-",I2.2,"T",I2.2,":",I2.2,":",I2.2)') & + values(1), values(2), values(3), values(5), values(6), values(7) + else + write(ts_base, '(I4.4,I2.2,I2.2,"T",I2.2,I2.2,I2.2)') & + values(1), values(2), values(3), values(5), values(6), values(7) + end if + + if (append_ms) then + write(ms_str, '(".",I3.3)') values(8) + timestamp = trim(ts_base) // trim(ms_str) + else + timestamp = trim(ts_base) + end if + end subroutine create_timestamp + + ! Check if a directory exists + logical function directory_exists(path) + character(len=*), intent(in) :: path + integer :: status + character(len=522) :: cmd + + cmd = "test -d " // path + call execute_command_line(cmd, exitstat=status) + directory_exists = (status == 0) + end function directory_exists + + ! Create directory if it doesn't exist + logical function create_directory(path) + character(len=*), intent(in) :: path + character(len=522) :: cmd + integer :: status + + if (.not. directory_exists(path)) then + cmd = "mkdir -p " // path + call execute_command_line(cmd, exitstat=status) + create_directory = (status == 0) + else + create_directory = .true. + end if + end function create_directory + + ! Set the log file path based on environment variables or defaults + subroutine set_log_file_path() + character(len=256) :: env_var + logical :: append_entries + logical :: module_log_env_exists + character(len=512) :: log_file_dir + character(len=40) :: timestamp + integer :: save_log_level + + append_entries = .true. + module_log_env_exists = .false. + ! Check if module log path environment exists + call get_env_var(EV_MODULE_LOGFILEPATH, env_var) + if (trim(adjustl(env_var)) /= "") then + log_file_path = trim(env_var) + module_log_env_exists = .true. + else + ! log file path not set yet + call get_env_var(EV_NGEN_LOGFILEPATH, env_var) + if (trim(adjustl(env_var)) /= "") then + log_file_path = trim(env_var) + else + ! ngen log path does not exist. Create alternate log + append_entries = .false. + ! Determine parent dir + if (directory_exists(LOG_DIR_NGENCERF)) then + log_file_dir = trim(LOG_DIR_NGENCERF) // DS // trim(LOG_DIR_DEFAULT) + else + call get_env_var("HOME", env_var) + if (trim(adjustl(env_var)) /= "") then + log_file_dir = trim(env_var) // DS // trim(LOG_DIR_DEFAULT) + else + log_file_dir = "~" // DS // trim(LOG_DIR_DEFAULT) + end if + end if + ! Ensure parent log dir exists + if (create_directory(log_file_dir)) then + ! Get dir for this log + call get_env_var("USER", env_var) + if (trim(adjustl(env_var)) /= "") then + log_file_dir = trim(log_file_dir) // DS // trim(env_var) + else + ! Get a date only timestamp + call create_timestamp(.true., .false., .false., timestamp) + log_file_dir = trim(log_file_dir) // DS // trim(timestamp) + end if + if (create_directory(log_file_dir)) then + ! Set log file name with T