From ddd01667c8604fe49b61ee533295b13e859e9019 Mon Sep 17 00:00:00 2001 From: Kaitlin Naughten Date: Tue, 11 Jul 2017 18:23:52 +1000 Subject: [PATCH] Intercomparison ocean figures --- file_guide.txt | 28 +++++ mip_dpt_calc.py | 97 ++++++++++++++++ mip_hi_seasonal.py | 2 +- mip_ts_distribution.py | 219 +++++++++++++++++++++++++++++++++++ seasonal_climatology_roms.py | 185 +++++++++++++++-------------- 5 files changed, 436 insertions(+), 95 deletions(-) create mode 100644 mip_dpt_calc.py create mode 100644 mip_ts_distribution.py diff --git a/file_guide.txt b/file_guide.txt index 90761e9..1ed36c0 100644 --- a/file_guide.txt +++ b/file_guide.txt @@ -1320,6 +1320,34 @@ mip_mld_seasonal.py: Make a 4x2 plot showing seasonally averaged mixed layer well as the ROMS grid file and the FESOM mesh directory. It will output a png file. +mip_dpt_calc.py: Calculate the mean Drake Passage transport over 2002-2016, as + well as the linear trend, for MetROMS, low-res FESOM, and + high-res FESOM. Print the results to the screen. + To run: First run timeseries_dpt.py for MetROMS, and the + equivalent script timeseries_dpt.py in the fesomtools + repository for both FESOM simulations. Then open + python or ipython and type "run mip_dpt_calc.py". The + script will prompt you for the paths to the 3 log + files. + +mip_ts_distribution.py: Make a 2x1 plot of T/S distributions on the continental + shelf (defined by regions south of 60S with bathymetry + shallower than 60S, plus all ice shelf cavities) in + MetROMS (left) and FESOM (right). Include the surface + freezing point and density contours. + To run: First clone my "fesomtools" GitHub repository + and replace '/short/y99/kaa561/fesomtools' + (near the top of the file) with the path to the + cloned repository on your system. Next, make + time-averaged files of temperature and salinity + for each model over the same period (I used + 2002-2016). Then open python or ipython and + type "run mip_ts_distribution.py". The script + will prompt you for the paths to the ROMS grid + file, the ROMS time-averaged file, the FESOM + mesh directory, and the FESOM time-averaged + file. It will output a png. + ***UTILITY FUNCTIONS*** diff --git a/mip_dpt_calc.py b/mip_dpt_calc.py new file mode 100644 index 0000000..64e34d9 --- /dev/null +++ b/mip_dpt_calc.py @@ -0,0 +1,97 @@ +from numpy import * +from scipy.stats import linregress + +# Calculate the mean Drake Passage transport over 2002-2016, as well as the +# linear trend, for MetROMS, low-res FESOM, and high-res FESOM. Print the +# results to the screen. +# Input: +# roms_log = logfile from timeseries_dpt.py for MetROMS +# fesom_log_low, fesom_log_high = logfiles from timeseries_dpt.py in the +# fesomtools repository, for FESOM low-res and +# high-res respectively +def mip_dpt_calc (roms_log, fesom_log_low, fesom_log_high): + + # Averaging period (days) + days_per_output = 5 + # Year simulation starts + year_start = 1992 + # Year calculation starts + calc_start = 2002 + + # Read ROMS timeseries + roms_time = [] + roms_dpt = [] + f = open(roms_log, 'r') + # Skip first line (header for time array) + f.readline() + for line in f: + try: + roms_time.append(float(line)) + except(ValueError): + # Reached the header for the next variable + break + for line in f: + roms_dpt.append(float(line)) + f.close() + # Add start year to ROMS time array + roms_time = array(roms_time) + year_start + roms_dpt = array(roms_dpt) + + # Read FESOM low-res timeseries + fesom_dpt_low = [] + f = open(fesom_log_low, 'r') + # Skip header + f.readline() + for line in f: + fesom_dpt_low.append(float(line)) + f.close() + # Read FESOM high-res timeseries + fesom_dpt_high = [] + f = open(fesom_log_high, 'r') + f.readline() + for line in f: + fesom_dpt_high.append(float(line)) + f.close() + # Make FESOM time array (note that FESOM neglects leap years in its output) + fesom_time = arange(len(fesom_dpt_low))*days_per_output/365. + year_start + fesom_dpt_low = array(fesom_dpt_low) + fesom_dpt_high = array(fesom_dpt_high) + + # Find range of time indices to consider + # ROMS + t_start_roms = nonzero(roms_time >= calc_start)[0][0] + t_end_roms = len(roms_time) + # FESOM + t_start_fesom = (calc_start-year_start)*365/days_per_output + t_end_fesom = len(fesom_time) + # Slice off the indices we don't care about + roms_time = roms_time[t_start_roms:t_end_roms] + roms_dpt = roms_dpt[t_start_roms:t_end_roms] + fesom_time = fesom_time[t_start_fesom:t_end_fesom] + fesom_dpt_low = fesom_dpt_low[t_start_fesom:t_end_fesom] + fesom_dpt_high = fesom_dpt_high[t_start_fesom:t_end_fesom] + + # Calculate and print averages + print 'Average Drake Passage Transport' + print 'MetROMS: ' + str(mean(roms_dpt)) + print 'FESOM low-res: ' + str(mean(fesom_dpt_low)) + print 'FESOM high-res: ' + str(mean(fesom_dpt_high)) + + # Calculate and print trends + # Also print p-values so we can see if it's statistically significant + print 'Trends in Drake Passage Transport' + slope, intercept, r_value, p_value, std_err = linregress(roms_time, roms_dpt) + print 'MetROMS: ' + str(slope) + ' Sv/y, p=' + str(p_value) + slope, intercept, r_value, p_value, std_err = linregress(fesom_time, fesom_dpt_low) + print 'FESOM low-res: ' + str(slope) + ' Sv/y, p=' + str(p_value) + slope, intercept, r_value, p_value, std_err = linregress(fesom_time, fesom_dpt_high) + print 'FESOM high-res: ' + str(slope) + ' Sv/y, p=' + str(p_value) + + +# Command-line interface +if __name__ == "__main__": + + roms_log = raw_input("Path to ROMS logfile from timeseries_dpt.py: ") + fesom_log_low = raw_input("Path to FESOM low-res logfile from timeseries_dpt.py: ") + fesom_log_high = raw_input("Path to FESOM high-res logfile from timeseries_dpt.py: ") + mip_dpt_calc(roms_log, fesom_log_low, fesom_log_high) diff --git a/mip_hi_seasonal.py b/mip_hi_seasonal.py index 6d466a9..f71e911 100644 --- a/mip_hi_seasonal.py +++ b/mip_hi_seasonal.py @@ -108,7 +108,7 @@ def mip_hi_seasonal (cice_seasonal_file, fesom_mesh_path, fesom_seasonal_file): cbaxes = fig.add_axes([0.35, 0.04, 0.3, 0.02]) cbar = colorbar(img, orientation='horizontal', ticks=arange(bounds[0],bounds[1]+0.5,0.5), cax=cbaxes, extend='max') cbar.ax.tick_params(labelsize=20) - suptitle('Sea ice effective thickness (m)', fontsize=30) + suptitle('Sea ice effective thickness (m), 1992-2016 average', fontsize=30) subplots_adjust(wspace=0.025,hspace=0.025) fig.savefig('hi_seasonal.png') diff --git a/mip_ts_distribution.py b/mip_ts_distribution.py new file mode 100644 index 0000000..1fda29c --- /dev/null +++ b/mip_ts_distribution.py @@ -0,0 +1,219 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from cartesian_grid_3d import * +# Import FESOM scripts (have to modify path first) +import sys +sys.path.insert(0, '/short/y99/kaa561/fesomtools') +from fesom_grid import * +# This will use the FESOM version of unesco.py for both MetROMS and FESOM, +# luckily it's identical +from unesco import * + +# Make a 2x1 plot of T/S distributions on the continental shelf (defined by +# regions south of 60S with bathymetry shallower than 60S, plus all ice shelf +# cavities) in MetROMS (left) and FESOM (right). Include the surface freezing +# point and density contours. +# Input: +# roms_grid = path to ROMS grid file +# roms_file = path to time-averaged ROMS file containing temperature and +# salinity (I used 2002-2016 average) +# fesom_mesh_path = path to FESOM mesh directory (I used high-res) +# fesom_file = path to time-averaged FESOM file containing temperature and +# salinity, over the same period as roms_file +def mip_ts_distribution (roms_grid, roms_file, fesom_mesh_path, fesom_file): + + # Bounds on latitude and bathymetry for continental shelf + lat0 = -60 + h0 = 1500 + # Number of temperature and salinity bins + num_bins = 1000 + # Bounds on temperature and salinity bins (pre-computed, change if needed) + min_salt = 32.3 + max_salt = 35.2 + min_temp = -3.1 + max_temp = 1.8 + # Bounds to actually plot + min_salt_plot = 33.25 + max_salt_plot = 35.0 + min_temp_plot = -3 + max_temp_plot = 1.5 + # FESOM grid generation parameters + circumpolar = False + cross_180 = False + # ROMS vertical grid parameters + theta_s = 7.0 + theta_b = 2.0 + hc = 250 + N = 31 + + print 'Setting up bins' + # Calculate boundaries of temperature bins + temp_bins = linspace(min_temp, max_temp, num=num_bins) + # Calculate centres of temperature bins (for plotting) + temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:]) + # Repeat for salinity + salt_bins = linspace(min_salt, max_salt, num=num_bins) + salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:]) + # Set up 2D arrays of temperature bins x salinity bins to increment with + # volume of water masses + ts_vals_roms = zeros([size(temp_centres), size(salt_centres)]) + ts_vals_fesom = zeros([size(temp_centres), size(salt_centres)]) + # Calculate surface freezing point as a function of salinity as seen by + # each sea ice model + freezing_pt_roms = salt_centres/(-18.48 + 18.48/1e3*salt_centres) + freezing_pt_fesom = -0.0575*salt_centres + 1.7105e-3*sqrt(salt_centres**3) - 2.155e-4*salt_centres**2 + # Get 2D versions of the temperature and salinity bins + salt_2d, temp_2d = meshgrid(salt_centres, temp_centres) + # Calculate potential density of each combination of temperature and + # salinity bins + density = unesco(temp_2d, salt_2d, zeros(shape(temp_centres)))-1000 + # Density contours to plot + density_lev = arange(26.8, 28.2, 0.2) + + print 'Processing ROMS' + # Read ROMS grid variables we need + id = Dataset(roms_grid, 'r') + roms_lon = id.variables['lon_rho'][:,:] + roms_lat = id.variables['lat_rho'][:,:] + roms_h = id.variables['h'][:,:] + roms_zice = id.variables['zice'][:,:] + id.close() + num_lat = size(roms_lat, 0) + num_lon = size(roms_lon, 1) + # Get integrands on 3D grid + dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N) + # Get volume integrand + dV = dx*dy*dz + # Read ROMS output + id = Dataset(roms_file, 'r') + roms_temp = id.variables['temp'][0,:,:,:] + roms_salt = id.variables['salt'][0,:,:,:] + id.close() + # Loop over 2D grid boxes + for j in range(num_lat): + for i in range(num_lon): + # Check for land mask + if roms_temp[0,j,i] is ma.masked: + continue + # Check if we're in the region of interest + if (roms_lat[j,i] < lat0 and roms_h[j,i] < h0) or (roms_zice[j,i] != 0): + # Loop downward + for k in range(N): + # Figure out which bins this falls into + temp_index = nonzero(temp_bins > roms_temp[k,j,i])[0][0] - 1 + salt_index = nonzero(salt_bins > roms_salt[k,j,i])[0][0] - 1 + # Increment bins with volume + ts_vals_roms[temp_index, salt_index] += dV[k,j,i] + # Mask bins with zero volume + ts_vals_roms = ma.masked_where(ts_vals_roms==0, ts_vals_roms) + + print 'Processing FESOM' + # Make FESOM grid elements + elements = fesom_grid(fesom_mesh_path, circumpolar, cross_180) + # Read temperature and salinity at each 3D node + id = Dataset(fesom_file, 'r') + fesom_temp = id.variables['temp'][0,:] + fesom_salt = id.variables['salt'][0,:] + id.close() + # Loop over elements + for elm in elements: + # Find bathymetry at each node + node_bathy = array([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) + # See if we're in the region of interest + if (all(elm.lat < lat0) and all(node_bathy < h0)) or (elm.cavity): + # Get area of 2D triangle + area = elm.area() + nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] + # Loop downward + while True: + if nodes[0].below is None or nodes[1].below is None or nodes[2].below is None: + # We've reached the bottom + break + # Calculate average temperature, salinity, and layer thickness + # over this 3D triangular prism + temp_vals = [] + salt_vals = [] + dz = [] + for i in range(3): + # Average temperature over 6 nodes + temp_vals.append(fesom_temp[nodes[i].id]) + temp_vals.append(fesom_temp[nodes[i].below.id]) + # Average salinity over 6 nodes + salt_vals.append(fesom_salt[nodes[i].id]) + salt_vals.append(fesom_salt[nodes[i].below.id]) + # Average dz over 3 vertical edges + dz.append(abs(nodes[i].depth - nodes[i].below.depth)) + # Get ready for next repetition of loop + nodes[i] = nodes[i].below + temp_elm = mean(array(temp_vals)) + salt_elm = mean(array(salt_vals)) + # Calculate volume of 3D triangular prism + volume = area*mean(array(dz)) + # Figure out which bins this falls into + temp_index = nonzero(temp_bins > temp_elm)[0][0] - 1 + salt_index = nonzero(salt_bins > salt_elm)[0][0] - 1 + # Increment bins with volume + ts_vals_fesom[temp_index, salt_index] += volume + # Mask bins with zero volume + ts_vals_fesom = ma.masked_where(ts_vals_fesom==0, ts_vals_fesom) + + # Find bounds on log of volume + min_val = min(amin(log(ts_vals_roms)), amin(log(ts_vals_fesom))) + max_val = max(amax(log(ts_vals_roms)), amax(log(ts_vals_fesom))) + # Set labels for density contours + manual_locations = [(33.4, 0.5), (33.6, 0.75), (33.9, 1.0), (34.2, 1.0), (34.45, 1.3), (34.75, 1.3), (34.9, 0.5)] + + print "Plotting" + fig = figure(figsize=(20,12)) + # ROMS + ax = fig.add_subplot(1, 2, 1) + pcolor(salt_centres, temp_centres, log(ts_vals_roms), vmin=min_val, vmax=max_val, cmap='jet') + # Add surface freezing point line + plot(salt_centres, freezing_pt_roms, color='black', linestyle='dashed') + # Add density contours + cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted') + clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations) + xlim([min_salt_plot, max_salt_plot]) + ylim([min_temp_plot, max_temp_plot]) + ax.tick_params(axis='x', labelsize=16) + ax.tick_params(axis='y', labelsize=16) + xlabel('Salinity (psu)', fontsize=22) + ylabel(r'Temperature ($^{\circ}$C)', fontsize=22) + title('MetROMS', fontsize=28) + # FESOM + ax = fig.add_subplot(1, 2, 2) + img = pcolor(salt_centres, temp_centres, log(ts_vals_fesom), vmin=min_val, vmax=max_val, cmap='jet') + plot(salt_centres, freezing_pt_fesom, color='black', linestyle='dashed') + cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted') + clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations) + xlim([min_salt_plot, max_salt_plot]) + ylim([min_temp_plot, max_temp_plot]) + ax.tick_params(axis='x', labelsize=16) + ax.tick_params(axis='y', labelsize=16) + xlabel('Salinity (psu)', fontsize=22) + title('FESOM (high-res)', fontsize=28) + # Add a colourbar on the right + cbaxes = fig.add_axes([0.93, 0.2, 0.02, 0.6]) + cbar = colorbar(img, cax=cbaxes) + cbar.ax.tick_params(labelsize=18) + # Add the main title + suptitle('Continental shelf water masses (log of volume), 2002-2016 average', fontsize=30) + subplots_adjust(wspace=0.1) + #fig.show() + fig.savefig('ts_distribution.png') + + +# Command-line interface +if __name__ == "__main__": + + roms_grid = raw_input("Path to ROMS grid file: ") + roms_file = raw_input("Path to time-averaged ROMS file containing temperature and salinity: ") + fesom_mesh_path = raw_input("Path to FESOM mesh directory: ") + fesom_file = raw_input("Path to time-averaged FESOM file containing temperature and salinity: ") + mip_ts_distribution(roms_grid, roms_file, fesom_mesh_path, fesom_file) + + + + + diff --git a/seasonal_climatology_roms.py b/seasonal_climatology_roms.py index 8ed5d91..65e8453 100644 --- a/seasonal_climatology_roms.py +++ b/seasonal_climatology_roms.py @@ -11,7 +11,8 @@ # process files ocean_avg_0001.nc through # ocean_avg_0102.nc. # out_file = path to desired output file -def seasonal_climatology_roms (directory, start_index, end_index, out_file): +# start_year = optional integer containing the first year to consider +def seasonal_climatology_roms (directory, start_index, end_index, out_file, start_year=1992): # Starting and ending months (1-based) for each season start_month = [12, 3, 6, 9] @@ -36,8 +37,6 @@ def seasonal_climatology_roms (directory, start_index, end_index, out_file): seasonal_salt[:,:,:,:] = 0.0 # Also integrate number of days in each season ndays = zeros(4) - # Total number of days for a final check - total_days = 0 # Loop over files for index in range(start_index, end_index+1): @@ -52,108 +51,106 @@ def seasonal_climatology_roms (directory, start_index, end_index, out_file): time_id = id.variables['ocean_time'] time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) id.close() - total_days += size(time)*5 print 'Integrating climatology' # Loop over timesteps for t in range(size(time)): - print '...time index ' + str(t+1) + ' of ' + str(size(time)) - # 5-day averages marked with middle day's date - year = time[t].year - month = time[t].month - day = time[t].day - # Get the season of this middle day - if month in [12, 1, 2]: - # DJF - season = 0 - elif month in [3, 4, 5]: - # MAM - season = 1 - elif month in [6, 7, 8]: - # JJA - season = 2 - elif month in [9, 10, 11]: - # SON - season = 3 - # Check for leap years - leap_year = False - if mod(year, 4) == 0: - leap_year = True - if mod(year, 100) == 0: - leap_year = False - if mod(year, 400) == 0: - leap_year = True - # Update last day in February - if leap_year: - end_day[0] = 29 - else: - end_day[0] = 28 - if month == start_month[season]: - # We are in the first month of the season - if day-2 < start_day[season]: - # Partially spills over into the previous season - prev_season = mod(season-1, 4) - # How many days does it spill over by? - spill_days = start_day[season]-day+2 - # Should be either 1 or 2 - if spill_days not in [1,2]: - print 'Problem: spill_days is ' + str(spill_days) - print 'Timestep ' + str(t+1) - print 'Year ' + str(year) - print 'Month ' + str(month+1) - print 'Day ' + str(day) - return - # Split between previous season and this season - seasonal_temp[prev_season,:,:,:] += temp[t,:,:,:]*spill_days - seasonal_salt[prev_season,:,:,:] += salt[t,:,:,:]*spill_days - ndays[prev_season] += spill_days - seasonal_temp[season,:,:,:] += temp[t,:,:,:]*(5-spill_days) - seasonal_salt[season,:,:,:] += salt[t,:,:,:]*(5-spill_days) - ndays[season] += 5-spill_days + # Make sure we are past start_year + if time[t].year >= start_year: + print '...time index ' + str(t+1) + ' of ' + str(size(time)) + # 5-day averages marked with middle day's date + year = time[t].year + month = time[t].month + day = time[t].day + # Get the season of this middle day + if month in [12, 1, 2]: + # DJF + season = 0 + elif month in [3, 4, 5]: + # MAM + season = 1 + elif month in [6, 7, 8]: + # JJA + season = 2 + elif month in [9, 10, 11]: + # SON + season = 3 + # Check for leap years + leap_year = False + if mod(year, 4) == 0: + leap_year = True + if mod(year, 100) == 0: + leap_year = False + if mod(year, 400) == 0: + leap_year = True + # Update last day in February + if leap_year: + end_day[0] = 29 else: - # Entirely within the season - seasonal_temp[season,:,:,:] += temp[t,:,:,:]*5 - seasonal_salt[season,:,:,:] += salt[t,:,:,:]*5 - ndays[season] += 5 - elif month == end_month[season]: - # We are in the last month of the season - if day+2 > end_day[season]: - # Partially spills over into the next season - next_season = mod(season+1, 4) - # How many days does it spill over by? - spill_days = day+2-end_day[season] - # Should be either 1 or 2 - if spill_days not in [1,2]: - print 'Problem: spill_days is ' + str(spill_days) - print 'Timestep ' + str(t+1) - print 'Year ' + str(year) - print 'Month ' + str(month+1) - print 'Day ' + str(day) - return - # Split between this season and next season - seasonal_temp[next_season,:,:,:] += temp[t,:,:,:]*spill_days - seasonal_salt[next_season,:,:,:] += salt[t,:,:,:]*spill_days - ndays[next_season] += spill_days - seasonal_temp[season,:,:,:] += temp[t,:,:,:]*(5-spill_days) - seasonal_salt[season,:,:,:] += salt[t,:,:,:]*(5-spill_days) - ndays[season] += 5-spill_days + end_day[0] = 28 + if month == start_month[season]: + # We are in the first month of the season + if day-2 < start_day[season]: + # Partially spills over into the previous season + prev_season = mod(season-1, 4) + # How many days does it spill over by? + spill_days = start_day[season]-day+2 + # Should be either 1 or 2 + if spill_days not in [1,2]: + print 'Problem: spill_days is ' + str(spill_days) + print 'Timestep ' + str(t+1) + print 'Year ' + str(year) + print 'Month ' + str(month+1) + print 'Day ' + str(day) + return + # Split between previous season and this season + seasonal_temp[prev_season,:,:,:] += temp[t,:,:,:]*spill_days + seasonal_salt[prev_season,:,:,:] += salt[t,:,:,:]*spill_days + ndays[prev_season] += spill_days + seasonal_temp[season,:,:,:] += temp[t,:,:,:]*(5-spill_days) + seasonal_salt[season,:,:,:] += salt[t,:,:,:]*(5-spill_days) + ndays[season] += 5-spill_days + else: + # Entirely within the season + seasonal_temp[season,:,:,:] += temp[t,:,:,:]*5 + seasonal_salt[season,:,:,:] += salt[t,:,:,:]*5 + ndays[season] += 5 + elif month == end_month[season]: + # We are in the last month of the season + if day+2 > end_day[season]: + # Partially spills over into the next season + next_season = mod(season+1, 4) + # How many days does it spill over by? + spill_days = day+2-end_day[season] + # Should be either 1 or 2 + if spill_days not in [1,2]: + print 'Problem: spill_days is ' + str(spill_days) + print 'Timestep ' + str(t+1) + print 'Year ' + str(year) + print 'Month ' + str(month+1) + print 'Day ' + str(day) + return + # Split between this season and next season + seasonal_temp[next_season,:,:,:] += temp[t,:,:,:]*spill_days + seasonal_salt[next_season,:,:,:] += salt[t,:,:,:]*spill_days + ndays[next_season] += spill_days + seasonal_temp[season,:,:,:] += temp[t,:,:,:]*(5-spill_days) + seasonal_salt[season,:,:,:] += salt[t,:,:,:]*(5-spill_days) + ndays[season] += 5-spill_days + else: + # Entirely within the season + seasonal_temp[season,:,:,:] += temp[t,:,:,:]*5 + seasonal_salt[season,:,:,:] += salt[t,:,:,:]*5 + ndays[season] += 5 else: - # Entirely within the season + # We are in the middle month of the season + # The 5 days in this index are entirely within the season seasonal_temp[season,:,:,:] += temp[t,:,:,:]*5 seasonal_salt[season,:,:,:] += salt[t,:,:,:]*5 - ndays[season] += 5 - else: - # We are in the middle month of the season - # The 5 days in this index are entirely within the season - seasonal_temp[season,:,:,:] += temp[t,:,:,:]*5 - seasonal_salt[season,:,:,:] += salt[t,:,:,:]*5 - ndays[season] += 5 + ndays[season] += 5 # Convert from sums to averages for season in range(4): seasonal_temp[season,:,:,:] = seasonal_temp[season,:,:,:]/ndays[season] seasonal_salt[season,:,:,:] = seasonal_salt[season,:,:,:]/ndays[season] - if sum(ndays) != total_days: - print 'Problem: files have ' + str(total_days) + ' days, but climatology has ' + str(sum(ndays)) - return # Write to file print 'Writing ' + out_file