From f0ea7fb5988f1a4fdec5822a316510909f11d9ea Mon Sep 17 00:00:00 2001 From: Kaitlin Naughten Date: Wed, 11 Jul 2018 22:46:04 +1000 Subject: [PATCH] Bug fix --- calc_ice_prod.py | 93 ++++++++++ circumpolar_plot.py | 46 ++--- ismr_plot.py | 2 +- mip_circumpolar_drift.py | 320 +++++++++++++++++++++++++++++++++ mip_dpt_calc_annual.py | 162 +++++++++++++++++ mip_iceshelf_figures.py | 16 +- mip_sfc_stress.py | 215 ++++++++++++++++++++++ mip_std_massloss.py | 290 ++++++++++++++++++++++++++++++ mip_tamura_binning.py | 234 ++++++++++++++++++++++++ mip_tamura_circumpolar_plot.py | 151 ++++++++++++++++ mip_tamura_plot.py | 90 ++++++++++ mip_ts_distribution_ecco2.py | 186 +++++++++++++++++++ slope_current.py | 312 ++++++++++++++++++++++++++++++++ total_iceshelf_area.py | 4 +- 14 files changed, 2074 insertions(+), 47 deletions(-) create mode 100644 calc_ice_prod.py create mode 100644 mip_circumpolar_drift.py create mode 100644 mip_dpt_calc_annual.py create mode 100644 mip_sfc_stress.py create mode 100644 mip_std_massloss.py create mode 100644 mip_tamura_binning.py create mode 100644 mip_tamura_circumpolar_plot.py create mode 100644 mip_tamura_plot.py create mode 100644 mip_ts_distribution_ecco2.py create mode 100644 slope_current.py diff --git a/calc_ice_prod.py b/calc_ice_prod.py new file mode 100644 index 0000000..50f79e2 --- /dev/null +++ b/calc_ice_prod.py @@ -0,0 +1,93 @@ +from netCDF4 import Dataset, num2date +from numpy import * + +def calc_ice_prod (in_file, start_year, end_year, out_file): + + # cm/day to m/day conversion + cm_to_m = 1e-2 + + # Read the grid + id = Dataset(in_file, 'r') + lon = id.variables['TLON'][:,:] + lat = id.variables['TLAT'][:,:] + num_lon = size(lon,1) + num_lat = size(lat,0) + # Set up array to hold output data + ice_prod = ma.empty([num_lat, num_lon]) + ice_prod[:,:] = 0.0 + + # Read the time values + time_id = id.variables['time'] + cice_time = num2date(time_id[:], units=time_id.units, calendar='standard') + num_time = time_id.size + # Find the first timestep and how many days in it we care about + if start_year == 1992: + # Starting at the beginning of the simulation + start_t = 0 + start_days = 5 + else: + for t in range(num_time): + if cice_time[t].year == start_year and cice_time[t].month-1 == 0 and cice_time[t].day in range(2,6+1): + start_t = t + start_days = cice_time[t].day-1 + break + # Find the last timestep and how many days in it we care about + if end_year == 2016: + # Ending at the end of the simulation + end_t = num_time-1 + end_days = 5 + else: + for t in range(num_time): + if cice_time[t].year == end_year+1 and cice_time[t].month-1 == 0 and cice_time[t].day in range(1,5+1): + end_t = t + end_days = 6-cice_time[t].day + break + print 'Starting at index ' + str(start_t) + ' (' + str(cice_time[start_t].year) + '-' + str(cice_time[start_t].month) + '-' + str(cice_time[start_t].day) + '), ' + str(start_days) + ' days' + print 'Ending at index ' + str(end_t) + ' (' + str(cice_time[end_t].year) + '-' + str(cice_time[end_t].month) + '-' + str(cice_time[end_t].day) + '), ' + str(end_days) + ' days' + + # Integrate the start days + thdgr_start = id.variables['frazil'][start_t,:,:] + id.variables['congel'][start_t,:,:] - id.variables['meltt'][start_t,:,:] - id.variables['meltb'][start_t,:,:] - id.variables['meltl'][start_t,:,:] + index = thdgr_start < 0 + thdgr_start[index] = 0 + ice_prod += thdgr_start*cm_to_m*start_days + # Integrate the middle days + thdgr_mid = id.variables['frazil'][start_t+1:end_t,:,:] + id.variables['congel'][start_t+1:end_t,:,:] - id.variables['meltt'][start_t+1:end_t,:,:] - id.variables['meltb'][start_t+1:end_t,:,:] - id.variables['meltl'][start_t+1:end_t,:,:] + index = thdgr_mid < 0 + thdgr_mid[index] = 0 + ice_prod += sum(thdgr_mid, axis=0)*cm_to_m*5 + # Integrate the end days + thdgr_end = id.variables['frazil'][end_t,:,:] + id.variables['congel'][end_t,:,:] - id.variables['meltt'][end_t,:,:] - id.variables['meltb'][end_t,:,:] - id.variables['meltl'][end_t,:,:] + index = thdgr_end < 0 + thdgr_end[index] = 0 + ice_prod += thdgr_end*cm_to_m*end_days + # Get annual average in m/y + ice_prod = ice_prod/(end_year-start_year+1) + + # Write to file + id = Dataset(out_file, 'w') + id.createDimension('ni', size(lon,1)) + id.createDimension('nj', size(lon,0)) + id.createDimension('time', 4) + id.createVariable('TLON', 'f8', ('nj', 'ni')) + id.variables['TLON'][:,:] = lon + id.createVariable('TLAT', 'f8', ('nj', 'ni')) + id.variables['TLAT'][:,:] = lat + id.createVariable('ice_prod', 'f8', ('nj', 'ni')) + id.variables['ice_prod'].units = 'm/y' + id.variables['ice_prod'][:,:] = ice_prod + id.close() + + +# Command-line interface +if __name__ == "__main__": + + in_file = raw_input("Path to CICE iceh_tot.nc file for entire simulation: ") + start_year = int(raw_input("First year to process: ")) + end_year = int(raw_input("End year to process: ")) + out_file = raw_input("Path to desired output file: ") + calc_ice_prod (in_file, start_year, end_year, out_file) + + + + + diff --git a/circumpolar_plot.py b/circumpolar_plot.py index 438e668..2135d58 100644 --- a/circumpolar_plot.py +++ b/circumpolar_plot.py @@ -264,51 +264,25 @@ def average_btw_depths (data_3d, z_3d, dz_3d, z_bounds): # both depth bounds are too deep (i.e. in the seafloor) data[j,i] = ma.masked else: - # Initialise integral and depth_range (vertical distance we're integrating over) to 0 - integral = 0.0 - depth_range = 0.0 + # Find the first level above z_deep if any(z_col < z_deep): # There exist ocean cells below z_deep - # Linearly interpolate to z_deep - k_above_deep = nonzero(z_col > z_deep)[0][0] - k_below_deep = k_above_deep - 1 - coeff1 = (z_col[k_below_deep] - z_deep)/(z_col[k_below_deep] - z_col[k_above_deep]) - coeff2 = 1 - coeff1 - data_deep = coeff1*data_col[k_above_deep] + coeff2*data_col[k_below_deep] - # Now integrate between z_deep and z_col[k_above_deep] - dz_curr = z_col[k_above_deep] - z_deep - integral += 0.5*(data_deep + data_col[k_above_deep])*dz_curr # Trapezoidal rule - depth_range += dz_curr - # Save index of k_above_deep; we will start normal (non-interpolated) integration here - k_start = k_above_deep + k_start = nonzero(z_col > z_deep)[0][0] else: - # z_deep is deeper than the seafloor at this location - # Start normal (non-interpolated) integration at the seafloor - k_start = 0 + # z_deep is deeper than the seafloor at this location, + # so start at the seafloor + k_end = 0 + # Find the first level above z_shallow if any(z_col > z_shallow): # There exist ocean cells above z_shallow - # Linearly interpolate to z_shallow - k_above_shallow = nonzero(z_col > z_shallow)[0][0] - k_below_shallow = k_above_shallow - 1 - coeff1 = (z_col[k_below_shallow] - z_shallow)/(z_col[k_below_shallow] - z_col[k_above_shallow]) - coeff2 = 1 - coeff1 - data_shallow = coeff1*data_col[k_above_shallow] + coeff2*data_col[k_below_shallow] - # Now integrate between z_col[k_below_shallow] and z_shallow - dz_curr = z_shallow - z_col[k_below_shallow] - integral += 0.5*(data_col[k_below_shallow] + data_shallow)*dz_curr # Trapezoidal rule - depth_range += dz_curr - # Save index of k_above_shallow; we will stop normal (non-interpolated) integration one level below it - k_end = k_above_shallow + k_end = nonzero(z_col > z_shallow)[0][0] else: - # z_shallow is shallower than the sea surface (or ice shelf draft) at this location - # Continue normal (non-interpolated) integration all the way to the surface/ice shelf + # z_shallow is shallower than the ice shelf draft at this location + # Continue normal (non-interpolated) integration all the way to the ice draft k_end = size(z_col) # Now integrate between k_start and k_end if k_start < k_end: - integral += sum(data_col[k_start:k_end]*dz_col[k_start:k_end]) - depth_range += sum(dz_col[k_start:k_end]) - # Divide integral by depth_range to get the vertical average - data[j,i] = integral/depth_range + data[j,i] = sum(data_col[k_start:k_end]*dz_col[k_start:k_end])/sum(dz_col[k_start:k_end]) return data diff --git a/ismr_plot.py b/ismr_plot.py index f0124b5..ee9f15a 100644 --- a/ismr_plot.py +++ b/ismr_plot.py @@ -35,7 +35,7 @@ def ismr_plot (file_path, save=False, fig_name=None): zice = id.variables['zice'][:-15,:-1] # Read the last year of ice shelf melt rates (assume 5-day averages here), # average over time, and convert from m/s to m/y - ismr = mean(id.variables['m'][-73:,:-15,:-1], axis=0)*60*60*24*365.25 + ismr = id.variables['m'][0,:-15,:-1]*60*60*24*365.25 #mean(id.variables['m'][-73:,:-15,:-1], axis=0)*60*60*24*365.25 id.close() # Mask the open ocean and land out of the melt rates ismr = ma.masked_where(zice==0, ismr) diff --git a/mip_circumpolar_drift.py b/mip_circumpolar_drift.py new file mode 100644 index 0000000..d061297 --- /dev/null +++ b/mip_circumpolar_drift.py @@ -0,0 +1,320 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from matplotlib.patches import Polygon +from matplotlib.collections import PatchCollection +from scipy.interpolate import RegularGridInterpolator +from cartesian_grid_3d import * +from circumpolar_plot import average_btw_depths +# Import FESOM scripts (have to modify path first) +import sys +sys.path.insert(0, '/short/y99/kaa561/fesomtools') +from fesom_grid import * +from unrotate_grid import * + +# Needs python/2.7.6 + +def mip_circumpolar_drift (): + + # File paths + # ECCO2 initial conditions file for temperature + ecco2_ini_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.199201.nc' + # ROMS grid file + roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + # ROMS January 2016 mean temp + roms_end_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/temp_salt_jan2016.nc' + # FESOM mesh paths + fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' + fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' + # FESOM January 2016 mean temp + fesom_end_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/temp_salt_jan2016.nc' + fesom_end_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/temp_salt_jan2016.nc' + # Depth bounds to average between + shallow_bound = 300 + deep_bound = 1000 + # ROMS grid parameters + theta_s = 7.0 + theta_b = 2.0 + hc = 250 + N = 31 + deg2rad = pi/180 + # Bound for colour scale + colour_bound = 3 + # Northern boundary for plot + nbdry = -50+90 + + print 'Processing ECCO2' + id = Dataset(ecco2_ini_file, 'r') + ecco_lon_tmp = id.variables['LONGITUDE_T'][:] + ecco_lat = id.variables['LATITUDE_T'][:] + ecco_depth = id.variables['DEPTH_T'][:] # Depth is positive + ecco_temp_3d_tmp = id.variables['THETA'][0,:,:,:] + id.close() + # Wrap periodic boundary + ecco_lon = zeros(size(ecco_lon_tmp)+2) + ecco_lon[0] = ecco_lon_tmp[-1]-360 + ecco_lon[1:-1] = ecco_lon_tmp + ecco_lon[-1] = ecco_lon_tmp[0]+360 + ecco_temp_3d = ma.array(zeros((size(ecco_depth), size(ecco_lat), size(ecco_lon)))) + ecco_temp_3d[:,:,0] = ecco_temp_3d_tmp[:,:,-1] + ecco_temp_3d[:,:,1:-1] = ecco_temp_3d_tmp + ecco_temp_3d[:,:,-1] = ecco_temp_3d_tmp[:,:,0] + # Calculate dz + ecco_depth_edges = zeros(size(ecco_depth)+1) + ecco_depth_edges[1:-1] = 0.5*(ecco_depth[:-1] + ecco_depth[1:]) + # Surface is zero + # Extrapolate for bottom + ecco_depth_edges[-1] = 2*ecco_depth[-1] - ecco_depth_edges[-2] + ecco_dz = ecco_depth_edges[1:] - ecco_depth_edges[:-1] + # Average between bounds + # Find the first level below shallow_bound + k_start = nonzero(ecco_depth > shallow_bound)[0][0] + # Find the first level below deep_bound + # Don't worry about regions where this hits the seafloor, as they will + # get masked out in the final plot + k_end = nonzero(ecco_depth > deep_bound)[0][0] + # Integrate between + ecco_temp = sum(ecco_temp_3d[k_start:k_end,:,:]*ecco_dz[k_start:k_end,None,None], axis=0)/sum(ecco_dz[k_start:k_end]) + # Fill land mask with zeros + index = ecco_temp.mask + ecco_temp = ecco_temp.data + ecco_temp[index] = 0.0 + # Prepare interpolation function + interp_function = RegularGridInterpolator((ecco_lat, ecco_lon), ecco_temp) + + print 'Processing MetROMS' + # Read grid + id = Dataset(roms_grid, 'r') + roms_h = id.variables['h'][:,:] + roms_zice = id.variables['zice'][:,:] + roms_mask = id.variables['mask_rho'][:,:] + roms_lon = id.variables['lon_rho'][:,:] + roms_lat = id.variables['lat_rho'][:,:] + num_lon = size(roms_lon,1) + num_lat = size(roms_lat,0) + id.close() + # Interpolate ECCO2 depth-averaged values to the ROMS grid + roms_temp_ini = interp_function((roms_lat, roms_lon)) + # Apply ROMS land mask + roms_temp_ini = ma.masked_where(roms_mask==0, roms_temp_ini) + # Read Jan 2016 values + id = Dataset(roms_end_file, 'r') + roms_temp_3d_end = id.variables['temp'][0,:,:,:] + id.close() + # Get z and dz + roms_dx, roms_dy, roms_dz, roms_z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N) + # Vertically average between given depths + roms_temp_end = average_btw_depths(roms_temp_3d_end, roms_z, roms_dz, [-1*shallow_bound, -1*deep_bound]) + # Mask regions shallower than 1000 m + roms_temp_ini = ma.masked_where(roms_h < deep_bound, roms_temp_ini) + roms_temp_end = ma.masked_where(roms_h < deep_bound, roms_temp_end) + # Mask ice shelf cavities + roms_temp_ini = ma.masked_where(roms_zice < 0, roms_temp_ini) + roms_temp_end = ma.masked_where(roms_zice < 0, roms_temp_end) + # Get difference + roms_temp_drift = roms_temp_end - roms_temp_ini + # Convert to spherical coordinates + roms_x = -(roms_lat+90)*cos(roms_lon*deg2rad+pi/2) + roms_y = (roms_lat+90)*sin(roms_lon*deg2rad+pi/2) + + print 'Processing low-res FESOM' + print '...Building mesh' + elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True) + # Read rotated lat and lon for each 2D node + f = open(fesom_mesh_path_lr + 'nod2d.out', 'r') + f.readline() + rlon_lr = [] + rlat_lr = [] + for line in f: + tmp = line.split() + lon_tmp = float(tmp[1]) + if lon_tmp < -180: + lon_tmp += 360 + elif lon_tmp > 180: + lon_tmp -= 360 + rlon_lr.append(lon_tmp) + rlat_lr.append(float(tmp[2])) + f.close() + rlon_lr = array(rlon_lr) + rlat_lr = array(rlat_lr) + # Unrotate grid + fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr) + # Get longitude in the range (-180, 180) to match ECCO + index = fesom_lon_lr < 0 + fesom_lon_lr[index] = fesom_lon_lr[index] + 360 + print '...Interpolating ECCO2' + fesom_temp_nodes_ini_lr = interp_function((fesom_lat_lr, fesom_lon_lr)) + # Read January 2016 temp + id = Dataset(fesom_end_file_lr, 'r') + fesom_temp_3d_nodes_end_lr = id.variables['temp'][0,:] + id.close() + print '...Looping over elements' + fesom_temp_ini_lr = [] + fesom_temp_end_lr = [] + patches_lr = [] + for elm in elements_lr: + # Make sure we're not in an ice shelf cavity, or shallower than deep_bound + if not elm.cavity: + if all(array([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) > deep_bound): + # Add a new patch + coord = transpose(vstack((elm.x, elm.y))) + patches_lr.append(Polygon(coord, True, linewidth=0.)) + # Average initial temp over element + fesom_temp_ini_lr.append(mean([fesom_temp_nodes_ini_lr[elm.nodes[0].id], fesom_temp_nodes_ini_lr[elm.nodes[1].id], fesom_temp_nodes_ini_lr[elm.nodes[2].id]])) + # Vertically integrate final temp for this element + fesom_temp_end_lr.append(fesom_element_average_btw_depths(elm, shallow_bound, deep_bound, fesom_temp_3d_nodes_end_lr)) + fesom_temp_ini_lr = array(fesom_temp_ini_lr) + fesom_temp_end_lr = array(fesom_temp_end_lr) + # Get difference + fesom_temp_drift_lr = fesom_temp_end_lr - fesom_temp_ini_lr + + print 'Processing high-res FESOM' + print '...Building mesh' + elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True) + f = open(fesom_mesh_path_hr + 'nod2d.out', 'r') + f.readline() + rlon_hr = [] + rlat_hr = [] + for line in f: + tmp = line.split() + lon_tmp = float(tmp[1]) + if lon_tmp < -180: + lon_tmp += 360 + elif lon_tmp > 180: + lon_tmp -= 360 + rlon_hr.append(lon_tmp) + rlat_hr.append(float(tmp[2])) + f.close() + rlon_hr = array(rlon_hr) + rlat_hr = array(rlat_hr) + fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr) + index = fesom_lon_hr < 0 + fesom_lon_hr[index] = fesom_lon_hr[index] + 360 + print '...Interpolating ECCO2' + fesom_temp_nodes_ini_hr = interp_function((fesom_lat_hr, fesom_lon_hr)) + id = Dataset(fesom_end_file_hr, 'r') + fesom_temp_3d_nodes_end_hr = id.variables['temp'][0,:] + id.close() + print '...Looping over elements' + fesom_temp_ini_hr = [] + fesom_temp_end_hr = [] + patches_hr = [] + for elm in elements_hr: + if not elm.cavity: + if all(array([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) > deep_bound): + coord = transpose(vstack((elm.x, elm.y))) + patches_hr.append(Polygon(coord, True, linewidth=0.)) + fesom_temp_ini_hr.append(mean([fesom_temp_nodes_ini_hr[elm.nodes[0].id], fesom_temp_nodes_ini_hr[elm.nodes[1].id], fesom_temp_nodes_ini_hr[elm.nodes[2].id]])) + fesom_temp_end_hr.append(fesom_element_average_btw_depths(elm, shallow_bound, deep_bound, fesom_temp_3d_nodes_end_hr)) + fesom_temp_ini_hr = array(fesom_temp_ini_hr) + fesom_temp_end_hr = array(fesom_temp_end_hr) + fesom_temp_drift_hr = fesom_temp_end_hr - fesom_temp_ini_hr + + print 'Plotting' + fig = figure(figsize=(19,8)) + fig.patch.set_facecolor('white') + gs = GridSpec(1,3) + gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05) + # ROMS + ax = subplot(gs[0,0], aspect='equal') + ax.pcolor(roms_x, roms_y, roms_temp_drift, vmin=-colour_bound, vmax=colour_bound, cmap='RdBu_r') + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + title('a) MetROMS', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # FESOM (low-res) + ax = subplot(gs[0,1], aspect='equal') + img = PatchCollection(patches_lr, cmap='RdBu_r') + img.set_array(fesom_temp_drift_lr) + img.set_clim(vmin=-colour_bound, vmax=colour_bound) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + title('b) FESOM (low-res)', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # FESOM (high-res) + ax = subplot(gs[0,2], aspect='equal') + img = PatchCollection(patches_hr, cmap='RdBu_r') + img.set_array(fesom_temp_drift_hr) + img.set_clim(vmin=-colour_bound, vmax=colour_bound) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + title('c) FESOM (high-res)', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # Add a horizontal colourbar on the bottom + cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04]) + cbar = colorbar(img, orientation='horizontal', cax=cbaxes, ticks=arange(-colour_bound, colour_bound+1, 1), extend='both') + cbar.ax.tick_params(labelsize=20) + # Main title + suptitle(r'Change in temperature from initial conditions ($^{\circ}$C), '+str(shallow_bound)+'-'+str(deep_bound)+' m average', fontsize=34) + fig.show() + fig.savefig('circumpolar_temp_drift.png') + + +# Element is assumed to be not in an ice shelf cavity, with bathymetry deeper +# than deep_bound +def fesom_element_average_btw_depths (elm, shallow_bound, deep_bound, data): + + area = elm.area() + nodes = [elm.nodes[0], elm.nodes[1], elm.nodes[2]] + # Find the first 3D element which is entirely below shallow_bound + while True: + if nodes[0].depth > shallow_bound and nodes[1].depth > shallow_bound and nodes[2].depth > shallow_bound: + # Save these nodes + first_nodes = copy(nodes) + break + else: + for i in range(3): + nodes[i] = nodes[i].below + # Integrate downward until one of the next nodes is deeper than deep_bound + integral = 0.0 + volume = 0.0 + while True: + if nodes[0].below.depth > deep_bound or nodes[1].below.depth > deep_bound or nodes[2].below.depth > deep_bound: + # Save these nodes + last_nodes = copy(nodes) + break + else: + # Calculate mean of data at six corners of this triangular prism, + # and mean depths at three edges + values_tmp = [] + dz_tmp = [] + for i in range(3): + values_tmp.append(data[nodes[i].id]) + values_tmp.append(data[nodes[i].below.id]) + dz_tmp.append(abs(nodes[i].depth - nodes[i].below.depth)) + # Get ready for next iteration of while loop + nodes[i] = nodes[i].below + # Integrand is mean of data at corners * area of upper face * mean of depths at edges + integral += mean(array(values_tmp))*area*mean(array(dz_tmp)) + volume += mean(array(dz_tmp))*area + # Now integrate from shallow_bound to first_nodes by linearly interpolating + # each node to shallow_bound + values_tmp = [] + dz_tmp = [] + for i in range(3): + values_tmp.append(data[first_nodes[i].id]) + id1, id2, coeff1, coeff2 = elm.nodes[i].find_depth(shallow_bound) + values_tmp.append(coeff1*data[id1] + coeff2*data[id2]) + dz_tmp.append(abs(first_nodes[i].depth - shallow_bound)) + integral += mean(array(values_tmp))*area*mean(array(dz_tmp)) + volume += mean(array(dz_tmp))*area + # Now integrate from last_nodes to deep_bound by linearly interpolating + # each node to deep_bound + values_tmp = [] + dz_tmp = [] + for i in range(3): + values_tmp.append(data[last_nodes[i].id]) + id1, id2, coeff1, coeff2 = elm.nodes[i].find_depth(deep_bound) + values_tmp.append(coeff1*data[id1] + coeff2*data[id2]) + dz_tmp.append(abs(deep_bound - last_nodes[i].depth)) + integral += mean(array(values_tmp))*area*mean(array(dz_tmp)) + volume += mean(array(dz_tmp))*area + # All done; divide integral by volume to get the average + return integral/volume diff --git a/mip_dpt_calc_annual.py b/mip_dpt_calc_annual.py new file mode 100644 index 0000000..beb5e00 --- /dev/null +++ b/mip_dpt_calc_annual.py @@ -0,0 +1,162 @@ +from numpy import * +from scipy.stats import linregress + +# Calculate the mean Drake Passage transport over 2002-2016, as well as the +# linear trend and standard deviation of annual averages, for MetROMS, low-res +# FESOM, and high-res FESOM. Print the results to the screen. +# Input: +# roms_log = logfile from timeseries_dpt.py for MetROMS +# fesom_log_low, fesom_log_high = logfiles from timeseries_dpt.py in the +# fesomtools repository, for FESOM low-res and +# high-res respectively +def mip_dpt_calc_annual (roms_log, fesom_log_low, fesom_log_high): + + # Averaging period (days) + days_per_output = 5 + # Years of simulation + year_start = 1992 + year_end = 2016 + # Year calculation starts + calc_start = 2002 + + num_years = year_end-year_start+1 + num_years_calc = year_end-calc_start+1 + + # Read ROMS timeseries + roms_time = [] + roms_dpt = [] + f = open(roms_log, 'r') + # Skip first line (header for time array) + f.readline() + for line in f: + try: + roms_time.append(float(line)) + except(ValueError): + # Reached the header for the next variable + break + for line in f: + roms_dpt.append(float(line)) + f.close() + # Add start year to ROMS time array + roms_time = array(roms_time) + year_start + roms_dpt = array(roms_dpt) + + # Read FESOM low-res timeseries + fesom_dpt_low = [] + f = open(fesom_log_low, 'r') + # Skip header + f.readline() + for line in f: + fesom_dpt_low.append(float(line)) + f.close() + # Read FESOM high-res timeseries + fesom_dpt_high = [] + f = open(fesom_log_high, 'r') + f.readline() + for line in f: + fesom_dpt_high.append(float(line)) + f.close() + # Make FESOM time array (note that FESOM neglects leap years in its output) + fesom_time = arange(len(fesom_dpt_low))*days_per_output/365. + year_start + fesom_dpt_low = array(fesom_dpt_low) + fesom_dpt_high = array(fesom_dpt_high) + + # Annually average + roms_dpt_avg = roms_annual_avg(roms_time, roms_dpt, year_start) + fesom_dpt_low_avg = fesom_annual_avg(fesom_dpt_low, days_per_output) + fesom_dpt_high_avg = fesom_annual_avg(fesom_dpt_high, days_per_output) + # Slice off the years we don't care about + roms_dpt_avg = roms_dpt_avg[calc_start-year_start:] + fesom_dpt_low_avg = fesom_dpt_low_avg[calc_start-year_start:] + fesom_dpt_high_avg = fesom_dpt_high_avg[calc_start-year_start:] + + # Calculate and print averages and standard deviations + print 'Average Drake Passage Transport' + print 'MetROMS: ' + str(mean(roms_dpt_avg)) + ' +/- ' + str(std(roms_dpt_avg)) + print 'FESOM low-res: ' + str(mean(fesom_dpt_low_avg)) + ' +/- ' + str(std(fesom_dpt_low_avg)) + print 'FESOM high-res: ' + str(mean(fesom_dpt_high_avg)) + ' +/- ' + str(std(fesom_dpt_high_avg)) + + # Calculate and print trends + # Also print p-values so we can see if it's statistically significant + annual_time = arange(calc_start, year_end+1) + print 'Trends in Drake Passage Transport' + slope, intercept, r_value, p_value, std_err = linregress(annual_time, roms_dpt_avg) + print 'MetROMS: ' + str(slope) + ' Sv/y, p=' + str(p_value) + slope, intercept, r_value, p_value, std_err = linregress(annual_time, fesom_dpt_low_avg) + print 'FESOM low-res: ' + str(slope) + ' Sv/y, p=' + str(p_value) + slope, intercept, r_value, p_value, std_err = linregress(annual_time, fesom_dpt_high_avg) + print 'FESOM high-res: ' + str(slope) + ' Sv/y, p=' + str(p_value) + + +# Annually average the given ROMS data. Note that ROMS takes leap years into +# account for its output, i.e. the 5-day averaging period holds true throughout +# the entire simulation with no days skipped. +# Input: +# time = 1D array of ROMS time values, in years, with year_start added (eg +# 1992.001, 1993.50) assuming year length of 365.25 days +# data = 1D array of ROMS data corresponding to time array +# year_start = integer containing the first year to process +# Output: data_avg = 1D array of annually averaged data +def roms_annual_avg (time, data, year_start): + + data_avg = [] + year = year_start + # Loop over years until we run out of data + while True: + # Find the first index after the beginning of this year + tmp1 = nonzero(time >= year)[0] + if len(tmp1)==0: + # No such index, we have run out of data + break + t_start = tmp1[0] + # Find the first index after the beginning of next year + tmp2 = nonzero(time >= year+1)[0] + if len(tmp2)==0: + # No such index, but we might not have run out of data + # eg simulation that ends on 31 December 2016 + t_end = len(time) + else: + t_end = tmp2[0] + if t_end - t_start < 73: + # This is a partial year, don't count it + break + # Calculate mean between these bounds + data_avg.append(mean(array(data[t_start:t_end]))) + # Increment year + year += 1 + return data_avg + + +# Annually average the given FESOM data. Note that FESOM neglects leap years +# in its output, i.e. the 5-day averaging period will skip 1 day every 4 years, +# so every year has exactly 365/5 = 73 batches of output. +# Input: +# data = 1D array of FESOM data +# days_per_output = averaging period for data +# Output: data_avg = 1D array of annually averaged data +def fesom_annual_avg (data, days_per_output): + + peryear = 365/days_per_output + data_avg = [] + year = 0 + # Loop over years + while True: + if len(data) < peryear*(year+1): + # Run out of data + # FESOM output comes in annual files so we don't have to worry about + # partial years like in ROMS + break + # Calculate mean for this year + data_avg.append(mean(array(data[peryear*year:peryear*(year+1)]))) + # Increment year + year += 1 + return data_avg + + +# Command-line interface +if __name__ == "__main__": + + roms_log = raw_input("Path to ROMS logfile from timeseries_dpt.py: ") + fesom_log_low = raw_input("Path to FESOM low-res logfile from timeseries_dpt.py: ") + fesom_log_high = raw_input("Path to FESOM high-res logfile from timeseries_dpt.py: ") + mip_dpt_calc_annual(roms_log, fesom_log_low, fesom_log_high) diff --git a/mip_iceshelf_figures.py b/mip_iceshelf_figures.py index 105e6eb..d77c5a3 100644 --- a/mip_iceshelf_figures.py +++ b/mip_iceshelf_figures.py @@ -1544,7 +1544,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_draft(x_min_tmp, x_max_tmp, y_min_tmp, y_max_tmp, gs_e, cbaxes_tmp, cbar_ticks, 'e') suptitle('Filchner-Ronne Ice Shelf', fontsize=30) fig.show() -fig.savefig('filchner_ronne.png') +fig.savefig('filchner_ronne.png', dpi=300) # Eastern Weddell x_min_tmp = -8 @@ -1586,7 +1586,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_zonal_ts(-1, lat_min, lat_max, lat_ticks, lat_labels, depth_min, depth_max, depth_ticks, depth_labels, gs_c, gs_d, cbaxes_tmp1, cbaxes_tmp2, cbar_ticks1, cbar_ticks2, 'c', 'd', loc_string='Fimbul Ice Shelf') suptitle('Eastern Weddell Region', fontsize=30) fig.show() -fig.savefig('eweddell.png') +fig.savefig('eweddell.png', dpi=300) # Amery x_min_tmp = 15.25 @@ -1628,7 +1628,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_draft(x_min_tmp, x_max_tmp, y_min_tmp, y_max_tmp, gs_e, cbaxes_tmp, cbar_ticks, 'e') suptitle('Amery Ice Shelf', fontsize=30) fig.show() -fig.savefig('amery.png') +fig.savefig('amery.png', dpi=300) # Australian sector x_min_tmp = 12 @@ -1671,7 +1671,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_velavg(x_min_totten, x_max_totten, y_min_totten, y_max_totten, gs_d, cbaxes_tmp, cbar_ticks, x_centres, y_centres, roms_ubin, roms_vbin, fesom_ubin_lr, fesom_vbin_lr, fesom_ubin_hr, fesom_vbin_hr, 'd', loc_string='Totten Ice Shelf', arrow_scale=0.7, arrow_headwidth=8, arrow_headlength=9) suptitle('Australian Sector', fontsize=30) fig.show() -fig.savefig('australian.png') +fig.savefig('australian.png', dpi=300) # Ross x_min_tmp = -9.5 @@ -1713,7 +1713,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_zonal_ts(180, lat_min, lat_max, lat_ticks, lat_labels, depth_min, depth_max, depth_ticks, depth_labels, gs_c, gs_d, cbaxes_tmp1, cbaxes_tmp2, cbar_ticks1, cbar_ticks2, 'c', 'd') suptitle('Ross Sea', fontsize=30) fig.show() -fig.savefig('ross.png') +fig.savefig('ross.png', dpi=300) # Amundsen Sea x_min_tmp = -17.5 @@ -1755,7 +1755,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_velavg(x_min_pig, x_max_pig, y_min_pig, y_max_pig, gs_d, cbaxes_tmp, cbar_ticks, x_centres, y_centres, roms_ubin, roms_vbin, fesom_ubin_lr, fesom_vbin_lr, fesom_ubin_hr, fesom_vbin_hr, 'd', loc_string='Pine Island Glacier Ice Shelf', arrow_scale=0.5, arrow_headwidth=8, arrow_headlength=9) suptitle('Amundsen Sea', fontsize=30) fig.show() -fig.savefig('amundsen.png') +fig.savefig('amundsen.png', dpi=300) # Bellingshausen Sea x_min_tmp = -20.25 @@ -1797,7 +1797,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_velavg(x_min_gvi, x_max_gvi, y_min_gvi, y_max_gvi, gs_d, cbaxes_tmp, cbar_ticks, x_centres, y_centres, roms_ubin, roms_vbin, fesom_ubin_lr, fesom_vbin_lr, fesom_ubin_hr, fesom_vbin_hr, 'd', loc_string='George VI Ice Shelf', arrow_scale=0.4, arrow_headwidth=8, arrow_headlength=9) suptitle('Bellingshausen Sea', fontsize=30) fig.show() -fig.savefig('bellingshausen.png') +fig.savefig('bellingshausen.png', dpi=300) # Larsen x_min_tmp = -22.5 @@ -1828,7 +1828,7 @@ def plot_zonal_ts (lon0, lat_min, lat_max, lat_ticks, lat_labels, depth_min, dep plot_bwtemp(x_min_tmp, x_max_tmp, y_min_tmp, y_max_tmp, gs_c, cbaxes_tmp, cbar_ticks, 'c', bathy_contour=1500) suptitle('Larsen Ice Shelves', fontsize=30) fig.show() -fig.savefig('larsen.png') +fig.savefig('larsen.png', dpi=300) diff --git a/mip_sfc_stress.py b/mip_sfc_stress.py new file mode 100644 index 0000000..f389985 --- /dev/null +++ b/mip_sfc_stress.py @@ -0,0 +1,215 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from matplotlib.collections import PatchCollection +from rotate_vector_roms import * +# Import FESOM scripts (have to modify path first) +import sys +sys.path.insert(0, '/short/y99/kaa561/fesomtools') +from patches import * +from unrotate_vector import * + +def mip_sfc_stress (): + + # File paths + roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/stress_firstyear.nc' # Already averaged over first year + fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' + fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' + fesom_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/MK44005.1992.forcing.diag.nc' + fesom_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/MK44005.1992.forcing.diag.nc' + # Degrees to radians conversion factor + deg2rad = pi/180.0 + # Northern boundaries for plots + nbdry_acc = -30+90 + nbdry_shelf = -64+90 + # Bounds for colour scale + colour_bound_acc = 0.25 + colour_bound_shelf = 0.25 + + print 'Processing ROMS' + # Read grid + id = Dataset(roms_grid, 'r') + roms_lat = id.variables['lat_rho'][:,:] + roms_lon = id.variables['lon_rho'][:,:] + angle = id.variables['angle'][:,:] + zice = id.variables['zice'][:,:] + id.close() + # Read surface stress + id = Dataset(roms_file, 'r') + sustr_tmp = id.variables['sustr'][0,:,:] + svstr_tmp = id.variables['svstr'][0,:,:] + id.close() + # Unrotate + sustr, svstr = rotate_vector_roms(sustr_tmp, svstr_tmp, angle) + # Get magnitude + roms_stress = sqrt(sustr**2 + svstr**2) + # Mask cavities + roms_stress = ma.masked_where(zice<0, roms_stress) + # Calculate polar projection + roms_x = -(roms_lat+90)*cos(roms_lon*deg2rad+pi/2) + roms_y = (roms_lat+90)*sin(roms_lon*deg2rad+pi/2) + + print 'Processing low-res FESOM' + # Build mesh and patches + elements_lr, patches_lr = make_patches(fesom_mesh_path_lr, circumpolar=True, mask_cavities=True) + # Read rotated and and lon + f = open(fesom_mesh_path_lr + 'nod2d.out', 'r') + f.readline() + rlon_lr = [] + rlat_lr = [] + for line in f: + tmp = line.split() + lon_tmp = float(tmp[1]) + if lon_tmp < -180: + lon_tmp += 360 + elif lon_tmp > 180: + lon_tmp -= 360 + rlon_lr.append(lon_tmp) + rlat_lr.append(float(tmp[2])) + f.close() + rlon_lr = array(rlon_lr) + rlat_lr = array(rlat_lr) + # Read surface stress + id = Dataset(fesom_file_lr, 'r') + stress_x_tmp = mean(id.variables['stress_x'][:,:], axis=0) + stress_y_tmp = mean(id.variables['stress_y'][:,:], axis=0) + id.close() + # Unrotate + stress_x_lr, stress_y_lr = unrotate_vector(rlon_lr, rlat_lr, stress_x_tmp, stress_y_tmp) + # Get magnitude + fesom_stress_lr_nodes = sqrt(stress_x_lr**2 + stress_y_lr**2) + # Average over elements + fesom_stress_lr = [] + for elm in elements_lr: + if not elm.cavity: + fesom_stress_lr.append(mean([fesom_stress_lr_nodes[elm.nodes[0].id], fesom_stress_lr_nodes[elm.nodes[1].id], fesom_stress_lr_nodes[elm.nodes[2].id]])) + + print 'Processing high-res FESOM' + elements_hr, patches_hr = make_patches(fesom_mesh_path_hr, circumpolar=True, mask_cavities=True) + f = open(fesom_mesh_path_hr + 'nod2d.out', 'r') + f.readline() + rlon_hr = [] + rlat_hr = [] + for line in f: + tmp = line.split() + lon_tmp = float(tmp[1]) + if lon_tmp < -180: + lon_tmp += 360 + elif lon_tmp > 180: + lon_tmp -= 360 + rlon_hr.append(lon_tmp) + rlat_hr.append(float(tmp[2])) + f.close() + rlon_hr = array(rlon_hr) + rlat_hr = array(rlat_hr) + id = Dataset(fesom_file_hr, 'r') + stress_x_tmp = mean(id.variables['stress_x'][:,:], axis=0) + stress_y_tmp = mean(id.variables['stress_y'][:,:], axis=0) + id.close() + stress_x_hr, stress_y_hr = unrotate_vector(rlon_hr, rlat_hr, stress_x_tmp, stress_y_tmp) + fesom_stress_hr_nodes = sqrt(stress_x_hr**2 + stress_y_hr**2) + fesom_stress_hr = [] + for elm in elements_hr: + if not elm.cavity: + fesom_stress_hr.append(mean([fesom_stress_hr_nodes[elm.nodes[0].id], fesom_stress_hr_nodes[elm.nodes[1].id], fesom_stress_hr_nodes[elm.nodes[2].id]])) + + print 'Plotting' + + # ACC + fig = figure(figsize=(19,8)) + fig.patch.set_facecolor('white') + gs = GridSpec(1,3) + gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05) + # ROMS + ax = subplot(gs[0,0], aspect='equal') + ax.pcolor(roms_x, roms_y, roms_stress, vmin=0, vmax=colour_bound_acc, cmap='jet') + xlim([-nbdry_acc, nbdry_acc]) + ylim([-nbdry_acc, nbdry_acc]) + title('a) MetROMS', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # FESOM (low-res) + ax = subplot(gs[0,1], aspect='equal') + img = PatchCollection(patches_lr, cmap='jet') + img.set_array(array(fesom_stress_lr)) + img.set_clim(vmin=0, vmax=colour_bound_acc) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry_acc, nbdry_acc]) + ylim([-nbdry_acc, nbdry_acc]) + title('b) FESOM (low-res)', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # FESOM (high-res) + ax = subplot(gs[0,2], aspect='equal') + img = PatchCollection(patches_hr, cmap='jet') + img.set_array(array(fesom_stress_hr)) + img.set_clim(vmin=0, vmax=colour_bound_acc) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry_acc, nbdry_acc]) + ylim([-nbdry_acc, nbdry_acc]) + title('c) FESOM (high-res)', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # Add a horizontal colourbar on the bottom + cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04]) + cbar = colorbar(img, orientation='horizontal', cax=cbaxes, extend='max', ticks=arange(0, colour_bound_acc+0.05, 0.05)) + cbar.ax.tick_params(labelsize=20) + # Main title + suptitle(r'Ocean surface stress (N/m$^2$), 1992 mean', fontsize=34) + fig.show() + fig.savefig('sfc_stress_acc.png') + + # Continental shelf + fig = figure(figsize=(19,8)) + fig.patch.set_facecolor('white') + gs = GridSpec(1,3) + gs.update(left=0.05, right=0.95, bottom=0.1, top=0.85, wspace=0.05) + # ROMS + ax = subplot(gs[0,0], aspect='equal') + ax.pcolor(roms_x, roms_y, roms_stress, vmin=0, vmax=colour_bound_shelf, cmap='jet') + xlim([-nbdry_shelf, nbdry_shelf]) + ylim([-nbdry_shelf, nbdry_shelf]) + title('a) MetROMS', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # FESOM (low-res) + ax = subplot(gs[0,1], aspect='equal') + img = PatchCollection(patches_lr, cmap='jet') + img.set_array(array(fesom_stress_lr)) + img.set_clim(vmin=0, vmax=colour_bound_shelf) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry_shelf, nbdry_shelf]) + ylim([-nbdry_shelf, nbdry_shelf]) + title('b) FESOM (low-res)', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # FESOM (high-res) + ax = subplot(gs[0,2], aspect='equal') + img = PatchCollection(patches_hr, cmap='jet') + img.set_array(array(fesom_stress_hr)) + img.set_clim(vmin=0, vmax=colour_bound_shelf) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry_shelf, nbdry_shelf]) + ylim([-nbdry_shelf, nbdry_shelf]) + title('c) FESOM (high-res)', fontsize=28) + ax.set_xticks([]) + ax.set_yticks([]) + # Add a horizontal colourbar on the bottom + cbaxes = fig.add_axes([0.3, 0.05, 0.4, 0.04]) + cbar = colorbar(img, orientation='horizontal', cax=cbaxes, extend='max', ticks=arange(0, colour_bound_shelf+0.05, 0.05)) + cbar.ax.tick_params(labelsize=20) + # Main title + suptitle(r'Ocean surface stress (N/m$^2$), 1992 mean', fontsize=34) + fig.show() + fig.savefig('sfc_stress_shelf.png') + + +# Command-line interface +if __name__ == "__main__": + + mip_sfc_stress() diff --git a/mip_std_massloss.py b/mip_std_massloss.py new file mode 100644 index 0000000..3a62eb6 --- /dev/null +++ b/mip_std_massloss.py @@ -0,0 +1,290 @@ +from numpy import * + +def mip_std_massloss (roms_logfile, roms_logfile_bs, fesom_logfile_lr, fesom_logfile_bs_lr, fesom_logfile_hr, fesom_logfile_bs_hr): + + year_start = 1992 + year_end = 2016 + # First year to consider + calc_start = 2002 + # Days per output in FESOM + days_per_output = 5 + # Name of each ice shelf + names = ['Total Mass Loss', 'Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange & Bach Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'] + num_shelves = len(names) + # Some Bellingshausen ice shelves were split up later + names_bs = ['Wilkins Ice Shelf', 'Stange Ice Shelf', 'George VI Ice Shelf'] + num_shelves_bs = len(names_bs) + + num_years = year_end-year_start+1 + num_years_calc = year_end-calc_start+1 + + # Read ROMS logfile + roms_time = [] + f = open(roms_logfile, 'r') + # Skip the first line (header for time array) + f.readline() + for line in f: + try: + roms_time.append(float(line)) + except(ValueError): + # Reached the header for the next variable + break + # Add start year to time array + roms_time = array(roms_time) + year_start + # Set up array for mass loss values at each ice shelf + roms_massloss = empty([num_shelves, len(roms_time)]) + index = 0 + # Loop over ice shelves + while index < num_shelves: + t = 0 + for line in f: + try: + roms_massloss[index, t] = float(line) + t += 1 + except(ValueError): + # Reached the header for the next ice shelf + break + index +=1 + f.close() + # Repeat for Bellingshausen + f = open(roms_logfile_bs, 'r') + f.readline() + # Skip the time values (should be the same) + for line in f: + try: + tmp = float(line) + except(ValueError): + # Reached the header for the next variable + break + roms_massloss_bs = empty([num_shelves_bs, len(roms_time)]) + index = 0 + while index < num_shelves_bs: + t = 0 + for line in f: + try: + roms_massloss_bs[index, t] = float(line) + t += 1 + except(ValueError): + break + index +=1 + + # Read FESOM logfiles + # Low-res + tmp = [] + f = open(fesom_logfile_lr, 'r') + # Skip the first line (header) + f.readline() + # Read total mass loss + num_time = 0 + for line in f: + try: + tmp.append(float(line)) + num_time += 1 + except(ValueError): + # Reached the header for the next variable + break + # Set up array for mass loss values at each ice shelf + fesom_massloss_lr = empty([num_shelves, num_time]) + # Save the total values in the first index + fesom_massloss_lr[0,:] = array(tmp) + # Loop over ice shelves + index = 1 + while index < num_shelves: + t = 0 + for line in f: + try: + fesom_massloss_lr[index,t] = float(line) + t += 1 + except(ValueError): + # Reached the header for the next ice shelf + break + index += 1 + f.close() + # Repeat for Bellingshausen + f = open(fesom_logfile_bs_lr, 'r') + f.readline() + fesom_massloss_bs_lr = empty([num_shelves_bs, num_time]) + index = 0 + while index < num_shelves_bs: + t = 0 + for line in f: + try: + fesom_massloss_bs_lr[index,t] = float(line) + t += 1 + except(ValueError): + break + index += 1 + f.close() + + # High-res + tmp = [] + f = open(fesom_logfile_hr, 'r') + f.readline() + num_time = 0 + for line in f: + try: + tmp.append(float(line)) + num_time += 1 + except(ValueError): + break + fesom_massloss_hr = empty([num_shelves, num_time]) + fesom_massloss_hr[0,:] = array(tmp) + index = 1 + while index < num_shelves: + t = 0 + for line in f: + try: + fesom_massloss_hr[index,t] = float(line) + t += 1 + except(ValueError): + break + index += 1 + f.close() + f = open(fesom_logfile_bs_hr, 'r') + f.readline() + fesom_massloss_bs_hr = empty([num_shelves_bs, num_time]) + index = 0 + while index < num_shelves_bs: + t = 0 + for line in f: + try: + fesom_massloss_bs_hr[index,t] = float(line) + t += 1 + except(ValueError): + break + index += 1 + f.close() + + # Annually average + # ROMS + roms_massloss_avg = empty([num_shelves, num_years]) + for index in range(num_shelves): + roms_massloss_avg[index,:] = roms_annual_avg(roms_time, roms_massloss[index,:], year_start) + roms_massloss_bs_avg = empty([num_shelves_bs, num_years]) + for index in range(num_shelves_bs): + roms_massloss_bs_avg[index,:] = roms_annual_avg(roms_time, roms_massloss_bs[index,:], year_start) + # Low-res FESOM + fesom_massloss_lr_avg = empty([num_shelves, num_years]) + for index in range(num_shelves): + fesom_massloss_lr_avg[index,:] = fesom_annual_avg(fesom_massloss_lr[index,:], days_per_output) + fesom_massloss_bs_lr_avg = empty([num_shelves_bs, num_years]) + for index in range(num_shelves_bs): + fesom_massloss_bs_lr_avg[index,:] = fesom_annual_avg(fesom_massloss_bs_lr[index,:], days_per_output) + # High-res FESOM + fesom_massloss_hr_avg = empty([num_shelves, num_years]) + for index in range(num_shelves): + fesom_massloss_hr_avg[index,:] = fesom_annual_avg(fesom_massloss_hr[index,:], days_per_output) + fesom_massloss_bs_hr_avg = empty([num_shelves_bs, num_years]) + for index in range(num_shelves_bs): + fesom_massloss_bs_hr_avg[index,:] = fesom_annual_avg(fesom_massloss_bs_hr[index,:], days_per_output) + # Slice off the years we don't care about + roms_massloss_avg = roms_massloss_avg[:,calc_start-year_start:] + roms_massloss_bs_avg = roms_massloss_bs_avg[:,calc_start-year_start:] + fesom_massloss_lr_avg = fesom_massloss_lr_avg[:,calc_start-year_start:] + fesom_massloss_bs_lr_avg = fesom_massloss_bs_lr_avg[:,calc_start-year_start:] + fesom_massloss_hr_avg = fesom_massloss_hr_avg[:,calc_start-year_start:] + fesom_massloss_bs_hr_avg = fesom_massloss_bs_hr_avg[:,calc_start-year_start:] + + # Loop over ice shelves + for index in range(num_shelves): + print names[index] + # Calculate standard deviation as percent of annual mean + print 'MetROMS: ' + str(std(roms_massloss_avg[index])/mean(roms_massloss_avg[index])*100) + '%' + print 'FESOM low-res: ' + str(std(fesom_massloss_lr_avg[index])/mean(fesom_massloss_lr_avg[index])*100) + '%' + print 'FESOM high-res: ' + str(std(fesom_massloss_hr_avg[index])/mean(fesom_massloss_hr_avg[index])*100) + '%' + #print 'MetROMS: ' + str((median(roms_massloss_avg[index])-mean(roms_massloss_avg[index]))/mean(roms_massloss_avg[index])*100) + #print 'FESOM low-res: ' + str((median(fesom_massloss_lr_avg[index])-mean(fesom_massloss_lr_avg[index]))/mean(fesom_massloss_lr_avg[index])*100) + #print 'FESOM high-res: ' + str((median(fesom_massloss_hr_avg[index])-mean(fesom_massloss_hr_avg[index]))/mean(fesom_massloss_hr_avg[index])*100) + #print 'MetROMS: mean ' + str(mean(roms_massloss_avg[index])) + ', median ' + str(median(roms_massloss_avg[index])) + #print 'FESOM low-res: mean ' + str(mean(fesom_massloss_lr_avg[index])) + ', median ' + str(median(fesom_massloss_lr_avg[index])) + #print 'FESOM high-res: mean ' + str(mean(fesom_massloss_hr_avg[index])) + ', median ' + str(median(fesom_massloss_hr_avg[index])) + + # Repeat for Bellingshausen ice shelves + for index in range(num_shelves_bs): + print names_bs[index] + print 'MetROMS: ' + str(std(roms_massloss_bs_avg[index])/mean(roms_massloss_bs_avg[index])*100) + '%' + print 'FESOM low-res: ' + str(std(fesom_massloss_bs_lr_avg[index])/mean(fesom_massloss_bs_lr_avg[index])*100) + '%' + print 'FESOM high-res: ' + str(std(fesom_massloss_bs_hr_avg[index])/mean(fesom_massloss_bs_hr_avg[index])*100) + '%' + #print 'MetROMS: ' + str((median(roms_massloss_bs_avg[index])-mean(roms_massloss_bs_avg[index]))/mean(roms_massloss_bs_avg[index])*100) + #print 'FESOM low-res: ' + str((median(fesom_massloss_bs_lr_avg[index])-mean(fesom_massloss_bs_lr_avg[index]))/mean(fesom_massloss_bs_lr_avg[index])*100) + #print 'FESOM high-res: ' + str((median(fesom_massloss_bs_hr_avg[index])-mean(fesom_massloss_bs_hr_avg[index]))/mean(fesom_massloss_bs_hr_avg[index])*100) + #print 'MetROMS: mean ' + str(mean(roms_massloss_bs_avg[index])) + ', median ' + str(median(roms_massloss_bs_avg[index])) + #print 'FESOM low-res: mean ' + str(mean(fesom_massloss_bs_lr_avg[index])) + ', median ' + str(median(fesom_massloss_bs_lr_avg[index])) + #print 'FESOM high-res: mean ' + str(mean(fesom_massloss_bs_hr_avg[index])) + ', median ' + str(median(fesom_massloss_bs_hr_avg[index])) + + +# Annually average the given ROMS data. Note that ROMS takes leap years into +# account for its output, i.e. the 5-day averaging period holds true throughout +# the entire simulation with no days skipped. +# Input: +# time = 1D array of ROMS time values, in years, with year_start added (eg +# 1992.001, 1993.50) assuming year length of 365.25 days +# data = 1D array of ROMS data corresponding to time array +# year_start = integer containing the first year to process +# Output: data_avg = 1D array of annually averaged data +def roms_annual_avg (time, data, year_start): + + data_avg = [] + year = year_start + # Loop over years until we run out of data + while True: + # Find the first index after the beginning of this year + tmp1 = nonzero(time >= year)[0] + if len(tmp1)==0: + # No such index, we have run out of data + break + t_start = tmp1[0] + # Find the first index after the beginning of next year + tmp2 = nonzero(time >= year+1)[0] + if len(tmp2)==0: + # No such index, but we might not have run out of data + # eg simulation that ends on 31 December 2016 + t_end = len(time) + else: + t_end = tmp2[0] + if t_end - t_start < 73: + # This is a partial year, don't count it + break + # Calculate mean between these bounds + data_avg.append(mean(array(data[t_start:t_end]))) + # Increment year + year += 1 + return data_avg + + +# Annually average the given FESOM data. Note that FESOM neglects leap years +# in its output, i.e. the 5-day averaging period will skip 1 day every 4 years, +# so every year has exactly 365/5 = 73 batches of output. +# Input: +# data = 1D array of FESOM data +# days_per_output = averaging period for data +# Output: data_avg = 1D array of annually averaged data +def fesom_annual_avg (data, days_per_output): + + peryear = 365/days_per_output + data_avg = [] + year = 0 + # Loop over years + while True: + if len(data) < peryear*(year+1): + # Run out of data + # FESOM output comes in annual files so we don't have to worry about + # partial years like in ROMS + break + # Calculate mean for this year + data_avg.append(mean(array(data[peryear*year:peryear*(year+1)]))) + # Increment year + year += 1 + return data_avg + + +# Command-line interface +if __name__ == "__main__": + + roms_logfile = raw_input("Path to ROMS logfile from timeseries_massloss.py: ") + roms_logfile_bs = raw_input("Path to ROMS logfile from timeseries_massloss_bellingshausen.py: ") + fesom_logfile_lr = raw_input("Path to FESOM low-res logfile from timeseries_massloss.py: ") + fesom_logfile_bs_lr = raw_input("Path to FESOM low-res logfile from timeseries_massloss_bellingshausen.py: ") + fesom_logfile_hr = raw_input("Path to FESOM high-res logfile from timeseries_massloss.py: ") + fesom_logfile_bs_hr = raw_input("Path to FESOM high-res logfile from timeseries_massloss_bellingshausen.py: ") + mip_std_massloss(roms_logfile, roms_logfile_bs, fesom_logfile_lr, fesom_logfile_bs_lr, fesom_logfile_hr, fesom_logfile_bs_hr) diff --git a/mip_tamura_binning.py b/mip_tamura_binning.py new file mode 100644 index 0000000..bbd5782 --- /dev/null +++ b/mip_tamura_binning.py @@ -0,0 +1,234 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from scipy.interpolate import griddata +from cartesian_grid_2d import * +import sys +sys.path.insert(0, '/short/y99/kaa561/fesomtools') +from fesom_grid import * + +def mip_seaice_tamura (): + + # File paths + # ROMS grid (just for bathymetry) + roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + # FESOM mesh paths + fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' + fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' + # CICE 1992-2013 mean ice production (precomputed in calc_ice_prod.py) + cice_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/ice_prod_1992_2013.nc' + # FESOM 1992-2013 mean ice production (precomputed in calc_annual_ice_prod.py in fesomtools) + fesom_lr_file = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/ice_prod_1992_2013.nc' + fesom_hr_file = '/short/y99/kaa561/FESOM/intercomparison_highres/output/ice_prod_1992_2013.nc' + # Tamura's 1992-2013 mean ice production (precomputed on desktop with Matlab) + tamura_file = '/short/m68/kaa561/tamura_1992_2013_monthly_climatology.nc' + # Output ASCII file + output_file = 'seaice_prod_bins.log' + # Size of longitude bin + dlon_bin = 1.0 + # Definition of continental shelf: everywhere south of lat0 with + # bathymetry shallower than h0 + lat0 = -60 + h0 = 1500 + # Radius of the Earth in metres + r = 6.371e6 + # Degrees to radians conversion factor + deg2rad = pi/180.0 + + # Set up longitude bins + bin_edges = arange(-180, 180+dlon_bin, dlon_bin) + bin_centres = 0.5*(bin_edges[:-1] + bin_edges[1:]) + num_bins = len(bin_centres) + + print 'Processing MetROMS' + # Read CICE grid + id = Dataset(cice_file, 'r') + cice_lon = id.variables['TLON'][:,:] + cice_lat = id.variables['TLAT'][:,:] + # Read sea ice production + cice_data = id.variables['ice_prod'][:,:] + id.close() + # Get area integrands + dx, dy = cartesian_grid_2d(cice_lon, cice_lat) + dA = dx*dy + # Make sure longitude is in the range [-180, 180] + index = cice_lon > 180 + cice_lon[index] = cice_lon[index] - 360 + # Read bathymetry (ROMS grid file) and trim to CICE grid + id = Dataset(roms_grid, 'r') + cice_bathy = id.variables['h'][1:-1,1:-1] + id.close() + # Set up integral + cice_data_bins = zeros(num_bins) + # Loop over all cells + num_lon = size(cice_lon,1) + num_lat = size(cice_lat,0) + for j in range(num_lat): + for i in range(num_lon): + # Check for land mask or ice shelves + if cice_data[j,i] is ma.masked: + continue + # Check for continental shelf + if cice_lat[j,i] < lat0 and cice_bathy[j,i] < h0: + # Find the right bin + bin_index = nonzero(bin_edges > cice_lon[j,i])[0][0] - 1 + # Integrate (m^3/y) + cice_data_bins[bin_index] += cice_data[j,i]*dA[j,i] + # Convert to 10^9 m^3/y + cice_data_bins *= 1e-9 + + print 'Processing low-res FESOM' + # Build mesh + elements_lr = fesom_grid(fesom_mesh_path_lr, circumpolar=True, cross_180=False) + # Read sea ice production + id = Dataset(fesom_lr_file, 'r') + fesom_data_lr = id.variables['ice_prod'][:] + id.close() + # Set up integral + fesom_data_bins_lr = zeros(num_bins) + # Loop over elements + for elm in elements_lr: + # Exclude ice shelf cavities + if not elm.cavity: + # Check for continental shelf in 2 steps + if all(elm.lat < lat0): + elm_bathy = mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) + if elm_bathy < h0: + # Get element-averaged sea ice production + elm_data = mean([fesom_data_lr[elm.nodes[0].id], fesom_data_lr[elm.nodes[1].id], fesom_data_lr[elm.nodes[2].id]]) + # Find the right bin + elm_lon = mean(elm.lon) + if elm_lon < -180: + elm_lon += 360 + elif elm_lon > 180: + elm_lon -= 360 + bin_index = nonzero(bin_edges > elm_lon)[0][0] - 1 + # Integrate (m^3/y) + fesom_data_bins_lr[bin_index] += elm_data*elm.area() + # Convert to 10^9 m^3/y + fesom_data_bins_lr *= 1e-9 + + print 'Processing high-res FESOM' + elements_hr = fesom_grid(fesom_mesh_path_hr, circumpolar=True, cross_180=False) + id = Dataset(fesom_hr_file, 'r') + fesom_data_hr = id.variables['ice_prod'][:] + id.close() + fesom_data_bins_hr = zeros(num_bins) + for elm in elements_hr: + if not elm.cavity: + if all(elm.lat < lat0): + elm_bathy = mean([elm.nodes[0].find_bottom().depth, elm.nodes[1].find_bottom().depth, elm.nodes[2].find_bottom().depth]) + if elm_bathy < h0: + elm_data = mean([fesom_data_hr[elm.nodes[0].id], fesom_data_hr[elm.nodes[1].id], fesom_data_hr[elm.nodes[2].id]]) + elm_lon = mean(elm.lon) + if elm_lon < -180: + elm_lon += 360 + elif elm_lon > 180: + elm_lon -= 360 + bin_index = nonzero(bin_edges > elm_lon)[0][0] - 1 + fesom_data_bins_hr[bin_index] += elm_data*elm.area() + fesom_data_bins_hr *= 1e-9 + + print 'Processing Tamura obs' + id = Dataset(tamura_file, 'r') + # Read grid and data + tamura_lon = id.variables['longitude'][:,:] + tamura_lat = id.variables['latitude'][:,:] + # Read sea ice formation + tamura_data = id.variables['ice_prod'][:,:] + id.close() + # Interpolate to a regular grid so we can easily integrate over area + dlon_reg = 0.2 + dlat_reg = 0.1 + lon_reg_edges = arange(-180, 180+dlon_reg, dlon_reg) + lon_reg = 0.5*(lon_reg_edges[:-1] + lon_reg_edges[1:]) + lat_reg_edges = arange(-80, -60+dlat_reg, dlat_reg) + lat_reg = 0.5*(lat_reg_edges[:-1] + lat_reg_edges[1:]) + lon_reg_2d, lat_reg_2d = meshgrid(lon_reg, lat_reg) + dx_reg = r*cos(lat_reg_2d*deg2rad)*dlon_reg*deg2rad + dy_reg = r*dlat_reg*deg2rad + dA_reg = dx_reg*dy_reg + # Be careful with the periodic boundary here + num_pts = size(tamura_lon) + num_wrap1 = count_nonzero(tamura_lon < -179) + num_wrap2 = count_nonzero(tamura_lon > 179) + points = empty([num_pts+num_wrap1+num_wrap2,2]) + values = empty(num_pts+num_wrap1+num_wrap2) + points[:num_pts,0] = ravel(tamura_lon) + points[:num_pts,1] = ravel(tamura_lat) + values[:num_pts] = ravel(tamura_data) + # Wrap the periodic boundary on both sides + index = tamura_lon < -179 + points[num_pts:num_pts+num_wrap1,0] = tamura_lon[index] + 360 + points[num_pts:num_pts+num_wrap1,1] = tamura_lat[index] + values[num_pts:num_pts+num_wrap1] = tamura_data[index] + index = tamura_lon > 179 + points[num_pts+num_wrap1:,0] = tamura_lon[index] - 360 + points[num_pts+num_wrap1:,1] = tamura_lat[index] + values[num_pts+num_wrap1:] = tamura_data[index] + values = ma.masked_where(isnan(values), values) + xi = empty([size(lon_reg_2d),2]) + xi[:,0] = ravel(lon_reg_2d) + xi[:,1] = ravel(lat_reg_2d) + result = griddata(points, values, xi) + tamura_data_reg = reshape(result, shape(lon_reg_2d)) + # Now, regrid the MetROMS bathymetry to this regular grid + num_pts = size(cice_lon) + num_wrap1 = count_nonzero(cice_lon < -179) + num_wrap2 = count_nonzero(cice_lon > 179) + points = empty([num_pts+num_wrap1+num_wrap2,2]) + values = empty(num_pts+num_wrap1+num_wrap2) + points[:num_pts,0] = ravel(cice_lon) + points[:num_pts,1] = ravel(cice_lat) + values[:num_pts] = ravel(cice_bathy) + index = cice_lon < -179 + points[num_pts:num_pts+num_wrap1,0] = cice_lon[index] + 360 + points[num_pts:num_pts+num_wrap1,1] = cice_lat[index] + values[num_pts:num_pts+num_wrap1] = cice_bathy[index] + index = cice_lon > 179 + points[num_pts+num_wrap1:,0] = cice_lon[index] - 360 + points[num_pts+num_wrap1:,1] = cice_lat[index] + values[num_pts+num_wrap1:] = cice_bathy[index] + values = ma.masked_where(isnan(values), values) + xi = empty([size(lon_reg_2d),2]) + xi[:,0] = ravel(lon_reg_2d) + xi[:,1] = ravel(lat_reg_2d) + result = griddata(points, values, xi) + bathy_reg = reshape(result, shape(lon_reg_2d)) + # Mask everything but the continental shelf from dA_reg + dA_reg = ma.masked_where(lat_reg_2d > lat0, dA_reg) + dA_reg = ma.masked_where(bathy_reg > h0, dA_reg) + # Mask the land mask (and ice shelves) from tamura_data_reg + tamura_data_reg = ma.masked_where(isnan(tamura_data_reg), tamura_data_reg) + # Set up integral + tamura_data_bins = zeros(num_bins) + # Loop over longitude only + for i in range(len(lon_reg)): + # Find the right bin + bin_index = nonzero(bin_edges > lon_reg[i])[0][0] - 1 + # Integrate (m^3/y) + tamura_data_bins[bin_index] += sum(tamura_data_reg[:,i]*dA_reg[:,i]) + # Convert to 10^9 m^3/y + tamura_data_bins *= 1e-9 + + # Write data to ASCII file + print 'Writing to file' + f = open(output_file, 'w') + f.write('Longitude:\n') + for val in bin_centres: + f.write(str(val) + '\n') + f.write('MetROMS sea ice production (10^9 m^3/y):\n') + for val in cice_data_bins: + f.write(str(val) + '\n') + f.write('FESOM (low-res) sea ice production (10^9 m^3/y):\n') + for val in fesom_data_bins_lr: + f.write(str(val) + '\n') + f.write('FESOM (high-res) sea ice production (10^9 m^3/y):\n') + for val in fesom_data_bins_hr: + f.write(str(val) + '\n') + f.write('Tamura sea ice production (10^9 m^3/y):\n') + for val in tamura_data_bins: + f.write(str(val) + '\n') + f.close() + + diff --git a/mip_tamura_circumpolar_plot.py b/mip_tamura_circumpolar_plot.py new file mode 100644 index 0000000..73efef2 --- /dev/null +++ b/mip_tamura_circumpolar_plot.py @@ -0,0 +1,151 @@ +from numpy import * +from netCDF4 import Dataset +from matplotlib.pyplot import * +from matplotlib.collections import PatchCollection +#from matplotlib.colors import * +# Import FESOM scripts (have to modify path first) +import sys +sys.path.insert(0, '/short/y99/kaa561/fesomtools') +from patches import * + +def mip_tamura_circumpolar_plot (): + + # File paths + # FESOM mesh paths + fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' + fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' + # CICE 1992-2013 mean ice production files (precomputed in calc_ice_prod.py) + cice_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/ice_prod_1992_2013.nc' + # FESOM 1992-2013 mean ice production files (precomputed in calc_annual_ice_prod.py in fesomtools) + fesom_lr_file = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/ice_prod_1992_2013.nc' + fesom_hr_file = '/short/y99/kaa561/FESOM/intercomparison_highres/output/ice_prod_1992_2013.nc' + # Tamura's 1992-2013 mean ice production (precomputed on desktop with Matlab) + tamura_file = '/short/m68/kaa561/tamura_1992_2013_monthly_climatology.nc' + # FESOM plotting parameters + circumpolar = True + mask_cavities = True + # Degrees to radians conversion factor + deg2rad = pi/180.0 + # Northern boundary for plot: 64S + nbdry = -64 + 90 + # Maximum value to plot + plot_bound = 30 + + print 'Processing Tamura obs' + id = Dataset(tamura_file, 'r') + tamura_lon = id.variables['longitude'][:,:] + tamura_lat = id.variables['latitude'][:,:] + # Read precomputed sea ice formation (already in m/y) + tamura_data = id.variables['ice_prod'][:,:] + id.close() + # Polar coordinates for plotting + tamura_x = -(tamura_lat+90)*cos(tamura_lon*deg2rad+pi/2) + tamura_y = (tamura_lat+90)*sin(tamura_lon*deg2rad+pi/2) + + print 'Processing MetROMS' + id = Dataset(cice_file, 'r') + cice_lon_tmp = id.variables['TLON'][:,:] + cice_lat_tmp = id.variables['TLAT'][:,:] + # Read precomputed sea ice formation (already in m/y) + cice_data_tmp = id.variables['ice_prod'][:,:] + id.close() + # Wrap the periodic boundary by 1 cell + cice_lon = ma.empty([size(cice_lon_tmp,0), size(cice_lon_tmp,1)+1]) + cice_lat = ma.empty([size(cice_lat_tmp,0), size(cice_lat_tmp,1)+1]) + cice_data = ma.empty([size(cice_data_tmp,0), size(cice_data_tmp,1)+1]) + cice_lon[:,:-1] = cice_lon_tmp + cice_lon[:,-1] = cice_lon_tmp[:,0] + cice_lat[:,:-1] = cice_lat_tmp + cice_lat[:,-1] = cice_lat_tmp[:,0] + cice_data[:,:-1] = cice_data_tmp + cice_data[:,-1] = cice_data_tmp[:,0] + # Polar coordinates for plotting + cice_x = -(cice_lat+90)*cos(cice_lon*deg2rad+pi/2) + cice_y = (cice_lat+90)*sin(cice_lon*deg2rad+pi/2) + + print 'Processing low-res FESOM' + # Build mesh + elements_lr, patches_lr = make_patches(fesom_mesh_path_lr, circumpolar, mask_cavities) + # Read precomputed sea ice formation (already in m/y) + id = Dataset(fesom_lr_file, 'r') + fesom_data_nodes_lr = id.variables['ice_prod'][:] + id.close() + # Find element-averages + fesom_data_lr = [] + for elm in elements_lr: + if not elm.cavity: + fesom_data_lr.append(mean(array([fesom_data_nodes_lr[elm.nodes[0].id], fesom_data_nodes_lr[elm.nodes[1].id], fesom_data_nodes_lr[elm.nodes[2].id]]))) + fesom_data_lr = array(fesom_data_lr) + + print 'Processing high-res FESOM' + elements_hr, patches_hr = make_patches(fesom_mesh_path_hr, circumpolar, mask_cavities) + id = Dataset(fesom_hr_file, 'r') + fesom_data_nodes_hr = id.variables['ice_prod'][:] + id.close() + fesom_data_hr = [] + for elm in elements_hr: + if not elm.cavity: + fesom_data_hr.append(mean(array([fesom_data_nodes_hr[elm.nodes[0].id], fesom_data_nodes_hr[elm.nodes[1].id], fesom_data_nodes_hr[elm.nodes[2].id]]))) + fesom_data_hr = array(fesom_data_hr) + + #bounds = linspace(0, plot_bound**(1.0/3), num=100)**3 + #norm = BoundaryNorm(boundaries=bounds, ncolors=256) + + print 'Plotting' + fig = figure(figsize=(10,12)) + gs = GridSpec(2,2) + gs.update(left=0.05, right=0.95, bottom=0.09, top=0.9, wspace=0.02, hspace=0.1) + # Tamura obs + ax = subplot(gs[0,0], aspect='equal') + img = pcolor(tamura_x, tamura_y, tamura_data, vmin=0, vmax=plot_bound, cmap='jet') + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + ax.set_xticks([]) + ax.set_yticks([]) + title('Observations', fontsize=24) + # MetROMS + ax = subplot(gs[0,1], aspect='equal') + img = pcolor(cice_x, cice_y, cice_data, vmin=0, vmax=plot_bound, cmap='jet') #, norm=norm) + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + ax.set_xticks([]) + ax.set_yticks([]) + title('MetROMS', fontsize=24) + # FESOM low-res + ax = subplot(gs[1,0], aspect='equal') + img = PatchCollection(patches_lr, cmap='jet') #, norm=norm) + img.set_array(fesom_data_lr) + img.set_clim(0, plot_bound) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + ax.set_xticks([]) + ax.set_yticks([]) + title('FESOM (low-res)', fontsize=24) + # FESOM high-res + ax = subplot(gs[1,1], aspect='equal') + img = PatchCollection(patches_hr, cmap='jet') #, norm=norm) + img.set_array(fesom_data_hr) + img.set_clim(0, plot_bound) + img.set_edgecolor('face') + ax.add_collection(img) + xlim([-nbdry, nbdry]) + ylim([-nbdry, nbdry]) + ax.set_xticks([]) + ax.set_yticks([]) + title('FESOM (high-res)', fontsize=24) + # Colourbar below + cbaxes = fig.add_axes([0.3, 0.03, 0.4, 0.03]) + cbar = colorbar(img, cax=cbaxes, extend='max', orientation='horizontal') + cbar.ax.tick_params(labelsize=16) + # Main title + suptitle('Sea ice production (m/y), 1992-2013 mean', fontsize=28) + fig.show() + fig.savefig('seaice_tamura.png') + + +# Command-line interface +if __name__ == "__main__": + + mip_tamura_circumpolar_plot() diff --git a/mip_tamura_plot.py b/mip_tamura_plot.py new file mode 100644 index 0000000..ba32aab --- /dev/null +++ b/mip_tamura_plot.py @@ -0,0 +1,90 @@ +from numpy import * +from matplotlib.pyplot import * + +def mip_tamura_plot (log_file): + + # Longitude to split the plot + lon_split = -65 + + # Read log file + f = open(log_file, 'r') + # Skip first line (header for longitude array) + f.readline() + lon = [] + for line in f: + try: + lon.append(float(line)) + except(ValueError): + # Reached the header for the next variable + break + metroms_data = [] + for line in f: + try: + metroms_data.append(float(line)) + except(ValueError): + break + fesom_data_lr = [] + for line in f: + try: + fesom_data_lr.append(float(line)) + except(ValueError): + break + fesom_data_hr = [] + for line in f: + try: + fesom_data_hr.append(float(line)) + except(ValueError): + break + tamura_data = [] + for line in f: + tamura_data.append(float(line)) + lon = array(lon) + metroms_data = array(metroms_data) + fesom_data_lr = array(fesom_data_lr) + fesom_data_hr = array(fesom_data_hr) + tamura_data = array(tamura_data) + + # Split at 65W + split_index = nonzero(lon > lon_split)[0][0] + lon = concatenate((lon[split_index:], lon[:split_index]+360)) + metroms_data = concatenate((metroms_data[split_index:], metroms_data[:split_index])) + fesom_data_lr = concatenate((fesom_data_lr[split_index:], fesom_data_lr[:split_index])) + fesom_data_hr = concatenate((fesom_data_hr[split_index:], fesom_data_hr[:split_index])) + tamura_data = concatenate((tamura_data[split_index:], tamura_data[:split_index])) + + # Plot + fig = figure(figsize=(13,6)) + gs = GridSpec(1,1) + gs.update(left=0.07, right=0.97, bottom=0.1, top=0.9) + ax = subplot(gs[0,0]) + plot(lon, metroms_data, color='blue', label='MetROMS') + plot(lon, fesom_data_lr, color='green', label='FESOM (low-res)') + plot(lon, fesom_data_hr, color='magenta', label='FESOM (high-res)') + plot(lon, tamura_data, color=(0.5,0.5,0.5), label='Observations (Tamura)') + grid(True) + legend() + xlim([lon[0], lon[-1]]) + ax.set_xticks(arange(-60, -90+360+30, 30)) + ax.set_xticklabels([r'60$^{\circ}$W', r'30$^{\circ}$W', r'0$^{\circ}$', r'30$^{\circ}$E', r'60$^{\circ}$E', r'90$^{\circ}$E', r'120$^{\circ}$E', r'150$^{\circ}$E', r'180$^{\circ}$', r'150$^{\circ}$W', r'120$^{\circ}$W', r'90$^{\circ}$W']) + xlabel('longitude', fontsize=16) + ylim([0,100]) + ylabel(r'10$^9$ m$^3$/y', fontsize=16) + title('Sea ice production on continental shelf, 1992-2013 mean', fontsize=20) + text(-40, 60, 'Weddell\nSea', fontsize=16, ha='center', va='center') + text(60, 70, 'Prydz\nBay', fontsize=16, ha='center', va='center') + text(115, 58, 'Australian Sector', fontsize=16, ha='center', va='center') + text(180, 73, 'Ross\nSea', fontsize=16, ha='center', va='center') + text(-105+360, 50, 'Amundsen\nSea', fontsize=16, ha='center', va='center') + fig.show() + fig.savefig('tamura_plot.png') + + print 'MetROMS total sea ice is ' + str((mean(metroms_data)-mean(tamura_data))/mean(tamura_data)*100) + '% higher than Tamura' + print 'FESOM low-res total sea ice is ' + str((mean(fesom_data_lr)-mean(tamura_data))/mean(tamura_data)*100) + '% higher than Tamura' + print 'FESOM high-res total sea ice is ' + str((mean(fesom_data_hr)-mean(tamura_data))/mean(tamura_data)*100) + '% higher than Tamura' + +# Command-line interface +if __name__ == "__main__": + + log_file = raw_input("Logfile from mip_tamura_binning.py: ") + mip_tamura_plot(log_file) + diff --git a/mip_ts_distribution_ecco2.py b/mip_ts_distribution_ecco2.py new file mode 100644 index 0000000..1a5a13e --- /dev/null +++ b/mip_ts_distribution_ecco2.py @@ -0,0 +1,186 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from matplotlib.colors import * +from cartesian_grid_3d import * +from unesco import * + +def mip_ts_distribution_ecco2 (): + + # Beginning of ECCO2 filenames + temp_file_head = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.1992' + salt_file_head = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/SALT.1440x720x50.1992' + # Northern boundary of water masses to consider + nbdry = -65 + # Number of temperature and salinity bins + num_bins_temp = 1000 + num_bins_salt = 2000 + # Bounds on temperature and salinity bins (pre-computed, change if needed) + min_salt = 32.3 + max_salt = 40.1 + min_temp = -3.1 + max_temp = 3.8 + # Bounds to actually plot + min_salt_plot = 33.25 + max_salt_plot = 35.1 + min_temp_plot = -3 + max_temp_plot = 3.8 + # Radius of the Earth in metres + r = 6.371e6 + # Degrees to radians conversion factor + deg2rad = pi/180.0 + + print 'Setting up bins' + # Calculate boundaries of temperature bins + temp_bins = linspace(min_temp, max_temp, num=num_bins_temp) + # Calculate centres of temperature bins (for plotting) + temp_centres = 0.5*(temp_bins[:-1] + temp_bins[1:]) + # Repeat for salinity + salt_bins = linspace(min_salt, max_salt, num=num_bins_salt) + salt_centres = 0.5*(salt_bins[:-1] + salt_bins[1:]) + # Set up 2D array of temperature bins x salinity bins to hold average + # depth of water masses, weighted by volume + ts_vals = zeros([size(temp_centres), size(salt_centres)]) + # Also array to integrate volume + volume = zeros([size(temp_centres), size(salt_centres)]) + # Calculate surface freezing point as a function of salinity as seen by + # CICE + freezing_pt = salt_centres/(-18.48 + 18.48/1e3*salt_centres) + # Get 2D versions of the temperature and salinity bins + salt_2d, temp_2d = meshgrid(salt_centres, temp_centres) + # Calculate potential density of each combination of temperature and + # salinity bins + density = unesco(temp_2d, salt_2d, zeros(shape(temp_2d)))-1000 + # Density contours to plot + density_lev = arange(26.6, 28.4, 0.2) + + print 'Reading grid' + # Read grid from first file + id = Dataset(temp_file_head + '01.nc', 'r') + lon = id.variables['LONGITUDE_T'][:] + lat = id.variables['LATITUDE_T'][:] + z = id.variables['DEPTH_T'][:] + id.close() + num_lon = size(lon) + num_lat = size(lat) + num_depth = size(z) + # Calculate integrands + # Interpolate to get longitude at the edges of each cell + lon_edges = zeros(num_lon+1) + lon_edges[1:-1] = 0.5*(lon[:-1] + lon[1:]) + lon_edges[0] = 0.5*(lon[0] + lon[-1] - 360) + lon_edges[-1] = 0.5*(lon[0] + 360 + lon[-1]) + dlon = lon_edges[1:] - lon_edges[:-1] + # Similarly for latitude; linearly extrapolate for edges (which don't matter) + lat_edges = zeros(num_lat+1) + lat_edges[1:-1] = 0.5*(lat[:-1] + lat[1:]) + lat_edges[0] = 2*lat[0] - lat_edges[1] + lat_edges[-1] = 2*lat[-1] - lat_edges[-2] + dlat = lat_edges[1:] - lat_edges[:-1] + # Make 2D versions + lon_2d, lat_2d = meshgrid(lon, lat) + dlon_2d, dlat_2d = meshgrid(dlon, dlat) + # Convert to Cartesian space + dx_2d = r*cos(lat_2d*deg2rad)*dlon_2d*deg2rad + dy_2d = r*dlat_2d*deg2rad + # We have z at the midpoint of each cell, now find it on the top and + # bottom edges of each cell + z_edges = zeros(num_depth+1) + z_edges[1:-1] = 0.5*(z[:-1] + z[1:]) + # At the surface, z=0 + # At bottom, extrapolate + z_edges[-1] = 2*z[-1] - z_edges[-2] + # Now find dz + dz_1d = z_edges[1:] - z_edges[:-1] + # Tile each array to be 3D + dx_3d = tile(dx_2d, (num_depth,1,1)) + dy_3d = tile(dy_2d, (num_depth,1,1)) + dz_3d = transpose(tile(dz_1d, (num_lon,num_lat,1))) + # Get volume integrand + dV = dx_3d*dy_3d*dz_3d + + print 'Reading data' + # Annual average over 1992 + temp = ma.empty([num_depth, num_lat, num_lon]) + salt = ma.empty([num_depth, num_lat, num_lon]) + temp[:,:,:] = 0.0 + salt[:,:,:] = 0.0 + for month in range(12): + if month+1 < 10: + month_string = '0' + str(month+1) + else: + month_string = str(month+1) + id = Dataset(temp_file_head + month_string + '.nc', 'r') + temp[:,:,:] += id.variables['THETA'][0,:,:,:] + id.close() + id = Dataset(salt_file_head + month_string + '.nc', 'r') + salt[:,:,:] += id.variables['SALT'][0,:,:,:] + id.close() + # Convert from integrals to averages + temp /= 12.0 + salt /= 12.0 + + print 'Binning temperature and salinity' + # Loop over grid boxes + # Find the first latitude index north of 65S; stop there + j_max = nonzero(lat > nbdry)[0][0] + for k in range(num_depth): + for j in range(j_max): + for i in range(num_lon): + if temp[k,j,i] is ma.masked: + # Land + continue + # Figure out which bins this falls into + temp_index = nonzero(temp_bins > temp[k,j,i])[0][0] - 1 + salt_index = nonzero(salt_bins > salt[k,j,i])[0][0] - 1 + # Integrate depth*dV in this bin + ts_vals[temp_index, salt_index] += z[k]*dV[k,j,i] + volume[temp_index, salt_index] += dV[k,j,i] + # Mask bins with zero volume + ts_vals = ma.masked_where(volume==0, ts_vals) + volume = ma.masked_where(volume==0, volume) + # Convert depths from integrals to volume-averages + ts_vals /= volume + + # Find the maximum depth for plotting + max_depth = amax(ts_vals) + # Make a nonlinear scale + bounds = linspace(0, max_depth**(1.0/2.5), num=100)**2.5 + norm = BoundaryNorm(boundaries=bounds, ncolors=256) + # Set labels for density contours + manual_locations = [(33.4, 3.0), (33.65, 3.0), (33.9, 3.0), (34.2, 3.0), (34.45, 3.5), (34.65, 3.25), (34.9, 3.0), (35, 1.5)] + +print "Plotting" +fig = figure(figsize=(9,9)) +ax = fig.add_subplot(1, 1, 1) +img = pcolor(salt_centres, temp_centres, ts_vals, norm=norm, vmin=0, vmax=max_depth, cmap='jet') +# Add surface freezing point line +plot(salt_centres, freezing_pt, color='black', linestyle='dashed') +# Add density contours +cs = contour(salt_centres, temp_centres, density, density_lev, colors=(0.6,0.6,0.6), linestyles='dotted') +clabel(cs, inline=1, fontsize=14, color=(0.6,0.6,0.6), fmt='%1.1f', manual=manual_locations) +xlim([min_salt_plot, max_salt_plot]) +ylim([min_temp_plot, max_temp_plot]) +ax.tick_params(axis='x', labelsize=14) +ax.tick_params(axis='y', labelsize=14) +xlabel('Salinity (psu)', fontsize=16) +ylabel(r'Temperature ($^{\circ}$C)', fontsize=16) +title('Water masses south of 65$^{\circ}$S: depth (m)\n1992 annual average, ECCO2', fontsize=20) +# Add a colourbar on the right +cbaxes = fig.add_axes([0.91, 0.3, 0.02, 0.4]) +cbar = colorbar(img, cax=cbaxes, ticks=[0,50,100,200,500,1000,2000,4000]) +cbar.ax.tick_params(labelsize=14) +fig.show() +fig.savefig('ts_distribution_ecco2.png') + + +# Command-line interface +if __name__ == "__main__": + + mip_ts_distribution_ecco2() + + + + + + diff --git a/slope_current.py b/slope_current.py new file mode 100644 index 0000000..95be22a --- /dev/null +++ b/slope_current.py @@ -0,0 +1,312 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from cartesian_grid_3d import * +from rotate_vector_roms import * +import sys +sys.path.insert(0, '/short/y99/kaa561/fesomtools') +from unrotate_grid import * +from unrotate_vector import * + +def slope_current (): + +# File paths +roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' +roms_file = '/short/m68/kaa561/metroms_iceshelf/tmproms/run/intercomparison/2002_2016_avg.nc' +fesom_mesh_path_lr = '/short/y99/kaa561/FESOM/mesh/meshA/' +fesom_mesh_path_hr = '/short/y99/kaa561/FESOM/mesh/meshB/' +fesom_file_lr = '/short/y99/kaa561/FESOM/intercomparison_lowres/output/oce_2002_2016_avg.nc' +fesom_file_hr = '/short/y99/kaa561/FESOM/intercomparison_highres/output/oce_2002_2016_avg.nc' +# ROMS vertical grid parameters +theta_s = 7.0 +theta_b = 2.0 +hc = 250 +N = 31 +# FESOM mesh parameters +circumpolar = False +cross_180 = False +# Spacing of longitude bins +dlon = 1 +# Parameters for continental shelf selection +lat0 = -64 # Maximum latitude to consider +h0 = 2500 # Deepest depth to consider + +# Set up longitude bins +# Start with edges +lon_bins = arange(-180, 180+dlon, dlon) +# Centres for plotting +lon_centres = 0.5*(lon_bins[:-1] + lon_bins[1:]) +num_bins = size(lon_centres) +# Set up arrays to store maximum barotropic speed in each bin +current_roms = zeros(num_bins) +current_fesom_lr = zeros(num_bins) +current_fesom_hr = zeros(num_bins) + +print 'Processing MetROMS' + +print 'Reading grid' +id = Dataset(roms_grid, 'r') +roms_lon = id.variables['lon_rho'][:,:] +roms_lat = id.variables['lat_rho'][:,:] +roms_h = id.variables['h'][:,:] +roms_zice = id.variables['zice'][:,:] +roms_angle = id.variables['angle'][:,:] +id.close() +print 'Reading data' +# Read full 3D u and v +id = Dataset(roms_file, 'r') +u_3d_tmp = id.variables['u'][0,:,:,:] +v_3d_tmp = id.variables['v'][0,:,:,:] +id.close() +print 'Vertically averaging velocity' +# Get integrands on 3D grid; we only care about dz +dx, dy, dz, z = cartesian_grid_3d(roms_lon, roms_lat, roms_h, roms_zice, theta_s, theta_b, hc, N) +# Unrotate each vertical level +u_3d = ma.empty(shape(dz)) +v_3d = ma.empty(shape(dz)) +num_lat_u = size(u_3d_tmp,1) +num_lon_u = size(u_3d_tmp,2) +num_lat_v = size(v_3d_tmp,1) +num_lon_v = size(v_3d_tmp,2) +for k in range(N): + u_k, v_k = rotate_vector_roms(u_3d_tmp[k,:,:], v_3d_tmp[k,:,:], roms_angle) + u_3d[k,:,:] = u_k + v_3d[k,:,:] = v_k +# Vertically average u and v +roms_u = sum(u_3d*dz, axis=0)/sum(dz, axis=0) +roms_v = sum(v_3d*dz, axis=0)/sum(dz, axis=0) +# Calculate speed +roms_speed = sqrt(roms_u**2 + roms_v**2) +print 'Selecting slope current' +# First make sure longitude is between -180 and 180 +index = roms_lon > 180 +roms_lon[index] = roms_lon[index] - 360 +for j in range(size(roms_speed,0)): + for i in range(size(roms_speed,1)): + # Check if we care about this point + if roms_lat[j,i] <= lat0 and roms_h[j,i] <= h0 and roms_zice[j,i] == 0: + # Find longitude bin + lon_index = nonzero(lon_bins > roms_lon[j,i])[0][0] - 1 + # Update slope current speed in this bin if needed + if roms_speed[j,i] > current_roms[lon_index]: + current_roms[lon_index] = roms_speed[j,i] + +print 'Processing low-res FESOM' + +print 'Building mesh' +# We only care about nodes, not elements, so don't need to use the +# fesom_grid function. +# Read cavity flag for each 2D surface node +fesom_cavity_lr = [] +f = open(fesom_mesh_path_lr + 'cavity_flag_nod2d.out', 'r') +for line in f: + tmp = int(line) + if tmp == 1: + fesom_cavity_lr.append(True) + elif tmp == 0: + fesom_cavity_lr.append(False) + else: + print 'Problem' +f.close() +# Save the number of 2D nodes +fesom_n2d_lr = len(fesom_cavity_lr) +# Read rotated lat and lon for each node, also depth +f = open(fesom_mesh_path_lr + 'nod3d.out', 'r') +f.readline() +rlon_lr = [] +rlat_lr = [] +node_depth_lr = [] +for line in f: + tmp = line.split() + lon_tmp = float(tmp[1]) + lat_tmp = float(tmp[2]) + node_depth_tmp = -1*float(tmp[3]) + if lon_tmp < -180: + lon_tmp += 360 + elif lon_tmp > 180: + lon_tmp -= 360 + rlon_lr.append(lon_tmp) + rlat_lr.append(lat_tmp) + node_depth_lr.append(node_depth_tmp) +f.close() +# For lat and lon, only care about the 2D nodes (the first +# fesom_n2d indices) +rlon_lr = array(rlon_lr[0:fesom_n2d_lr]) +rlat_lr = array(rlat_lr[0:fesom_n2d_lr]) +node_depth_lr = array(node_depth_lr) +# Unrotate longitude +fesom_lon_lr, fesom_lat_lr = unrotate_grid(rlon_lr, rlat_lr) +# Read lists of which nodes are directly below which +f = open(fesom_mesh_path_lr + 'aux3d.out', 'r') +max_num_layers_lr = int(f.readline()) +node_columns_lr = zeros([fesom_n2d_lr, max_num_layers_lr]) +for n in range(fesom_n2d_lr): + for k in range(max_num_layers_lr): + node_columns_lr[n,k] = int(f.readline()) +node_columns_lr = node_columns_lr.astype(int) +f.close() +# Now figure out the bottom depth of each 2D node +bottom_depth_lr = zeros(fesom_n2d_lr) +for n in range(fesom_n2d_lr): + node_id = node_columns_lr[n,0] - 1 + for k in range(1, max_num_layers_lr): + if node_columns_lr[n,k] == -999: + # Reached the bottom + break + node_id = node_columns_lr[n,k] - 1 + # Save the last valid depth + bottom_depth_lr[n] = node_depth_lr[n] +print 'Reading data' +# Read full 3D field for both u and v +id = Dataset(fesom_file_lr, 'r') +node_ur_3d_lr = id.variables['u'][0,:] +node_vr_3d_lr = id.variables['v'][0,:] +id.close() +print 'Vertically averaging velocity' +# Vertically average +node_ur_lr = zeros(fesom_n2d_lr) +node_vr_lr = zeros(fesom_n2d_lr) +for n in range(fesom_n2d_lr): + # Integrate udz, vdz, and dz over this water column + udz_col = 0 + vdz_col = 0 + dz_col = 0 + for k in range(max_num_layers_lr-1): + if node_columns_lr[n,k+1] == -999: + # Reached the bottom + break + # Trapezoidal rule + top_id = node_columns_lr[n,k] + bot_id = node_columns_lr[n,k+1] + dz_tmp = node_depth_lr[bot_id-1] - node_depth_lr[top_id-1] + udz_col += 0.5*(node_ur_3d_lr[top_id-1]+node_ur_3d_lr[bot_id-1])*dz_tmp + vdz_col += 0.5*(node_vr_3d_lr[top_id-1]+node_vr_3d_lr[bot_id-1])*dz_tmp + dz_col += dz_tmp + # Convert from integrals to averages + node_ur_lr[n] = udz_col/dz_col + node_vr_lr[n] = vdz_col/dz_col +# Unrotate +node_u_lr, node_v_lr = unrotate_vector(rlon_lr, rlat_lr, node_ur_lr, node_vr_lr) +# Calculate speed +node_speed_lr = sqrt(node_u_lr**2 + node_v_lr**2) +print 'Selecting slope current' +for n in range(fesom_n2d_lr): + # Check if we care about this node + if fesom_lat_lr[n] <= lat0 and bottom_depth_lr[n] <= h0 and not fesom_cavity_lr[n]: + # Find longitude bin + lon_index = nonzero(lon_bins > fesom_lon_lr[n])[0][0] - 1 + # Update slope current speed in this bin if needed + if node_speed_lr[n] > current_fesom_lr[lon_index]: + current_fesom_lr[lon_index] = node_speed_lr[n] + +print 'Processing high-res FESOM' + +print 'Building mesh' +fesom_cavity_hr = [] +f = open(fesom_mesh_path_hr + 'cavity_flag_nod2d.out', 'r') +for line in f: + tmp = int(line) + if tmp == 1: + fesom_cavity_hr.append(True) + elif tmp == 0: + fesom_cavity_hr.append(False) + else: + print 'Problem' +f.close() +fesom_n2d_hr = len(fesom_cavity_hr) +f = open(fesom_mesh_path_hr + 'nod3d.out', 'r') +f.readline() +rlon_hr = [] +rlat_hr = [] +node_depth_hr = [] +for line in f: + tmp = line.split() + lon_tmp = float(tmp[1]) + lat_tmp = float(tmp[2]) + node_depth_tmp = -1*float(tmp[3]) + if lon_tmp < -180: + lon_tmp += 360 + elif lon_tmp > 180: + lon_tmp -= 360 + rlon_hr.append(lon_tmp) + rlat_hr.append(lat_tmp) + node_depth_hr.append(node_depth_tmp) +f.close() +rlon_hr = array(rlon_hr[0:fesom_n2d_hr]) +rlat_hr = array(rlat_hr[0:fesom_n2d_hr]) +node_depth_hr = array(node_depth_hr) +fesom_lon_hr, fesom_lat_hr = unrotate_grid(rlon_hr, rlat_hr) +f = open(fesom_mesh_path_hr + 'aux3d.out', 'r') +max_num_layers_hr = int(f.readline()) +node_columns_hr = zeros([fesom_n2d_hr, max_num_layers_hr]) +for n in range(fesom_n2d_hr): + for k in range(max_num_layers_hr): + node_columns_hr[n,k] = int(f.readline()) +node_columns_hr = node_columns_hr.astype(int) +f.close() +bottom_depth_hr = zeros(fesom_n2d_hr) +for n in range(fesom_n2d_hr): + node_id = node_columns_hr[n,0] - 1 + for k in range(1, max_num_layers_hr): + if node_columns_hr[n,k] == -999: + break + node_id = node_columns_hr[n,k] - 1 + bottom_depth_hr[n] = node_depth_hr[n] +print 'Reading data' +id = Dataset(fesom_file_hr, 'r') +node_ur_3d_hr = id.variables['u'][0,:] +node_vr_3d_hr = id.variables['v'][0,:] +id.close() +print 'Vertically averaging velocity' +node_ur_hr = zeros(fesom_n2d_hr) +node_vr_hr = zeros(fesom_n2d_hr) +for n in range(fesom_n2d_hr): + udz_col = 0 + vdz_col = 0 + dz_col = 0 + for k in range(max_num_layers_hr-1): + if node_columns_hr[n,k+1] == -999: + break + top_id = node_columns_hr[n,k] + bot_id = node_columns_hr[n,k+1] + dz_tmp = node_depth_hr[bot_id-1] - node_depth_hr[top_id-1] + udz_col += 0.5*(node_ur_3d_hr[top_id-1]+node_ur_3d_hr[bot_id-1])*dz_tmp + vdz_col += 0.5*(node_vr_3d_hr[top_id-1]+node_vr_3d_hr[bot_id-1])*dz_tmp + dz_col += dz_tmp + node_ur_hr[n] = udz_col/dz_col + node_vr_hr[n] = vdz_col/dz_col +node_u_hr, node_v_hr = unrotate_vector(rlon_hr, rlat_hr, node_ur_hr, node_vr_hr) +node_speed_hr = sqrt(node_u_hr**2 + node_v_hr**2) +print 'Selecting slope current' +for n in range(fesom_n2d_hr): + if fesom_lat_hr[n] <= lat0 and bottom_depth_hr[n] <= h0 and not fesom_cavity_hr[n]: + lon_index = nonzero(lon_bins > fesom_lon_hr[n])[0][0] - 1 + if node_speed_hr[n] > current_fesom_hr[lon_index]: + current_fesom_hr[lon_index] = node_speed_hr[n] + +print 'Plotting' +fig = figure(figsize=(12,8)) +plot(lon_centres, current_roms, color='blue', label='MetROMS') +plot(lon_centres, current_fesom_lr, color='green', label='FESOM low-res') +plot(lon_centres, current_fesom_hr, color='magenta', label='FESOM high-res') +grid(True) +title('Slope current speed', fontsize=20) +xlabel('Longitude', fontsize=14) +ylabel('m/s', fontsize=14) +xlim([-180, 180]) +legend() +fig.savefig('slope_current.png') + +print 'Mean slope current in MetROMS: ' + str(mean(current_roms)) + ' m/s' +print 'Mean slope current in low-res FESOM: ' + str(mean(current_fesom_lr)) + ' m/s' +print 'Mean slope current in high-res FESOM: ' + str(mean(current_fesom_hr)) + ' m/s' + + +# Command-line interface +if __name__ == "__main__": + + coastal_current() + + + + diff --git a/total_iceshelf_area.py b/total_iceshelf_area.py index 103f6fc..59a4458 100644 --- a/total_iceshelf_area.py +++ b/total_iceshelf_area.py @@ -9,12 +9,12 @@ def total_iceshelf_area (roms_grid_file, fesom_mesh_path_lr, fesom_mesh_path_hr): - id = Dataset(file_path, 'r') + id = Dataset(roms_grid_file, 'r') lon = id.variables['lon_rho'][:-15,1:-1] lat = id.variables['lat_rho'][:-15,1:-1] zice = id.variables['zice'][:-15,1:-1] id.close() - dx, dy = cartesian_grid(lon, lat) + dx, dy = cartesian_grid_2d(lon, lat) dA = ma.masked_where(zice==0, dx*dy) print 'MetROMS: ' + str(sum(dA)) + ' m^2'