From dbf9882a470d46ba92752be9799528e9fcc5eafe Mon Sep 17 00:00:00 2001 From: Kaitlin Alexander Date: Thu, 15 Sep 2016 10:30:46 +1000 Subject: [PATCH] Fixed vector rotation issue, fixed some grid issues, and a bunch of new figure scripts. --- add_iceberg_melt.py | 6 +- add_tide_period.py | 30 +++ aice_animation.py | 6 +- bw_diff_plot.py | 99 ++++++++++ bwsalt_plot.py | 2 +- cice_grid.py | 29 ++- cice_strx.py | 74 ++++++++ cice_stry.py | 74 ++++++++ cice_transport.py | 47 +++++ circumpolar_cice_plot.py | 29 ++- circumpolar_plot.py | 108 ++++++----- dpt_2d.py | 31 ++-- dpt_2d_int.py | 28 +-- dpt_timeseries.py | 51 +++--- eraint_rotate_winds.py | 41 +++++ file_guide.txt | 253 ++++++++++++++++---------- grid_res.py | 69 +++++++ ismr_map.py | 12 +- make_zonal_slices.sh | 13 ++ massloss_map.py | 12 +- mld_plot.py | 80 ++++++++ nbdry_grid_hack.py | 30 ++- overturning_plot.py | 35 ++-- plot_kinetic_energy.py | 2 +- remove_cavities.py | 48 +++++ romscice_ini_ecco.py | 130 +++++++++---- rotate_vector_cice.py | 15 ++ rotate_vector_roms.py | 31 ++++ sose_roms_seasonal.py | 2 +- spinup_plots.py | 139 +++++++++----- massloss.py => timeseries_massloss.py | 88 ++++----- uv_vectorplot.py | 33 ++-- uvp_grid_fix.py | 92 ++++++++++ zonal_plot.py | 90 +++++---- 34 files changed, 1386 insertions(+), 443 deletions(-) create mode 100644 add_tide_period.py create mode 100644 bw_diff_plot.py create mode 100644 cice_strx.py create mode 100644 cice_stry.py create mode 100644 cice_transport.py create mode 100644 eraint_rotate_winds.py create mode 100644 grid_res.py create mode 100755 make_zonal_slices.sh create mode 100644 mld_plot.py create mode 100644 remove_cavities.py create mode 100644 rotate_vector_cice.py create mode 100644 rotate_vector_roms.py rename massloss.py => timeseries_massloss.py (63%) create mode 100644 uvp_grid_fix.py diff --git a/add_iceberg_melt.py b/add_iceberg_melt.py index 57b9b7c..5bf5537 100644 --- a/add_iceberg_melt.py +++ b/add_iceberg_melt.py @@ -12,10 +12,10 @@ def add_iceberg_melt (file): # Naming conventions for iceberg files - iceberg_head = '/short/m68/kaa561/ROMS-CICE-MCT/data/MartinAdcroft2010_iceberg_meltfluxes/icebergs.1861-1960.' + iceberg_head = '/short/m68/kaa561/ROMS-CICE-MCT/data/originals/MartinAdcroft2010_iceberg_meltfluxes/icebergs.1861-1960.' iceberg_tail = '.melt.nc' # Iceberg grid file - iceberg_grid = '/short/m68/kaa561/ROMS-CICE-MCT/data/MartinAdcroft2010_iceberg_meltfluxes/icebergs.static.nc' + iceberg_grid = '/short/m68/kaa561/ROMS-CICE-MCT/data/originals/MartinAdcroft2010_iceberg_meltfluxes/icebergs.static.nc' # ROMS grid file roms_grid ='/short/m68/kaa561/ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_10m.nc' # Density of freshwater @@ -58,7 +58,7 @@ def add_iceberg_melt (file): melt_roms = melt_roms/rho_w*seconds_per_12h # Add to precipitation field for this month id = Dataset(file, 'a') - id.variables['rain'][month,:,:] = id.variables['rain'][month,:,:] - melt_roms + id.variables['rain'][month,:,:] = id.variables['rain'][month,:,:] + melt_roms id.close() diff --git a/add_tide_period.py b/add_tide_period.py new file mode 100644 index 0000000..7c47048 --- /dev/null +++ b/add_tide_period.py @@ -0,0 +1,30 @@ +from netCDF4 import Dataset +from numpy import * + +# Given a ROMS tide file with tidal potential amplitude and phase angle +# (created using Kate's potential tide scripts), add the tidal period in seconds +# for each component. +def add_tide_period (): + + # Tide file to add periods to + # Order is q1, o1, p1, k1, n2, m2, s2, k2 + tide_file = '../ROMS-CICE-MCT/data/pot_tides.nc' + # Periods in seconds + # Order is m2, s2, n2, k2, k1, o1, p1, q1, mf, mm + period = array([44714.165191868, 43200.0012869521, 86164.0770050671, 92949.6357005365, 45570.0535117177, 86637.1997716528, 43082.0503185947, 96726.0857029666, 2380715.86358729, 1180295.54554976]) + # Put them in the right order + period_reorder = array([period[7], period[5], period[6], period[4], period[2], period[0], period[1], period[3]]) + + id = Dataset(tide_file, 'a') + id.createVariable('tide_period', 'f8', ('tide_period')) + id.variables['tide_period'].long_name = 'tide angular period' + id.variables['tide_period'].units = 'seconds' + id.variables['tide_period'][:] = period_reorder + id.close() + + +# Command-line interface +if __name__ == "__main__": + + add_tide_period() + diff --git a/aice_animation.py b/aice_animation.py index a3572be..f47f879 100644 --- a/aice_animation.py +++ b/aice_animation.py @@ -13,9 +13,9 @@ # Directory containing CICE output files directory = '/short/y99/kaa561/roms_spinup_newest/cice/' # Number of time indices in each file -num_ts = [504] +num_ts = [270, 252, 252, 252, 252] # File number to start with for the animation (1-based) -start_file = 1 +start_file = 5 # Degrees to radians conversion factor deg2rad = pi/180 # Names of each month for making titles @@ -65,6 +65,6 @@ def animate(i): return img # Animate once every time index from start_file to the last file -anim = FuncAnimation(fig, func=animate, frames=range(431,504)) #sum(array(num_ts[start_file-1:]))) +anim = FuncAnimation(fig, func=animate, frames=range(179,252)) #sum(array(num_ts[start_file-1:]))) # Save as an mp4 with one frame per second anim.save('aice.mp4', fps=1) diff --git a/bw_diff_plot.py b/bw_diff_plot.py new file mode 100644 index 0000000..6b078bd --- /dev/null +++ b/bw_diff_plot.py @@ -0,0 +1,99 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * + +def bw_diff_plot (file_path, var_name, colour_bounds=None, save=False, fig_name=None): + + init_path = '../ROMS-CICE-MCT/data/woa_ini.nc' + deg2rad = pi/180 + lon_c = 50 + lat_c = -83 + radius = 10.1 + + id = Dataset(init_path, 'r') + data1 = id.variables[var_name][0,0,:-15,:-2] + id.close() + + id = Dataset(file_path, 'r') + lon = id.variables['lon_rho'][:-15,:-2] + lat = id.variables['lat_rho'][:-15,:-2] + data2 = id.variables[var_name][-1,0,:-15,:-2] + id.close() + + data_diff = data2 - data1 + + # Convert grid to spherical coordinates + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + # Find centre in spherical coordinates + x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) + y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Build a regular x-y grid and select the missing circle + x_reg, y_reg = meshgrid(linspace(amin(x), amax(x), num=1000), linspace(amin(y), amax(y), num=1000)) + land_circle = zeros(shape(x_reg)) + land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + + if colour_bounds is not None: + # User has set bounds on colour scale + lev = linspace(colour_bounds[0], colour_bounds[1], num=50) + if colour_bounds[0] == -colour_bounds[1]: + # Bounds are centered on zero, so choose a blue-to-red colourmap + # centered on yellow + colour_map = 'RdYlBu_r' + else: + colour_map = 'jet' + else: + max_val = amax(abs(data_diff)) + lev = linspace(-max_val, max_val, num=50) + colour_map = 'RdYlBu_r' + + # Plot + fig = figure(figsize=(16,12)) + ax = fig.add_subplot(1,1,1,aspect='equal') + fig.patch.set_facecolor('white') + # First shade everything in grey + contourf(x, y, ones(shape(data_diff)), 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in the missing circle + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Now shade the temperature (land mask will remain grey) + contourf(x, y, data_diff, lev, cmap=colour_map, extend='both') + cbar = colorbar() + cbar.ax.tick_params(labelsize=20) + if var_name == 'temp': + title(r'Difference in bottom water temperature since initialisation ($^{\circ}$C)', fontsize=30) + elif var_name == 'salt': + title(r'Difference in bottom water salinity since initialisation (psu)', fontsize=30) + axis('off') + + # Finished + if save: + fig.savefig(fig_name) + else: + fig.show() + + +if __name__ == "__main__": + + file_path = raw_input("Path to ocean history/averages file: ") + tmp = raw_input("Temperature (t) or salinity (s)? ") + if tmp == 't': + var_name = 'temp' + elif tmp == 's': + var_name = 'salt' + colour_bounds = None + get_bounds = raw_input("Set bounds on colour scale (y/n)? ") + if get_bounds == 'y': + lower_bound = float(raw_input("Lower bound: ")) + upper_bound = float(raw_input("Upper bound: ")) + colour_bounds = [lower_bound, upper_bound] + action = raw_input("Save figure (s) or display on screen (d)? ") + if action == 's': + save = True + fig_name = raw_input("Filename for figure: ") + else: + save = False + fig_name = None + + bw_diff_plot(file_path, var_name, colour_bounds, save, fig_name) + + diff --git a/bwsalt_plot.py b/bwsalt_plot.py index dd14452..23d5e85 100644 --- a/bwsalt_plot.py +++ b/bwsalt_plot.py @@ -58,7 +58,7 @@ def bwsalt_plot (file_path, save=False, fig_name=None): contourf(x, y, bwsalt, lev, cmap='jet', extend='both') cbar = colorbar(ticks=arange(min_scale, max_scale+0.2, 0.2)) cbar.ax.tick_params(labelsize=20) - title(r'Bottom water salintiy (psu), annual average', fontsize=30) + title(r'Bottom water salinity (psu), annual average', fontsize=30) axis('off') # Finished diff --git a/cice_grid.py b/cice_grid.py index f5ad176..871f51c 100644 --- a/cice_grid.py +++ b/cice_grid.py @@ -15,15 +15,38 @@ def cice_grid (roms_grid_name, cice_grid_name, cice_kmt_name): cice_kmt = Dataset(cice_kmt_name, 'w') # Read variables - ulon = roms.variables['lon_rho'][:,:] - ulat = roms.variables['lat_rho'][:,:] - angle = roms.variables['angle'][:] + ulon_tmp = roms.variables['lon_psi'][:,:] + ulat_tmp = roms.variables['lat_psi'][:,:] + # Convert angle to degrees + angle = roms.variables['angle'][:]*180/pi # Mask out ice shelf cavities for sea ice kmt = roms.variables['mask_rho'][:,:] - roms.variables['mask_zice'][:,:] i = arange(angle.shape[1]) j = arange(angle.shape[0]) + # Get one more index in each dimension for lat and lon; linearly extend + ulon = zeros([size(j), size(i)]) + ulon[:-1,:-1] = ulon_tmp + # Longitude is a bit tricky because of the mod 360 + for jj in range(size(j)): + ulon_1back = ulon[jj,-2] + ulon_2back = ulon[jj,-3] + if ulon_2back - ulon_1back > 300: + ulon_2back -= 360 + ulon[jj,-1] = 2*ulon_1back - ulon_2back + for ii in range(size(i)): + ulon_1back = ulon[-2,ii] + ulon_2back = ulon[-3,ii] + if ulon_2back - ulon_1back < -300: + ulon_2back += 360 + ulon[-1,ii] = 2*ulon_1back - ulon_2back + ulat = zeros([size(j), size(i)]) + ulat[:-1,:-1] = ulat_tmp + # Latitude is easy + ulat[-1,:] = 2*ulat[-2,:] - ulat[-3,:] + ulat[:,-1] = 2*ulat[:,-2] - ulat[:,-3] + # Write variables cice_grid.createDimension('i', size(i)) cice_grid.createDimension('j', size(j)) diff --git a/cice_strx.py b/cice_strx.py new file mode 100644 index 0000000..b637792 --- /dev/null +++ b/cice_strx.py @@ -0,0 +1,74 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from rotate_vector_cice import * + +def cice_strx (file_path, tstep, colour_bounds=None, save=False, fig_name=None): + + deg2rad = pi/180 + + id = Dataset(file_path, 'r') + lon = id.variables['ULON'][:-15,:] + lat = id.variables['ULAT'][:-15,:] + angle = id.variables['ANGLET'][:-15,:] + strx = id.variables['strairx'][tstep-1,:-15,:] + id.variables['strocnx'][tstep-1,:-15,:] + id.variables['strtltx'][tstep-1,:-15,:] + id.variables['strcorx'][tstep-1,:-15,:] + id.variables['strintx'][tstep-1,:-15,:] + stry = id.variables['strairy'][tstep-1,:-15,:] + id.variables['strocny'][tstep-1,:-15,:] + id.variables['strtlty'][tstep-1,:-15,:] + id.variables['strcory'][tstep-1,:-15,:] + id.variables['strinty'][tstep-1,:-15,:] + id.close() + + strx_lonlat, stry_lonlat = rotate_vector_cice(strx, stry, angle) + + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + if colour_bounds is not None: + # User has set bounds on colour scale + lev = linspace(colour_bounds[0], colour_bounds[1], num=40) + if colour_bounds[0] == -colour_bounds[1]: + # Bounds are centered on zero, so choose a blue-to-red colourmap + # centered on yellow + colour_map = 'RdYlBu_r' + else: + colour_map = 'jet' + else: + max_val = amax(abs(stry_lonlat)) + lev = linspace(-max_val, max_val, num=40) + colour_map = 'RdYlBu_r' + + fig = figure(figsize=(16,12)) + fig.add_subplot(1,1,1, aspect='equal') + contourf(x, y, strx_lonlat, lev, cmap=colour_map, extend='both') + cbar = colorbar() + cbar.ax.tick_params(labelsize=20) + title('Sum of eastward stresses N/m^2', fontsize=30) + axis('off') + + if save: + fig.savefig(fig_name) + else: + fig.show() + + +if __name__ == "__main__": + + file_path = raw_input("Path to CICE history file: ") + tstep = int(raw_input("Timestep number (starting at 1): ")) + + # Get colour bounds if necessary + colour_bounds = None + get_bounds = raw_input("Set bounds on colour scale (y/n)? ") + if get_bounds == 'y': + lower_bound = float(raw_input("Lower bound: ")) + upper_bound = float(raw_input("Upper bound: ")) + colour_bounds = [lower_bound, upper_bound] + + action = raw_input("Save figure (s) or display in window (d)? ") + if action == 's': + save = True + fig_name = raw_input("File name for figure: ") + elif action == 'd': + save = False + fig_name = None + + cice_strx(file_path, tstep, colour_bounds, save, fig_name) + + diff --git a/cice_stry.py b/cice_stry.py new file mode 100644 index 0000000..9d297f6 --- /dev/null +++ b/cice_stry.py @@ -0,0 +1,74 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from rotate_vector_cice import * + +def cice_stry (file_path, tstep, colour_bounds=None, save=False, fig_name=None): + + deg2rad = pi/180 + + id = Dataset(file_path, 'r') + lon = id.variables['ULON'][:-15,:] + lat = id.variables['ULAT'][:-15,:] + angle = id.variables['ANGLET'][:-15,:] + strx = id.variables['strairx'][tstep-1,:-15,:] + id.variables['strocnx'][tstep-1,:-15,:] + id.variables['strtltx'][tstep-1,:-15,:] + id.variables['strcorx'][tstep-1,:-15,:] + id.variables['strintx'][tstep-1,:-15,:] + stry = id.variables['strairy'][tstep-1,:-15,:] + id.variables['strocny'][tstep-1,:-15,:] + id.variables['strtlty'][tstep-1,:-15,:] + id.variables['strcory'][tstep-1,:-15,:] + id.variables['strinty'][tstep-1,:-15,:] + id.close() + + strx_lonlat, stry_lonlat = rotate_vector_cice(strx, stry, angle) + + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + if colour_bounds is not None: + # User has set bounds on colour scale + lev = linspace(colour_bounds[0], colour_bounds[1], num=40) + if colour_bounds[0] == -colour_bounds[1]: + # Bounds are centered on zero, so choose a blue-to-red colourmap + # centered on yellow + colour_map = 'RdYlBu_r' + else: + colour_map = 'jet' + else: + max_val = amax(abs(stry_lonlat)) + lev = linspace(-max_val, max_val, num=40) + colour_map = 'RdYlBu_r' + + fig = figure(figsize=(16,12)) + fig.add_subplot(1,1,1, aspect='equal') + contourf(x, y, stry_lonlat, lev, cmap=colour_map, extend='both') + cbar = colorbar() + cbar.ax.tick_params(labelsize=20) + title('Sum of northward stresses N/m^2', fontsize=30) + axis('off') + + if save: + fig.savefig(fig_name) + else: + fig.show() + + +if __name__ == "__main__": + + file_path = raw_input("Path to CICE history file: ") + tstep = int(raw_input("Timestep number (starting at 1): ")) + + # Get colour bounds if necessary + colour_bounds = None + get_bounds = raw_input("Set bounds on colour scale (y/n)? ") + if get_bounds == 'y': + lower_bound = float(raw_input("Lower bound: ")) + upper_bound = float(raw_input("Upper bound: ")) + colour_bounds = [lower_bound, upper_bound] + + action = raw_input("Save figure (s) or display in window (d)? ") + if action == 's': + save = True + fig_name = raw_input("File name for figure: ") + elif action == 'd': + save = False + fig_name = None + + cice_stry(file_path, tstep, colour_bounds, save, fig_name) + + diff --git a/cice_transport.py b/cice_transport.py new file mode 100644 index 0000000..a885613 --- /dev/null +++ b/cice_transport.py @@ -0,0 +1,47 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * + +def cice_transport (file_path, save=False, fig_name=None): + + deg2rad = pi/180 + + id = Dataset(file_path,'r') + dice = mean(id.variables['frazil'][-73:,:-15,:] - id.variables['meltb'][-73:,:-15,:], axis=0) + lon = id.variables['ULON'][:-15,:] + lat = id.variables['ULAT'][:-15,:] + id.close() + + # Convert to spherical coordinates + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + lev = linspace(-1, 1, num=40) + + # Plot + fig = figure(figsize=(16,12)) + fig.add_subplot(1,1,1, aspect='equal') + contourf(x, y, dice, lev, cmap='RdYlBu_r', extend='both') + cbar = colorbar() + cbar.ax.tick_params(labelsize=20) + title('Freezing minus melting (cm/day), annually averaged', fontsize=30) + axis('off') + + if save: + fig.savefig(fig_name) + else: + fig.show() + + +if __name__ == "__main__": + + file_path = raw_input("Path to CICE history file, containing at least one year of 5-day averages: ") + action = raw_input("Save figure (s) or display in window (d)? ") + if action == 's': + save = True + fig_name = raw_input("File name for figure: ") + elif action == 'd': + save = False + fig_name = None + cice_transport(file_path, save, fig_name) + diff --git a/circumpolar_cice_plot.py b/circumpolar_cice_plot.py index b0e3e50..3a9612c 100644 --- a/circumpolar_cice_plot.py +++ b/circumpolar_cice_plot.py @@ -1,6 +1,7 @@ from netCDF4 import Dataset from numpy import * from matplotlib.pyplot import * +from rotate_vector_cice import * # Make a circumpolar Antarctic plot of the given (horizontal) variable from CICE. # Input: @@ -17,13 +18,37 @@ def circumpolar_cice_plot (file_path, var_name, tstep, colour_bounds=None, save= deg2rad = pi/180 - # Read the variable and figure out which grid it's on + # Read the variable id = Dataset(file_path, 'r') data = id.variables[var_name][tstep-1,:-15,:] if var_name == 'aice': units = 'fraction' else: units = id.variables[var_name].units + + # Check for vector variables that need to be rotated + if var_name in ['uvel', 'vvel', 'uatm', 'vatm', 'uocn', 'vocn', 'strairx', 'strairy', 'strtltx', 'strtlty', 'strcorx', 'strcory', 'strocnx', 'strocny', 'strintx', 'strinty']: + angle = id.variables['ANGLET'][:-15,:] + if var_name in ['uvel', 'uatm', 'uocn', 'strairx', 'strtltx', 'strcorx', 'strocnx', 'strintx']: + # u-variable + u_data = data[:,:] + if var_name[0] == 'u': + v_data = id.variables[var_name.replace('u','v')][tstep-1,:-15,:] + else: + v_data = id.variables[var_name.replace('x','y')][tstep-1,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_cice(u_data, v_data, angle) + data = u_data_lonlat + elif var_name in ['vvel', 'vatm', 'vocn', 'strairy', 'strtlty', 'strcory', 'strocny', 'strinty']: + # v-variable + v_data = data[:,:] + if var_name[0] == 'v': + u_data = id.variables[var_name.replace('v','u',1)][tstep-1,:-15,:] + else: + u_data = id.variables[var_name.replace('y','x')][tstep-1,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_cice(u_data, v_data, angle) + data = v_data_lonlat + + # Figure out which grid we're on grid_string = id.variables[var_name].coordinates if grid_string.startswith('ULON'): grid_name = 'u' @@ -58,7 +83,7 @@ def circumpolar_cice_plot (file_path, var_name, tstep, colour_bounds=None, save= colour_map = 'jet' else: # Determine bounds automatically - if var_name in ['uvel', 'vvel', 'uatm', 'vatm', 'uocn', 'vocn', 'frzmlt', 'fresh_ai', 'fsalt_ai', 'fhocn_ai', 'strairx', 'strairy', 'strtltx', 'strtlty', 'strcorx', 'strcory', 'strocnx', 'strocny', 'strintx', 'strinty']: + if var_name in ['uvel', 'vvel', 'uatm', 'vatm', 'uocn', 'vocn', 'fresh_ai', 'fsalt_ai', 'fhocn_ai', 'strairx', 'strairy', 'strtltx', 'strtlty', 'strcorx', 'strcory', 'strocnx', 'strocny', 'strintx', 'strinty']: # Center levels on 0 for certain variables, with a blue-to-red # colourmap max_val = amax(abs(data)) diff --git a/circumpolar_plot.py b/circumpolar_plot.py index 41ff2f9..2b38f2d 100644 --- a/circumpolar_plot.py +++ b/circumpolar_plot.py @@ -2,6 +2,7 @@ from numpy import * from matplotlib.pyplot import * from cartesian_grid_3d import * +from rotate_vector_roms import * # Make a circumpolar Antarctic plot of the given (horizontal) ROMS variable. # Input: @@ -23,7 +24,8 @@ # save = optional boolean flag indicating that the plot should be saved to a # file rather than displayed on the screen # fig_name = if save=True, filename for figure -def circumpolar_plot (file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds=None, save=False, fig_name=None): +# grid_path = path to grid file; only needed if var_name is a vector component +def circumpolar_plot (file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds=None, save=False, fig_name=None, grid_path=None): # Grid parameters theta_s = 0.9 @@ -52,59 +54,58 @@ def circumpolar_plot (file_path, var_name, tstep, depth_key, depth, depth_bounds units = id.variables[var_name].units long_name = id.variables[var_name].long_name - # Figure out what grid the variable is on - grid_string = id.variables[var_name].coordinates - if grid_string.startswith('lon_rho'): - grid_name = 'rho' - lon_name = 'lon_rho' - lat_name = 'lat_rho' - elif grid_string.startswith('lon_u'): - grid_name = 'u' - lon_name = 'lon_u' - lat_name = 'lat_u' - elif grid_string.startswith('lon_v'): - grid_name = 'v' - lon_name = 'lon_v' - lat_name = 'lat_v' - else: - print 'Grid type ' + grid_string + ' not supported' - id.close() - return - + # Check for vector variables that need to be rotated + if var_name in ['ubar', 'vbar', 'u', 'v', 'sustr', 'svstr', 'bustr', 'bvstr']: + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() + if var_name in ['ubar', 'sustr', 'bustr']: + # 2D u-variable + u_data = data[:,:] + v_data = id.variables[var_name.replace('u','v')][tstep-1,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle) + data = u_data_lonlat + elif var_name in ['vbar', 'svstr', 'bvstr']: + # 2D v-variable + v_data = data[:,:] + u_data = id.variables[var_name.replace('v','u')][tstep-1,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle) + data = v_data_lonlat + elif var_name in ['u']: + # 3D u-variable + data_full_ugrid = data_full[:,:,:] + data_full = ma.empty([data_full_ugrid.shape[0],data_full_ugrid.shape[1],data_full_ugrid.shape[2]+1]) + for k in range(N): + u_data = data_full_ugrid[k,:,:] + v_data = id.variables[var_name.replace('u','v')][tstep-1,k,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle) + data_full[k,:,:] = u_data_lonlat + elif var_name in ['v']: + # 3D v-variable + data_full_vgrid = data_full[:,:,:] + data_full = ma.empty([data_full_vgrid.shape[0],data_full_vgrid.shape[1]+1,data_full_vgrid.shape[2]]) + for k in range(N): + v_data = data_full_vgrid[k,:,:] + u_data = id.variables[var_name.replace('v','u')][tstep-1,k,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle) + data_full[k,:,:] = v_data_lonlat + # Read grid variables h = id.variables['h'][:-15,:] zice = id.variables['zice'][:-15,:] - # h and zice are on the rho-grid; interpolate if necessary - if grid_name == 'u': - h = 0.5*(h[:,0:-1] + h[:,1:]) - zice = 0.5*(zice[:,0:-1] + zice[:,1:]) - elif grid_name == 'v': - h = 0.5*(h[0:-1,:] + h[1:,:]) - zice = 0.5*(zice[0:-1,:] + zice[1:,:]) - # Read the correct lat and lon for this grid - lon = id.variables[lon_name][:-15,:] - lat = id.variables[lat_name][:-15,:] + lon = id.variables['lon_rho'][:-15,:] + lat = id.variables['lat_rho'][:-15,:] id.close() # Throw away the overlapping periodic boundary - if grid_name == 'u': - if choose_depth: - data_full = data_full[:,:,:-1] - else: - data = data[:,:-1] - lon = lon[:,:-1] - lat = lat[:,:-1] - h = h[:,:-1] - zice = zice[:,:-1] + if choose_depth: + data_full = data_full[:,:,:-2] else: - if choose_depth: - data_full = data_full[:,:,:-2] - else: - data = data[:,:-2] - lon = lon[:,:-2] - lat = lat[:,:-2] - h = h[:,:-2] - zice = zice[:,:-2] + data = data[:,:-2] + lon = lon[:,:-2] + lat = lat[:,:-2] + h = h[:,:-2] + zice = zice[:,:-2] # Convert to spherical coordinates x = -(lat+90)*cos(lon*deg2rad+pi/2) @@ -354,6 +355,12 @@ def average_btw_depths (data_3d, z_3d, dz_3d, z_bounds): depth_bounds = None id.close() + if var_name in ['ubar', 'vbar', 'u', 'v', 'sustr', 'svstr', 'bustr', 'bvstr']: + # Will need the grid file to get the angle + grid_path = raw_input("Path to ROMS grid file: ") + else: + grid_path = None + # Get index of time axis in ROMS history/averages file tstep = int(raw_input("Timestep number (starting at 1): ")) @@ -375,7 +382,7 @@ def average_btw_depths (data_3d, z_3d, dz_3d, z_bounds): fig_name = None # Make the plot - circumpolar_plot(file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds, save, fig_name) + circumpolar_plot(file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds, save, fig_name, grid_path) # Repeat until the user wants to exit while True: @@ -431,6 +438,9 @@ def average_btw_depths (data_3d, z_3d, dz_3d, z_bounds): depth = NaN depth_bounds = None id.close() + if var_name in ['ubar', 'vbar', 'u', 'v', 'sustr', 'svstr', 'bustr', 'bvstr'] and grid_path is None: + # Will need the grid file to get the angle + grid_path = raw_input("Path to ROMS grid file: ") elif int(changes) == 3: # New depth information depth_type = raw_input("Single depth (s) or vertical average (v)? ") @@ -479,7 +489,7 @@ def average_btw_depths (data_3d, z_3d, dz_3d, z_bounds): fig_name = raw_input("File name for figure: ") # Make the plot - circumpolar_plot(file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds, save, fig_name) + circumpolar_plot(file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds, save, fig_name, grid_path) else: break diff --git a/dpt_2d.py b/dpt_2d.py index f056cea..00ce572 100644 --- a/dpt_2d.py +++ b/dpt_2d.py @@ -1,12 +1,15 @@ from netCDF4 import Dataset from numpy import * from matplotlib.pyplot import * +from rotate_vector_roms import * # Calculates zonal transport through each grid cell in the Drake Passage, # vertically integrates, and makes a contour plot of the 2D (latitude vs time) # result. -# Input: file_path = path to ROMS ocean history or averages file -def dpt_2d (file_path): +# Input: +# grid_path = path to ROMS grid file +# file_path = path to ROMS ocean history or averages file +def dpt_2d (grid_path, file_path): # Radius of the Earth in m r = 6.371e6 @@ -27,9 +30,12 @@ def dpt_2d (file_path): j_min = 229 j_max = 298 - print 'Reading grid' - # Read grid variables + # Read angle from the grid file + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() + # Read other grid variables id = Dataset(file_path, 'r') h = id.variables['h'][:-15,:-3] zice = id.variables['zice'][:-15,:-3] @@ -66,18 +72,16 @@ def dpt_2d (file_path): for t in range(size(time)): print 'Processing timestep ' + str(t+1) + ' of '+str(size(time)) - # Read ubar and interpolate onto the rho-grid + # Rotate velocities into lat-lon space ubar = id.variables['ubar'][t,:-15,:] - w_bdry_ubar = 0.5*(ubar[:,0] + ubar[:,-1]) - middle_ubar = 0.5*(ubar[:,0:-1] + ubar[:,1:]) - e_bdry_ubar = w_bdry_ubar[:] - ubar_rho = ma.concatenate((w_bdry_ubar[:,None], middle_ubar, e_bdry_ubar[:,None]), axis=1) + vbar = id.variables['vbar'][t,:-15,:] + ubar_lonlat, vbar_lonlat = rotate_vector_roms(ubar, vbar, angle) # Throw away the overlapping periodic boundary - ubar_rho = ubar_rho[:,:-3] + ubar_lonlat = ubar_lonlat[:,:-3] # Trim to Drake Passage bounds - ubar_rho_DP = ubar_rho[j_min:j_max,i_DP] + ubar_DP = ubar_lonlat[j_min:j_max,i_DP] # Calculate transport and convert to Sv - transport[t,:] = ubar_rho_DP*dy_wct_DP*1e-6 + transport[t,:] = ubar_DP*dy_wct_DP*1e-6 id.close() @@ -98,8 +102,9 @@ def dpt_2d (file_path): # Command-line interface if __name__ == "__main__": + grid_path = raw_input('Enter path to ROMS grid file: ') file_path = raw_input('Enter path to ocean history/averages file: ') - dpt_2d(file_path) + dpt_2d(grid_path, file_path) diff --git a/dpt_2d_int.py b/dpt_2d_int.py index 655f3e8..98fa784 100644 --- a/dpt_2d_int.py +++ b/dpt_2d_int.py @@ -1,12 +1,15 @@ from netCDF4 import Dataset from numpy import * from matplotlib.pyplot import * +from rotate_vector_roms import * # Calculates zonal transport through each grid cell in the Drake Passage, # vertically integrates, takes indefinite integral (cumulative sum) over # latitude, and makes a contour plot of the 2D (latitude vs time) result. -# Input: file_path = path to ROMS ocean history or averages file -def dpt_2d_int (file_path): +# Input: +# grid_path = path to ROMS grid file +# file_path = path to ROMS ocean history or averages file +def dpt_2d_int (grid_path, file_path): # Radius of the Earth in m r = 6.371e6 @@ -28,6 +31,10 @@ def dpt_2d_int (file_path): j_max = 298 print 'Reading grid' + # Read angle from the grid file + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() # Read grid variables id = Dataset(file_path, 'r') h = id.variables['h'][:-15,:-3] @@ -65,18 +72,16 @@ def dpt_2d_int (file_path): for t in range(size(time)): print 'Processing timestep ' + str(t+1) + ' of '+str(size(time)) - # Read ubar and interpolate onto the rho-grid + # Rotate velocities into lat-lon space ubar = id.variables['ubar'][t,:-15,:] - w_bdry_ubar = 0.5*(ubar[:,0] + ubar[:,-1]) - middle_ubar = 0.5*(ubar[:,0:-1] + ubar[:,1:]) - e_bdry_ubar = w_bdry_ubar[:] - ubar_rho = ma.concatenate((w_bdry_ubar[:,None], middle_ubar, e_bdry_ubar[:,None]), axis=1) + vbar = id.variables['vbar'][t,:-15,:] + ubar_lonlat, vbar_lonlat = rotate_vector_roms(ubar, vbar, angle) # Throw away the overlapping periodic boundary - ubar_rho = ubar_rho[:,:-3] + ubar_lonlat = ubar_lonlat[:,:-3] # Trim to Drake Passage bounds - ubar_rho_DP = ubar_rho[j_min:j_max,i_DP] + ubar_DP = ubar_lonlat[j_min:j_max,i_DP] # Calculate transport and convert to Sv - transport[t,:] = ubar_rho_DP*dy_wct_DP*1e-6 + transport[t,:] = ubar_DP*dy_wct_DP*1e-6 id.close() @@ -98,8 +103,9 @@ def dpt_2d_int (file_path): # Command-line interface if __name__ == "__main__": + grid_path = raw_input('Enter path to ROMS grid file: ') file_path = raw_input('Enter path to ocean history/averages file: ') - dpt_2d_int(file_path) + dpt_2d_int(grid_path, file_path) diff --git a/dpt_timeseries.py b/dpt_timeseries.py index 76aab92..1f0e25e 100644 --- a/dpt_timeseries.py +++ b/dpt_timeseries.py @@ -1,19 +1,14 @@ from netCDF4 import Dataset from numpy import * from matplotlib.pyplot import * +from rotate_vector_roms import * # Calculates zonal transport through each grid cell in the Drake Passage # and plots a timeseries of the result. # Input: +# grid_path = path to ROMS grid file # file_path = path to ROMS ocean history or averages file -def dpt_timeseries (file_path): - - # Radius of the Earth in m - r = 6.371e6 - # Degrees to radians conversion factor - deg2rad = pi/180.0 - # Northern boundary of ROMS grid - nbdry_val = -30 +def dpt_timeseries (grid_path, file_path): # Radius of the Earth in m r = 6.371e6 @@ -35,7 +30,11 @@ def dpt_timeseries (file_path): j_max = 298 print 'Reading grid' - # Read grid variables + # Read angle from the grid file + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() + # Read other grid variables id = Dataset(file_path, 'r') h = id.variables['h'][:-15,:-3] zice = id.variables['zice'][:-15,:-3] @@ -43,7 +42,6 @@ def dpt_timeseries (file_path): lat = id.variables['lat_rho'][:-15,:-3] mask = id.variables['mask_rho'][:-15,:-3] - # Interpolate latitude to the edges of each cell s_bdry = lat[0,:] middle_lat = 0.5*(lat[0:-1,:] + lat[1:,:]) @@ -51,18 +49,9 @@ def dpt_timeseries (file_path): lat_edges = ma.concatenate((s_bdry[None,:], middle_lat, n_bdry[None,:])) # Subtract to get the change in latitude over each cell dlat = lat_edges[1:,:] - lat_edges[0:-1,:] - # Convert from spherical to Cartesian coordinates # dy = r*dlat where dlat is converted to radians dy = r*dlat*pi/180.0 - # Calculate water column thickness - wct = h + zice - - # Calculate dy_wct and mask with land mask - dy_wct = ma.masked_where(mask==0, dy*wct) - # Trim to Drake Passage bounds - dy_wct_DP = dy_wct[j_min:j_max,i_DP] - lat_DP = lat[j_min:j_max,i_DP] # Read time values and convert from seconds to years time = id.variables['ocean_time'][:]/(365*24*60*60) @@ -72,18 +61,23 @@ def dpt_timeseries (file_path): for t in range(size(time)): print 'Processing timestep ' + str(t+1) + ' of '+str(size(time)) - # Read ubar and interpolate onto the rho-grid + # Calculate water column thickness + zeta = id.variables['zeta'][t,:-15,:-3] + wct = h + zice + zeta + # Calculate dy_wct and mask with land mask + dy_wct = ma.masked_where(mask==0, dy*wct) + # Trim to Drake Passage bounds + dy_wct_DP = dy_wct[j_min:j_max,i_DP] + # Rotate velocities into lat-lon space ubar = id.variables['ubar'][t,:-15,:] - w_bdry_ubar = 0.5*(ubar[:,0] + ubar[:,-1]) - middle_ubar = 0.5*(ubar[:,0:-1] + ubar[:,1:]) - e_bdry_ubar = w_bdry_ubar[:] - ubar_rho = ma.concatenate((w_bdry_ubar[:,None], middle_ubar, e_bdry_ubar[:,None]), axis=1) + vbar = id.variables['vbar'][t,:-15,:] + ubar_lonlat, vbar_lonlat = rotate_vector_roms(ubar, vbar, angle) # Throw away the overlapping periodic boundary - ubar_rho = ubar_rho[:,:-3] + ubar_lonlat = ubar_lonlat[:,:-3] # Trim to Drake Passage bounds - ubar_rho_DP = ubar_rho[j_min:j_max,i_DP] + ubar_DP = ubar_lonlat[j_min:j_max,i_DP] # Integrate transport and convert to Sv - transport.append(sum(ubar_rho_DP*dy_wct_DP)*1e-6) + transport.append(sum(ubar_DP*dy_wct_DP)*1e-6) id.close() @@ -101,8 +95,9 @@ def dpt_timeseries (file_path): # Command-line interface if __name__ == "__main__": + grid_path = raw_input('Enter path to ROMS grid file: ') file_path = raw_input('Enter path to ocean history/averages file: ') - dpt_timeseries(file_path) + dpt_timeseries(grid_path, file_path) diff --git a/eraint_rotate_winds.py b/eraint_rotate_winds.py new file mode 100644 index 0000000..237f7a2 --- /dev/null +++ b/eraint_rotate_winds.py @@ -0,0 +1,41 @@ +from netCDF4 import Dataset +from numpy import * + +# Rotate the winds in a ROMS-CICE forcing file, interpolated from ERA-Interim as +# in romscice_atm_subdaily.py or romscice_atm_monthly.py, so that they are in +# local x-y space (i.e. positive Uwind points directly to the cell to the right; +# positive Vwind points directly to the cell above) rather than lon-lat space, +# which is slightly rotated. +# Input: +# grid_path = path to ROMS grid file +# eraint_path = path to ERA-Interim forcing file, interpolated to ROMS grid +# as in romscice_atm_subdaily.py or romscice_atm_monthly.py +def eraint_rotate_winds (grid_path, eraint_path): + + id = Dataset(grid_path, 'r') + angle = id.variables['angle'][:,:] + id.close() + + id = Dataset(eraint_path, 'a') + num_time = id.variables['time'].shape[0] + + # Loop over timesteps + for t in range(num_time): + print 'Processing timestep ' + str(t+1) + ' of ' + str(num_time) + u_lonlat = id.variables['Uwind'][t,:,:] + v_lonlat = id.variables['Vwind'][t,:,:] + # Simple rotation matrix + u_xy = u_lonlat*cos(angle) + v_lonlat*sin(angle) + v_xy = v_lonlat*cos(angle) - u_lonlat*sin(angle) + id.variables['Uwind'][t,:,:] = u_xy + id.variables['Vwind'][t,:,:] = v_xy + + id.close() + + +# Command-line interface +if __name__ == "__main__": + + grid_path = raw_input("Path to ROMS grid file: ") + eraint_path = raw_input("Path to ERA-Interim forcing file: ") + eraint_rotate_winds(grid_path, eraint_path) diff --git a/file_guide.txt b/file_guide.txt index f180068..9f868d2 100644 --- a/file_guide.txt +++ b/file_guide.txt @@ -5,6 +5,7 @@ cice_grid.py: Given a ROMS grid file, create corresponding CICE grid and kmt To run: Open python or ipython, and type "run cice_grid.py." The script will prompt you for paths to the existing ROMS grid file and the desired CICE grid and kmt files. + woa_netcdf.py: Converts World Ocean Atlas temperature and salinity data from text file (FESOM input format) to NetCDF. To run: First edit user parameters (file paths) near the top of @@ -13,11 +14,10 @@ woa_netcdf.py: Converts World Ocean Atlas temperature and salinity data from romscice_ini_woa.py: Builds a ROMS initialisation file using World Ocean Atlas data for temperature and salinity; starts with a motionless - ocean and zero sea surface height. Under ice shelves, sets - the salinity to a constant value (34.5 psu) and the - temperature to the local freezing point. Uses WOA data - assumed to be converted from FESOM input using - woa_netcdf.py + ocean and zero sea surface height. Under ice shelves, + extrapolates temperature and salinity using + nearest-neighbour techniques. Uses WOA data assumed to be + converted from FESOM input using woa_netcdf.py. To run: First edit user parameters (file paths and grid parameters) near the bottom of the file (after the Then make sure that you have scipy version 0.14 or @@ -26,6 +26,18 @@ romscice_ini_woa.py: Builds a ROMS initialisation file using World Ocean Atlas Then open python/ipython and type "run romscice_ini_woa.py". +romscice_ini_ecco.py: Builds a ROMS initialisation file using ECCO2 data for + temperature and salinity; starts with a motionless ocean + and zero sea surface height. In ice shelf cavities, + temperature and salinity are linearly extrapolated + using nearest-neighbour as in romscice_ini_woa.py. + To run: First edit user parameters (file paths and grid + parameters) near the bottom of the file. Then make + sure that you have scipy version 0.14 or higher + (on raijin, this means switching to python/2.7.6; + instructions at the top of the file). Then open + python/ipython and type "run romscice_ini.py". + romscice_nbc.py: Builds a ROMS northern lateral boundary condition file from 1 year of monthly ECCO2 data for temperature, salinity, and velocity (set sea surface height to zero). Also contains useful @@ -37,6 +49,32 @@ romscice_nbc.py: Builds a ROMS northern lateral boundary condition file from 1 of the file). Then open python or ipython and type "run romscice_nbc.py". +convert_era.job: A self-submitting batch job which converts 1 year of + ERA-Interim sub-daily data into a ROMS-CICE forcing file, + in chunks of 100 6-hour timesteps at once (otherwise the python + memory overflows because this is a LOT of data). Depends on + roms_cice_atm_subdaily.py. + To run: First edit user parameters in romscice_atm_subdaily.py + (see below). Then edit the PBS job settings at the top + of this file. Then, to get the batch jobs started for + a given year (say 1992), type + qsub -v YEAR=1992,COUNT=0 convert_era.job + You can also submit multiple years quickly with a bash + loop: + for i in `seq 1992 2009`; + do + qsub -v YEAR=$i,COUNT=0 convert_era.job + done + +romscice_atm_subdaily.py: Convert 100 6-hour timesteps of an ERA-Interim + atmospheric forcing file (plus 50 12-hour timesteps + of ERA-Interim precipitation) to a ROMS-CICE forcing + file. + To run: Edit user parameters near the top of the + script (mainly just file paths). This script + is designed to be called by a self-submitting + batch job, e.g. convert_era.job. + romscice_atm_monthly.py: Convert ERA-Interim files of monthly averaged atmospheric forcing to ROMS-CICE input forcing files on the correct grid and with the correct units. @@ -84,6 +122,42 @@ add_iceberg_melt.py: Read Martin and Adcroft's monthly climatology of precipitation; the iceberg fluxes will be added to this. +add_tide_period.py: Given a ROMS tide file with tidal potential amplitude and + phase angle (created using Kate's potential tide scripts), + add the tidal period in seconds for each component. + To run: First edit the variable "tide_file" to point to the + correct file to edit. Then open python or ipython + and type "run add_tide_period.py". + +eraint_rotate_winds.py: Rotate the winds in a ROMS-CICE forcing file, + interpolated from ERA-Interim as in + romscice_atm_subdaily.py or romscice_atm_monthly.py, + so that they are in local x-y space (i.e. positive + Uwind points directly to the cell to the right; + positive Vwind points directly to the cell above) rather + than lon-lat space, which is slightly rotated. + To run: Open python or ipython and type + "run eraint_rotate_winds.py". You will be + prompted for the path to the ROMS grid file and + the forcing file to rotate. + +remove_cavities.py: Remove the ice shelf cavities from the given ROMS grid file + and fill them in with land. After you run this script, I + suggest you remove the variables "zice" and "mask_zice" + from the file using NCO, to avoid any confusion later. + To run: Open python or ipython and type + "run remove_cavities.py". You will be prompted for + the path to the ROMS grid file you wish to edit. + +uvp_grid_fix.py: The Matlab scripts I use to generate the ROMS grid assume that + the u-grid, v-grid, and psi-grid points have the same locations + as the rho-grid, and they just chop off the last point in the + required dimension(s). This is not correct. This script fixes + that. + To run: Open python or ipython and type "run uvp_grid_fix.py". + You will be prompoted for the path to the ROMS grid + file you wish to edit. + ***POST-PROCESSING DIAGNOSTICS*** @@ -172,18 +246,20 @@ dpt_timeseries.py: Like dpt_2d.py, but integrates over latitude as well as The script will prompt you for paths to the ROMS grid file and the ocean history or averages file. -massloss.py: Calculates and plots timeseries of basal mass loss and - area-averaged melt rates from major ice shelves during a ROMS - simulation. Also writes the timeseries to a log file so they don't - have to be recomputed if the run is extended; at the beginning of - this script, previous values will be read from the same log file - if it exists. - To run: Open python or ipython, and type "run massloss.py". The - script will prompt you for the paths to the ROMS grid file, - the ocean history or averages file, and the log file. If - you are using ice shelf draft data from something other - than RTopo 1.05 you might need to tweak the lat and lon - limits. +timeseries_massloss.py: Calculates and plots timeseries of basal mass loss and + area-averaged melt rates from major ice shelves and the + entire continent during a ROMS simulation. Also writes + the timeseries to a log file so they don't have to be + recomputed if the run is extended; at the beginning of + this script, previous values will be read from the same + log file if it exists. + To run: Open python or ipython, and type + "run timeseries_massloss.py". The script will + prompt you for the paths to the ROMS grid file, + the ocean history or averages file, and the log + file. If you are using ice shelf draft data + from something other than RTopo 1.05 you might + need to tweak the lat and lon limits. ***CMIP5 ANALYSIS*** @@ -521,23 +597,24 @@ uv_vectorplot.py: Make a circumpolar Antarctic plot of speed overlaid with massloss_map.py: Make a map of unexplained percent error in annually averaged simulated basal mass loss from each ice shelf that is over 5,000 km^2 in Rignot et al., 2013. - To run: First make sure your logfile from massloss.py is up to - date, as this script reads simulated basal mass loss - timeseries from this logfile. Then open python or - ipython and type "run massloss_map.py". The script will - prompt you for paths to the ROMS grid file and the - mass loss logfile, and whether to save the figure (and - if so, what filename) or display it on the screen. + To run: First make sure your logfile from + timeseries_massloss.py is up to date, as this script + reads simulated basal mass loss timeseries from this + logfile. Then open python or ipython and type + "run massloss_map.py". The script will prompt you for + paths to the ROMS grid file and the mass loss logfile, + and whether to save the figure (and if so, what + filename) or display it on the screen. ismr_map.py: Same as massloss_map.py, but for area-averaged melt rates rather than area-integrated mass loss. - To run: First make sure your logfile from massloss.py is up to - date, as this script reads simulated basal mass loss - timeseries from this logfile. Then open python or ipython - and type "run ismr_map.py". The script will prompt you for - paths to the ROMS grid file and the mass loss logfile, and - whether to save the figure (and if so, what filename) or - display it on the screen. + To run: First make sure your logfile from timeseries_massloss.py + is up to date, as this script reads simulated basal mass + loss timeseries from this logfile. Then open python or + ipython and type "run ismr_map.py". The script will prompt + you for paths to the ROMS grid file and the mass loss + logfile, and whether to save the figure (and if so, what + filename) or display it on the screen. nsidc_aice_monthly.py: Make a figure comparing sea ice concentration from NSIDC (1995 data) and CICE (latest year of spinup under @@ -594,40 +671,15 @@ bwsalt_plot.py: Creates a circumpolar plot of bottom water salinity, averaged figure (and if so, what filename) or display it on screen. -ismr_seasonal_cycle.py: Make a map of the magnitude of the seasonal cycle (over - the last year of simulation) in area-averaged melt rates - for each ice shelf that is over 5,000 km^2 in Rignot - et al., 2013. - To run: First make sure your logfile from massloss.py is - up to date, as this script reads simulated - basal mass loss timeseries from this log file. - Then open python or ipython and type - "run ismr_seasonal_cycle.py". The script will - prompt you for paths to the ROMS grid file and - the mass loss logfile, and whether to save the - figure (and if so, what filename) or display it - on the screen. - -massloss_map.py: Make a map of unexplained error in annually averaged simulated - basal mass loss from each ice shelf that is over 5,000 km^2 in - Rignot et al., 2013. - To run: First make sure your logfile from massloss.py is up to - date, as this script reads simulated basal mass loss - timeseries from this logfile. Then open python or - ipython and type "run massloss_map.py". The script will - prompt you for paths to the ROMS grid file and the - mass loss logfile, and whether to save the figure (and - if so, what filename) or display it on the screen. - sose_roms_seasonal.py: Make a 4x2 plot compring lat vs. depth slices of seasonally averaged temperature or salinity at the given longitude, between ROMS (last year of simulation) and SOSE (2005-2010 climatology). To run: First make the sure the path to the SOSE seasonal - climatology file, stored in the variable sose_file - near the top of the main function, is correct. - (If you don't have this file ask Kaitlin for it.) - Then Open python or ipython and type + climatology file, stored in the variable + sose_file near the top of the main function, is + correct. (If you don't have this file ask Kaitlin + for it.) Then open python or ipython and type "run sose_roms_seasonal.py". You will be prompted for the path to the ROMS output file, whether to plot temperature or salinity, the longitude @@ -644,6 +696,34 @@ zice_circumpolar.py: Creates a circumpolar Antarctic plot of ice shelf draft. you for the path to the ROMS grid file and the filename to save the figure under. +grid_res.py: Make a circumpolar plot of the horizontal resolution of the ROMS + grid (square root of the area of each cell). + To run: Open python or ipython and type "run grid_res.py". You will + be prompted for the ROMS grid file, and whether to save the + figure (and if so, what filename) or display it on the + screen. + +mld_plot.py: Create a circumpolar Antarctic plot of mixed layer depth in ROMS. + Follows the same process as circumpolar_plot.py, but masking out + the ice shelves requires a special case. + To run: Open python or ipython and type "mld_plot.py". You will + be prompted for the ROMS output file, the timestep to + plot, colour bounds (optional), whether to save the figure + (and if so, what filename) or display it on the screen. + This script is non-repeating. + +make_zonal_slices.sh: Example of a bash script that calls sose_roms_seasonal.py + for both temperature and salinity at 10 degree increments + all along the Antarctic coastline. + To run: First edit the variable ROMS_FILE to point to the + ROMS output file you want plots for (it must + contain at least 1 year of 5-day averages). + Also change whatever other arguments you want + in the calls to sose_roms_seasonal, such as the + output file name or the maximum depth to plot. + Then just type "./make_zonal_slices.sh" in the + shell and it will go. + ***ANIMATIONS*** @@ -669,13 +749,28 @@ calc_z.py: Given ROMS grid variables, calculate the s-coordinates, stretching cartesian_grid_2d.py: Given ROMS grid variables, calculate 2D Cartesian integrands dx and dy. To run: This is a function designed to be called from - other scripts. See for example massloss.py + other scripts. See for example + timeseries_massloss.py cartesian_grid_3d.py: Given ROMS grid variables, calculate 3D Cartesian integrands dx, dy, and dz, as well as 3D z-coordinates. To run: This is a function designed to be called from other scripts. See for example spinup_plots.py. +rotate_vector_cice.py: Given a 2D vector in x-y space on the CICE grid, rotate + it to lon-lat space. + To run: This is a function designed to be called from + other scripts. See for example + circumpolar_cice_plot.py. + +rotate_vector_roms.py: Given a 2D vector in x-y space on the ROMS grid + (x-component on the u-grid, y-component on the v-grid), + interpolate them both to the rho-grid and rotate the + vector to lon-lat space. + To run: This is a function designed to be called from + other scripts. See for example + circumpolar_plot.py. + ***OBSOLETE*** @@ -705,42 +800,6 @@ update_grid.sh: After making changes to bathymetry, ice shelf draft, and masks still be useful. Then type "qsub update_grid.sh" to submit to the queue. -convert_era.job: A self-submitting batch job which converts 1 year of - ERA-Interim sub-daily data into a ROMS-CICE forcing file, - in chunks of 100 6-hour timesteps at once (otherwise the python - memory overflows because this is a LOT of data). Depends on - roms_cice_atm_subdaily.py. - To run: First edit user parameters in romscice_atm_subdaily.py - (see below). Then edit the PBS job settings at the top - of this file. Then, to get the batch jobs started for - a given year (say 1992), type - qsub -v YEAR=1992,COUNT=0 convert_era.job - You can also submit multiple years quickly with a bash - loop: - for i in `seq 1992 2009`; - do - qsub -v YEAR=$i,COUNT=0 convert_era.job - done - -romscice_atm_subdaily.py: Convert 100 6-hour timesteps of an ERA-Interim - atmospheric forcing file (plus 50 12-hour timesteps - of ERA-Interim precipitation) to a ROMS-CICE forcing - file. - To run: Edit user parameters near the top of the - script (mainly just file paths). This script - is designed to be called by a self-submitting - batch job, e.g. convert_era.job. - -romscice_ini_ecco.py: Builds a ROMS initialisation file using ECCO2 data for - temperature and salinity; starts with a motionless ocean - and zero sea surface height. - To run: First edit user parameters (file paths and grid - parameters) near the bottom of the file. Then make - sure that you have scipy version 0.14 or higher - (on raijin, this means switching to python/2.7.6; - instructions at the top of the file). Then open - python/ipython and type "run romscice_ini.py". - convert_ecco.job: A batch job which converts 1 year of ECCO2 data into northern lateral bounday conditions for ROMS. To run: First edit user parameters in romscice_nbc.py diff --git a/grid_res.py b/grid_res.py new file mode 100644 index 0000000..bb33436 --- /dev/null +++ b/grid_res.py @@ -0,0 +1,69 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from cartesian_grid_2d import * + +# Make a circumpolar plot of the horizontal resolution of the ROMS grid (square +# root of the area of each cell). +# Input: +# grid_path = path to ROMS grid file +# save = optional boolean indicating to save the figure, rather than display it +# on the screen +# fig_name = if save=True, filename to save the figure +def grid_res (grid_path, save=False, fig_name=None): + + # Degrees to radians conversion factor + deg2rad = pi/180 + + # Read grid + id = Dataset(grid_path, 'r') + lon = id.variables['lon_rho'][:-15,:-2] + lat = id.variables['lat_rho'][:-15,:-2] + mask = id.variables['mask_rho'][:-15,:-2] + id.close() + + # Get differentials + dx, dy = cartesian_grid_2d(lon, lat) + # Calculate resolution: square root of the area, converted to km + res = sqrt(dx*dy)*1e-3 + # Apply land mask + res = ma.masked_where(mask==0, res) + + # Polar coordinates for plotting + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + # Colour levels + lev = linspace(0, 40, num=50) + + # Plot + fig = figure(figsize=(16,12)) + fig.add_subplot(1,1,1, aspect='equal') + contourf(x, y, res, lev) + cbar = colorbar() + cbar.ax.tick_params(labelsize=20) + title('Grid resolution (km)', fontsize=30) + axis('off') + + if save: + fig.savefig(fig_name) + else: + fig.show() + + +# Command-line interface +if __name__ == "__main__": + + grid_path = raw_input("Path to ROMS grid file: ") + action = raw_input("Save figure (s) or display on screen (d)? ") + if action == 's': + save = True + fig_name = raw_input("Filename for figure: ") + elif action == 'd': + save = False + fig_name = None + grid_res(grid_path, save, fig_name) + + + + diff --git a/ismr_map.py b/ismr_map.py index c574be9..1c4de7e 100644 --- a/ismr_map.py +++ b/ismr_map.py @@ -3,11 +3,11 @@ from matplotlib.pyplot import * from matplotlib import rcParams -# Make a map of unexplained percent error in annually averaged simulated melt rate -# from each ice shelf that is over 5,000 km^2 in Rignot et al., 2013. +# Make a map of unexplained percent error in annually averaged simulated melt +# rate from each ice shelf that is over 5,000 km^2 in Rignot et al., 2013. # Input: # grid_path = path to ROMS grid file -# log_path = path to log file created by massloss.py +# log_path = path to log file created by timeseries_massloss.py # save = optional boolean to save the figure to a file, rather than displaying # it on the screen # fig_name = if save=True, path to the desired filename for the figure @@ -53,6 +53,12 @@ def ismr_map (grid_path, log_path, save=False, fig_name=None): except(ValueError): # Reached the header for the next variable break + # Skip the values for the entire continent + for line in f: + try: + pass + except(ValueError): + break # Set up array for melt rate values at each ice shelf ismr_ts = empty([len(obs_ismr), len(time)]) index = 0 diff --git a/make_zonal_slices.sh b/make_zonal_slices.sh new file mode 100755 index 0000000..e1be10a --- /dev/null +++ b/make_zonal_slices.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +ROMS_FILE="/short/y99/kaa561/roms_spinup_newestbenchmark/ocean1_avg.nc" +for i in `seq 0 10 180`; +do +python -c "from sose_roms_seasonal import *; sose_roms_seasonal('$ROMS_FILE', 'temp', $i, -500, True, 'zonal_slices/temp_""$i""E.png')" +python -c "from sose_roms_seasonal import *; sose_roms_seasonal('$ROMS_FILE', 'salt', $i, -500, True, 'zonal_slices/salt_""$i""E.png')" +done +for i in `seq 170 -10 10`; +do +python -c "from sose_roms_seasonal import *; sose_roms_seasonal('$ROMS_FILE', 'temp', -$i, -500, True, 'zonal_slices/temp_""$i""W.png')" +python -c "from sose_roms_seasonal import *; sose_roms_seasonal('$ROMS_FILE', 'salt', -$i, -500, True, 'zonal_slices/salt_""$i""W.png')" +done \ No newline at end of file diff --git a/massloss_map.py b/massloss_map.py index fe849cb..e4f2935 100644 --- a/massloss_map.py +++ b/massloss_map.py @@ -3,11 +3,11 @@ from matplotlib.pyplot import * from matplotlib import rcParams -# Make a map of unexplained percent error in annually averaged simulated basal mass loss -# from each ice shelf that is over 5,000 km^2 in Rignot et al., 2013. +# Make a map of unexplained percent error in annually averaged simulated basal +# mass loss from each ice shelf that is over 5,000 km^2 in Rignot et al., 2013. # Input: # grid_path = path to ROMS grid file -# log_path = path to log file created by massloss.py +# log_path = path to log file created by timeseries_massloss.py # save = optional boolean to save the figure to a file, rather than displaying # it on the screen # fig_name = if save=True, path to the desired filename for the figure @@ -48,6 +48,12 @@ def massloss_map (grid_path, log_path, save=False, fig_name=None): except(ValueError): # Reached the header for the next variable break + # Skip the values for the entire continent + for line in f: + try: + pass + except(ValueError): + break # Set up array for mass loss values at each ice shelf massloss_ts = empty([len(obs_massloss), len(time)]) index = 0 diff --git a/mld_plot.py b/mld_plot.py new file mode 100644 index 0000000..6900a90 --- /dev/null +++ b/mld_plot.py @@ -0,0 +1,80 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * + +# Creates a circumpolar Antarctic plot of mixed layer depth in ROMS. Follows +# the same process as circumpolar_plot.py, but masking out the ice shelves +# requires a special case. +# Input: +# file_path = path to ocean history/averages file +# tstep = timestep to plot in file_path +# colour_bounds = optional bounds on colour scale, stored as an array of size +# 2 with the lower bound first. If colour_bounds = None, then +# determine colour scale bounds automatically. +# save = optional boolean flag indicating that the plot should be saved to a +# file rather than displayed on the screen +# fig_name = if save=True, filename for figure +def mld_plot (file_path, tstep, colour_bounds, save=False, fig_name=None): + + deg2rad = pi/180 + + # Read grid, zice, Hsbl + id = Dataset(file_path, 'r') + lon = id.variables['lon_rho'][:-15,:-2] + lat = id.variables['lat_rho'][:-15,:-2] + zice = id.variables['zice'][:-15,:-2] + hsbl = id.variables['Hsbl'][tstep-1,:-15,:-2] + id.close() + # Mask out the ice shelves and change the sign + mld = ma.masked_where(zice!=0, -hsbl) + + # Convert to spherical coordinates + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + if colour_bounds is not None: + # User has set bounds on colour scale + lev = linspace(colour_bounds[0], colour_bounds[1], num=40) + else: + # Determine bounds automatically + lev = linspace(0, amax(mld), num=40) + + # Plot + fig = figure(figsize=(16,12)) + fig.add_subplot(1,1,1, aspect='equal') + contourf(x, y, mld, lev, cmap='jet', extend='both') + cbar = colorbar() + cbar.ax.tick_params(labelsize=20) + title('Mixed layer depth (m)', fontsize=30) + axis('off') + + if save: + fig.savefig(fig_name) + else: + fig.show() + + +# Command-line interface +if __name__ == "__main__": + + file_path = raw_input("Path to ocean history/averages file: ") + tstep = int(raw_input("Timestep number (starting at 1): ")) + colour_bounds = None + get_bounds = raw_input("Set bounds on colour scale (y/n)? ") + if get_bounds == 'y': + lower_bound = float(raw_input("Lower bound: ")) + upper_bound = float(raw_input("Upper bound: ")) + colour_bounds = [lower_bound, upper_bound] + action = raw_input("Save figure (s) or display in window (d)? ") + if action == 's': + save = True + fig_name = raw_input("File name for figure: ") + elif action == 'd': + save = False + fig_name = None + mld_plot(file_path, tstep, colour_bounds, save, fig_name) + + + + + diff --git a/nbdry_grid_hack.py b/nbdry_grid_hack.py index 35a9245..1ee947b 100644 --- a/nbdry_grid_hack.py +++ b/nbdry_grid_hack.py @@ -21,27 +21,21 @@ def nbdry_grid_hack (grid_file, num_pts): # Loop over longitude for i in range(size(h,1)): - # Only modify columns where the northern boundary is open - # (i.e. not land) - if mask_rho[-1,i] == 1: - # Set bathymetry and land mask to be the same as the - # point "num_pts" from the northern boundary - for j in range(1, num_pts): - h[-j,i] = h[-num_pts,i] - mask_rho[-j,i] = mask_rho[-num_pts,i] - - # Update the other masks - mask_u = mask_rho[:,1:]*mask_rho[:,:-1] - mask_v = mask_rho[1:,:]*mask_rho[:-1,:] - mask_psi = mask_rho[1:,1:]*mask_rho[1:,:-1]*mask_rho[:-1,1:]*mask_rho[:-1,:-1] + # Find the southernmost unmasked cell within "num_pts" of the + # northern boundary and set all the points north of it to match + found_pt = False + for j in range(num_pts, -1, -1): + if mask_rho[-j,i] == 1: + if found_pt: + # Already found the right point + h[-j,i] = val + else: + # This is the first unmasked point + found_pt = True + val = h[-j,i] # Save changes id.variables['h'][:,:] = h - id.variables['mask_rho'][:,:] = mask_rho - id.variables['mask_u'][:,:] = mask_u - id.variables['mask_v'][:,:] = mask_v - id.variables['mask_psi'][:,:] = mask_psi - id.close() diff --git a/overturning_plot.py b/overturning_plot.py index 9ab64ea..75e3e26 100644 --- a/overturning_plot.py +++ b/overturning_plot.py @@ -2,13 +2,15 @@ from numpy import * from matplotlib.pyplot import * from cartesian_grid_3d import * +from rotate_vector_roms import * # Calculate the meridional overturning streamfunction at the last timestep # of the given ROMS history/averages file and make a contour plot in z-space. # Input: +# grid_path = path to ROMS grid file # file_path = path to ROMS history/averages file # fig_name = filename for figure -def overturning_plot (file_path, fig_name): +def overturning_plot (grid_path, file_path, fig_name): # Grid parameters theta_s = 0.9 @@ -16,23 +18,33 @@ def overturning_plot (file_path, fig_name): hc = 40 N = 31 - # Read v and grid variables + # Read angle from the grid file + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() + # Read grid variables id = Dataset(file_path, 'r') v = id.variables['v'][-1,:,:-15,:-3] # -1 means last timestep h = id.variables['h'][:-15,:-3] zice = id.variables['zice'][:-15,:-3] zeta = id.variables['zeta'][-1,:-15,:-3] - lon_v = id.variables['lon_v'][:-15,:-3] - lat_v = id.variables['lat_v'][:-15,:-3] + lon = id.variables['lon_rho'][:-15,:-3] + lat = id.variables['lat_rho'][:-15,:-3] + # Read both velocities in x-y space + u_xy = id.variables['u'][-1,:,:-15,:] + v_xy = id.variables['v'][-1,:,:-15,:] id.close() - # Interpolate h, zice, and zeta to the v-grid - h_v = 0.5*(h[0:-1,:] + h[1:,:]) - zice_v = 0.5*(zice[0:-1,:] + zice[1:,:]) - zeta_v = 0.5*(zeta[0:-1,:] + zeta[1:,:]) + # Rotate velocities to lat-lon space + v = ma.empty([N,v_xy.shape[1]+1,v_xy.shape[2]]) + for k in range(N): + u_lonlat, v_lonlat = rotate_vector_roms(u_xy[k,:,:], v_xy[k,:,:], angle) + v[k,:,:] = v_lonlat[:,:] + # Throw away the periodic boundary overlap + v = v[:,:,:-3] # Calculate Cartesian integrands and z-coordinates - dx, dy, dz, z = cartesian_grid_3d(lon_v, lat_v, h_v, zice_v, theta_s, theta_b, hc, N, zeta_v) + dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta) # Calculate transport in each cell transport = v*dx*dz @@ -44,7 +56,7 @@ def overturning_plot (file_path, fig_name): # Calculate latitude and z coordinates, averaged over longitude, # for plotting - avg_lat = mean(lat_v, axis=1) + avg_lat = mean(lat, axis=1) avg_lat = tile(avg_lat, (N,1)) avg_z = mean(z, axis=2) @@ -67,6 +79,7 @@ def overturning_plot (file_path, fig_name): # Command-line interface if __name__ == "__main__": + grid_path = raw_input("Path to ROMS grid file: ") file_path = raw_input("Path to ROMS history/averages file: ") fig_name = raw_input("File name for figure: ") - overturning_plot(file_path, fig_name) + overturning_plot(grid_path, file_path, fig_name) diff --git a/plot_kinetic_energy.py b/plot_kinetic_energy.py index 0259eec..67e77d5 100644 --- a/plot_kinetic_energy.py +++ b/plot_kinetic_energy.py @@ -1,6 +1,6 @@ from re import split from numpy import * -from matplotlib.pyplot import plot,xlabel,ylabel,clf,show +from matplotlib.pyplot import plot,xlabel,ylabel,clf,show,grid # Read the kinetic energy values written in ocean.log each timestep # and return them as a 1D array. diff --git a/remove_cavities.py b/remove_cavities.py new file mode 100644 index 0000000..d29690f --- /dev/null +++ b/remove_cavities.py @@ -0,0 +1,48 @@ +from netCDF4 import Dataset +from numpy import * + +# Remove the ice shelf cavities from the given ROMS grid file and fill them +# in with land. After you run this script, I suggest you remove the variables +# "zice" and "mask_zice" from the file using NCO, to avoid any confusion later. +# Input: +# grid_path = path to ROMS grid file to edit +def remove_cavities (grid_path): + + # Read existing grid + id = Dataset(grid_path, 'a') + mask_zice = id.variables['mask_zice'][:,:] + mask_rho = id.variables['mask_rho'][:,:] + mask_u = id.variables['mask_u'][:,:] + mask_v = id.variables['mask_v'][:,:] + mask_psi = id.variables['mask_psi'][:,:] + h = id.variables['h'][:,:] + + # Select ice shelf points + index = mask_zice == 1 + # Apply land mask + mask_rho[index] = 0 + # Apply token bathymetry + h[index] = 50 + + # Recalculate masks on u-, v-, and psi-grids + mask_u = mask_rho[:,:-1]*mask_rho[:,1:] + mask_v = mask_rho[:-1,:]*mask_rho[1:,:] + mask_psi = mask_rho[1:,1:]*mask_rho[1:,:-1]*mask_rho[:-1,1:]*mask_rho[:-1,:-1] + + # Overwrite variables in file + id.variables['mask_rho'][:,:] = mask_rho + id.variables['mask_u'][:,:] = mask_u + id.variables['mask_v'][:,:] = mask_v + id.variables['mask_psi'][:,:] = mask_psi + id.variables['h'][:,:] = h + + id.close() + + +# Command-line interface +if __name__ == "__main__": + + grid_path = raw_input("Path to ROMS grid file to edit: ") + remove_cavities(grid_path) + + diff --git a/romscice_ini_ecco.py b/romscice_ini_ecco.py index f5a7047..78e380c 100644 --- a/romscice_ini_ecco.py +++ b/romscice_ini_ecco.py @@ -1,6 +1,7 @@ # Build a 1995 initialisation file for ROMS from the ECCO2 temperature and # salinity reanalysis of January 1995. Set initial velocities and sea surface -# height to zero. +# height to zero. Under the ice shelves, extrapolate temperature and salinity +# from the ice shelf front. # NB: Users will likely need to edit paths to ECCO2 data! Scroll down below # the interp_ecco2roms function to find where these filenames are defined. @@ -20,13 +21,6 @@ # Main routine def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdry_ecco): - # Parameters for freezing point of seawater; taken from Ben's PhD thesis - a = -5.73e-2 - b = 8.32e-2 - c = -7.61e-8 - rho = 1035.0 - g = 9.81 - # Read ECCO2 data and grid print 'Reading ECCO2 data' theta_fid = Dataset(theta_file, 'r') @@ -97,22 +91,9 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b # Regridding happens here... print 'Interpolating temperature' - temp = interp_ecco2roms(theta, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, -0.5) + temp = interp_ecco2roms(theta, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, -0.5) print 'Interpolating salinity' - salt = interp_ecco2roms(salt, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, 35) - - # Identify the continental shelf - h_3d = tile(h, (N,1,1)) - index = nonzero((lat_roms_3d <= -60)*(h_3d <= 1200)) - # Set salinity over the continental shelf to a constant value, and - # temperature to the local freezing point - salt[index] = 35 - temp[index] = a*salt[index] + b + c*rho*g*abs(z_roms_3d[index]) - # Make sure the ice shelf cavities got this too - mask_zice_3d = tile(mask_zice, (N,1,1)) - index = mask_zice_3d == 1 - salt[index] = 35 - temp[index] = a*salt[index] + b + c*rho*g*abs(z_roms_3d[index]) + salt = interp_ecco2roms(salt, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, 34.5) # Set initial velocities and sea surface height to zero u = zeros((N, num_lat, num_lon-1)) @@ -206,8 +187,8 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b print 'Finished' -# Given an array on the ECCO2 grid, fill land mask with constant values -# and interpolate onto the ROMS grid. +# Given an array on the ECCO2 grid, interoplate onto the ROMS grid, extrapolate +# under ice shelf cavities, and fill the land mask with constant values. # Input: # A = array of size nxmxo containing values on the ECCO2 grid (dimension # longitude x latitude x depth) @@ -223,31 +204,104 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b # z_roms_3d = array of size pxqxr containing ROMS depth values in negative # z-coordinates (converted from grid file using calc_z.py, as # shown below in the main script) -# fill_value = scalar containing the value with which to fill the ROMS land mask -# and ice shelf cavities (doesn't really matter, don't use NaN) +# mask_rho = array of size qxr containing ROMS land mask (ocean/ice shelf 1, +# land 0) +# mask_zice = array of size qxr containing ROMS ice shelf mask (ice shelf 1, +# ocean/land 0) +# fill = scalar containing the value with which to fill the ROMS land mask +# (doesn't really matter, don't use NaN) # Output: # B = array of size pxqxr containing values on the ROMS grid (dimension depth x # latitude x longitude) -def interp_ecco2roms (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, fill_value): +def interp_ecco2roms (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, fill): + + # Radius of the Earth in m + r = 6.371e6 + # Degrees to radians conversion factor + deg2rad = pi/180.0 # Calculate N based on size of ROMS grid N = size(lon_roms_3d, 0) - # Fill the ECCO2 land mask with a constant value close to the mean of A - # This is less than ideal, but we will overwrite the continental shelf - # values anyway later - Afill = A - Afill[A.mask] = fill_value + # Unmask A and fill with NaNs + A_unmask = A.data + A_unmask[A.mask]=NaN - # Build a function for linear interpolation of A - interp_function = RegularGridInterpolator((lon_ecco, lat_ecco, depth_ecco), Afill) + # Build a function for linear interpolation of A; set flag to fill + # out-of-bounds values with NaN + interp_function = RegularGridInterpolator((lon_ecco, lat_ecco, depth_ecco), A_unmask, bounds_error=False, fill_value=NaN) B = zeros(shape(lon_roms_3d)) + # Interpolate each z-level individually - 3D vectorisation uses too # much memory! for k in range(N): print '...vertical level ', str(k+1), ' of ', str(N) # Pass positive values for ROMS depth - B[k,:,:] = interp_function((lon_roms_3d[k,:,:], lat_roms_3d[k,:,:], -z_roms_3d[k,:,:])) + tmp = interp_function((lon_roms_3d[k,:,:], lat_roms_3d[k,:,:], -z_roms_3d[k,:,:])) + # Fill NaNs with constant value + index = isnan(tmp) + tmp[index] = fill + # Fill land mask with constant value + tmp[mask_rho==0] = fill + # Save this depth level + B[k,:,:] = tmp + + print '...selecting the ice shelf front' + # Find the ice shelf front + front = zeros(shape(lon_roms_3d)) + # Find northernmost latitude with ice shelves + nbdry_zice = where(sum(mask_zice, axis=1) > 0)[-1][-1] + 2 + # Loop over all cells; can't find a cleaner way to do this + for j in range(nbdry_zice): + for i in range(size(mask_zice,1)): + # First make sure this is an ocean point + if mask_rho[j,i] == 1: + # Find bounds on window of radius 1 + j_min = max(j-1,0) + j_max = min(j+2, size(mask_zice,0)) + i_min = max(i-1,0) + i_max = min(i+2, size(mask_zice,1)) + # Points at the ice shelf front are not ice shelf points, but + # have at least one neighbour that is + if mask_zice[j,i] == 0 and any(mask_zice[j_min:j_max,i_min:i_max] == 1): + front[:,j,i] = 1 + + print '...extrapolating under ice shelves' + mask_zice_3d = tile(mask_zice, (N,1,1)) + for j in range(nbdry_zice, -1, -1): + print '...latitude index ' + str(nbdry_zice-j+1) + ' of ' + str(nbdry_zice+1) + # Find coordinates of ice shelf front points + lon_front = lon_roms_3d[front==1] + lat_front = lat_roms_3d[front==1] + z_front = z_roms_3d[front==1] + # Also find the values of the given variable here + B_front = B[front==1] + for i in range(size(mask_zice,1)): + if mask_zice[j,i] == 1: + for k in range(N): + # Calculate Cartesian distance from this point to each point + # at the ice shelf front + dlon = lon_front - lon_roms_3d[k,j,i] + index = dlon > 300 + dlon[index] -= 360 + index = dlon < -300 + dlon[index] += 360 + dlon = abs(dlon) + dlat = abs(lat_front - lat_roms_3d[k,j,i]) + dz = abs(z_front - z_roms_3d[k,j,i]) + dx = r*cos(lat_roms_3d[k,j,i]*deg2rad)*dlon*deg2rad + dy = r*dlat*deg2rad + dist = sqrt(dx**2 + dy**2 + dz**2) + # Find the index with the minimum distance: this is the + # nearest-neighbour + nearest = argmin(dist) + B[k,j,i] = B_front[nearest] + # Update the ice shelf front points to include the new points we've just + # extrapolated to + front_tmp = front[:,j,:] + mask_zice_tmp = mask_zice_3d[:,j,:] + front_tmp[mask_zice_tmp==1] = 1 + front[:,j,:] = front_tmp return B @@ -259,8 +313,8 @@ def interp_ecco2roms (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3 # Path to ROMS grid file grid_file = '../ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_10m.nc' # Path to ECCO2 files for temperature and salinity in January 1995 - theta_file = '../ROMS-CICE-MCT/data/ECCO2/raw/THETA.1440x720x50.199501.nc' - salt_file = '../ROMS-CICE-MCT/data/ECCO2/raw/SALT.1440x720x50.199501.nc' + theta_file = '../ROMS-CICE-MCT/data/THETA.1440x720x50.199501.nc' + salt_file = '../ROMS-CICE-MCT/data/SALT.1440x720x50.199501.nc' # Path to desired output file output_file = '../ROMS-CICE-MCT/data/ecco2_ini.nc' diff --git a/rotate_vector_cice.py b/rotate_vector_cice.py new file mode 100644 index 0000000..9496e0d --- /dev/null +++ b/rotate_vector_cice.py @@ -0,0 +1,15 @@ +from netCDF4 import Dataset +from numpy import * + +# Given a 2D vector in x-y space on the CICE grid, rotate it to lon-lat space. +# Input: +# u, v = x and y components of the vector on the CICE grid +# angle = angle between the CICE x-axis and east, at each point, in radians +# Output: +# u_lonlat, v_lonlat = components of the vector with respect to lon-lat space +def rotate_vector_cice (u, v, angle): + + u_lonlat = u*cos(-angle) + v*sin(-angle) + v_lonlat = v*cos(-angle) - u*sin(-angle) + + return u_lonlat, v_lonlat diff --git a/rotate_vector_roms.py b/rotate_vector_roms.py new file mode 100644 index 0000000..e578b9e --- /dev/null +++ b/rotate_vector_roms.py @@ -0,0 +1,31 @@ +from netCDF4 import Dataset +from numpy import * + +# Given a 2D vector in x-y space on the ROMS grid (x component on the u-grid, +# y component on the v-grid), interpolate them both to the rho-grid and rotate +# the vector to lon-lat space. +# Input: +# u = x-component of vector on the ROMS u-grid +# v = y-component of vector on the ROMS v-grid +# angle = angle between the ROMS x-axis and east, at each point, in radians +# Output: +# u_lonlat, v_lonlat = components of the vector with respect to lon-lat space +def rotate_vector_roms (u, v, angle): + + # Interpolate u to the rho-grid + w_bdry_u = 0.5*(u[:,0] + u[:,-1]) + middle_u = 0.5*(u[:,0:-1] + u[:,1:]) + e_bdry_u = w_bdry_u[:] + u_rho = ma.concatenate((w_bdry_u[:,None], middle_u, e_bdry_u[:,None]), axis=1) + # Interplate v to the rho-grid + s_bdry_v = v[0,:] + middle_v = 0.5*(v[0:-1,:] + v[1:,:]) + n_bdry_v = v[-1,:] + v_rho = ma.concatenate((s_bdry_v[None,:], middle_v, n_bdry_v[None,:]), axis=0) + + # Rotate + u_lonlat = u_rho*cos(-angle) + v_rho*sin(-angle) + v_lonlat = v_rho*cos(-angle) - u_rho*sin(-angle) + + return u_lonlat, v_lonlat + diff --git a/sose_roms_seasonal.py b/sose_roms_seasonal.py index 02a19a0..101e28e 100644 --- a/sose_roms_seasonal.py +++ b/sose_roms_seasonal.py @@ -45,7 +45,7 @@ def sose_roms_seasonal (file_path, var_name, lon0, depth_bdry, save=False, fig_n var_max = 7.5 var_ticks = 1 elif var_name == 'salt': - var_min = 34.2 + var_min = 33.8 var_max = 34.8 var_ticks = 0.2 else: diff --git a/spinup_plots.py b/spinup_plots.py index 2689bc1..53b4b2e 100644 --- a/spinup_plots.py +++ b/spinup_plots.py @@ -3,6 +3,7 @@ from matplotlib.pyplot import * from os.path import * from cartesian_grid_3d import * +from rotate_vector_roms import * # Analyse a ROMS spinup by calculating and plotting 9 timeseries: # Total heat content @@ -12,7 +13,7 @@ # Total kinetic energy # Drake Passage transport # Total sea ice extent -# Net sea ice-to-ocean freshwater flux +# Total sea ice volume # Area-averaged bottom water temperature in ice shelf cavities @@ -105,14 +106,13 @@ def calc_ohc (file_path, dV, rho, t): return ohc -# Calculate total salt content at the given timestep t. +# Calculate average salinity at the given timestep t. # Input: # file_path = path to ocean history/averages file # dV = elements of volume on the rho grid, masked with land mask -# rho = density on the rho grid at timestep t # t = timestep index in file_path -# Output: totalsalt = total salt content (kg) -def calc_totalsalt (file_path, dV, rho, t): +# Output: avgsalt = average salinity (psu) +def calc_avgsalt (file_path, dV, t): # Read salinity, converting to float128 to prevent overflow during # integration @@ -120,9 +120,9 @@ def calc_totalsalt (file_path, dV, rho, t): salt = ma.asarray(id.variables['salt'][t,:,:-15,:-3], dtype=float128) id.close() - # Integrate 1e-3*salt*rho over volume to get total mass of salt - totalsalt = sum(1e-3*salt*rho*dV) - return totalsalt + # Integrate to get average + avgsalt = sum(salt*dV)/sum(dV) + return avgsalt # Calculate net basal mass loss based on the given ice shelf melt rate field. @@ -193,13 +193,14 @@ def calc_tke (file_path, dV, rho, t): # Calculate zonal transport through the Drake Passage. # Input: +# grid_path = path to ROMS grid file # file_path = path to ocean history/averages file # dy_wct = differential of y times water column thickness for each cell on the # 2D rho-grid, masked with land mask # t = timestep index in file_path # Output: drakepsg_trans = zonal transport through the Drake Passage (60W), # integrated over depth and latitude -def calc_drakepsgtrans (file_path, dy_wct, t): +def calc_drakepsgtrans (grid_path, file_path, dy_wct, t): # Bounds on Drake Passage; edit for new grids # i-index of single north-south line to plot (representing a zonal slice); @@ -213,25 +214,24 @@ def calc_drakepsgtrans (file_path, dy_wct, t): j_min = 229 j_max = 298 - # Read ubar, converting to float128 to prevent overflow during integration + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() + + # Rotate velocities into lat-lon space id = Dataset(file_path, 'r') ubar = ma.asarray(id.variables['ubar'][t,:-15,:], dtype=float128) + vbar = ma.asarray(id.variables['vbar'][t,:-15,:], dtype=float128) id.close() - - # Interpolate ubar onto the rho-grid - w_bdry_ubar = 0.5*(ubar[:,0] + ubar[:,-1]) - middle_ubar = 0.5*(ubar[:,0:-1] + ubar[:,1:]) - e_bdry_ubar = w_bdry_ubar[:] - ubar_rho = ma.concatenate((w_bdry_ubar[:,None], middle_ubar, e_bdry_ubar[:,None]), axis=1) - # Throw away periodic boundary overlap - ubar = ubar[:,:-3] - - # Trim arrays to these bounds - ubar_rho_DP = ubar_rho[j_min:j_max,i_DP] + ubar_lonlat, vbar_lonlat = rotate_vector_roms(ubar, vbar, angle) + # Throw away the overlapping periodic boundary + ubar_lonlat = ubar_lonlat[:,:-3] + # Trim to Drake Passage bounds + ubar_DP = ubar_lonlat[j_min:j_max,i_DP] dy_wct_DP = dy_wct[j_min:j_max,i_DP] # Calculate transport - transport = sum(ubar_rho_DP*dy_wct_DP) + transport = sum(ubar_DP*dy_wct_DP) # Divide by 1e6 to convert to Sv return transport*1e-6 @@ -242,7 +242,7 @@ def calc_drakepsgtrans (file_path, dy_wct, t): # cice_path = path to CICE history file # dA = elements of area on the 2D rho grid (any mask will be removed) # t = timestep index in cice_path -# Output: totalice = total sea ice extent (m^2) +# Output: totalice = total sea ice extent (million km^2) def calc_totalice (cice_path, dA, t): id = Dataset(cice_path, 'r') @@ -266,6 +266,33 @@ def calc_totalice (cice_path, dA, t): return totalice*1e-12 +# Calculate total sea ice volume at the given timestep t. +# Input: +# cice_path = path to CICE history file +# dA = elements of area on the 2D rho grid (any mask will be removed) +# t = timestep index in cice_path +# Output = totalvice = total sea ice volume (million km^3) +def calc_totalvice (cice_path, dA, t): + + id = Dataset(cice_path, 'r') + # Read sea ice area fraction and height + aice = ma.asarray(id.variables['aice'][t,:-15,:-3], dtype=float128) + hi = ma.asarray(id.variables['hi'][t,:-15,:-3], dtype=float128) + id.close() + + # Remove masks on aice, hi, dA. Fill aice and hi with zeros on land mask. + aice_nomask = aice.data + aice_nomask[aice.mask] = 0.0 + hi_nomask = hi.data + hi_nomask[hi.mask] = 0.0 + dA_nomask = dA.data + + # Integrate volume + totalvice = sum(dA_nomask*aice_nomask*hi_nomask) + # Convert to million km^3 and return + return totalvice*1e-12 + + # Calculate total sea ice-to-ocean freshwater flux at the given timestep t. # Input: # cice_path = path to CICE history file @@ -316,12 +343,13 @@ def calc_bwtemp (file_path, dA, t): # Main routine # Input: +# grid_path = path to ROMS grid file # file_path = path to ocean history/averages file # cice_path = path to CICE history file # log_path = path to log file (if it exists, previously calculated values will # be read from it; regardless, it will be overwritten with all # calculated values following computation) -def spinup_plots (file_path, cice_path, log_path): +def spinup_plots (grid_path, file_path, cice_path, log_path): # Observed basal mass loss (Rignot 2013) and uncertainty obs_massloss = 1325 @@ -332,12 +360,13 @@ def spinup_plots (file_path, cice_path, log_path): time = [] ohc = [] - totalsalt = [] + avgsalt = [] massloss = [] tke = [] drakepsgtrans = [] totalice = [] - totalfwflux = [] + totalvice = [] + #totalfwflux = [] bwtemp = [] # Check if the log file exists if exists(log_path): @@ -358,7 +387,7 @@ def spinup_plots (file_path, cice_path, log_path): break for line in f: try: - totalsalt.append(float(line)) + avgsalt.append(float(line)) except (ValueError): break for line in f: @@ -383,9 +412,14 @@ def spinup_plots (file_path, cice_path, log_path): break for line in f: try: - totalfwflux.append(float(line)) + totalvice.append(float(line)) except(ValueError): break + #for line in f: + #try: + #totalfwflux.append(float(line)) + #except(ValueError): + #break for line in f: bwtemp.append(float(line)) f.close() @@ -408,18 +442,20 @@ def spinup_plots (file_path, cice_path, log_path): print 'Calculating ocean heat content' ohc.append(calc_ohc(file_path, dV, rho, t)) print 'Calculating total salt content' - totalsalt.append(calc_totalsalt(file_path, dV, rho, t)) + avgsalt.append(calc_avgsalt(file_path, dV, t)) print 'Calculating basal mass loss' massloss_tmp, massloss_factor = calc_massloss(file_path, dA, t) massloss.append(massloss_tmp) print 'Calculating total kinetic energy' tke.append(calc_tke(file_path, dV, rho, t)) print 'Calculating Drake Passage transport' - drakepsgtrans.append(calc_drakepsgtrans(file_path, dy_wct, t)) + drakepsgtrans.append(calc_drakepsgtrans(grid_path, file_path, dy_wct, t)) print 'Calculating total sea ice extent' totalice.append(calc_totalice(cice_path, dA, t)) - print 'Calculating total sea ice-to-ocean freshwater flux' - totalfwflux.append(calc_totalfwflux(cice_path, dA, t)) + print 'Calculating total sea ice volume' + totalvice.append(calc_totalvice(cice_path, dA, t)) + #print 'Calculating total sea ice-to-ocean freshwater flux' + #totalfwflux.append(calc_totalfwflux(cice_path, dA, t)) print 'Calculating average bottom water temperature in ice shelf cavities' bwtemp.append(calc_bwtemp(file_path, dA, t)) @@ -434,11 +470,11 @@ def spinup_plots (file_path, cice_path, log_path): print 'Plotting total salt content' clf() - plot(time, totalsalt) + plot(time, avgsalt) xlabel('Years') - ylabel('Southern Ocean Salt Content (kg)') + ylabel('Average Salinity (psu)') grid(True) - savefig('totalsalt.png') + savefig('avgsalt.png') print 'Plotting basal mass loss and area-averaged ice shelf melt rate' clf() @@ -506,15 +542,23 @@ def spinup_plots (file_path, cice_path, log_path): grid(True) savefig('totalice.png') - print 'Plotting total sea ice-to-ocean freshwater flux' + print 'Plotting total sea ice volume' clf() - plot(time, totalfwflux) - # Add a line at zero - plot(time, zeros(len(totalfwflux)), color='black') + plot(time, totalvice) xlabel('Years') - ylabel('Sea Ice-to-Ocean Freshwater Flux (Sv)') + ylabel(r'Total Sea Ice Volume (million km$^3$)') grid(True) - savefig('totalfwflux.png') + savefig('totalvice.png') + + #print 'Plotting total sea ice-to-ocean freshwater flux' + #clf() + #plot(time, totalfwflux) + # Add a line at zero + #plot(time, zeros(len(totalfwflux)), color='black') + #xlabel('Years') + #ylabel('Sea Ice-to-Ocean Freshwater Flux (Sv)') + #grid(True) + #savefig('totalfwflux.png') print 'Plotting bottom water temperature' clf() @@ -532,8 +576,8 @@ def spinup_plots (file_path, cice_path, log_path): f.write('Southern Ocean Heat Content (J):\n') for elm in ohc: f.write(str(elm) + '\n') - f.write('Southern Ocean Salt Content (kg):\n') - for elm in totalsalt: + f.write('Average Salinity (psu):\n') + for elm in avgsalt: f.write(str(elm) + '\n') f.write('Ice Shelf Basal Mass Loss (Gt/y):\n') for elm in massloss: @@ -547,9 +591,11 @@ def spinup_plots (file_path, cice_path, log_path): f.write('Total Sea Ice Extent (million km^2):\n') for elm in totalice: f.write(str(elm) + '\n') - f.write('Total Sea Ice-to-Ocean Freshwater Flux (Sv):\n') - for elm in totalfwflux: + for elm in totalvice: f.write(str(elm) + '\n') + #f.write('Total Sea Ice-to-Ocean Freshwater Flux (Sv):\n') + #for elm in totalfwflux: + #f.write(str(elm) + '\n') f.write('Average Bottom Water Temperature in Ice Shelf Cavities (C):\n') for elm in bwtemp: f.write(str(elm) + '\n') @@ -559,10 +605,11 @@ def spinup_plots (file_path, cice_path, log_path): # Command-line interface if __name__ == "__main__": + grid_path = raw_input('Enter path to ROMS grid file: ') file_path = raw_input('Enter path to ocean history/averages file: ') cice_path = raw_input('Enter path to CICE history file: ') log_path = raw_input('Enter path to log file to save values and/or read previously calculated values: ') - spinup_plots(file_path, cice_path, log_path) + spinup_plots(grid_path, file_path, cice_path, log_path) diff --git a/massloss.py b/timeseries_massloss.py similarity index 63% rename from massloss.py rename to timeseries_massloss.py index ded151e..77dd75c 100644 --- a/massloss.py +++ b/timeseries_massloss.py @@ -11,26 +11,25 @@ # log_path = path to log file (if it exists, previously calculated values will # be read from it; regardless, it will be overwritten with all # calculated values following computation) -def massloss (file_path, log_path): +def timeseries_massloss (file_path, log_path): # Titles and figure names for each ice shelf - names = ['Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'] - fig_names = ['larsen_d.png', 'larsen_c.png', 'wilkins_georgevi_stange.png', 'ronne_filchner.png', 'abbot.png', 'pig.png', 'thwaites.png', 'dotson.png', 'getz.png', 'nickerson.png', 'sulzberger.png', 'mertz.png', 'totten_moscowuni.png', 'shackleton.png', 'west.png', 'amery.png', 'princeharald.png', 'baudouin_borchgrevink.png', 'lazarev.png', 'nivl.png', 'fimbul_jelbart_ekstrom.png', 'brunt_riiserlarsen.png', 'ross.png'] + names = ['All Ice Shelves', 'Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf'] + fig_names = ['total_massloss.png', 'larsen_d.png', 'larsen_c.png', 'wilkins_georgevi_stange.png', 'ronne_filchner.png', 'abbot.png', 'pig.png', 'thwaites.png', 'dotson.png', 'getz.png', 'nickerson.png', 'sulzberger.png', 'mertz.png', 'totten_moscowuni.png', 'shackleton.png', 'west.png', 'amery.png', 'princeharald.png', 'baudouin_borchgrevink.png', 'lazarev.png', 'nivl.png', 'fimbul_jelbart_ekstrom.png', 'brunt_riiserlarsen.png', 'ross.png'] # Limits on longitude and latitude for each ice shelf # These depend on the source geometry, in this case RTopo 1.05 # Note there is one extra index at the end of each array; this is because # the Ross region crosses the line 180W and therefore is split into two - lon_min = [-62.67, -65.5, -79.17, -85, -104.17, -102.5, -108.33, -114.5, -135.67, -149.17, -155, 144, 115, 94.17, 80.83, 65, 33.83, 19, 12.9, 9.33, -10.05, -28.33, -180, 158.33] - lon_max = [-59.33, -60, -66.67, -28.33, -88.83, -99.17, -103.33, -111.5, -114.33, -140, -145, 146.62, 123.33, 102.5, 89.17, 75, 37.67, 33.33, 16.17, 12.88, 7.6, -10.33, -146.67, 180] - lat_min = [-73.03, -69.35, -74.17, -83.5, -73.28, -75.5, -75.5, -75.33, -74.9, -76.42, -78, -67.83, -67.17, -66.67, -67.83, -73.67, -69.83, -71.67, -70.5, -70.75, -71.83, -76.33, -85, -84.5] - lat_max = [-69.37, -66.13, -69.5, -74.67, -71.67, -74.17, -74.67, -73.67, -73, -75.17, -76.41, -66.67, -66.5, -64.83, -66.17, -68.33, -68.67, -68.33, -69.33, -69.83, -69.33, -71.5, -77.77, -77] + lon_min = [-180, -62.67, -65.5, -79.17, -85, -104.17, -102.5, -108.33, -114.5, -135.67, -149.17, -155, 144, 115, 94.17, 80.83, 65, 33.83, 19, 12.9, 9.33, -10.05, -28.33, -180, 158.33] + lon_max = [180, -59.33, -60, -66.67, -28.33, -88.83, -99.17, -103.33, -111.5, -114.33, -140, -145, 146.62, 123.33, 102.5, 89.17, 75, 37.67, 33.33, 16.17, 12.88, 7.6, -10.33, -146.67, 180] + lat_min = [-90, -73.03, -69.35, -74.17, -83.5, -73.28, -75.5, -75.5, -75.33, -74.9, -76.42, -78, -67.83, -67.17, -66.67, -67.83, -73.67, -69.83, -71.67, -70.5, -70.75, -71.83, -76.33, -85, -84.5] + lat_max = [-30, -69.37, -66.13, -69.5, -74.67, -71.67, -74.17, -74.67, -73.67, -73, -75.17, -76.41, -66.67, -66.5, -64.83, -66.17, -68.33, -68.67, -68.33, -69.33, -69.83, -69.33, -71.5, -77.77, -77] # Observed mass loss (Rignot 2013) and uncertainty for each ice shelf, in Gt/y - obs_massloss = [1.4, 20.7, 135.4, 155.4, 51.8, 101.2, 97.5, 45.2, 144.9, 4.2, 18.2, 7.9, 90.6, 72.6, 27.2, 35.5, -2, 21.6, 6.3, 3.9, 26.8, 9.7, 47.7] - obs_massloss_error = [14, 67, 40, 45, 19, 8, 7, 4, 14, 2, 3, 3, 8, 15, 10, 23, 3, 18, 2, 2, 14, 16, 34] + obs_massloss = [1325, 1.4, 20.7, 135.4, 155.4, 51.8, 101.2, 97.5, 45.2, 144.9, 4.2, 18.2, 7.9, 90.6, 72.6, 27.2, 35.5, -2, 21.6, 6.3, 3.9, 26.8, 9.7, 47.7] + obs_massloss_error = [235, 14, 67, 40, 45, 19, 8, 7, 4, 14, 2, 3, 3, 8, 15, 10, 23, 3, 18, 2, 2, 14, 16, 34] # Observed ice shelf melt rates and uncertainty - obs_ismr = [0.1, 0.4, 3.1, 0.3, 1.7, 16.2, 17.7, 7.8, 4.3, 0.6, 1.5, 1.4, 7.7, 2.8, 1.7, 0.6, -0.4, 0.4, 0.7, 0.5, 0.5, 0.1, 0.1] - obs_ismr_error = [0.6, 1, 0.8, 0.1, 0.6, 1, 1, 0.6, 0.4, 0.3, 0.3, 0.6, 0.7, 0.6, 0.7, 0.4, 0.6, 0.4, 0.2, 0.2, 0.2, 0.2, 0.1] - + obs_ismr = [0.85, 0.1, 0.4, 3.1, 0.3, 1.7, 16.2, 17.7, 7.8, 4.3, 0.6, 1.5, 1.4, 7.7, 2.8, 1.7, 0.6, -0.4, 0.4, 0.7, 0.5, 0.5, 0.1, 0.1] + obs_ismr_error = [0.1, 0.6, 1, 0.8, 0.1, 0.6, 1, 1, 0.6, 0.4, 0.3, 0.3, 0.6, 0.7, 0.6, 0.7, 0.4, 0.6, 0.4, 0.2, 0.2, 0.2, 0.2, 0.1] # Density of ice in kg/m^3 rho_ice = 916 @@ -84,45 +83,38 @@ def massloss (file_path, log_path): if exists(log_path): # Fill first start_t timesteps with existing values massloss[:,0:start_t] = old_massloss[:,:] + # Read melt rate and convert from m/s to m/y + ismr = id.variables['m'][:,:-15,:-3]*365.25*24*60*60 + id.close() + # Set up array of masked area values for each ice shelf + dA_masked = ma.empty([len(names), size(dA,0), size(dA,1)]) # Set up array of conversion factors from mass loss to area-averaged melt # rate for each ice shelf factors = empty(len(names)) - - # Process each timestep separately to prevent memory overflow + for index in range(len(names)): + # Mask dA for the current ice shelf (note dA is already masked + # with the global ice shelf mask, so just restrict the indices + # to isolate the given ice shelf) + if index == len(names)-1: + # Ross region is split into two + dA_tmp = ma.masked_where(((lon < lon_min[index]) + (lon > lon_max[index]) + (lat < lat_min[index]) + (lat > lat_max[index]))*((lon < lon_min[index+1]) + (lon > lon_max[index+1]) + (lat < lat_min[index+1]) + (lat > lat_max[index+1])), dA) + else: + dA_tmp = ma.masked_where((lon < lon_min[index]) + (lon > lon_max[index]) + (lat < lat_min[index]) + (lat > lat_max[index]), dA) + dA_masked[index,:,:] = dA_tmp[:,:] + area_tmp = sum(dA_masked[index,:,:]) + factors[index] = 1e12/(rho_ice*area_tmp) + print 'Area of ' + names[index] + ': ' + str(area_tmp) + ' m^2' + + # Build timeseries for t in range(start_t, size(time)): - print 'Processing timestep ' + str(t-start_t+1) + ' of ' + str(size(time)-start_t) - - # Read ice shelf melt rate, converting to float128 to prevent overflow - # during integration - ismr = ma.asarray(id.variables['m'][t-start_t,:-15,:-3], dtype=float128) - # Convert from m/s to m/y - ismr = ismr*365.25*24*60*60 - # Loop over ice shelves for index in range(len(names)): - - # Mask dA for the current ice shelf (note dA is already masked - # with the global ice shelf mask, so just restrict the indices - # to isolate the given ice shelf) - if index == len(names)-1: - # Ross region is split into two - dA_tmp = ma.masked_where(((lon < lon_min[index]) + (lon > lon_max[index]) + (lat < lat_min[index]) + (lat > lat_max[index]))*((lon < lon_min[index+1]) + (lon > lon_max[index+1]) + (lat < lat_min[index+1]) + (lat > lat_max[index+1])), dA) - else: - dA_tmp = ma.masked_where((lon < lon_min[index]) + (lon > lon_max[index]) + (lat < lat_min[index]) + (lat > lat_max[index]), dA) - # Integrate ice shelf melt rate over area to get volume loss - volumeloss = sum(ismr*dA_tmp) + volumeloss = sum(ismr[t-start_t,:,:]*dA_masked[index,:,:]) # Convert to mass loss in Gt/y massloss[index, t] = 1e-12*rho_ice*volumeloss - if t == start_t: - # Calculate conversion factor on first timestep - factors[index] = 1e12/(rho_ice*sum(dA_tmp)) - print 'Area of ' + names[index] + ': ' + str(sum(dA_tmp)) - - id.close() - # Plot each timeseries print 'Plotting' for index in range(len(names)): @@ -152,12 +144,12 @@ def massloss (file_path, log_path): max_tick += dtick ax1.set_ylim([min_tick, max_tick]) # Title and ticks in blue for this side of the plot - ax1.set_ylabel('Basal Mass Loss (Gt/y)', color='b', fontsize=18) + ax1.set_ylabel('Basal Mass Loss (Gt/y)', color='b') for t1 in ax1.get_yticklabels(): t1.set_color('b') - ax1.set_xlabel('Years', fontsize=18) - setp(ax1.get_xticklabels(), fontsize=15) - setp(ax1.get_yticklabels(), fontsize=15) + ax1.set_xlabel('Years') +# setp(ax1.get_xticklabels(), fontsize=15) +# setp(ax1.get_yticklabels(), fontsize=15) ax1.grid(True) # Twin axis for melt rates ax2 = ax1.twinx() @@ -168,12 +160,12 @@ def massloss (file_path, log_path): ax2.axhline(ismr_low, color='r', linestyle='dashed') ax2.axhline(ismr_high, color='r', linestyle='dashed') # Title and ticks in red for this side of the plot - ax2.set_ylabel('Area-Averaged Ice Shelf Melt Rate (m/y)', color='r', fontsize=18) + ax2.set_ylabel('Area-Averaged Ice Shelf Melt Rate (m/y)', color='r') for t2 in ax2.get_yticklabels(): t2.set_color('r') - setp(ax2.get_yticklabels(), fontsize=15) +# setp(ax2.get_yticklabels(), fontsize=15) # Name of the ice shelf for the main title - title(names[index], fontsize=24) + title(names[index]) fig.savefig(fig_names[index]) print 'Saving results to log file' @@ -227,5 +219,5 @@ def calc_grid (file_path): file_path = raw_input('Enter path to ocean history/averages file: ') log_path = raw_input('Enter path to log file to save values and/or read previously calculated values: ') - massloss(file_path, log_path) + timeseries_massloss(file_path, log_path) diff --git a/uv_vectorplot.py b/uv_vectorplot.py index 50fbc9c..cd467a0 100644 --- a/uv_vectorplot.py +++ b/uv_vectorplot.py @@ -1,10 +1,12 @@ from netCDF4 import Dataset from numpy import * from matplotlib.pyplot import * +from rotate_vector_roms import * # Make a circumpolar Antarctic plot of speed overlaid with velocity vectors at # the given depth (surface, bottom, or vertically averaged). # Input: +# grid_path = path to ROMS grid file # file_path = path to ocean history/averages file # tstep = timestep in file_path to plot (1-indexed) # depth_key = integer flag indicating whether to plot the surface velocity (1), @@ -12,7 +14,7 @@ # save = optional boolean flag indicating that the plot should be saved to a # file rather than displayed on the screen # fig_name = if save=True, filename for figure -def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): +def uv_vectorplot (grid_path, file_path, tstep, depth_key, save=False, fig_name=None): # Radius of the Earth in metres r = 6.371e6 @@ -22,6 +24,10 @@ def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): # single point or the plot will be way too crowded) block = 15 + # Read angle from grid file + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() # Read grid and velocity data id = Dataset(file_path, 'r') lon = id.variables['lon_rho'][:-15,:-2] @@ -40,19 +46,11 @@ def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): v = id.variables['vbar'][tstep-1,:-15,:] id.close() - # Interpolate u to the rho-grid - w_bdry_u = 0.5*(u[:,0] + u[:,-1]) - middle_u = 0.5*(u[:,0:-1] + u[:,1:]) - e_bdry_u = w_bdry_u[:] - u_rho = ma.concatenate((w_bdry_u[:,None], middle_u, e_bdry_u[:,None]), axis=1) - # Interplate v to the rho-grid - s_bdry_v = v[0,:] - middle_v = 0.5*(v[0:-1,:] + v[1:,:]) - n_bdry_v = v[-1,:] - v_rho = ma.concatenate((s_bdry_v[None,:], middle_v, n_bdry_v[None,:]), axis=0) + # Rotate velocities to lat-lon space + u_lonlat, v_lonlat = rotate_vector_roms(u, v, angle) # Throw away the overlapping periodic boundary - u_rho = u_rho[:,:-2] - v_rho = v_rho[:,:-2] + u_rho = u_lonlat[:,:-2] + v_rho = v_lonlat[:,:-2] # Calculate speed for the background filled contour plot speed = sqrt(u_rho**2 + v_rho**2) @@ -60,8 +58,8 @@ def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): X = -(lat+90)*cos(lon*deg2rad+pi/2) Y = (lat+90)*sin(lon*deg2rad+pi/2) - # Calculate velocity components in lon-lat space (just differentiate and - # rearrange spherical coordinate transformations) + # Calculate velocity components in spherical coordinate space + # (just differentiate and rearrange spherical coordinate transformations) dlon_dt = u_rho/(r*cos(lat*deg2rad)*deg2rad) dlat_dt = v_rho/(r*deg2rad) # Calculate velocity components in X-Y space (just differentiate and @@ -123,6 +121,7 @@ def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): # Command-line interface if __name__ == "__main__": + grid_path = raw_input("Path to ROMS grid file: ") file_path = raw_input("Path to ocean history/averages file: ") tstep = int(raw_input("Timestep number (starting at 1): ")) # Parse depth information @@ -141,7 +140,7 @@ def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): save = False fig_name = None # Make the plot - uv_vectorplot(file_path, tstep, depth_key, save, fig_name) + uv_vectorplot(grid_path, file_path, tstep, depth_key, save, fig_name) # Repeat until the user wants to exit while True: @@ -177,7 +176,7 @@ def uv_vectorplot (file_path, tstep, depth_key, save=False, fig_name=None): # Get file name for figure fig_name = raw_input("File name for figure: ") # Make the plot - uv_vectorplot(file_path, tstep, depth_key, save, fig_name) + uv_vectorplot(grid_path, file_path, tstep, depth_key, save, fig_name) else: break diff --git a/uvp_grid_fix.py b/uvp_grid_fix.py new file mode 100644 index 0000000..6dca499 --- /dev/null +++ b/uvp_grid_fix.py @@ -0,0 +1,92 @@ +from netCDF4 import Dataset +from numpy import * + +# The Matlab scripts I use to generate the ROMS grid assume that the u-grid, +# v-grid, and psi-grid points have the same locations as the rho-grid, and they +# just chop off the last point in the required dimension(s). This is not +# correct. This script fixes that. +# Input: grid_file = path to ROMS grid file to edit +def uvp_grid_fix (grid_file): + + # Read x and y coordinates for rho grid + id = Dataset(grid_file, 'a') + x_rho = id.variables['x_rho'][:,:] + y_rho = id.variables['y_rho'][:,:] + + # Find x and y coordinates for u-grid: halfway between the rho-points + # in the x direction + x_u = 0.5*(x_rho[:,:-1] + x_rho[:,1:]) + y_u = 0.5*(y_rho[:,:-1] + y_rho[:,1:]) + # Find longitude and latitude + lon_u, lat_u = polarstereo_inv(x_u, y_u) + + # Find x and y coordinates for v-grid: halfway between the rho-points + # in the y direction + x_v = 0.5*(x_rho[:-1,:] + x_rho[1:,:]) + y_v = 0.5*(y_rho[:-1,:] + y_rho[1:,:]) + # Find longitude and latitude + lon_v, lat_v = polarstereo_inv(x_v, y_v) + + # Find x and y coordinates for psi-grid: on the corners, i.e. halfway + # between the rho-points in both directions + x_psi = 0.5*(x_v[:,:-1] + x_v[:,1:]) + y_psi = 0.5*(y_u[:-1,:] + y_u[1:,:]) + # Find longitude and latitude + lon_psi, lat_psi = polarstereo_inv(x_psi, y_psi) + + # Save updated variables + id.variables['x_u'][:,:] = x_u + id.variables['y_u'][:,:] = y_u + id.variables['x_v'][:,:] = x_v + id.variables['y_v'][:,:] = y_v + id.variables['x_psi'][:,:] = x_psi + id.variables['y_psi'][:,:] = y_psi + id.variables['lon_u'][:,:] = lon_u + id.variables['lat_u'][:,:] = lat_u + id.variables['lon_v'][:,:] = lon_v + id.variables['lat_v'][:,:] = lat_v + id.variables['lon_psi'][:,:] = lon_psi + id.variables['lat_psi'][:,:] = lat_psi + id.close() + + +# Given x and y coordinates on a polar stereographic projection, find the +# longitude and latitude by numerically inverting the polar stereographic +# transformation. Adapted from https://au.mathworks.com/matlabcentral/fileexchange/32907-polar-stereographic-coordinate-transformation--map-to-lat-lon- +# Input: +# x, y = coordinates in x-y space +# Output: +# lon, lat = corresponding longitude and latitude in degrees +def polarstereo_inv (x, y): + + a = 6378.137 # Radius of Earth + e = sqrt(6.694379852e-3) # Eccentricity + phi_c = 71 # Projection is tangent to this latitude + deg2rad = pi/180 + + x = -x + y = -y + phi_c = phi_c*deg2rad + + t_c = tan(pi/4 - phi_c/2)/((1-e*sin(phi_c))/(1+e*sin(phi_c)))**(e/2) + m_c = cos(phi_c)/sqrt(1-e**2*(sin(phi_c))**2) + rho = sqrt(x**2 + y**2) + t = rho*t_c/(a*m_c) + + chi = pi/2 - 2*arctan(t) + lat = chi + (e**2/2 + 5*e**4/24 + e**6/12 + 13*e**8/360)*sin(2*chi) + (7*e**4/48 + 29*e**6/240 + 811*e**8/11520)*sin(4*chi) + (7*e**6/120+81*e**8/1120)*sin(6*chi) + (4279*e**8/161280)*sin(8*chi) + lon = arctan2(x, -y) + + lat = -lat/deg2rad + lon = -lon/deg2rad + index = lon < 0 + lon[index] += 360 + + return lon, lat + + +# Command-line interface +if __name__ == '__main__': + + grid_file = raw_input("Path to ROMS grid file: ") + uvp_grid_fix(grid_file) diff --git a/zonal_plot.py b/zonal_plot.py index 054c7af..05c90a1 100644 --- a/zonal_plot.py +++ b/zonal_plot.py @@ -2,6 +2,7 @@ from numpy import * from matplotlib.pyplot import * from calc_z import * +from rotate_vector_roms import * # Create a zonally averaged or zonally sliced plot (i.e. depth vs latitude) # of the given variable. @@ -23,7 +24,8 @@ # save = optional boolean flag; if True, the figure will be saved with file name # fig_name, if False, the figure will display on the screen # fig_name = optional string containing filename for figure, if save=True -def zonal_plot (file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds=None, save=False, fig_name=None): +# grid_path = path to grid file; only needed if var_name is a vector component +def zonal_plot (file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds=None, save=False, fig_name=None, grid_path=None): # Grid parameters theta_s = 0.9 @@ -42,57 +44,42 @@ def zonal_plot (file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min units = id.variables[var_name].units long_name = id.variables[var_name].long_name - # Figure out what grid the variable is on - grid_string = id.variables[var_name].coordinates - if grid_string.startswith('lon_rho'): - grid_name = 'rho' - lon_name = 'lon_rho' - lat_name = 'lat_rho' - elif grid_string.startswith('lon_u'): - grid_name = 'u' - lon_name = 'lon_u' - lat_name = 'lat_u' - elif grid_string.startswith('lon_v'): - grid_name = 'v' - lon_name = 'lon_v' - lat_name = 'lat_v' - else: - print 'Grid type ' + grid_string + ' not supported' - id.close() - return + # Rotate velocity if necessary + if var_name in ['u', 'v']: + grid_id = Dataset(grid_path, 'r') + angle = grid_id.variables['angle'][:-15,:] + grid_id.close() + if var_name == 'u': + data_3d_ugrid = data_3d[:,:,:] + data_3d = ma.empty([data_3d_ugrid.shape[0], data_3d_ugrid.shape[1], data_3d_ugrid.shape[2]+1]) + for k in range(N): + u_data = data_3d_ugrid[k,:,:] + v_data = id.variables['v'][tstep-1,k,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle) + data_3d[k,:,:] = u_data_lonlat + elif var_name == 'v': + data_3d_vgrid = data_3d[:,:,:] + data_3d = ma.empty([data_3d_vgrid.shape[0], data_3d_vgrid.shape[1]+1, data_3d_vgrid.shape[2]]) + for k in range(N): + v_data = data_3d_vgrid[k,:,:] + u_data = id.variables['u'][tstep-1,k,:-15,:] + u_data_lonlat, v_data_lonlat = rotate_vector_roms(u_data, v_data, angle) + data_3d[k,:,:] = v_data_lonlat # Read grid variables h = id.variables['h'][:-15,:] zice = id.variables['zice'][:-15,:] - # h, zice, and zeta are on the rho-grid; interpolate if necessary - if grid_name == 'u': - h = 0.5*(h[:,0:-1] + h[:,1:]) - zice = 0.5*(zice[:,0:-1] + zice[:,1:]) - zeta = 0.5*(zeta[:,0:-1] + zeta[:,1:]) - elif grid_name == 'v': - h = 0.5*(h[0:-1,:] + h[1:,:]) - zice = 0.5*(zice[0:-1,:] + zice[1:,:]) - zeta = 0.5*(zeta[0:-1,:] + zeta[1:,:]) - # Read the correct lat and lon for this grid (determined previously) - lon_2d = id.variables[lon_name][:-15,:] - lat_2d = id.variables[lat_name][:-15,:] + lon_2d = id.variables['lon_rho'][:-15,:] + lat_2d = id.variables['lat_rho'][:-15,:] id.close() # Throw away periodic boundary overlap - if grid_name == 'u': - data_3d = data_3d[:,:,:-1] - zeta = zeta[:,:-1] - h = h[:,:-1] - zice = zice[:,:-1] - lon_2d = lon_2d[:,:-1] - lat_2d = lat_2d[:,:-1] - else: - data_3d = data_3d[:,:,:-2] - zeta = zeta[:,:-2] - h = h[:,:-2] - zice = zice[:,:-2] - lon_2d = lon_2d[:,:-2] - lat_2d = lat_2d[:,:-2] + data_3d = data_3d[:,:,:-2] + zeta = zeta[:,:-2] + h = h[:,:-2] + zice = zice[:,:-2] + lon_2d = lon_2d[:,:-2] + lat_2d = lat_2d[:,:-2] # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) @@ -193,6 +180,7 @@ def zonal_plot (file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min # There is land everywhere at the northern boundary # Show the first 2 degrees of this land mask lat_max = max(lat[:,j_max]) + 2 + lat_max = -65 xlim([lat_min, lat_max]) ylim([depth_min, 0]) @@ -441,6 +429,11 @@ def interp_lon_helper (lon, lon0): file_path = raw_input("Path to ocean history/averages file: ") var_name = raw_input("Variable name: ") + if var_name in ['u', 'v']: + # Will need the grid file to get the angle + grid_path = raw_input("Path to ROMS grid file: ") + else: + grid_path = None tstep = int(raw_input("Timestep number (starting at 1): ")) lon_type = raw_input("Single longitude (s) or zonal average (z)? ") @@ -479,7 +472,7 @@ def interp_lon_helper (lon, lon0): save = False fig_name = None - zonal_plot(file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds, save, fig_name) + zonal_plot(file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds, save, fig_name, grid_path) # Repeat until the user wants to exit while True: @@ -498,6 +491,9 @@ def interp_lon_helper (lon, lon0): elif int(changes) == 2: # New variable name var_name = raw_input("Variable name: ") + if var_name in ['u', 'v'] and grid_path is None: + # Will need the grid file to get the angle + grid_path = raw_input("Path to ROMS grid file: ") elif int(changes) == 3: # New timestep tstep = int(raw_input("Timestep number (starting at 1): ")) @@ -539,7 +535,7 @@ def interp_lon_helper (lon, lon0): fig_name = raw_input("File name for figure: ") # Make the plot - zonal_plot(file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds, save, fig_name) + zonal_plot(file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds, save, fig_name, grid_path) else: break