From 1d1b3190ba3c69db466bfad4243b4c440c275f16 Mon Sep 17 00:00:00 2001 From: Kaitlin Naughten Date: Mon, 29 May 2017 15:47:15 +1000 Subject: [PATCH] Miscellaneous updates --- adv_frazil.py | 10 +- adv_freezingpt_slice.py | 15 +-- adv_mld.py | 10 +- adv_mld_2.py | 125 +++++++++++++++++++++ adv_polynyas.py | 2 +- adv_ross_tsplots.py | 164 +++++++++++++++++---------- adv_sss.py | 6 +- adv_thickness.py | 6 +- adv_thickness_2.py | 144 ++++++++++++++++++++++++ calc_rx1.py | 50 +++++++++ calc_z.py | 19 ++-- circumpolar_plot.py | 8 +- common_grid.py | 232 +++++++++++++++++++++++++++------------ compare_nic_seasonal.py | 16 ++- file_guide.txt | 8 -- find_isolated_points.py | 29 +++++ fix_isolated_points.py | 170 ++++++++++++++++++++++++++++ freezingpt_slice.py | 6 +- gadv_frazil_bathy.py | 10 +- gadv_frazil_thickness.py | 4 +- gadv_freezingpt_slice.py | 14 +-- h_circumpolar.py | 5 - i_slice.py | 6 +- ini_sss_circumpolar.py | 5 - ini_sst_circumpolar.py | 5 - make_density_file.py | 6 +- monthly_avg_cice.py | 33 ++++-- monthly_avg_roms.py | 30 +++-- nbdry_grid_hack.py | 2 +- overturning_plot.py | 84 -------------- plot_layers.py | 121 ++++++++++++++++++++ romscice_atm_subdaily.py | 22 ++-- romscice_ini_ecco.py | 60 +++++----- romscice_ini_woa.py | 12 +- romscice_nbc.py | 21 ++-- romscice_sss_nudging.py | 100 +++++++++++++++++ sose_roms_seasonal.py | 6 +- temp_salt_slice.py | 6 +- timeseries_3D.py | 6 +- timeseries_sss.py | 29 ++++- zice_circumpolar.py | 5 - zonal_plot.py | 6 +- 42 files changed, 1223 insertions(+), 395 deletions(-) create mode 100644 adv_mld_2.py create mode 100644 adv_thickness_2.py create mode 100644 calc_rx1.py create mode 100644 find_isolated_points.py create mode 100644 fix_isolated_points.py delete mode 100644 overturning_plot.py create mode 100644 plot_layers.py create mode 100644 romscice_sss_nudging.py diff --git a/adv_frazil.py b/adv_frazil.py index 1416b5d..f863ac7 100644 --- a/adv_frazil.py +++ b/adv_frazil.py @@ -13,13 +13,13 @@ def adv_frazil (): # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] # File name: annual average - file_tail = 'iceh_avg.nc' + file_tail = 'cice/rundir/history/iceh_avg.nc' # Bounds and ticks for colour scales - max_abs = 1 - tick_abs = 0.25 - max_anom = 2.0 - tick_anom = 1.0 + max_abs = 0.25 #1.0 + tick_abs = 0.05 #0.25 + max_anom = 0.75 #2.0 + tick_anom = 0.25 #1.0 # Degrees to radians conversion factor deg2rad = pi/180. diff --git a/adv_freezingpt_slice.py b/adv_freezingpt_slice.py index e31cf9e..859341f 100644 --- a/adv_freezingpt_slice.py +++ b/adv_freezingpt_slice.py @@ -10,19 +10,19 @@ def adv_freezingpt_slice (): # Path to ocean history file - file_path = '/short/m68/kaa561/advection/c4_l/ocean_his_8aug.nc' + file_path = '/short/m68/kaa561/advection/c4_l/ocean_his_0001.nc' # Timestep to plot - tstep = 1 #221 + tstep = 178 # i-index to plot (1-based) i_val = 1250 # Deepest depth to plot - depth_min = -100 + depth_min = -200 # Bounds on colour scale scale_max = 0.5 scale_tick = 0.25 # Bounds on latitudes to plot - lat_min = -71 - lat_max = -67 + lat_min = -76 + lat_max = -73 save = True fig_name = 'adv_freezingpt_slice.png' @@ -31,6 +31,7 @@ def adv_freezingpt_slice (): theta_b = 0.9 hc = 40 N = 31 + Vstretching = 2 # Read temperature, salinity, and grid variables id = Dataset(file_path, 'r') @@ -50,7 +51,7 @@ def adv_freezingpt_slice (): deltat = temp - tfr # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script - z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) + z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta, Vstretching) # Select depth and latitude at the given i-index z = z_3d[:,:,i_val-1] lat = tile(lat_2d[:,i_val-1], (N,1)) @@ -72,7 +73,7 @@ def adv_freezingpt_slice (): for val in lat_ticks: lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') ax.set_xticklabels(lat_labels, fontsize=14) - depth_ticks = arange(depth_min, 0+25, 25) + depth_ticks = arange(depth_min, 0+50, 50) yticks(depth_ticks) depth_labels = [] for val in depth_ticks: diff --git a/adv_mld.py b/adv_mld.py index ffc8085..e537644 100644 --- a/adv_mld.py +++ b/adv_mld.py @@ -13,15 +13,15 @@ def adv_mld (): # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] # File name: daily average for 23 August - file_tail = 'ocean_avg_23aug.nc' + file_tail = 'ocean_avg_0001.nc' # If 23 August doesn't have its own file, put the time index here - tstep = 1 #236 if all one file of daily averages for entire simulation + tstep = 236 # Bounds and ticks for colour scales - max_abs = 800 + max_abs = 600 #800 tick_abs = 200 - max_anom = 400 - tick_anom = 200 + max_anom = 300 #400 + tick_anom = 150 #200 # Degrees to radians conversion factor deg2rad = pi/180. diff --git a/adv_mld_2.py b/adv_mld_2.py new file mode 100644 index 0000000..53524cc --- /dev/null +++ b/adv_mld_2.py @@ -0,0 +1,125 @@ +from netCDF4 import * +from numpy import * +from matplotlib.pyplot import * +#import colormaps as cmaps + +# Create a 3x2 plot of mixed layer depth (calculated by KPP) on 23 August (the +# sea ice area max) for each advection experiment: the absolute values for +# U3_LIM, and the anomalies from U3_LIM for the other 5 experiments. +def adv_mld_2 (): + + # Paths to simulation directories + paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/c4_l/'] + # Titles for plotting + labels = ['a) U3_LIM', 'b) C4_LD - U3_LIM'] + # File name: daily average for 23 August + file_tail = 'ocean_avg_0001.nc' + # If 23 August doesn't have its own file, put the time index here + tstep = 236 + + # Bounds and ticks for colour scales + max_abs = 600 #800 + tick_abs = 200 + max_anom = 300 #400 + tick_anom = 150 #200 + + # Degrees to radians conversion factor + deg2rad = pi/180. + # Centre of missing circle in grid + lon_c = 50 + lat_c = -83 + # Radius of missing circle + radius = 10.5 + # Boundary of regular grid to embed circle in + circle_bdry = -70+90 + + lon_ticks = array([-120, -60, 60, 120, 180]) + lat_ticks = array([-44, -42, -42, -44, -41]) + lon_labels = [r'120$^{\circ}$W', r'60$^{\circ}$W', r'60$^{\circ}$E', r'120$^{\circ}$E', r'180$^{\circ}$'] + lon_rot = [-60, 60, -60, 60, 0] + + # Read mixed layer depth from U3_LIM simulation; also grid and mask + # variables + id = Dataset(paths[0]+file_tail, 'r') + data = id.variables['Hsbl'][tstep-1,:350,1:] + lon = id.variables['lon_rho'][:350,1:] + lat = id.variables['lat_rho'][:350,1:] + mask = id.variables['mask_rho'][:350,1:] + zice = id.variables['zice'][:350,1:] + id.close() + + # Mask out the ice shelf cavities and switch sign on mixed layer depth + index = zice != 0 + mask[index] = 0.0 + data0 = ma.masked_where(zice!=0, -data) + + # Land mask + land = ma.masked_where(mask==1, mask) + + # Circumpolar x and y coordinates for plotting + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle + x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) + y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Longitude labels + x_ticks = -(lat_ticks+90)*cos(lon_ticks*deg2rad+pi/2) + y_ticks = (lat_ticks+90)*sin(lon_ticks*deg2rad+pi/2) + # Regular grid to embed missing circle in + x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid + land_circle = zeros(shape(x_reg)) + land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + + # Set up figure + fig = figure(figsize=(20,10)) + ax = fig.add_subplot(1, 2, 1, aspect='equal') + # First shade land + contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Shade the mixed layer depth (pcolor not contourf so we don't misrepresent + # the model grid) + img0 = pcolor(x, y, data0, vmin=0, vmax=max_abs, cmap='jet') #cmaps.viridis) + # Add longitude labels + for i in range(size(x_ticks)): + text(x_ticks[i], y_ticks[i], lon_labels[i], ha='center', rotation=lon_rot[i]) + axis('off') + # Add title + title(labels[0], fontsize=20) + # Add colorbar + cbaxes0 = fig.add_axes([0.05, 0.15, 0.02, 0.7]) + cbar0 = colorbar(img0, ticks=arange(0, max_abs+tick_abs, tick_abs), cax=cbaxes0, extend='max') + cbar0.ax.tick_params(labelsize=16) + + # Read mixed layer depth + id = Dataset(paths[1]+file_tail, 'r') + data = id.variables['Hsbl'][tstep-1,:350,1:] + id.close() + # Mask out the ice shelf cavities and switch sign + data = ma.masked_where(zice!=0, -data) + # Calculate anomaly from U3_LIM + data = data - data0 + # Add to plot, same as before + ax = fig.add_subplot(1, 2, 2, aspect='equal') + contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + img = pcolor(x, y, data, vmin=-max_anom, vmax=max_anom, cmap='RdBu_r') + axis('off') + title(labels[1], fontsize=20) + cbaxes = fig.add_axes([0.92, 0.15, 0.02, 0.7]) + cbar = colorbar(img, ticks=arange(-max_anom, max_anom+tick_anom, tick_anom), cax=cbaxes, extend='both') + cbar.ax.tick_params(labelsize=16) + + # Main title + suptitle('Mixed layer depth (m) on 23 August', fontsize=28) + + #fig.show() + fig.savefig('adv_mld_2.png') + + +# Command-line interface +if __name__ == '__main__': + + adv_mld_2() + diff --git a/adv_polynyas.py b/adv_polynyas.py index 128aeef..c238b7f 100644 --- a/adv_polynyas.py +++ b/adv_polynyas.py @@ -12,7 +12,7 @@ def adv_polynyas (): # Titles for plotting labels = ['a) U3_LIM', 'b) C4_LD'] # File name: daily average for 23 August - file_tail = 'iceh.1992-08-23.nc' + file_tail = 'cice/rundir/history/iceh.1992-08-23.nc' # Longitude and latitude bounds lon_min = 67 lon_max = 86 diff --git a/adv_ross_tsplots.py b/adv_ross_tsplots.py index 217ba66..2d2688a 100644 --- a/adv_ross_tsplots.py +++ b/adv_ross_tsplots.py @@ -11,20 +11,23 @@ def adv_ross_tsplots (): # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/c4_l/'] # Titles for plotting - labels = [r'a) Temperature ($^{\circ}$C), U3_LIM', r'b) Temperature ($^{\circ}$C), C4_LD', 'c) Salinity (psu), U3_LIM', 'd) Salinity (psu), C4_LD'] + labels = [r'a) Temperature ($^{\circ}$C), U3_LIM', r'b) Temperature ($^{\circ}$C), C4_LD - U3_LIM', 'c) Salinity (psu), U3_LIM', 'd) Salinity (psu), C4_LD - U3_LIM'] # File name: daily average for 31 December - file_tail = 'ocean_avg_31dec.nc' + file_tail = 'ocean_avg_0001.nc' var_names = ['temp', 'salt'] # If 31 December doesn't have its own file, put the time index here - tstep = 1 #366 if all one file of daily averages for entire simulation + tstep = 366 # Longitude to interpolate to lon0 = 180 # Deepest depth to plot depth_min = -350 # Bounds and ticks for colour scales - scale_min = [-2, 33.8] - scale_max = [3, 34.8] - scale_ticks = [1, 0.2] + abs_min = [-2, 33.8] + abs_max = [3, 34.8] + abs_ticks = [1, 0.2] + anom_min = [-1.5, -0.15] + anom_max = [1.5, 0.15] + anom_ticks = [0.5, 0.05] # Bounds on latitude lat_min = -80 lat_max = -60 @@ -33,63 +36,104 @@ def adv_ross_tsplots (): theta_b = 0.9 hc = 40 N = 31 + Vstretching=2 # Set up figure fig = figure(figsize=(18,12)) - # Loop over simulations - for sim in range(2): - # Loop over variables (temp and salt) - for var in range(2): - # Read 3D variable - id = Dataset(paths[sim]+file_tail, 'r') - data_3d = id.variables[var_names[var]][tstep-1,:,:,:] - # Also read sea surface height - zeta = id.variables['zeta'][tstep-1,:,:] - if sim==0 and var==0: - # For the first simulation, read grid variables - h = id.variables['h'][:,:] - zice = id.variables['zice'][:,:] - lon_2d = id.variables['lon_rho'][:,:] - lat_2d = id.variables['lat_rho'][:,:] - id.close() - # Calculate 3D z-coordinates - z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) - # Interpolate temp/salt, z-coordinates, and latitude to 180E - data, z, lat = interp_lon_roms(data_3d, z_3d, lat_2d, lon_2d, lon0) - ax = fig.add_subplot(2, 2, 2*var+sim+1) - # Shade data (pcolor not contourf so we don't misrepresent the - # model grid) - img = pcolor(lat, z, data, vmin=scale_min[var], vmax=scale_max[var], cmap='jet') - # Add title - title(labels[2*var+sim], fontsize=24) - # Label axes - if var == 1: - xlabel('Latitude', fontsize=18) - if sim == 0: - ylabel('Depth (m)', fontsize=18) - xlim([lat_min, lat_max]) - ylim([depth_min, 0]) - if sim == 1: - # Add colorbars for each variable - if var == 0: - cbaxes = fig.add_axes([0.93, 0.575, 0.01, 0.3]) - elif var == 1: - cbaxes = fig.add_axes([0.93, 0.125, 0.01, 0.3]) - cbar = colorbar(img, ticks=arange(scale_min[var], scale_max[var]+scale_ticks[var], scale_ticks[var]), cax=cbaxes, extend='both') - cbar.ax.tick_params(labelsize=18) - # Set ticks the way we want them - lat_ticks = arange(lat_min+5, lat_max+5, 5) - ax.set_xticks(lat_ticks) - lat_labels = [] - for val in lat_ticks: - lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') - ax.set_xticklabels(lat_labels, fontsize=18) - depth_ticks = range(depth_min+50, 0+100, 100) - ax.set_yticks(depth_ticks) - depth_labels = [] - for val in depth_ticks: - depth_labels.append(str(int(round(-val)))) - ax.set_yticklabels(depth_labels, fontsize=18) + # Loop over variables (temp and salt) + for var in range(2): + # Read 3D variable + id = Dataset(paths[0]+file_tail, 'r') + data_3d = id.variables[var_names[var]][tstep-1,:,:,:] + # Also read sea surface height + zeta = id.variables['zeta'][tstep-1,:,:] + if var==0: + # For the first simulation, read grid variables + h = id.variables['h'][:,:] + zice = id.variables['zice'][:,:] + lon_2d = id.variables['lon_rho'][:,:] + lat_2d = id.variables['lat_rho'][:,:] + id.close() + # Calculate 3D z-coordinates + z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta, Vstretching) + # Interpolate temp/salt, z-coordinates, and latitude to 180E + data0, z, lat = interp_lon_roms(data_3d, z_3d, lat_2d, lon_2d, lon0) + ax = fig.add_subplot(2, 2, 2*var+1) + # Shade data (pcolor not contourf so we don't misrepresent the + # model grid) + img = pcolor(lat, z, data0, vmin=abs_min[var], vmax=abs_max[var], cmap='jet') + # Add title + title(labels[2*var], fontsize=24) + # Label axes + if var == 1: + xlabel('Latitude', fontsize=18) + ylabel('Depth (m)', fontsize=18) + xlim([lat_min, lat_max]) + ylim([depth_min, 0]) + # Add colorbars for each variable + if var == 0: + cbaxes = fig.add_axes([0.02, 0.575, 0.01, 0.3]) + elif var == 1: + cbaxes = fig.add_axes([0.02, 0.125, 0.01, 0.3]) + cbar = colorbar(img, ticks=arange(abs_min[var], abs_max[var]+abs_ticks[var], abs_ticks[var]), cax=cbaxes, extend='both') + cbar.ax.tick_params(labelsize=18) + # Set ticks the way we want them + lat_ticks = arange(lat_min+5, lat_max+5, 5) + ax.set_xticks(lat_ticks) + lat_labels = [] + for val in lat_ticks: + lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') + ax.set_xticklabels(lat_labels, fontsize=18) + depth_ticks = range(depth_min+50, 0+100, 100) + ax.set_yticks(depth_ticks) + depth_labels = [] + for val in depth_ticks: + depth_labels.append(str(int(round(-val)))) + ax.set_yticklabels(depth_labels, fontsize=18) + + # Now calculate anomalies for C4_LD simulation + id = Dataset(paths[1]+file_tail, 'r') + data_3d = id.variables[var_names[var]][tstep-1,:,:,:] + # Also read sea surface height + zeta = id.variables['zeta'][tstep-1,:,:] + id.close() + # Calculate 3D z-coordinates + z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta, Vstretching) + # Interpolate temp/salt, z-coordinates, and latitude to 180E + data1, z, lat = interp_lon_roms(data_3d, z_3d, lat_2d, lon_2d, lon0) + # Get anomaly + data = data1-data0 + ax = fig.add_subplot(2, 2, 2*var+2) + # Shade data (pcolor not contourf so we don't misrepresent the + # model grid) + img = pcolor(lat, z, data, vmin=anom_min[var], vmax=anom_max[var], cmap='RdBu_r') + # Add title + title(labels[2*var+1], fontsize=24) + # Label axes + if var == 1: + xlabel('Latitude', fontsize=18) + xlim([lat_min, lat_max]) + ylim([depth_min, 0]) + # Add colorbars for each variable + if var == 0: + cbaxes = fig.add_axes([0.93, 0.575, 0.01, 0.3]) + elif var == 1: + cbaxes = fig.add_axes([0.93, 0.125, 0.01, 0.3]) + cbar = colorbar(img, ticks=arange(anom_min[var], anom_max[var]+anom_ticks[var], anom_ticks[var]), cax=cbaxes, extend='both') + cbar.ax.tick_params(labelsize=18) + # Set ticks the way we want them + lat_ticks = arange(lat_min+5, lat_max+5, 5) + ax.set_xticks(lat_ticks) + lat_labels = [] + for val in lat_ticks: + lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') + ax.set_xticklabels(lat_labels, fontsize=18) + depth_ticks = range(depth_min+50, 0+100, 100) + ax.set_yticks(depth_ticks) + depth_labels = [] + for val in depth_ticks: + depth_labels.append(str(int(round(-val)))) + ax.set_yticklabels(depth_labels, fontsize=18) # Main title suptitle(r'180$^{\circ}$E, 31 December (Ross Sea)', fontsize=30) diff --git a/adv_sss.py b/adv_sss.py index bcb830a..1f62bbe 100644 --- a/adv_sss.py +++ b/adv_sss.py @@ -13,14 +13,14 @@ def adv_sss (): # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] # File name: daily average for 23 August - file_tail = 'iceh.1992-08-23.nc' + file_tail = 'cice/rundir/history/iceh.1992-08-23.nc' # Bounds and ticks for colour scales min_abs = 33 max_abs = 35 tick_abs = 0.5 - max_anom = 2.0 - tick_anom = 1.0 + max_anom = 1.0 #2.0 + tick_anom = 0.5 #1.0 # Degrees to radians conversion factor deg2rad = pi/180. diff --git a/adv_thickness.py b/adv_thickness.py index 4f8b74d..bca4272 100644 --- a/adv_thickness.py +++ b/adv_thickness.py @@ -13,13 +13,13 @@ def adv_thickness (): # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] # File name: daily average for 23 August - file_tail = 'iceh.1992-08-23.nc' + file_tail = 'cice/rundir/history/iceh.1992-08-23.nc' # Bounds and ticks for colour scales max_abs = 2.0 tick_abs = 0.5 - max_anom = 2.0 - tick_anom = 1.0 + max_anom = 1.5 #2.0 + tick_anom = 0.5 #1.0 # Degrees to radians conversion factor deg2rad = pi/180. diff --git a/adv_thickness_2.py b/adv_thickness_2.py new file mode 100644 index 0000000..ad1316b --- /dev/null +++ b/adv_thickness_2.py @@ -0,0 +1,144 @@ +from netCDF4 import * +from numpy import * +from matplotlib.pyplot import * +#import colormaps as cmaps + +# Create a 3x2 plot of sea ice effective thickness on 23 August (the sea ice +# area max) for each advection experiment: the absolute values for U3_LIM, and +# the anomalies from U3_LIM for the other 5 experiments. +def adv_thickness_2 (): + + # Paths to simulation directories + paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/c4_l/'] + # Titles for plotting + labels = ['a) U3_LIM', 'b) C4_LD - U3_LIM'] + # File name: daily average for 23 August + file_tail = 'cice/rundir/history/iceh.1992-08-23.nc' + + # Bounds and ticks for colour scales + max_abs = 2.0 + tick_abs = 0.5 + max_anom = 1.5 #2.0 + tick_anom = 0.5 #1.0 + + # Degrees to radians conversion factor + deg2rad = pi/180. + # Centre of missing circle in grid + lon_c = 50 + lat_c = -83 + # Radius of missing circle + radius = 10.5 + # Boundary of regular grid to embed circle in + circle_bdry = -70+90 + + lon_ticks = array([-120, -60, 60, 120, 180]) + lat_ticks = array([-44, -42, -42, -44, -41]) + lon_labels = [r'120$^{\circ}$W', r'60$^{\circ}$W', r'60$^{\circ}$E', r'120$^{\circ}$E', r'180$^{\circ}$'] + lon_rot = [-60, 60, -60, 60, 0] + + # Read thickness data from U3_LIM simulation; also grid and mask variables + id = Dataset(paths[0]+file_tail, 'r') + # Effective thickness is concentration*thickness + data_tmp = id.variables['aice'][0,:350,:]*id.variables['hi'][0,:350,:] + # Also read aice on its own for masking + aice_tmp = id.variables['aice'][0,:350,:] + lon_tmp = id.variables['TLON'][:350,:] + lat_tmp = id.variables['TLAT'][:350,:] + mask_tmp = id.variables['tmask'][:350,:] + id.close() + + # Wrap periodic boundary so there isn't a gap in the plot + lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) + lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) + mask = ma.empty([size(mask_tmp,0), size(mask_tmp,1)+1]) + aice = ma.empty([size(aice_tmp,0), size(aice_tmp,1)+1]) + data0 = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) + lon[:,:-1] = lon_tmp + lon[:,-1] = lon_tmp[:,0] + lat[:,:-1] = lat_tmp + lat[:,-1] = lat_tmp[:,0] + mask[:,:-1] = mask_tmp + mask[:,-1] = mask_tmp[:,0] + aice[:,:-1] = aice_tmp + aice[:,-1] = aice_tmp[:,0] + data0[:,:-1] = data_tmp + data0[:,-1] = data_tmp[:,0] + + # Mask areas with less than 15% sea ice out of the thickness data + data0_mask = ma.masked_where(aice<0.15, data0) + + # Land mask + land = ma.masked_where(mask==1, mask) + + # Circumpolar x and y coordinates for plotting + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle + x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) + y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Longitude labels + x_ticks = -(lat_ticks+90)*cos(lon_ticks*deg2rad+pi/2) + y_ticks = (lat_ticks+90)*sin(lon_ticks*deg2rad+pi/2) + # Regular grid to embed missing circle in + # Regular grid to embed missing circle in + x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid + land_circle = zeros(shape(x_reg)) + land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + + # Set up figure + fig = figure(figsize=(20,10)) + ax = fig.add_subplot(1, 2, 1, aspect='equal') + # Start with a lighter grey circle + contourf(x, y, zeros(shape(x)), 0, colors=(('0.9', '0.9', '0.9'))) + # Shade land + contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Shade the thickness data (pcolor not contourf so we don't misrepresent + # the model grid) + img0 = pcolor(x, y, data0_mask, vmin=0, vmax=max_abs, cmap='jet') #cmaps.viridis) + # Add longitude labels + for i in range(size(x_ticks)): + text(x_ticks[i], y_ticks[i], lon_labels[i], ha='center', rotation=lon_rot[i]) + axis('off') + # Add title + title(labels[0], fontsize=20) + # Add colorbar + cbaxes0 = fig.add_axes([0.05, 0.15, 0.02, 0.7]) + cbar0 = colorbar(img0, ticks=arange(0, max_abs+tick_abs, tick_abs), cax=cbaxes0, extend='max') + cbar0.ax.tick_params(labelsize=16) + + # Read the thickness data + id = Dataset(paths[1]+file_tail, 'r') + data_tmp = id.variables['aice'][0,:350,:]*id.variables['hi'][0,:350,:] + id.close() + # Wrap the periodic boundary + data = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) + data[:,:-1] = data_tmp + data[:,-1] = data_tmp[:,0] + # Calculate anomaly from U3_LIM + data = data - data0 + # Add to plot, same as before + ax = fig.add_subplot(1, 2, 2, aspect='equal') + contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + img = pcolor(x, y, data, vmin=-max_anom, vmax=max_anom, cmap='RdBu_r') + axis('off') + title(labels[1], fontsize=20) + cbaxes = fig.add_axes([0.92, 0.15, 0.02, 0.7]) + cbar = colorbar(img, ticks=arange(-max_anom, max_anom+tick_anom, tick_anom), cax=cbaxes, extend='both') + cbar.ax.tick_params(labelsize=16) + + # Main title + suptitle('Effective sea ice thickness (m) on 23 August', fontsize=28) + + #fig.show() + fig.savefig('adv_thickness_2.png') + + +# Command-line interface +if __name__ == '__main__': + + adv_thickness_2() + diff --git a/calc_rx1.py b/calc_rx1.py new file mode 100644 index 0000000..111cc95 --- /dev/null +++ b/calc_rx1.py @@ -0,0 +1,50 @@ +from netCDF4 import Dataset +from numpy import * +from calc_z import * + +def calc_rx1 (grid_path, theta_s, theta_b, hc, N, Vstretching, out_file): + + id = Dataset(grid_path, 'r') + lon_2d = id.variables['lon_rho'][:,:] + lat_2d = id.variables['lat_rho'][:,:] + h = id.variables['h'][:,:] + zice = id.variables['zice'][:,:] + mask_rho = id.variables['mask_rho'][:,:] + id.close() + + z, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, None, Vstretching) + for k in range(N): + tmp = z[k,:,:] + tmp[mask_rho==0] = NaN + z[k,:,:] = tmp + + rx1_3d_i = abs((z[1:,1:,1:]-z[1:,1:,:-1]+z[:-1,1:,1:]-z[:-1,1:,:-1])/(z[1:,1:,1:]+z[1:,1:,:-1]-z[:-1,1:,1:]-z[:-1,1:,:-1])) + rx1_3d_j = abs((z[1:,1:,1:]-z[1:,:-1,1:]+z[:-1,1:,1:]-z[:-1,:-1,1:])/(z[1:,1:,1:]+z[1:,:-1,1:]-z[:-1,1:,1:]-z[:-1,:-1,1:])) + rx1_3d = maximum(rx1_3d_i, rx1_3d_j) + rx1_tmp = amax(rx1_3d, axis=0) + + rx1 = zeros(shape(lon_2d)) + rx1[1:,1:] = rx1_tmp + rx1[0,:] = rx1[1,:] + rx1[:,0] = rx1[:,1] + rx1 = ma.masked_where(isnan(rx1), rx1) + + id = Dataset(out_file, 'w') + id.createDimension('xi_rho', size(lon_2d,1)) + id.createDimension('eta_rho', size(lon_2d,0)) + id.createVariable('rx1', 'f8', ('eta_rho', 'xi_rho')) + id.variables['rx1'][:,:] = rx1 + id.close() + + +if __name__ == "__main__": + + grid_path = raw_input("Path to grid file: ") + theta_s = float(raw_input("theta_s: ")) + theta_b = float(raw_input("theta_b: ")) + hc = float(raw_input("hc: ")) + N = int(raw_input("N: ")) + Vstretching = int(raw_input("Vstretching (2 or 4): ")) + out_file = raw_input("Path to output file: ") + + calc_rx1 (grid_path, theta_s, theta_b, hc, N, Vstretching, out_file) diff --git a/calc_z.py b/calc_z.py index 6a60ea1..988a08f 100644 --- a/calc_z.py +++ b/calc_z.py @@ -1,19 +1,20 @@ from numpy import * # Given ROMS grid variables, calculate s-coordinates, stretching curves, and -# z-coordinates. Assumes Vtransform = 2 and Vstretching = 2. +# z-coordinates. Assumes Vtransform = 2. # Input (all straight out of grid file and *.in file): # h, zice = 2D arrays containing values for bathymetry and ice shelf draft. # Both have dimension latitude x longitude. # theta_s, theta_b, hc, N = scalar parameters # zeta = optional 2D array containing values for sea surface height +# Vstretching = optional integer showing stretching transfomration, 2 or 4 # Output: # z = 3D array containing negative z-coordinate values for depth on the rho # grid; dimension depth x latitude x longitude # s = 1D array of s-coordinate values # C = 1D array of stretching curve values -def calc_z (h, zice, theta_s, theta_b, hc, N, zeta=None): +def calc_z (h, zice, theta_s, theta_b, hc, N, zeta=None, Vstretching=4): # Follows the method of scoord_zice.m and stretching.m on katabatic # (in /ds/projects/iomp/matlab_scripts/ROMS_NetCDF/iomp_IAF/) @@ -26,11 +27,15 @@ def calc_z (h, zice, theta_s, theta_b, hc, N, zeta=None): lev = arange(1,N+1)-0.5 s = (lev-N)*ds - Csur = (-cosh(theta_s*s) + 1)/(cosh(theta_s) - 1) - Cbot = sinh(theta_b*(s+1))/sinh(theta_b) - 1 - weight = (s+1)**alpha*(1 + (alpha/beta)*(1 - (s+1)**beta)) - C = weight*Csur + (1-weight)*Cbot - + if Vstretching == 2: + Csur = (-cosh(theta_s*s) + 1)/(cosh(theta_s) - 1) + Cbot = sinh(theta_b*(s+1))/sinh(theta_b) - 1 + weight = (s+1)**alpha*(1 + (alpha/beta)*(1 - (s+1)**beta)) + C = weight*Csur + (1-weight)*Cbot + elif Vstretching == 4: + C = (1-cosh(theta_s*s))/(cosh(theta_s)-1) + C = (exp(theta_b*C)-1)/(1-exp(-theta_b)) + h = h - abs(zice) num_lon = size(h, 1) diff --git a/circumpolar_plot.py b/circumpolar_plot.py index 92dacd9..883560c 100644 --- a/circumpolar_plot.py +++ b/circumpolar_plot.py @@ -28,9 +28,9 @@ def circumpolar_plot (file_path, var_name, tstep, depth_key, depth, depth_bounds, colour_bounds=None, save=False, fig_name=None, grid_path=None): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 deg2rad = pi/180 @@ -158,7 +158,7 @@ def circumpolar_plot (file_path, var_name, tstep, depth_key, depth, depth_bounds colour_map = 'jet' else: # Determine bounds automatically - if var_name in ['u', 'v', 'ubar', 'vbar', 'm', 'shflux', 'ssflux', 'sustr', 'svstr', 'bustr', 'bvstr']: + if var_name in ['u', 'v', 'ubar', 'vbar', 'm', 'shflux', 'ssflux', 'sustr', 'svstr', 'bustr', 'bvstr', 'ssflux_restoring']: # Center levels on 0 for certain variables, with a blue-to-red # colourmap max_val = amax(abs(data)) diff --git a/common_grid.py b/common_grid.py index ead74af..d2a2978 100644 --- a/common_grid.py +++ b/common_grid.py @@ -7,8 +7,9 @@ from monthly_avg_cice import * # Interpolate ROMS output to a regular quarter-degree grid for easy comparison -# with FESOM. Write monthly averages of surface heat and salt flux, the -# surface stress vector and its curl, and the sea ice velocity vector. +# with FESOM. Write monthly averages of surface temperature, salinity, surface +# heat and salt flux, sea ice concentration and thickness, ocean and sea ice +# velocity, the surface stress vector and its curl. # Input: # roms_file = path to ROMS output file containing 5-day averages for the entire # simulation, starting on 1 January @@ -25,6 +26,7 @@ def common_grid (roms_file, cice_file, out_file): r = 6.371e6 # Degrees to radians conversion factor deg2rad = pi/180.0 + N = 31 print 'Calculating grids' @@ -32,7 +34,7 @@ def common_grid (roms_file, cice_file, out_file): lon_common = arange(-180, 180+res, res) lat_common = arange(-90, nbdry+res, res) # Get a 2D version of each to calculate dx and dy in metres - lat_2d, lon_2d = meshgrid(lat_common, lon_common) + lon_2d, lat_2d = meshgrid(lon_common, lat_common) # dx = r*cos(lat)*dlon where lat and dlon (i.e. res) are in radians dx = r*cos(lat_2d*deg2rad)*res*deg2rad # dy = r*dlat where dlat (i.e. res) is in radians @@ -90,88 +92,189 @@ def common_grid (roms_file, cice_file, out_file): print 'Interpolating land mask to new grid' mask_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, mask_roms) - # Make the interpolation strict, i.e. cells with any land in their - # interpolation neighbourhood are considered land. This way there is no - # worrying about interpolating variables on the boundary. - mask_common[mask_common < 1] = 0 - - print 'Setting up ' + out_file - id = Dataset(out_file, 'w') - id.createDimension('longitude', size(lon_common)) - id.createDimension('latitude', size(lat_common)) - id.createDimension('time', None) - id.createVariable('longitude', 'f8', ('longitude')) - id.variables['longitude'].units = 'degrees' - id.variables['longitude'][:] = lon_common - id.createVariable('latitude', 'f8', ('latitude')) - id.variables['latitude'].units = 'degrees' - id.variables['latitude'][:] = lat_common - id.createVariable('time', 'f8', ('time')) - id.variables['time'].units = 'months' - id.createVariable('mask', 'f8', ('latitude', 'longitude')) - id.variables['mask'].units = '1' - id.variables['mask'][:,:] = mask_common - id.createVariable('shflux', 'f8', ('time', 'latitude', 'longitude')) - id.variables['shflux'].long_name = 'surface heat flux into ocean' - id.variables['shflux'].units = 'W/m^2' - id.createVariable('ssflux', 'f8', ('time', 'latitude', 'longitude')) - id.variables['ssflux'].long_name = 'surface virtual salinity flux into ocean' - id.variables['ssflux'].units = 'psu m/s' - id.createVariable('sustr', 'f8', ('time', 'latitude', 'longitude')) - id.variables['sustr'].long_name = 'zonal surface stress' - id.variables['sustr'].units = 'N/m^2' - id.createVariable('svstr', 'f8', ('time', 'latitude', 'longitude')) - id.variables['svstr'].long_name = 'meridional surface stress' - id.variables['svstr'].units = 'N/m^2' - id.createVariable('curl_str', 'f8', ('time', 'latitude', 'longitude')) - id.variables['curl_str'].long_name = 'curl of surface stress' - id.variables['curl_str'].units = 'N/m^3' - id.createVariable('uice', 'f8', ('time', 'latitude', 'longitude')) - id.variables['uice'].long_name = 'sea ice velocity eastward' - id.variables['uice'].units = 'm/s' - id.createVariable('vice', 'f8', ('time', 'latitude', 'longitude')) - id.variables['vice'].long_name = 'sea ice velocity northward' - id.variables['vice'].units = 'm/s' + mask_common[isnan(mask_common)] = 0 + # Cut it off at 1 + mask_common[mask_common < 0.5] = 0 + mask_common[mask_common >= 0.5] = 1 + +# print 'Setting up ' + out_file +# id = Dataset(out_file, 'w') +# id.createDimension('longitude', size(lon_common)) +# id.createDimension('latitude', size(lat_common)) +# id.createDimension('time', None) +# id.createVariable('longitude', 'f8', ('longitude')) +# id.variables['longitude'].units = 'degrees' +# id.variables['longitude'][:] = lon_common +# id.createVariable('latitude', 'f8', ('latitude')) +# id.variables['latitude'].units = 'degrees' +# id.variables['latitude'][:] = lat_common +# id.createVariable('time', 'f8', ('time')) +# id.variables['time'].units = 'months' +# id.createVariable('mask', 'f8', ('latitude', 'longitude')) +# id.variables['mask'].units = '1' +# id.variables['mask'][:,:] = mask_common +# id.createVariable('sst', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['sst'].long_name = 'sea surface temperature' +# id.variables['sst'].units = 'C' +# id.createVariable('sss', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['sss'].long_name = 'sea surface salinity' +# id.variables['sss'].units = 'psu' +# id.createVariable('shflux', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['shflux'].long_name = 'surface heat flux into ocean' +# id.variables['shflux'].units = 'W/m^2' +# id.createVariable('ssflux', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['ssflux'].long_name = 'surface virtual salinity flux into ocean' +# id.variables['ssflux'].units = 'psu m/s' +# id.createVariable('aice', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['aice'].long_name = 'sea ice concentration' +# id.variables['aice'].units = '1' +# id.createVariable('hice', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['hice'].long_name = 'sea ice thickness' +# id.variables['hice'].units = 'm' +# id.createVariable('uocn', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['uocn'].long_name = 'ocean surface velocity eastward' +# id.variables['uocn'].units = 'm/s' +# id.createVariable('vocn', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['vocn'].long_name = 'ocean surface velocity northward' +# id.variables['vocn'].units = 'm/s' +# id.createVariable('uice', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['uice'].long_name = 'sea ice velocity eastward' +# id.variables['uice'].units = 'm/s' +# id.createVariable('vice', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['vice'].long_name = 'sea ice velocity northward' +# id.variables['vice'].units = 'm/s' +# id.createVariable('sustr', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['sustr'].long_name = 'zonal surface stress' +# id.variables['sustr'].units = 'N/m^2' +# id.createVariable('svstr', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['svstr'].long_name = 'meridional surface stress' +# id.variables['svstr'].units = 'N/m^2' +# id.createVariable('curl_str', 'f8', ('time', 'latitude', 'longitude')) +# id.variables['curl_str'].long_name = 'curl of surface stress' +# id.variables['curl_str'].units = 'N/m^3' +# id.close() # Loop over months - for month in range(num_months): + for month in range(18,num_months): print 'Processing month ' + str(month+1) + ' of ' + str(num_months) + id = Dataset(out_file, 'a') # Write time value for this month id.variables['time'][month] = month+1 + print '...sea surface temperature' + # Get monthly average of 3D variable + temp_roms = monthly_avg_roms(roms_file, 'temp', [N, size(lon_rho,0), size(lon_rho,1)], month%12, instance=month/12+1) + # Select surface layer + sst_roms = temp_roms[-1,:,:] + # Interpolate to common grid + sst_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, sst_roms) + # Apply land mask + sst = ma.masked_where(mask_common==0, sst_common) + # Write to file + id.variables['sst'][month,:,:] = sst + + print '...sea surface salinity' + # Get monthly average of 3D variable + salt_roms = monthly_avg_roms(roms_file, 'salt', [N, size(lon_rho,0), size(lon_rho,1)], month%12, instance=month/12+1) + # Select surface layer + sss_roms = salt_roms[-1,:,:] + # Interpolate to common grid + sss_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, sss_roms) + # Apply land mask + sss = ma.masked_where(mask_common==0, sss_common) + # Write to file + id.variables['sss'][month,:,:] = sss + print '...surface heat flux' # Get monthly average - shflux_roms = monthly_avg_roms(roms_file, 'shflux', shape(lon_rho), month%12+1, instance=month/12+1) + shflux_roms = monthly_avg_roms(roms_file, 'shflux', shape(lon_rho), month%12, instance=month/12+1) # Interpolate to common grid shflux_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, shflux_roms) # Apply land mask - shflux = ma.masked_where(mask==0, shflux_common) + shflux = ma.masked_where(mask_common==0, shflux_common) # Write to file id.variables['shflux'][month,:,:] = shflux print '...surface salt flux' # Get monthly average - ssflux_roms = monthly_avg_roms(roms_file, 'ssflux', shape(lon_rho), month%12+1, instance=month/12+1) + ssflux_roms = monthly_avg_roms(roms_file, 'ssflux', shape(lon_rho), month%12, instance=month/12+1) # Interpolate to common grid ssflux_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, ssflux_roms) # Apply land mask - ssflux = ma.masked_where(mask==0, ssflux_common) + ssflux = ma.masked_where(mask_common==0, ssflux_common) # Write to file id.variables['ssflux'][month,:,:] = ssflux + print '...sea ice concentration' + # Get monthly average (use CICE file) + aice_cice = monthly_avg_cice(cice_file, 'aice', shape(lon_cice), month%12, instance=month/12+1) + # Interpolate to common grid (note CICE grid not ROMS) + aice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, aice_cice) + # Apply land mask + aice = ma.masked_where(mask_common==0, aice_common) + # Write to file + id.variables['aice'][month,:,:] = aice + + print '...sea ice thickness' + # Get monthly average (use CICE file) + hice_cice = monthly_avg_cice(cice_file, 'hi', shape(lon_cice), month%12, instance=month/12+1) + # Interpolate to common grid (note CICE grid not ROMS) + hice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, hice_cice) + # Apply land mask + hice = ma.masked_where(mask_common==0, hice_common) + # Write to file + id.variables['hice'][month,:,:] = hice + + print '...surface ocean velocity vector' + # Surface ocean velocity + # Get monthly averages of both 3D vector components + uocn_3d_tmp = monthly_avg_roms(roms_file, 'u', [N, u_shape[0], u_shape[1]], month%12, instance=month/12+1) + vocn_3d_tmp = monthly_avg_roms(roms_file, 'v', [N, v_shape[0], v_shape[1]], month%12, instance=month/12+1) + # Select surface layer + uocn_tmp = uocn_3d_tmp[-1,:,:] + vocn_tmp = vocn_3d_tmp[-1,:,:] + # Rotate to lon-lat space (note they are on the rho grid now) + uocn_roms, vocn_roms = rotate_vector_roms(uocn_tmp, vocn_tmp, angle_roms) + # Interpolate to common grid + uocn_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, uocn_roms) + vocn_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, vocn_roms) + # Apply land mask + uocn = ma.masked_where(mask_common==0, uocn_common) + vocn = ma.masked_where(mask_common==0, vocn_common) + # Write to file + id.variables['uocn'][month,:,:] = uocn + id.variables['vocn'][month,:,:] = vocn + + print '...sea ice velocity vector' + # Sea ice velocity (CICE variable not ROMS) + # Get monthly averages of both vector components + uice_tmp = monthly_avg_cice(cice_file, 'uvel', shape(lon_cice), month%12, instance=month/12+1) + vice_tmp = monthly_avg_cice(cice_file, 'vvel', shape(lon_cice), month%12, instance=month/12+1) + # Rotate to lon-lat space + uice_cice, vice_cice = rotate_vector_cice(uice_tmp, vice_tmp, angle_cice) + # Interpolate to common grid (note CICE grid not ROMS) + uice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, uice_cice) + vice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, vice_cice) + # Apply land mask + uice = ma.masked_where(mask_common==0, uice_common) + vice = ma.masked_where(mask_common==0, vice_common) + # Write to file + id.variables['uice'][month,:,:] = uice + id.variables['vice'][month,:,:] = vice + print '...surface stress vector' # Surface stresses # Get monthly averages of both vector components - sustr_tmp = monthly_avg_roms(roms_file, 'sustr', u_shape, month%12+1, instance=month/12+1) - svstr_tmp = monthly_avg_roms(roms_file, 'svstr', v_shape, month%12+1, instance=month/12+1) + sustr_tmp = monthly_avg_roms(roms_file, 'sustr', u_shape, month%12, instance=month/12+1) + svstr_tmp = monthly_avg_roms(roms_file, 'svstr', v_shape, month%12, instance=month/12+1) # Rotate to lon-lat space (note they are on the rho grid now) sustr_roms, svstr_roms = rotate_vector_roms(sustr_tmp, svstr_tmp, angle_roms) # Interpolate to common grid sustr_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, sustr_roms) svstr_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, svstr_roms) # Apply land mask - sustr = ma.masked_where(mask==0, sustr_common) - svstr = ma.masked_where(mask==0, svstr_common) + sustr = ma.masked_where(mask_common==0, sustr_common) + svstr = ma.masked_where(mask_common==0, svstr_common) # Write to file id.variables['sustr'][month,:,:] = sustr id.variables['svstr'][month,:,:] = svstr @@ -188,28 +291,13 @@ def common_grid (roms_file, cice_file, out_file): dsustr_dy[:-1,:] = (sustr_common[1:,:] - sustr_common[:-1,:])/(2*dy[:-1,:]) dsustr_dy[-1,:] = (sustr_common[-1,:] - sustr_common[-2,:])/(2*dy[-1,:]) curl_str = dsvstr_dx - dsustr_dy + curl_str = ma.masked_where(mask_common==0, curl_str) # Write to file id.variables['curl_str'][month,:,:] = curl_str - print '...sea ice velocity vector' - # Sea ice velocity (CICE variable not ROMS) - # Get monthly averages of both vector components - uice_tmp = monthly_avg_cice(cice_file, 'uvel', shape(lon_cice), month%12+1, instance=month/12+1) - vice_tmp = monthly_avg_cice(cice_file, 'vvel', shape(lon_cice), month%12+1, instance=month/12+1) - # Rotate to lon-lat space - uice_cice, vice_cice = rotate_vector_cice(uice_tmp, vice_tmp, angle_cice) - # Interpolate to common grid (note CICE grid not ROMS) - uice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, uice_cice) - vice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, vice_cice) - # Apply land mask - uice = ma.masked_where(mask==0, uice_common) - vice = ma.masked_where(mask==0, vice_common) - # Write to file - id.variables['uice'][month,:,:] = uice - id.variables['vice'][month,:,:] = vice + id.close() print 'Finished' - id.close() # Interpolate from the ROMS grid to the common grid. @@ -225,7 +313,7 @@ def common_grid (roms_file, cice_file, out_file): def interp_roms2common (lon_1d, lat_1d, lon_roms, lat_roms, data_roms): # Get a 2D field of common latitude and longitude - lat_2d, lon_2d = meshgrid(lat_1d, lon_1d) + lon_2d, lat_2d = meshgrid(lon_1d, lat_1d) # Make an array of all the ROMS coordinates, flattened points = empty([size(lon_roms), 2]) diff --git a/compare_nic_seasonal.py b/compare_nic_seasonal.py index 815e06b..ef18c91 100644 --- a/compare_nic_seasonal.py +++ b/compare_nic_seasonal.py @@ -35,7 +35,7 @@ def compare_nic_seasonal (cice_file, var_name, colour_bounds=None, save=False, f if var_name in ['uvel', 'vvel', 'strairx', 'strairy', 'strocnx', 'strocny']: rotate = True # Read the angle of grid rotation - angle = id.variables['ANGLE'][:-15,:] + angle = id.variables['ANGLE'][:,:] # Figure out whether this is an x or y component, and what the name of # the other component is if var_name == 'uvel': @@ -92,9 +92,15 @@ def compare_nic_seasonal (cice_file, var_name, colour_bounds=None, save=False, f other_cmp = seasonal_avg_cice(cice_file, other_name, [num_lat, num_lon]) # Rotate to lon-lat space if cmp_flag == 'x': - cice_data_tmp, other_tmp = rotate_vector_cice(this_cmp, other_cmp, angle) + cice_data_tmp = ma.empty(shape(this_cmp)) + for season in range(4): + tmp1, tmp2 = rotate_vector_cice(this_cmp[season,:,:], other_cmp[season,:,:], angle) + cice_data_tmp[season,:,:] = tmp1 elif cmp_flag == 'y': - other_tmp, cice_data_tmp = rotate_vector_cice(other_cmp, this_cmp, angle) + cice_data_tmp = ma.empty(shape(this_cmp)) + for season in range(4): + tmp1, tmp2 = rotate_vector_cice(other_cmp[season,:,:], this_cmp[season,:,:], angle) + cice_data_tmp[season,:,:] = tmp2 else: cice_data_tmp = seasonal_avg_cice(cice_file, var_name, [num_lat, num_lon]) @@ -105,6 +111,10 @@ def compare_nic_seasonal (cice_file, var_name, colour_bounds=None, save=False, f cice_data[:,:,:-1] = cice_data_tmp cice_data[:,:,-1] = cice_data_tmp[:,:,0] + # Conversions to account for different thermodynamics schemes + if var_name in ['frazil', 'snoice']: + cice_data /= 3.6 + # Read Nic's grid from the January file id = Dataset(nic_dir_head + str(output_number) + nic_dir_tail + nic_file_head + str(nic_year_number) + '-01.nc', 'r') nic_lon = id.variables[lon_name][:max_j,:] diff --git a/file_guide.txt b/file_guide.txt index b811b23..c24b1d3 100644 --- a/file_guide.txt +++ b/file_guide.txt @@ -486,14 +486,6 @@ zonal_plot.py: Creates a depth vs latitude plot of the given variable. You can only have to specify which parameters have changed since the last plot. -overturning_plot.py: Calculate the meridional overturning streamfunction at the - last timestep of the given ROMS history/averages file and - make a contour plot in z-space. - To run: Open python or ipython and type - "run overturning_plot.py". You will be prompted - for the path to the ROMS history/averages file - and the filename with which to save the plot. - uv_vectorplot.py: Make a circumpolar Antarctic plot of speed overlaid with velocity vectors at the given depth (surface, bottom, or vertically aveaged). diff --git a/find_isolated_points.py b/find_isolated_points.py new file mode 100644 index 0000000..74a82fb --- /dev/null +++ b/find_isolated_points.py @@ -0,0 +1,29 @@ +from netCDF4 import Dataset +from numpy import * + +def find_isolated_points (cice_kmt_file): + + start_j = 50 + end_j = 250 + + id = Dataset(cice_kmt_file, 'r') + kmt = id.variables['kmt'][:,:] + id.close() + + num_i = size(kmt,1) + + for j in range(start_j, end_j): + for i in range(num_i-1): + if kmt[j,i] == 1: + if i == num_i-1: + neighbours = array([kmt[j,i-1], kmt[j,0], kmt[j-1,i], kmt[j+1,i]]) + else: + neighbours = array([kmt[j,i-1], kmt[j,i+1], kmt[j-1,i], kmt[j+1,i]]) + if sum(neighbours) < 2: + print "i=" + str(i+1) + ', j=' + str(j+1) + + +if __name__ == "__main__": + + cice_kmt_file = raw_input("Path to CICE land mask file: ") + find_isolated_points(cice_kmt_file) diff --git a/fix_isolated_points.py b/fix_isolated_points.py new file mode 100644 index 0000000..efbdf72 --- /dev/null +++ b/fix_isolated_points.py @@ -0,0 +1,170 @@ +from netCDF4 import Dataset +from numpy import * + +def fix_isolated_pts (): + + grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree_tmp.nc' + + id = Dataset(grid_file, 'a') + mask_rho = id.variables['mask_rho'][:,:] + h = id.variables['h'][:,:] # Fill value 50 + mask_zice = id.variables['mask_zice'][:,:] + zice = id.variables['zice'][:,:] + + # Set as ice shelf, average of i-1, j-1, j+1 + i_vals = [1162, 1179, 430, 1184] + j_vals = [131, 177, 183, 201] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i-1], zice[j-1,i], zice[j+1,i]])) + + # Set as ice shelf, average of i-1, i+1, j-1 + i_vals = [1291, 1311, 64, 2, 296, 1426, 1375, 1386, 1422, 1106, 1125, 1054, 406] + j_vals = [85, 95, 103, 116, 118, 123, 124, 125, 125, 157, 157, 171, 181] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i-1], zice[j,i+1], zice[j-1,i]])) + + # Set as ice shelf, average of i-1, i+1 + i_vals = [92, 12] + j_vals = [93, 113] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i-1], zice[j,i+1]])) + + # Set as ice shelf, average of i+1, j-1, j+1 + i_vals = [714, 850, 1131] + j_vals = [101, 131, 176] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i+1], zice[j-1,i], zice[j+1,i]])) + + # Set as ice shelf, average of i+1, j-1 + i_vals = [1110] + j_vals = [158] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i+1], zice[j-1,i]])) + + # Set as ice shelf, average of i+1, j+1 + i_vals = [1023, 1129] + j_vals = [164, 165] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i+1], zice[j+1,i]])) + + # Set as ice shelf, average of i-1, j+1 + i_vals = [1178] + j_vals = [171] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i-1], zice[j+1,i]])) + + # Set as ice shelf, average of i+1, j-1 + i_vals = [665, 1136, 596] + j_vals = [178, 188, 197] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i+1], zice[j-1,i]])) + + # Set as ice shelf, average of i-1, j-1 + i_vals = [516] + j_vals = [190] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = mean(array([zice[j,i-1], zice[j-1,i]])) + + # Set as ice shelf, same as j-1 + i_vals = [1157, 1122] + j_vals = [192, 158] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 1 + zice[j,i] = zice[j-1,i] + + # Set as land + i_vals = [326, 182, 241, 204, 205, 233, 1099, 1099, 1130, 508, 624, 1138, 1143, 1159, 1163, 1166, 1169, 1172, 1183, 1194, 1180, 1199, 1203, 1202, 1184, 1209, 1210] + j_vals = [117, 127, 135, 136, 137, 139, 159, 160, 179, 187, 187, 189, 195, 197, 207, 212, 216, 217, 222, 228, 229, 229, 230, 231, 232, 235, 235] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_rho[j,i] = 0 + h[j,i] = 50.0 + + # Remove ice shelf + i_vals = [841, 853, 853, 691, 395] + j_vals = [131, 135, 138, 142, 178] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_zice[j,i] = 0 + zice[j,i] = 0.0 + + # Remove land, h average of i+1, j-1, j+1 + i_vals = [1157, 1158] + j_vals = [206, 209] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_rho[j,i] = 1 + h[j,i] = mean(array([h[j,i+1], h[j-1,i], h[j+1,i]])) + + # Remove land, h average of i-1, i+1, j-1, j+1 + i_vals = [1159, 1160] + j_vals = [202, 211] + num_pts = len(i_vals) + for n in range(num_pts): + i = i_vals[n]-1 + j = j_vals[n]-1 + mask_rho[j,i] = 1 + h[j,i] = mean(array([h[j,i-1], h[j,i+1], h[j-1,i], h[j+1,i]])) + + # Calculate new land mask for u, v, psi grids + mask_u = mask_rho[:,1:]*mask_rho[:,:-1] + mask_v = mask_rho[1:,:]*mask_rho[:-1,:] + mask_psi = mask_rho[1:,1:]*mask_rho[:-1,1:]*mask_rho[1:,:-1]*mask_rho[:-1,:-1] + + # Save new fields + id.variables['mask_rho'][:,:] = mask_rho + id.variables['mask_u'][:,:] = mask_u + id.variables['mask_v'][:,:] = mask_v + id.variables['h'][:,:] = h + id.variables['mask_zice'][:,:] = mask_zice + id.variables['zice'][:,:] = zice + id.close() + +if __name__ == "__main__": + fix_isolated_pts() + diff --git a/freezingpt_slice.py b/freezingpt_slice.py index 6b444ca..9da971c 100644 --- a/freezingpt_slice.py +++ b/freezingpt_slice.py @@ -20,9 +20,9 @@ def freezingpt_slice (file_path, tstep, i_val, depth_min, colour_bounds=None, save=False, fig_name=None): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Read temperature, salinity, and grid variables diff --git a/gadv_frazil_bathy.py b/gadv_frazil_bathy.py index 1dd0002..eccb421 100644 --- a/gadv_frazil_bathy.py +++ b/gadv_frazil_bathy.py @@ -9,10 +9,10 @@ def gadv_frazil_bathy (): # Paths to simulation directories - path_up3l = '/short/m68/kaa561/advection_bitreproducible/u3_lim/' - path_up3 = '/short/m68/kaa561/advection_bitreproducible/u3/' + path_up3l = '/short/m68/kaa561/advection/u3_lim/' + path_up3 = '/short/m68/kaa561/advection/u3/' # Path to ROMS grid (containing bathymetry data) - roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree_old.nc' # Filename for annually averaged CICE data file_tail = 'cice/rundir/history/iceh_avg.nc' # Bounds on lat and lon @@ -21,8 +21,8 @@ def gadv_frazil_bathy (): lat_min = -71 lat_max = -64 # Bounds for colour scales - max_frazil = 0.4 - tick_frazil = 0.2 + max_frazil = 0.5 + tick_frazil = 0.25 max_bathy = 5 tick_bathy = 1 diff --git a/gadv_frazil_thickness.py b/gadv_frazil_thickness.py index b3d3f78..9cbd1d7 100644 --- a/gadv_frazil_thickness.py +++ b/gadv_frazil_thickness.py @@ -8,8 +8,8 @@ def gadv_frazil_thickness (): # Paths to simulation directories - path_up3l = '/short/m68/kaa561/advection_bitreproducible/u3_lim/' - path_up3 = '/short/m68/kaa561/advection_bitreproducible/u3/' + path_up3l = '/short/m68/kaa561/advection/u3_lim/' + path_up3 = '/short/m68/kaa561/advection/u3/' # Annually averaged CICE file file_tail_avg = 'cice/rundir/history/iceh_avg.nc' # Daily averaged CICE file for 23 August diff --git a/gadv_freezingpt_slice.py b/gadv_freezingpt_slice.py index f8747b7..bb291c0 100644 --- a/gadv_freezingpt_slice.py +++ b/gadv_freezingpt_slice.py @@ -9,19 +9,19 @@ def gadv_freezingpt_slice (): # Path to ocean history file - file_path = '/short/m68/kaa561/advection_bitreproducible/u3/ocean_his_7aug.nc' + file_path = '/short/m68/kaa561/advection/u3/ocean_his_0001.nc' # Timestep to plot - tstep = 1 #220 + tstep = 177 # i-index to plot (1-based) i_val = 10 # Deepest depth to plot depth_min = -100 # Bounds on colour scale - scale_max = 0.08 - scale_tick = 0.04 + scale_max = 0.15 + scale_tick = 0.05 # Bounds on latitudes to plot - lat_min = -70.3 - lat_max = -66.5 + lat_min = -69 + lat_max = -64 save = True fig_name = 'kn_fig1.png' @@ -49,7 +49,7 @@ def gadv_freezingpt_slice (): deltat = temp - tfr # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script - z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) + z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta, Vstretching=2) # Select depth and latitude at the given i-index z = z_3d[:,:,i_val-1] lat = tile(lat_2d[:,i_val-1], (N,1)) diff --git a/h_circumpolar.py b/h_circumpolar.py index bf005d8..613da9f 100644 --- a/h_circumpolar.py +++ b/h_circumpolar.py @@ -10,11 +10,6 @@ # fig_name = filename for figure def h_circumpolar (grid_path, fig_name): - # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 - N = 31 deg2rad = pi/180 # Read data diff --git a/i_slice.py b/i_slice.py index 11f87ac..b742ec6 100644 --- a/i_slice.py +++ b/i_slice.py @@ -21,9 +21,9 @@ def i_slice (file_path, var_name, tstep, i_val, depth_min, colour_bounds=None, save=False, fig_name=None): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Read data and grid variables diff --git a/ini_sss_circumpolar.py b/ini_sss_circumpolar.py index 5e4c538..db91e51 100644 --- a/ini_sss_circumpolar.py +++ b/ini_sss_circumpolar.py @@ -13,11 +13,6 @@ # fig_name = filename for figure def ini_sss_circumpolar (grid_path, file_path, fig_name): - # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 - N = 31 deg2rad = pi/180 # Read surface temps diff --git a/ini_sst_circumpolar.py b/ini_sst_circumpolar.py index 3b77d30..1b5636d 100644 --- a/ini_sst_circumpolar.py +++ b/ini_sst_circumpolar.py @@ -13,11 +13,6 @@ # fig_name = filename for figure def ini_sst_circumpolar (grid_path, file_path, fig_name): - # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 - N = 31 deg2rad = pi/180 # Read surface temps diff --git a/make_density_file.py b/make_density_file.py index dc278f9..c6ff786 100644 --- a/make_density_file.py +++ b/make_density_file.py @@ -12,9 +12,9 @@ def make_density_file (input_file, output_file): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Read grid variables diff --git a/monthly_avg_cice.py b/monthly_avg_cice.py index 4d218a2..11ec816 100644 --- a/monthly_avg_cice.py +++ b/monthly_avg_cice.py @@ -38,6 +38,7 @@ def monthly_avg_cice (file_path, var, shape, month, instance=-1): # Default: find the last instance of this month end_t = -1 for t in range(size(cice_time)-1, -1, -1): + check_leapyear(cice_time[t].year, end_day) if cice_time[t].month-1 == next_month and cice_time[t].day in range(start_day[next_month], start_day[next_month]+5): end_t = t break @@ -46,6 +47,7 @@ def monthly_avg_cice (file_path, var, shape, month, instance=-1): return start_t = -1 for t in range(end_t, -1, -1): + check_leapyear(cice_time[t].year, end_day) if cice_time[t].month-1 == month and cice_time[t].day in range(start_day[month]+1, start_day[month]+6): start_t = t break @@ -56,7 +58,8 @@ def monthly_avg_cice (file_path, var, shape, month, instance=-1): # Find the given instance of the month count = 0 start_t = -1 - for t in range(size(time)): + for t in range(size(cice_time)): + check_leapyear(cice_time[t].year, end_day) if cice_time[t].month-1 == month and cice_time[t].day in range(start_day[month]+1, start_day[month]+6): count += 1 if count == instance: @@ -66,7 +69,8 @@ def monthly_avg_cice (file_path, var, shape, month, instance=-1): print 'Error: ' + file_path + ' does not contain ' + str(instance) + ' ' + month_name[month] + 's' return end_t = -1 - for t in range(start_t+1, size(time)): + for t in range(start_t+1, size(cice_time)): + check_leapyear(cice_time[t].year, end_day) if cice_time[t].month-1 == next_month and cice_time[t].day in range(start_day[next_month], start_day[next_month]+5): end_t = t break @@ -82,15 +86,7 @@ def monthly_avg_cice (file_path, var, shape, month, instance=-1): # Use the year of the first timestep we care about cice_year = cice_time[start_t].year # Check for leap years - leap_year = False - if mod(cice_year, 4) == 0: - leap_year = True - if mod(cice_year, 100) == 0: - leap_year = False - if mod(cice_year, 400) == 0: - leap_year = True - if leap_year: - end_day[1] = 29 + check_leapyear(cice_year, end_day) num_days = 0 @@ -159,3 +155,18 @@ def monthly_avg_cice (file_path, var, shape, month, instance=-1): monthly_data /= num_days return monthly_data + + +def check_leapyear (year, end_day): + + leap_year = False + if mod(year, 4) == 0: + leap_year = True + if mod(year, 100) == 0: + leap_year = False + if mod(year, 400) == 0: + leap_year = True + if leap_year: + end_day[1] = 29 + else: + end_day[1] = 28 diff --git a/monthly_avg_roms.py b/monthly_avg_roms.py index 3ed5f49..87ce099 100644 --- a/monthly_avg_roms.py +++ b/monthly_avg_roms.py @@ -39,6 +39,7 @@ def monthly_avg_roms (file_path, var, shape, month, instance=-1): # Default: find the last instance of this month end_t = -1 for t in range(size(time)-1, -1, -1): + check_leapyear(time[t].year, end_day) if time[t].month-1 == month and time[t].day in range(end_day[month]-2, end_day[month]+1): end_t = t break @@ -50,6 +51,7 @@ def monthly_avg_roms (file_path, var, shape, month, instance=-1): return start_t = -1 for t in range(end_t, -1, -1): + check_leapyear(time[t].year, end_day) if time[t].month-1 == prev_month and time[t].day in range(end_day[prev_month]-1, end_day[prev_month]+1): start_t = t break @@ -64,6 +66,7 @@ def monthly_avg_roms (file_path, var, shape, month, instance=-1): count = 0 start_t = -1 for t in range(size(time)): + check_leapyear(time[t].year, end_day) if time[t].month-1 == prev_month and time[t].day in range(end_day[prev_month]-1, end_day[prev_month]+1): count += 1 if count == instance: @@ -79,6 +82,7 @@ def monthly_avg_roms (file_path, var, shape, month, instance=-1): return end_t = -1 for t in range(start_t+1, size(time)): + check_leapyear(time[t].year, end_day) if time[t].month-1 == month and time[t].day in range(end_day[month]-2, end_day[month]+1): end_t = t break @@ -97,15 +101,7 @@ def monthly_avg_roms (file_path, var, shape, month, instance=-1): # Use the year of the first timestep we care about roms_year = time[start_t].year # Check for leap years - leap_year = False - if mod(roms_year, 4) == 0: - leap_year = True - if mod(roms_year, 100) == 0: - leap_year = False - if mod(roms_year, 400) == 0: - leap_year = True - if leap_year: - end_day[1] = 29 + check_leapyear(roms_year, end_day) num_days = 0 @@ -165,4 +161,20 @@ def monthly_avg_roms (file_path, var, shape, month, instance=-1): return monthly_data + +def check_leapyear (year, end_day): + + leap_year = False + if mod(year, 4) == 0: + leap_year = True + if mod(year, 100) == 0: + leap_year = False + if mod(year, 400) == 0: + leap_year = True + if leap_year: + end_day[1] = 29 + else: + end_day[1] = 28 + + diff --git a/nbdry_grid_hack.py b/nbdry_grid_hack.py index c8b569e..dfd8eef 100644 --- a/nbdry_grid_hack.py +++ b/nbdry_grid_hack.py @@ -42,6 +42,6 @@ def nbdry_grid_hack (grid_file, num_pts): # Command-line interface if __name__ == "__main__": - grid_file='../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + grid_file='../metroms_iceshelf/apps/common/grid/original.nc' num_pts = 15 nbdry_grid_hack(grid_file, num_pts) diff --git a/overturning_plot.py b/overturning_plot.py deleted file mode 100644 index 6d66ea8..0000000 --- a/overturning_plot.py +++ /dev/null @@ -1,84 +0,0 @@ -from netCDF4 import Dataset -from numpy import * -from matplotlib.pyplot import * -from cartesian_grid_3d import * -from rotate_vector_roms import * - -# Calculate the meridional overturning streamfunction at the last timestep -# of the given ROMS history/averages file and make a contour plot in z-space. -# Input: -# grid_path = path to ROMS grid file -# file_path = path to ROMS history/averages file -# fig_name = filename for figure -def overturning_plot (grid_path, file_path, fig_name): - - # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 - N = 31 - - # Read angle from the grid file - grid_id = Dataset(grid_path, 'r') - angle = grid_id.variables['angle'][:-15,:] - grid_id.close() - # Read grid variables - id = Dataset(file_path, 'r') - h = id.variables['h'][:-15,1:-1] - zice = id.variables['zice'][:-15,1:-1] - zeta = id.variables['zeta'][-1,:-15,1:-1] - lon = id.variables['lon_rho'][:-15,1:-1] - lat = id.variables['lat_rho'][:-15,1:-1] - # Read both velocities in x-y space - u_xy = id.variables['u'][-1,:,:-15,:] - v_xy = id.variables['v'][-1,:,:-15,:] - id.close() - - # Rotate velocities to lat-lon space - v = ma.empty([N,v_xy.shape[1]+1,v_xy.shape[2]]) - for k in range(N): - u_lonlat, v_lonlat = rotate_vector_roms(u_xy[k,:,:], v_xy[k,:,:], angle) - v[k,:,:] = v_lonlat[:,:] - # Throw away the periodic boundary overlap - v = v[:,:,1:-1] - - # Calculate Cartesian integrands and z-coordinates - dx, dy, dz, z = cartesian_grid_3d(lon, lat, h, zice, theta_s, theta_b, hc, N, zeta) - - # Calculate transport in each cell - transport = v*dx*dz - # Definite integral over longitude - transport = sum(transport, axis=2) - # Indefinite integral over depth; flip before and after so the integral - # starts at the surface, not the bottom. Also convert to Sv. - transport = flipud(cumsum(flipud(transport), axis=0))*1e-6 - - # Calculate latitude and z coordinates, averaged over longitude, - # for plotting - avg_lat = mean(lat, axis=1) - avg_lat = tile(avg_lat, (N,1)) - avg_z = mean(z, axis=2) - - # Centre colour scale on 0 - max_val = amax(abs(transport)) - lev = linspace(-max_val, max_val, num=40) - - # Make the plot - figure(figsize=(16,8)) - contourf(avg_lat, avg_z, transport, lev, cmap='RdBu_r') - colorbar() - xlabel('Latitude') - ylabel('Depth (m)') - title('Meridional Overturning Streamfunction (Sv)') - - #savefig(fig_name) - show() - - -# Command-line interface -if __name__ == "__main__": - - grid_path = raw_input("Path to ROMS grid file: ") - file_path = raw_input("Path to ROMS history/averages file: ") - fig_name = raw_input("File name for figure: ") - overturning_plot(grid_path, file_path, fig_name) diff --git a/plot_layers.py b/plot_layers.py new file mode 100644 index 0000000..deec5ad --- /dev/null +++ b/plot_layers.py @@ -0,0 +1,121 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from calc_z import * +from interp_lon_roms import * + +def plot_layers (grid_path, lon0, depth_min, Vstretching, theta_s, theta_b, hc, N): + + id = Dataset(grid_path, 'r') + lon_2d = id.variables['lon_rho'][:,:] + lat_2d = id.variables['lat_rho'][:,:] + h = id.variables['h'][:,:] + zice = id.variables['zice'][:,:] + mask = id.variables['mask_rho'][:,:] + id.close() + + z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, None, Vstretching) + + if lon0 < 0: + lon_string = str(int(round(-lon0))) + r'$^{\circ}$W' + else: + lon_string = str(int(round(lon0))) + r'$^{\circ}$E' + + if lon0 < 0: + lon0 += 360 + + mask_3d = tile(mask, (N,1,1)) + + mask_interp, z, lat = interp_lon_roms(mask_3d, z_3d, lat_2d, lon_2d, lon0) + layer = zeros(shape(z)) + for k in range(N): + layer[k,:] = k+1 + layer = ma.masked_where(mask_interp==0, layer) + + # Choose latitude bounds based on land mask + mask_sum = sum(mask_interp, axis=0) + # Find southernmost and northernmost unmasked j-indices + edges = ma.flatnotmasked_edges(mask_sum) + j_min = edges[0] + j_max = edges[1] + if j_min == 0: + # There are ocean points right to the southern boundary + # Don't do anything special + lat_min = min(lat[:,j_min]) + else: + # There is land everywhere at the southern boundary + # Show the last 2 degrees of this land mask + lat_min = min(lat[:,j_min]) - 2 + if j_max == size(mask_sum) - 1: + # There are ocean points right to the northern boundary + # Don't do anything special + lat_max = max(lat[:,j_max]) + else: + # There is land everywhere at the northern boundary + # Show the first 2 degrees of this land mask + lat_max = max(lat[:,j_max]) + 2 + + #lat_min = -85 + #lat_max = -75 + + lev = range(1, N) + + fig = figure(figsize=(18,6)) #(18,12)) + contour(lat, z, layer, lev, colors='k') + title(lon_string) #(r"ROMS terrain-following levels through the Ross Ice Shelf cavity (180$^{\circ}$E)", fontsize=24) + xlabel('Latitude') + ylabel('Depth (m)') + xlim([lat_min, lat_max]) + ylim([depth_min, 0]) + #text(-82, -200, "Ice shelf", fontsize=24) + #text(-82, -650, "Sea floor", fontsize=24) + #fig.savefig('ross_levels.png') + fig.show() + + if lon0 > 180: + lon0 -= 360 + + +if __name__ == "__main__": + + grid_path = raw_input("Path to grid file: ") + lon0 = float(raw_input("Enter longitude (-180 to 180): ")) + depth_min = -1*float(raw_input("Deepest depth to plot (positive, metres): ")) + Vstretching = int(raw_input("Vstretching (2 or 4): ")) + theta_s = float(raw_input("theta_s: ")) + theta_b = float(raw_input("theta_b: ")) + hc = float(raw_input("hc: ")) + N = int(raw_input("N: ")) + plot_layers (grid_path, lon0, depth_min, Vstretching, theta_s, theta_b, hc, N) + + while True: + repeat = raw_input("Make another plot (y/n)? ") + if repeat == 'y': + while True: + changes = raw_input("Enter a parameter to change: (1) grid file, (2) longitude, (3) deepest depth, (4) Vstretching, (5) theta_s, (6) theta_b, (7) hc, (8) N; or enter to continue: ") + if len(changes) == 0: + break + else: + if int(changes) == 1: + grid_path = raw_input("Path to grid file: ") + elif int(changes) == 2: + lon0 = float(raw_input("Enter longitude (-180 to 180): ")) + elif int(changes) == 3: + depth_min = -1*float(raw_input("Deepest depth to plot (positive, metres): ")) + elif int(changes) == 4: + Vstretching = int(raw_input("Vstretching (2 or 4): ")) + elif int(changes) == 5: + theta_s = float(raw_input("theta_s: ")) + elif int(changes) == 6: + theta_b = float(raw_input("theta_b: ")) + elif int(changes) == 7: + hc = float(raw_input("hc: ")) + elif int(changes) == 8: + N = int(raw_input("N: ")) + plot_layers (grid_path, lon0, depth_min, Vstretching, theta_s, theta_b, hc, N) + else: + break + + + + diff --git a/romscice_atm_subdaily.py b/romscice_atm_subdaily.py index ff51f43..9d3e76f 100644 --- a/romscice_atm_subdaily.py +++ b/romscice_atm_subdaily.py @@ -8,14 +8,15 @@ # total cloud cover (tcc), and 10-metre winds (u10, # v10) # FC_yyyy_subdaily_orig.nc: one year of 12-hour measurements for total -# precipitation (tp) and snowfall (sf) +# precipitation (tp), snowfall (sf), and evaporation +# (e) # to two ROMS-CICE input forcing files with the correct units and naming # conventions, and winds rotated correctly: # AN_yyyy_subdaily.nc: one year of 6-hour measurements for surface pressure # (Pair), temperature (Tair), specific humidity (Qair), # cloud fraction (cloud), and winds (Uwind, Vwind) -# FC_yyyy_subdaily.nc: one year of 12-hour measurements for rainfall (rain) and -# snowfall (snow) +# FC_yyyy_subdaily.nc: one year of 12-hour measurements for rainfall (rain), +# snowfall (snow), and evaporation # Input: year = integer containing the year to process # count = time record in the given year to start with @@ -32,9 +33,9 @@ def convert_file (year, count): # Paths of ROMS grid file, input ERA-Interim files, and output ROMS-CICE # files; other users will need to change these - grid_file = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree_good.nc' - input_atm_file = '/short/m68/kaa561/metroms_iceshelf/data/ERA_Interim/AN_' + str(year) + '_subdaily_orig.nc' - input_ppt_file = '/short/m68/kaa561/metroms_iceshelf/data/ERA_Interim/FC_' + str(year) + '_subdaily_orig.nc' + grid_file = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + input_atm_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ERA_Interim/AN_' + str(year) + '_subdaily_orig.nc' + input_ppt_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ERA_Interim/FC_' + str(year) + '_subdaily_orig.nc' output_atm_file = '/short/m68/kaa561/metroms_iceshelf/data/ERA_Interim/AN_' + str(year) + '_subdaily.nc' output_ppt_file = '/short/m68/kaa561/metroms_iceshelf/data/ERA_Interim/FC_' + str(year) + '_subdaily.nc' logfile = str(year) + '.log' @@ -196,6 +197,9 @@ def convert_file (year, count): oppt_fid.createVariable('snow', 'f8', ('time', 'eta_rho', 'xi_rho')) oppt_fid.variables['snow'].long_name = 'snow fall rate' oppt_fid.variables['snow'].units = 'm_per_12hr' + oppt_fid.createVariable('evaporation', 'f8', ('time', 'eta_rho', 'xi_rho')) + oppt_fid.variables['evaporation'].long_name = 'evaporation rate' + oppt_fid.variables['evaporation'].units = 'm_per_12hr' oppt_fid.close() log = open(logfile, 'a') @@ -215,15 +219,19 @@ def convert_file (year, count): ippt_fid = Dataset(input_ppt_file, 'r') tp = transpose(ippt_fid.variables['tp'][t,:,:]) sf = transpose(ippt_fid.variables['sf'][t,:,:]) + e = -1*transpose(ippt_fid.variables['e'][t,:,:]) ippt_fid.close() # Interpolate to ROMS grid and write to output FC file rain = interp_era2roms(tp, lon_era, lat_era, lon_roms, lat_roms) snow = interp_era2roms(sf, lon_era, lat_era, lon_roms, lat_roms) - # Make sure there are no negative values + evap = interp_era2roms(e, lon_era, lat_era, lon_roms, lat_roms) + # Make sure there are no negative values for precip rain[rain < 0] = 0.0 oppt_fid.variables['rain'][t,:,:] = rain snow[snow < 0] = 0.0 oppt_fid.variables['snow'][t,:,:] = snow + # Negative values are allowed for evaporation (they mean condensation) + oppt_fid.variables['evaporation'][t,:,:] = evap oppt_fid.close() log = open(logfile, 'a') diff --git a/romscice_ini_ecco.py b/romscice_ini_ecco.py index df32d6c..58ab00b 100644 --- a/romscice_ini_ecco.py +++ b/romscice_ini_ecco.py @@ -15,11 +15,12 @@ from netCDF4 import Dataset from numpy import * from scipy.interpolate import RegularGridInterpolator +from scipy.spatial import KDTree from calc_z import * # Main routine -def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdry_ecco): +def run (grid_file, theta_file, salt_file, output_file, theta_s, theta_b, hc, N, nbdry_ecco): # Read ECCO2 data and grid print 'Reading ECCO2 data' @@ -91,9 +92,9 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b # Regridding happens here... print 'Interpolating temperature' - temp = interp_ecco2roms_ini(theta, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, -0.5) + temp = interp_ecco2roms_ini(theta, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice) print 'Interpolating salinity' - salt = interp_ecco2roms_ini(salt, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, 34.5) + salt = interp_ecco2roms_ini(salt, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice) # Set initial velocities and sea surface height to zero u = zeros((N, num_lat, num_lon-1)) @@ -133,7 +134,7 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b out_fid.createVariable('Tcline', 'f8', ('one')) out_fid.variables['Tcline'].long_name = 'S-coordinate surface/bottom layer width' out_fid.variables['Tcline'].units = 'meter' - out_fid.variables['Tcline'][:] = Tcline + out_fid.variables['Tcline'][:] = hc out_fid.createVariable('hc', 'f8', ('one')) out_fid.variables['hc'].long_name = 'S-coordinate parameter, critical depth' out_fid.variables['hc'].units = 'meter' @@ -187,8 +188,8 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b print 'Finished' -# Given an array on the ECCO2 grid, interoplate onto the ROMS grid, extrapolate -# under ice shelf cavities, and fill the land mask with constant values. +# Given an array on the ECCO2 grid, fill the land mask with nearest neighbours, +# interpolate onto the ROMS grid, and extrapolate under ice shelf cavities. # Input: # A = array of size nxmxo containing values on the ECCO2 grid (dimension # longitude x latitude x depth) @@ -208,12 +209,10 @@ def run (grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b # land 0) # mask_zice = array of size qxr containing ROMS ice shelf mask (ice shelf 1, # ocean/land 0) -# fill = scalar containing the value with which to fill the ROMS land mask -# (doesn't really matter, don't use NaN) # Output: # B = array of size pxqxr containing values on the ROMS grid (dimension depth x # latitude x longitude) -def interp_ecco2roms_ini (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, fill): +def interp_ecco2roms_ini (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice): # Radius of the Earth in m r = 6.371e6 @@ -223,26 +222,32 @@ def interp_ecco2roms_ini (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_ro # Calculate N based on size of ROMS grid N = size(lon_roms_3d, 0) - # Unmask A and fill with NaNs - A_unmask = A.data - A_unmask[A.mask]=NaN + # Fill in masked values with the nearest neighbours so they don't screw up + # the interpolation + # Loop over z-levels + print 'Filling land mask with nearest neighbours' + for k in range(size(A,2)): + print '...vertical level ' + str(k+1) + ' of ' + str(size(A,2)) + tmp = A[:,:,k] + j,i = mgrid[0:tmp.shape[0], 0:tmp.shape[1]] + jigood = array((j[~tmp.mask], i[~tmp.mask])).T + jibad = array((j[tmp.mask], i[tmp.mask])).T + tmp[tmp.mask] = tmp[~tmp.mask][KDTree(jigood).query(jibad)[1]] + A[:,:,k] = tmp - # Build a function for linear interpolation of A; set flag to fill - # out-of-bounds values with NaN - interp_function = RegularGridInterpolator((lon_ecco, lat_ecco, depth_ecco), A_unmask, bounds_error=False, fill_value=NaN) + # Build a function for linear interpolation of A + interp_function = RegularGridInterpolator((lon_ecco, lat_ecco, depth_ecco), A) B = zeros(shape(lon_roms_3d)) + print 'Interpolating to ROMS grid' # Interpolate each z-level individually - 3D vectorisation uses too # much memory! for k in range(N): print '...vertical level ', str(k+1), ' of ', str(N) # Pass positive values for ROMS depth tmp = interp_function((lon_roms_3d[k,:,:], lat_roms_3d[k,:,:], -z_roms_3d[k,:,:])) - # Fill NaNs with constant value - index = isnan(tmp) - tmp[index] = fill # Fill land mask with constant value - tmp[mask_rho==0] = fill + tmp[mask_rho==0] = 0.0 # Save this depth level B[k,:,:] = tmp @@ -311,24 +316,23 @@ def interp_ecco2roms_ini (A, lon_ecco, lat_ecco, depth_ecco, lon_roms_3d, lat_ro # File paths to edit here: # Path to ROMS grid file - grid_file = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + grid_file = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree_tmp.nc' # Path to ECCO2 files for temperature and salinity in January 1992 - theta_file = '/short/m68/kaa561/metroms_iceshelf/data/THETA.1440x720x50.199201.nc' - salt_file = '/short/m68/kaa561/metroms_iceshelf/data/SALT.1440x720x50.199201.nc' + theta_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.199201.nc' + salt_file = '/short/m68/kaa561/metroms_iceshelf/data/originals/ECCO2/SALT.1440x720x50.199201.nc' # Path to desired output file - output_file = '/short/m68/kaa561/metroms_iceshelf/data/ecco2_ini.nc' + output_file = '/short/m68/kaa561/metroms_iceshelf/data/ecco2_ini_newmask.nc' # Grid parameters; check grid_file and *.in file to make sure these are correct - Tcline = 40 - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Northernmost index of ECCO2 grid to read (1-based) nbdry_ecco = 241 - run(grid_file, theta_file, salt_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdry_ecco) + run(grid_file, theta_file, salt_file, output_file, theta_s, theta_b, hc, N, nbdry_ecco) diff --git a/romscice_ini_woa.py b/romscice_ini_woa.py index e53c9f7..73c08a2 100644 --- a/romscice_ini_woa.py +++ b/romscice_ini_woa.py @@ -264,18 +264,18 @@ def interp_woa2roms (A, lon_woa, lat_woa, depth_woa, lon_roms_3d, lat_roms_3d, z if __name__ == "__main__": # Path to ROMS grid file - grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree_tmp.nc' # Path to World Ocean Atlas NetCDF file (converted from FESOM input using # woa_netcdf.py) woa_file = '/short/y99/kaa561/FESOM/woa01_ts.nc' # Path to desired output file - output_file = '../metroms_iceshelf/data/woa_ini.nc' + output_file = '../metroms_iceshelf/data/woa_ini_newmask.nc' # Grid parameters: check grid_file and *.in file to make sure these are correct - Tcline = 40 - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + Tcline = 250 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Northernmost index of WOA grid to read (1-based) diff --git a/romscice_nbc.py b/romscice_nbc.py index 3307165..0026b78 100644 --- a/romscice_nbc.py +++ b/romscice_nbc.py @@ -27,17 +27,16 @@ def convert_file (year): # Paths of ROMS grid file, input ECCO2 files (without the tail yyyymm.nc), # and output ROMS-CICE boundary condition file; other users will need to # change these - grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree_good.nc' - theta_base = '../metroms_iceshelf/data/ECCO2/raw/THETA.1440x720x50.' + str(year) - salt_base = '../metroms_iceshelf/data/ECCO2/raw/SALT.1440x720x50.' + str(year) - vvel_base = '../metroms_iceshelf/data/ECCO2/raw/VVEL.1440x720x50.' + str(year) + grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + theta_base = '../metroms_iceshelf/data/originals/ECCO2/THETA.1440x720x50.' + str(year) + salt_base = '../metroms_iceshelf/data/originals/ECCO2/SALT.1440x720x50.' + str(year) + vvel_base = '../metroms_iceshelf/data/originals/ECCO2/VVEL.1440x720x50.' + str(year) output_file = '../metroms_iceshelf/data/ECCO2/ecco2_cube92_lbc_' + str(year) + '.nc' # Grid parameters; check grid_file and *.in to make sure these are correct - Tcline = 40 - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Northernmost index of ECCO2 grid to read (1-based) nbdry_ecco = 241 @@ -159,7 +158,7 @@ def convert_file (year): out_fid.createVariable('Tcline', 'f8', ('one')) out_fid.variables['Tcline'].long_name = 'S-coordinate surface/bottom layer width' out_fid.variables['Tcline'].units = 'meter' - out_fid.variables['Tcline'][:] = Tcline + out_fid.variables['Tcline'][:] = hc out_fid.createVariable('hc', 'f8', ('one')) out_fid.variables['hc'].long_name = 'S-coordinate parameter, critical depth' out_fid.variables['hc'].units = 'meter' @@ -332,8 +331,8 @@ def interp_ecco2roms_nbc (A, lon_ecco, lat_ecco, depth_ecco, lon_roms, lat_roms, if __name__ == "__main__": # Start and end years; other users may need to change these - first_year = 1992 - last_year = 2005 + first_year = 2006 + last_year = 2016 for year in range(first_year, last_year+1): print 'Processing '+str(year) convert_file(year) diff --git a/romscice_sss_nudging.py b/romscice_sss_nudging.py new file mode 100644 index 0000000..79e10ac --- /dev/null +++ b/romscice_sss_nudging.py @@ -0,0 +1,100 @@ +from netCDF4 import Dataset +from numpy import * +from scipy.interpolate import RegularGridInterpolator +from scipy.spatial import KDTree + +def romscice_sss_nudging (): + + grid_file = '../metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + woa_head = '../metroms_iceshelf/data/originals/WOA_2013/woa13_95A4_s' + woa_tail = '_01v2.nc' + output_file = '../metroms_iceshelf/data/sss_nudge.nc' + nbdry_woa = 61 + + print 'Reading ECCO2 grid' + id = Dataset(woa_head + '01' + woa_tail, 'r') + lon_woa_raw = id.variables['lon'][:] + lat_woa = id.variables['lat'][0:nbdry_woa] + id.close() + + index = lon_woa_raw > 180 + lon_woa_raw[index] -= 360 + + # The WOA longitude axis doesn't wrap around; there is a gap between + # almost-180W and almost-180E, and the ROMS grid has points in this gap. + # So copy the last longitude value (mod 360) to the beginning, and the + # first longitude value (mod 360) to the end. + lon_woa = zeros(size(lon_woa_raw)+2) + lon_woa[0] = lon_woa_raw[-1]-360 + lon_woa[1:-1] = lon_woa_raw + lon_woa[-1] = lon_woa_raw[0]+360 + + # Read ROMS grid + print 'Reading ROMS grid' + id = Dataset(grid_file, 'r') + lon_roms = id.variables['lon_rho'][:,:] + lat_roms = id.variables['lat_rho'][:,:] + h = id.variables['h'][:,:] + mask_rho = id.variables['mask_rho'][:,:] + mask_zice = id.variables['mask_zice'][:,:] + id.close() + num_lon = size(lon_roms, 1) + num_lat = size(lon_roms, 0) + + index = lon_roms > 180 + lon_roms[index] -= 360 + + print 'Setting up ' + output_file + out_id = Dataset(output_file, 'w') + out_id.createDimension('xi_rho', num_lon) + out_id.createDimension('eta_rho', num_lat) + out_id.createDimension('sss_time', None) + out_id.createVariable('sss_time', 'f8', ('sss_time')) + out_id.variables['sss_time'].long_name = 'time since initialization' + out_id.variables['sss_time'].units = 'days' + out_id.variables['sss_time'].cycle_length = 365.25 + out_id.createVariable('SSS', 'f8', ('sss_time', 'eta_rho', 'xi_rho')) + out_id.variables['SSS'].long_name = 'surface salinity' + out_id.variables['SSS'].units = 'psu' + + for month in range(12): + print 'Processing month ' + str(month+1) + ' of 12' + if month+1 < 10: + filename = woa_head + '0' + str(month+1) + woa_tail + else: + filename = woa_head + str(month+1) + woa_tail + id = Dataset(filename, 'r') + sss_raw = transpose(id.variables['s_an'][0,0,0:nbdry_woa,:]) + id.close() + sss = ma.array(zeros((size(lon_woa), size(lat_woa)))) + sss[1:-1,:] = ma.copy(sss_raw) + sss[0,:] = ma.copy(sss_raw[-1,:]) + sss[-1,:] = ma.copy(sss_raw[0,:]) + sss_interp = interp_woa2roms_sfc(sss, lon_woa, lat_woa, lon_roms, lat_roms) + + time = 365.25/12*(month+0.5) + out_id.variables['sss_time'][month] = time + out_id.variables['SSS'][month,:,:] = sss_interp + + out_id.close() + + +def interp_woa2roms_sfc (A, lon_woa, lat_woa, lon_roms, lat_roms): + + j,i = mgrid[0:A.shape[0], 0:A.shape[1]] + jigood = array((j[~A.mask], i[~A.mask])).T + jibad = array((j[A.mask], i[A.mask])).T + A[A.mask] = A[~A.mask][KDTree(jigood).query(jibad)[1]] + interp_function = RegularGridInterpolator((lon_woa, lat_woa), A) + B = interp_function((lon_roms, lat_roms)) + + return B + + +# Command-line interface +if __name__ == "__main__": + + romscice_sss_nudging() + + + diff --git a/sose_roms_seasonal.py b/sose_roms_seasonal.py index bcddd3f..f1e39b9 100644 --- a/sose_roms_seasonal.py +++ b/sose_roms_seasonal.py @@ -25,9 +25,9 @@ def sose_roms_seasonal (file_path, var_name, lon0, depth_bdry, save=False, fig_n sose_file = '../SOSE_seasonal_climatology.nc' # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Season names for titles diff --git a/temp_salt_slice.py b/temp_salt_slice.py index 87f8655..c63c9c5 100644 --- a/temp_salt_slice.py +++ b/temp_salt_slice.py @@ -17,9 +17,9 @@ def temp_salt_slice (file_path, tstep, lon0, depth_min, save=False, fig_name=None): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 # Month names for titles diff --git a/timeseries_3D.py b/timeseries_3D.py index 59ce4de..a2c6dc3 100644 --- a/timeseries_3D.py +++ b/timeseries_3D.py @@ -17,9 +17,9 @@ def timeseries_3D (grid_path, file_path, log_path): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 rho0 = 1000.0 # Reference density (kg/m^3) Cp = 3974 # Specific heat of polar seawater (J/K/kg) diff --git a/timeseries_sss.py b/timeseries_sss.py index 899b5eb..a8f0866 100644 --- a/timeseries_sss.py +++ b/timeseries_sss.py @@ -4,8 +4,9 @@ from os.path import * from cartesian_grid_2d import * -# Calculate and plot timeseries of area-averaged sea surface salinity and -# surface salt flux during a ROMS simulation. +# Calculate and plot timeseries of area-averaged sea surface salinity, surface +# salt flux, and surface salt flux due to salinity restoring during a ROMS +# simulation. # Input: # file_path = path to ROMS averages file # log_path = path to log file (if it exists, previously calculated values will @@ -16,6 +17,7 @@ def timeseries_sss (file_path, log_path): time = [] avg_sss = [] avg_ssflux = [] + avg_restore = [] # Check if the log file exists if exists(log_path): print 'Reading previously calculated values' @@ -34,7 +36,12 @@ def timeseries_sss (file_path, log_path): except(ValueError): break for line in f: - avg_ssflux.append(float(line)) + try: + avg_ssflux.append(float(line)) + except(ValueError): + break + for line in f: + avg_restore.append(float(line)) f.close() print 'Analysing grid' @@ -42,7 +49,7 @@ def timeseries_sss (file_path, log_path): lon = id.variables['lon_rho'][:-15,1:-1] lat = id.variables['lat_rho'][:-15,1:-1] zice = id.variables['zice'][:-15,1:-1] - # Calculate area on the tracer grid + # Calculate area on the tracer grid and mask ice shelves dx, dy = cartesian_grid_2d(lon, lat) dA = ma.masked_where(zice!=0, dx*dy) # Read time values and convert from seconds to years @@ -52,16 +59,18 @@ def timeseries_sss (file_path, log_path): time.append(new_time[t]) print 'Reading data' - # Read surface salinity and salt flux + # Read surface salinity, salt flux, and restoring flux # Throw away overlapping periodic boundary and northern sponge layer sss = id.variables['salt'][:,-1,:-15,1:-1] ssflux = id.variables['ssflux'][:,:-15,1:-1] + ssflux_restoring = id.variables['ssflux_restoring'][:,:-15,1:-1] id.close() # Build timeseries for t in range(size(new_time)): avg_sss.append(sum(sss[t,:,:]*dA)/sum(dA)) avg_ssflux.append(sum(ssflux[t,:,:]*dA)/sum(dA)) + avg_restore.append(sum(ssflux_restoring[t,:,:]*dA)/sum(dA)) print 'Plotting' clf() @@ -78,6 +87,13 @@ def timeseries_sss (file_path, log_path): grid(True) savefig('avg_ssflux.png') + clf() + plot(time, avg_restore) + xlabel('Years') + ylabel(r'Average surface salt flux from salinity restoring (kg/m$^2$/s)') + grid(True) + savefig('avg_restore.png') + print 'Saving results to log file' f = open(log_path, 'w') f.write('Time (years):\n') @@ -89,6 +105,9 @@ def timeseries_sss (file_path, log_path): f.write('Average surface salt flux (kg/m^2/s):\n') for elm in avg_ssflux: f.write(str(elm) + '\n') + f.write('Average surface salt flux from salinity restoring (kg/m^2/s):\n') + for elm in avg_restore: + f.write(str(elm) + '\n') f.close() diff --git a/zice_circumpolar.py b/zice_circumpolar.py index 936de2f..377e34f 100644 --- a/zice_circumpolar.py +++ b/zice_circumpolar.py @@ -8,11 +8,6 @@ # fig_name = filename for figure def zice_circumpolar (grid_path, fig_name): - # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 - N = 31 deg2rad = pi/180 # Northern boundary 63S for plot nbdry = -63+90 diff --git a/zonal_plot.py b/zonal_plot.py index ddfa3dc..932c663 100644 --- a/zonal_plot.py +++ b/zonal_plot.py @@ -29,9 +29,9 @@ def zonal_plot (file_path, var_name, tstep, lon_key, lon0, lon_bounds, depth_min, colour_bounds=None, save=False, fig_name=None, grid_path=None): # Grid parameters - theta_s = 4.0 - theta_b = 0.9 - hc = 40 + theta_s = 7.0 + theta_b = 2.0 + hc = 250 N = 31 if var_name in ['w','AKv','AKt','AKs']: N = 32