From f3ad52c8ebf0e81b5195ef1852bc79f955d05525 Mon Sep 17 00:00:00 2001 From: Kaitlin Naughten Date: Mon, 23 Jan 2017 15:50:12 +1100 Subject: [PATCH] Bug fixes and some new scripts for diagnosing sea ice weirdness --- adv_amery_tsplots.py | 32 +++++- adv_frazil.py | 39 ++++++- adv_mld.py | 77 ++++++++----- adv_polynyas.py | 19 ++++ adv_sss.py | 37 +++++- adv_thickness.py | 38 ++++++- aice_hi_seasonal.py | 74 +++++++++++- aice_hs_seasonal.py | 221 ++++++++++++++++++++++++++++++++++++ cice_vectorplot.py | 15 +-- file_guide.txt | 135 +++++++++++++++++++++- ice2ocn_fwflux.py | 13 ++- ice_drift_seasonal.py | 98 +++++++++++++++- nsidc_aice_seasonal.py | 5 +- seaice_budget.py | 164 +++++++++++++++++++++++++++ seaice_budget_thermo.py | 231 ++++++++++++++++++++++++++++++++++++++ sose_seasonal.py | 11 ++ ssflux_tamura_monthly.py | 107 ++++++++++++++++-- ssflux_tamura_seasonal.py | 118 ++++++++++++++++--- temp_salt_seasonal.py | 2 +- uv_vectorplot.py | 15 +-- 20 files changed, 1348 insertions(+), 103 deletions(-) create mode 100644 aice_hs_seasonal.py create mode 100644 seaice_budget.py create mode 100644 seaice_budget_thermo.py diff --git a/adv_amery_tsplots.py b/adv_amery_tsplots.py index 07d7a20..751acad 100644 --- a/adv_amery_tsplots.py +++ b/adv_amery_tsplots.py @@ -3,43 +3,65 @@ from matplotlib.pyplot import * from calc_z import * +# Create a 2x2 plot showing zonal slices of temperature and salinity through +# 71E (Amery Ice Shelf) at the end of the U3_LIM and C4_LD simulations. def adv_amery_tsplots (): + # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/c4_l/'] + # Titles for plotting labels = [r'a) Temperature ($^{\circ}$C), U3_LIM', r'b) Temperature ($^{\circ}$C), C4_LD', 'c) Salinity (psu), U3_LIM', 'd) Salinity (psu), C4_LD'] + # File name: daily average for 31 December file_tail = 'ocean_avg_31dec.nc' var_names = ['temp', 'salt'] - tstep = 1 #366 + # If 31 December doesn't have its own file, put the time index here + tstep = 1 #366 if all one file of daily averages for entire simulation + # Longitude to interpolate to lon0 = 71 + # Deepest depth to plot depth_min = -500 + # Bounds and ticks for colour scales scale_min = [-2, 33.8] - scale_max = [3, 34.8] + scale_max = [3, 34.8] scale_ticks = [1, 0.2] + # Bounds on latitude lat_min = -72 lat_max = -50 - + # Grid parameters theta_s = 4.0 theta_b = 0.9 hc = 40 N = 31 + # Set up figure fig = figure(figsize=(18,12)) + # Loop over simulations for sim in range(2): + # Loop over variables (temp and salt) for var in range(2): + # Read 3D variable id = Dataset(paths[sim]+file_tail, 'r') data_3d = id.variables[var_names[var]][tstep-1,:,:,:] + # Also read sea surface height zeta = id.variables['zeta'][tstep-1,:,:] if sim==0 and var==0: + # For the first simulation, read grid variables h = id.variables['h'][:,:] zice = id.variables['zice'][:,:] lon_2d = id.variables['lon_rho'][:,:] lat_2d = id.variables['lat_rho'][:,:] id.close() + # Calculate 3D z-coordinates z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) + # Interpolate temp/salt, z-coordinates, and latitude to 71E data, z, lat = interp_lon(data_3d, z_3d, lat_2d, lon_2d, lon0) ax = fig.add_subplot(2, 2, 2*var+sim+1) + # Shade data (pcolor not contourf so we don't misrepresent the + # model grid) img = pcolor(lat, z, data, vmin=scale_min[var], vmax=scale_max[var], cmap='jet') + # Add title title(labels[2*var+sim], fontsize=24) + # Label axes if var == 1: xlabel('Latitude', fontsize=16) if sim == 0: @@ -47,12 +69,14 @@ def adv_amery_tsplots (): xlim([lat_min, lat_max]) ylim([depth_min, 0]) if sim == 1: + # Add colorbars for each variable if var == 0: cbaxes = fig.add_axes([0.93, 0.575, 0.01, 0.3]) elif var == 1: cbaxes = fig.add_axes([0.93, 0.125, 0.01, 0.3]) cbar = colorbar(img, ticks=arange(scale_min[var], scale_max[var]+scale_ticks[var], scale_ticks[var]), cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=14) + # Set ticks the way we want them lat_ticks = arange(lat_min+2, lat_max+1, 5) ax.set_xticks(lat_ticks) lat_labels = [] @@ -66,6 +90,7 @@ def adv_amery_tsplots (): depth_labels.append(str(int(round(-val)))) ax.set_yticklabels(depth_labels, fontsize=14) + # Main title suptitle(r'71$^{\circ}$E (Amery Ice Shelf), 31 December', fontsize=30) #fig.show() fig.savefig('adv_amery_tsplots.png') @@ -176,6 +201,7 @@ def interp_lon_helper (lon, lon0): return ie, iw, coeff1, coeff2 +# Command-line interface if __name__ == "__main__": adv_amery_tsplots() diff --git a/adv_frazil.py b/adv_frazil.py index 6dca7a2..70e97fb 100644 --- a/adv_frazil.py +++ b/adv_frazil.py @@ -3,23 +3,35 @@ from matplotlib.pyplot import * #import colormaps as cmaps +# Create a 3x2 plot of annually averaged frazil formation for each advection +# experiment: the absolute values for U3_LIM, and the anomalies from U3_LIM for +# the other 5 experiments. def adv_frazil (): + # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/u3/', '/short/m68/kaa561/advection/c4_l/', '/short/m68/kaa561/advection/c4_h/', '/short/m68/kaa561/advection/a4_l/', '/short/m68/kaa561/advection/a4_h/'] + # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] + # File name: annual average file_tail = 'iceh_avg.nc' + # Bounds and ticks for colour scales max_abs = 0.2 tick_abs = 0.05 max_anom = 1.0 tick_anom = 0.5 + # Degrees to radians conversion factor deg2rad = pi/180. + # Centre of missing circle in grid lon_c = 50 lat_c = -83 + # Radius of missing circle radius = 10.5 + # Boundary of regular grid to embed circle in circle_bdry = -70+90 - + + # Read frazil data from U3_LIM simulation; also grid and mask variables id = Dataset(paths[0]+file_tail, 'r') data_tmp = id.variables['frazil'][0,:350,:] lon_tmp = id.variables['TLON'][:350,:] @@ -27,6 +39,7 @@ def adv_frazil (): mask_tmp = id.variables['tmask'][:350,:] id.close() + # Wrap periodic boundary so there isn't a gap in the plot lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) mask = ma.empty([size(mask_tmp,0), size(mask_tmp,1)+1]) @@ -40,35 +53,52 @@ def adv_frazil (): data0[:,:-1] = data_tmp data0[:,-1] = data_tmp[:,0] + # Land mask land = ma.masked_where(mask==1, mask) + # Circumpolar x and y coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Regular grid to embed missing circle in x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid land_circle = zeros(shape(x_reg)) land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + # Set up figure fig = figure(figsize=(9,15)) ax = fig.add_subplot(3, 2, 1, aspect='equal') + # First shade land contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Shade the frazil data (pcolor not contourf so we don't misrepresent the + # model grid) img0 = pcolor(x, y, data0, vmin=0, vmax=max_abs, cmap='jet') axis('off') + # Add title title(labels[0], fontsize=20) + # Add colorbar cbaxes0 = fig.add_axes([0.025, 0.7, 0.02, 0.2]) cbar0 = colorbar(img0, ticks=arange(0, max_abs+tick_abs, tick_abs), cax=cbaxes0, extend='max') cbar0.ax.tick_params(labelsize=16) + # Loop over the other simulations for sim in range(1, len(paths)): + # Read the frazil data id = Dataset(paths[sim]+file_tail, 'r') data_tmp = id.variables['frazil'][0,:350,:] id.close() - data = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) + # Wrap the periodic boundary + data = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) data[:,:-1] = data_tmp data[:,-1] = data_tmp[:,0] + # Calculate anomaly from U3_LIM data = data - data0 + # Add to plot, same as before ax = fig.add_subplot(3, 2, sim+1, aspect='equal') contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) @@ -76,10 +106,12 @@ def adv_frazil (): axis('off') title(labels[sim], fontsize=20) if sim == 3: + # Only add an anomaly colorbar for one of the simulations cbaxes = fig.add_axes([0.025, 0.4, 0.02, 0.2]) cbar = colorbar(img, ticks=arange(-max_anom, max_anom+tick_anom, tick_anom), cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=16) - + + # Main title suptitle('Annually averaged frazil ice formation (cm/day)', fontsize=24) subplots_adjust(wspace=0.025,hspace=0.2) @@ -87,6 +119,7 @@ def adv_frazil (): fig.savefig('adv_frazil.png') +# Command-line interface if __name__ == '__main__': adv_frazil() diff --git a/adv_mld.py b/adv_mld.py index 2fef7bd..5f1ad4b 100644 --- a/adv_mld.py +++ b/adv_mld.py @@ -3,79 +3,95 @@ from matplotlib.pyplot import * #import colormaps as cmaps +# Create a 3x2 plot of mixed layer depth (calculated by KPP) on 23 August (the +# sea ice area max) for each advection experiment: the absolute values for +# U3_LIM, and the anomalies from U3_LIM for the other 5 experiments. def adv_mld (): + # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/u3/', '/short/m68/kaa561/advection/c4_l/', '/short/m68/kaa561/advection/c4_h/', '/short/m68/kaa561/advection/a4_l/', '/short/m68/kaa561/advection/a4_h/'] + # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] + # File name: daily average for 23 August file_tail = 'ocean_avg_23aug.nc' - tstep = 1 #236 + # If 23 August doesn't have its own file, put the time index here + tstep = 1 #236 if all one file of daily averages for entire simulation + # Bounds and ticks for colour scales max_abs = 300 tick_abs = 100 max_anom = 200 tick_anom = 100 + # Degrees to radians conversion factor deg2rad = pi/180. + # Centre of missing circle in grid lon_c = 50 lat_c = -83 + # Radius of missing circle radius = 10.5 + # Boundary of regular grid to embed circle in circle_bdry = -70+90 - + + # Read mixed layer depth from U3_LIM simulation; also grid and mask + # variables id = Dataset(paths[0]+file_tail, 'r') - data_tmp = id.variables['Hsbl'][tstep-1,:350,:] - lon_tmp = id.variables['lon_rho'][:350,:] - lat_tmp = id.variables['lat_rho'][:350,:] - mask_tmp = id.variables['mask_rho'][:350,:] - zice_tmp = id.variables['zice'][:350,:] + data = id.variables['Hsbl'][tstep-1,:350,1:] + lon = id.variables['lon_rho'][:350,1:] + lat = id.variables['lat_rho'][:350,1:] + mask = id.variables['mask_rho'][:350,1:] + zice = id.variables['zice'][:350,1:] id.close() - index = zice_tmp != 0 - mask_tmp[index] = 0.0 - data_tmp = ma.masked_where(zice_tmp!=0, -data_tmp) - - lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) - lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) - mask = ma.empty([size(mask_tmp,0), size(mask_tmp,1)+1]) - data0 = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) - lon[:,:-1] = lon_tmp - lon[:,-1] = lon_tmp[:,0] - lat[:,:-1] = lat_tmp - lat[:,-1] = lat_tmp[:,0] - mask[:,:-1] = mask_tmp - mask[:,-1] = mask_tmp[:,0] - data0[:,:-1] = data_tmp - data0[:,-1] = data_tmp[:,0] + # Mask out the ice shelf cavities and switch sign on mixed layer depth + index = zice != 0 + mask[index] = 0.0 + data = ma.masked_where(zice!=0, -data) + # Land mask land = ma.masked_where(mask==1, mask) + # Circumpolar x and y coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Regular grid to embed missing circle in x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid land_circle = zeros(shape(x_reg)) land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + # Set up figure fig = figure(figsize=(9,15)) ax = fig.add_subplot(3, 2, 1, aspect='equal') + # First shade land contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Shade the mixed layer depth (pcolor not contourf so we don't misrepresent + # the model grid) img0 = pcolor(x, y, data0, vmin=0, vmax=max_abs, cmap='jet') #cmaps.viridis) axis('off') + # Add title title(labels[0], fontsize=20) + # Add colorbar cbaxes0 = fig.add_axes([0.025, 0.7, 0.02, 0.2]) cbar0 = colorbar(img0, ticks=arange(0, max_abs+tick_abs, tick_abs), cax=cbaxes0, extend='max') cbar0.ax.tick_params(labelsize=16) + # Loop over the other simulations for sim in range(1, len(paths)): + # Read mixed layer depth id = Dataset(paths[sim]+file_tail, 'r') - data_tmp = id.variables['Hsbl'][tstep-1,:350,:] + data = id.variables['Hsbl'][tstep-1,:350,:] id.close() - data_tmp = ma.masked_where(zice_tmp!=0, -data_tmp) - data = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) - data[:,:-1] = data_tmp - data[:,-1] = data_tmp[:,0] + # Mask out the ice shelf cavities and switch sign + data = ma.masked_where(zice!=0, -data) + # Calculate anomaly from U3_LIM data = data - data0 + # Add to plot, same as before ax = fig.add_subplot(3, 2, sim+1, aspect='equal') contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) @@ -83,10 +99,12 @@ def adv_mld (): axis('off') title(labels[sim], fontsize=20) if sim == 3: + # Only add an anomaly colorbar for one of the simulations cbaxes = fig.add_axes([0.025, 0.4, 0.02, 0.2]) cbar = colorbar(img, ticks=arange(-max_anom, max_anom+tick_anom, tick_anom), cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=16) - + + # Main title suptitle('Mixed layer depth (m) on 23 August', fontsize=28) subplots_adjust(wspace=0.025,hspace=0.15) @@ -94,6 +112,7 @@ def adv_mld (): fig.savefig('adv_mld.png') +# Command-line interface if __name__ == '__main__': adv_mld() diff --git a/adv_polynyas.py b/adv_polynyas.py index 956d74a..b3b6912 100644 --- a/adv_polynyas.py +++ b/adv_polynyas.py @@ -2,38 +2,55 @@ from numpy import * from matplotlib.pyplot import * +# Create a 1x2 plot of sea ice concentration along the coast of Wilkes Land +# on 23 August (the sea ice area max), showing the difference in polynya size +# between the U3_LIM and C4_LD simulations. def adv_polynyas (): + # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/c4_l/'] + # Titles for plotting labels = ['a) U3_LIM', 'b) C4_LD'] + # File name: daily average for 23 August file_tail = 'iceh.1992-08-23.nc' + # Longitude and latitude bounds lon_min = 100 lon_max = 140 lat_min = -67.1 lat_max = -64.9 + # Bounds on labels for latitude lat_min_label = -67 lat_max_label = -65 + # Set up figure fig = figure(figsize=(18,6)) + # Loop over simulations for sim in range(2): + # Read sea ice concentration in the region of interest id = Dataset(paths[sim]+file_tail, 'r') data = id.variables['aice'][0,70:280,350:750] if sim == 0: + # For the first simulation, also read the grid lon = id.variables['TLON'][70:280,350:750] lat = id.variables['TLAT'][70:280,350:750] id.close() ax = fig.add_subplot(1, 2, sim+1) + # Shade the data (pcolor not contourf so we can show each individual + # model cell) img = pcolor(lon, lat, data, vmin=0, vmax=1, cmap='jet') + # Configure plot title(labels[sim], fontsize=20) xlabel('Longitude', fontsize=16) ylabel('Latitude', fontsize=16) xlim([lon_min, lon_max]) ylim([lat_min, lat_max]) if sim == 1: + # Add a colorbar on the right cbaxes = fig.add_axes([0.93, 0.25, 0.015, 0.5]) cbar = colorbar(ticks=arange(0, 1+0.25, 0.25), cax=cbaxes) cbar.ax.tick_params(labelsize=14) + # Set ticks the way we want them lon_ticks = arange(lon_min, lon_max+10, 10) ax.set_xticks(lon_ticks) lon_labels = [] @@ -46,12 +63,14 @@ def adv_polynyas (): for val in lat_ticks: lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') ax.set_yticklabels(lat_labels, fontsize=14) + # Add main title suptitle('Sea ice concentration on 23 August', fontsize=22) #fig.show() fig.savefig('adv_polynyas.png') +# Command-line interface if __name__ == "__main__": adv_polynyas() diff --git a/adv_sss.py b/adv_sss.py index 6333b25..a4d1143 100644 --- a/adv_sss.py +++ b/adv_sss.py @@ -3,24 +3,36 @@ from matplotlib.pyplot import * #import colormaps as cmaps +# Create a 3x2 plot of sea surface salinity as seen by CICE on 23 August (the +# sea ice area max) for each advection experiment: the absolute values for +# U3_LIM, and the anomalies from U3_LIM for the other 5 experiments. def adv_sss (): + # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/u3/', '/short/m68/kaa561/advection/c4_l/', '/short/m68/kaa561/advection/c4_h/', '/short/m68/kaa561/advection/a4_l/', '/short/m68/kaa561/advection/a4_h/'] + # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] + # File name: daily average for 23 August file_tail = 'iceh.1992-08-23.nc' + # Bounds and ticks for colour scales min_abs = 32.75 max_abs = 34.75 tick_abs = 0.5 max_anom = 3.0 tick_anom = 1.5 + # Degrees to radians conversion factor deg2rad = pi/180. + # Centre of missing circle in grid lon_c = 50 lat_c = -83 + # Radius of missing circle radius = 10.5 + # Boundary of regular grid to embed circle in circle_bdry = -70+90 - + + # Read salinity data from U3_LIM simulation; also grid and mask variables id = Dataset(paths[0]+file_tail, 'r') data_tmp = id.variables['sss'][0,:350,:] lon_tmp = id.variables['TLON'][:350,:] @@ -28,6 +40,7 @@ def adv_sss (): mask_tmp = id.variables['tmask'][:350,:] id.close() + # Wrap periodic boundary so there isn't a gap in the plot lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) mask = ma.empty([size(mask_tmp,0), size(mask_tmp,1)+1]) @@ -41,35 +54,52 @@ def adv_sss (): data0[:,:-1] = data_tmp data0[:,-1] = data_tmp[:,0] + # Land mask land = ma.masked_where(mask==1, mask) + # Circumpolar x and y coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Regular grid to embed missing circle in x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid land_circle = zeros(shape(x_reg)) land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + # Set up figure fig = figure(figsize=(9,15)) ax = fig.add_subplot(3, 2, 1, aspect='equal') + # First shade land contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Shade the salinity data (pcolor not contourf so we don't misrepresent the + # model grid) img0 = pcolor(x, y, data0, vmin=min_abs, vmax=max_abs, cmap='jet') #cmaps.viridis) axis('off') + # Add title title(labels[0], fontsize=20) + # Add colorbar cbaxes0 = fig.add_axes([0.025, 0.7, 0.02, 0.2]) cbar0 = colorbar(img0, ticks=arange(min_abs, max_abs+tick_abs, tick_abs), cax=cbaxes0, extend='both') cbar0.ax.tick_params(labelsize=16) + # Loop over the other simulations for sim in range(1, len(paths)): + # Read the salinity data id = Dataset(paths[sim]+file_tail, 'r') data_tmp = id.variables['sss'][0,:350,:] id.close() + # Wrap the periodic boundary data = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) data[:,:-1] = data_tmp data[:,-1] = data_tmp[:,0] + # Calculate anomaly from U3_LIM data = data - data0 + # Add to plot, same as before ax = fig.add_subplot(3, 2, sim+1, aspect='equal') contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) @@ -77,10 +107,12 @@ def adv_sss (): axis('off') title(labels[sim], fontsize=20) if sim == 3: + # Only add an anomaly colorbar for one of the simulations cbaxes = fig.add_axes([0.025, 0.4, 0.02, 0.2]) cbar = colorbar(img, ticks=arange(-max_anom, max_anom+tick_anom, tick_anom), cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=16) - + + # Main title suptitle('Sea surface salinity (psu) on 23 August', fontsize=28) subplots_adjust(wspace=0.025,hspace=0.15) @@ -88,6 +120,7 @@ def adv_sss (): fig.savefig('adv_sss.png') +# Command-line interface if __name__ == '__main__': adv_sss() diff --git a/adv_thickness.py b/adv_thickness.py index f9b468f..db8cf8e 100644 --- a/adv_thickness.py +++ b/adv_thickness.py @@ -3,30 +3,44 @@ from matplotlib.pyplot import * #import colormaps as cmaps +# Create a 3x2 plot of sea ice effective thickness on 23 August (the sea ice +# area max) for each advection experiment: the absolute values for U3_LIM, and +# the anomalies from U3_LIM for the other 5 experiments. def adv_thickness (): + # Paths to simulation directories paths = ['/short/m68/kaa561/advection/u3_lim/', '/short/m68/kaa561/advection/u3/', '/short/m68/kaa561/advection/c4_l/', '/short/m68/kaa561/advection/c4_h/', '/short/m68/kaa561/advection/a4_l/', '/short/m68/kaa561/advection/a4_h/'] + # Titles for plotting labels = ['a) U3_LIM', 'b) U3 - U3_LIM', 'c) C4_LD - U3_LIM', 'd) C4_HD - U3_LIM', 'e) A4_LD - U3_LIM', 'f) A4_HD - U3_LIM'] + # File name: daily average for 23 August file_tail = 'iceh.1992-08-23.nc' + # Bounds and ticks for colour scales max_abs = 2.0 tick_abs = 0.5 max_anom = 2.0 tick_anom = 1.0 + # Degrees to radians conversion factor deg2rad = pi/180. + # Centre of missing circle in grid lon_c = 50 lat_c = -83 + # Radius of missing circle radius = 10.5 + # Boundary of regular grid to embed circle in circle_bdry = -70+90 - + + # Read thickness data from U3_LIM simulation; also grid and mask variables id = Dataset(paths[0]+file_tail, 'r') + # Effective thickness is concentration*thickness data_tmp = id.variables['aice'][0,:350,:]*id.variables['hi'][0,:350,:] lon_tmp = id.variables['TLON'][:350,:] lat_tmp = id.variables['TLAT'][:350,:] mask_tmp = id.variables['tmask'][:350,:] id.close() + # Wrap periodic boundary so there isn't a gap in the plot lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) mask = ma.empty([size(mask_tmp,0), size(mask_tmp,1)+1]) @@ -40,35 +54,52 @@ def adv_thickness (): data0[:,:-1] = data_tmp data0[:,-1] = data_tmp[:,0] + # Land mask land = ma.masked_where(mask==1, mask) + # Circumpolar x and y coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Regular grid to embed missing circle in x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid land_circle = zeros(shape(x_reg)) land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + # Set up figure fig = figure(figsize=(9,15)) ax = fig.add_subplot(3, 2, 1, aspect='equal') + # First shade land contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + # Shade the thickness data (pcolor not contourf so we don't misrepresent + # the model grid) img0 = pcolor(x, y, data0, vmin=0, vmax=max_abs, cmap='jet') #cmaps.viridis) axis('off') + # Add title title(labels[0], fontsize=20) + # Add colorbar cbaxes0 = fig.add_axes([0.025, 0.7, 0.02, 0.2]) cbar0 = colorbar(img0, ticks=arange(0, max_abs+tick_abs, tick_abs), cax=cbaxes0, extend='max') cbar0.ax.tick_params(labelsize=16) + # Loop over the other simulations for sim in range(1, len(paths)): + # Read the thickness data id = Dataset(paths[sim]+file_tail, 'r') data_tmp = id.variables['aice'][0,:350,:]*id.variables['hi'][0,:350,:] id.close() + # Wrap the periodic boundary data = ma.empty([size(data_tmp,0), size(data_tmp,1)+1]) data[:,:-1] = data_tmp data[:,-1] = data_tmp[:,0] + # Calculate anomaly from U3_LIM data = data - data0 + # Add to plot, same as before ax = fig.add_subplot(3, 2, sim+1, aspect='equal') contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) @@ -76,10 +107,12 @@ def adv_thickness (): axis('off') title(labels[sim], fontsize=20) if sim == 3: + # Only add an anomaly colorbar for one of the simulations cbaxes = fig.add_axes([0.025, 0.4, 0.02, 0.2]) cbar = colorbar(img, ticks=arange(-max_anom, max_anom+tick_anom, tick_anom), cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=16) - + + # Main title suptitle('Effective sea ice thickness (m) on 23 August', fontsize=28) subplots_adjust(wspace=0.025,hspace=0.15) @@ -87,6 +120,7 @@ def adv_thickness (): fig.savefig('adv_thickness.png') +# Command-line interface if __name__ == '__main__': adv_thickness() diff --git a/aice_hi_seasonal.py b/aice_hi_seasonal.py index 9e93373..4cf9e75 100644 --- a/aice_hi_seasonal.py +++ b/aice_hi_seasonal.py @@ -2,19 +2,36 @@ from netCDF4 import Dataset, num2date from matplotlib.pyplot import * +# Creates a 4x2 plot of seasonally averaged sea ice concentration (top row) and +# thickness (bottom row) over the last year of simulation. +# Input: +# cice_file = path to CICE output file with 5-day averages, containing at least +# one complete Dec-Nov period (if there are multiple such periods, +# this script uses the last one) +# save = optional boolean to save the figure to a file, rather than displaying +# it on the screen +# fig_name = if save=True, path to the desired filename for figure def aice_hi_seasonal (cice_file, save=False, fig_name=None): + # Starting and ending months (1-based) for each season start_month = [12, 3, 6, 9] end_month = [2, 5, 8, 11] + # Starting and ending days of the month (1-based) for each season + # Assume no leap years, we'll fix this later if we need start_day = [1, 1, 1, 1] end_day = [28, 31, 31, 30] + # Number of days in each season (again, ignore leap years for now) ndays_season = [90, 92, 92, 91] + # Season names for titles season_names = ['DJF', 'MAM', 'JJA', 'SON'] + # Degrees to radians conversion deg2rad = pi/180.0 + # Read CICE grid and time values id = Dataset(cice_file, 'r') lon_tmp = id.variables['TLON'][:-15,:] lat_tmp = id.variables['TLAT'][:-15,:] + # Wrap the periodic boundary by 1 cell lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) lon[:,:-1] = lon_tmp @@ -22,95 +39,132 @@ def aice_hi_seasonal (cice_file, save=False, fig_name=None): lat[:,:-1] = lat_tmp lat[:,-1] = lat_tmp[:,0] time_id = id.variables['time'] + # Get the year, month, and day (all 1-based) for each output step + # These are 5-day averages marked with the next day's date. time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) - end_t = -1 - for t in range(size(time)-1, -1, -1): + # Loop backwards through time indices to find the last one we care about + # (which contains 30 November in its averaging period) + end_t = -1 # Missing value flag + for t in range(size(time)-1, -1, -1): if time[t].month == start_month[0] and time[t].day in range(start_day[0], start_day[0]+5): end_t = t break + # Make sure we actually found it if end_t == -1: print 'Error: ' + cice_file + ' does not contain a complete Dec-Nov period' return - start_t = -1 + # Continue looping backwards to find the first time index we care about + # (which contains 1 December the previous year in its averaging period) + start_t = -1 # Missing value flag for t in range(end_t-60, -1, -1): if time[t].month == start_month[0] and time[t].day in range(start_day[0]+1, start_day[0]+6): start_t = t break + # Make sure we actually found it if start_t == -1: print 'Error: ' + cice_file + ' does not contain a complete Dec-Nov period' return + # Check for leap years leap_year = False if mod(time[end_t].year, 4) == 0: + # Years divisible by 4 are leap years leap_year = True if mod(time[end_t].year, 100) == 0: + # Unless they're also divisible by 100, in which case they aren't + # leap years leap_year = False if mod(time[end_t].year, 400) == 0: + # Unless they're also divisible by 400, in which case they are + # leap years after all leap_year = True if leap_year: + # Update last day in February end_day[0] += 1 ndays_season[0] += 1 + # Initialise seasonal averages of CICE output aice_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) aice_tmp[:,:,:] = 0.0 hi_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) hi_tmp[:,:,:] = 0.0 + # Process one season at a time for season in range(4): - season_days = 0 + season_days = 0 # Number of days in season; this will be incremented next_season = mod(season+1, 4) + # Find starting timestep start_t_season = -1 for t in range(start_t, end_t+1): if time[t].month == start_month[season] and time[t].day in range(start_day[season]+1, start_day[season]+6): start_t_season = t break + # Make sure we actually found it if start_t_season == -1: print 'Error: could not find starting timestep for season ' + season_names[season] return + # Find ending timestep end_t_season = -1 for t in range(start_t_season+1, end_t+1): if time[t].month == start_month[next_season] and time[t].day in range(start_day[next_season], start_day[next_season]+5): end_t_season = t break + # Make sure we actually found it if end_t_season == -1: print 'Error: could not find ending timestep for season ' + season_names[season] return + # Figure out how many of the 5 days averaged in start_t_season are + # actually within this season if time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 5: + # Starting day is in position 1 of 5; we care about all of them start_days = 5 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 4: + # Starting day is in position 2 of 5; we care about the last 4 start_days = 4 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season]+ 3: + # Starting day is in position 3 of 5; we care about the last 3 start_days = 3 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 2: + # Starting day is in position 4 of 5; we care about the last 2 start_days = 2 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 1: + # Starting day is in position 5 of 5; we care about the last 1 start_days = 1 else: print 'Error for season ' + season_names[season] + ': starting index is month ' + str(time[start_t_season].month) + ', day ' + str(time[start_t_season].day) return + # Start accumulating data weighted by days aice_tmp[season,:,:] += id.variables['aice'][start_t_season,:-15,:]*start_days hi_tmp[season,:,:] += id.variables['hi'][start_t_season,:-15,:]*start_days season_days += start_days + # Between start_t_season and end_t_season, we want all the days for t in range(start_t_season+1, end_t_season): aice_tmp[season,:,:] += id.variables['aice'][t,:-15,:]*5 hi_tmp[season,:,:] += id.variables['hi'][t,:-15,:]*5 season_days += 5 + # Figure out how many of the 5 days averaged in end_t_season are + # actually within this season if time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 4: + # Ending day is in position 1 of 5; we care about the first 1 end_days = 1 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 3: + # Ending day is in position 2 of 5; we care about the first 2 end_days = 2 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 2: + # Ending day is in position 3 of 5; we care about the first 3 end_days = 3 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 1: + # Ending day is in position 4 of 5; we care about the first 4 end_days = 4 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season]: + # Ending day is in position 5 of 5; we care about all 5 end_days = 5 else: print 'Error for season ' + season_names[season] + ': ending index is month ' + str(time[end_t_season].month) + ', day ' + str(time[end_t_season].day) @@ -120,15 +174,19 @@ def aice_hi_seasonal (cice_file, save=False, fig_name=None): hi_tmp[season,:,:] += id.variables['hi'][end_t_season,:-15,:]*end_days season_days += end_days + # Check that we got the correct number of days if season_days != ndays_season[season]: print 'Error: found ' + str(season_days) + ' days instead of ' + str(ndays_season[season]) return + # Finished accumulating data, now convert from sum to average aice_tmp[season,:,:] /= season_days hi_tmp[season,:,:] /= season_days + # Finished reading all CICE data id.close() + # Wrap the periodic boundary aice = ma.empty([size(aice_tmp,0), size(aice_tmp,1), size(aice_tmp,2)+1]) aice[:,:,:-1] = aice_tmp aice[:,:,-1] = aice_tmp[:,:,0] @@ -136,10 +194,11 @@ def aice_hi_seasonal (cice_file, save=False, fig_name=None): hi[:,:,:-1] = hi_tmp hi[:,:,-1] = hi_tmp[:,:,0] + # Get circumpolar x and y coordinates for plotting x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) - # Find boundaries for each side of plot based on extent of NSIDC grid + # Set boundaries of plot bdry1 = -35 bdry2 = 35 bdry3 = -33 @@ -153,6 +212,7 @@ def aice_hi_seasonal (cice_file, save=False, fig_name=None): fig = figure(figsize=(20,9)) # Loop over seasons for season in range(4): + # Concentration ax = fig.add_subplot(2, 4, season+1, aspect='equal') img = contourf(x, y, aice[season,:,:], lev1) xlim([bdry1, bdry2]) @@ -162,9 +222,11 @@ def aice_hi_seasonal (cice_file, save=False, fig_name=None): text(-39, 0, 'aice (1)', fontsize=21, ha='right') title(season_names[season], fontsize=24) if season == 3: + # Add colorbar cbaxes1 = fig.add_axes([0.92, 0.55, 0.01, 0.3]) cbar1 = colorbar(img, ticks=arange(0,1+0.25,0.25), cax=cbaxes1) cbar1.ax.tick_params(labelsize=16) + # Thickness ax = fig.add_subplot(2, 4, season+5, aspect='equal') img = contourf(x, y, hi[season,:,:], lev2, extend='both') xlim([bdry1, bdry2]) @@ -173,9 +235,11 @@ def aice_hi_seasonal (cice_file, save=False, fig_name=None): if season == 0: text(-39, 0, 'hi (m)', fontsize=21, ha='right') if season == 3: + # Add colorbar cbaxes2 = fig.add_axes([0.92, 0.15, 0.01, 0.3]) cbar2 = colorbar(img, ticks=arange(0,2.5+0.5,0.5), cax=cbaxes2) cbar2.ax.tick_params(labelsize=16) + # Make plots closer together subplots_adjust(wspace=0.025,hspace=0.025) # Finished diff --git a/aice_hs_seasonal.py b/aice_hs_seasonal.py new file mode 100644 index 0000000..c5811ae --- /dev/null +++ b/aice_hs_seasonal.py @@ -0,0 +1,221 @@ +from numpy import * +from netCDF4 import Dataset, num2date +from matplotlib.pyplot import * + +def aice_hs_seasonal (cice_file, save=False, fig_name=None): + + start_month = [12, 3, 6, 9] + end_month = [2, 5, 8, 11] + start_day = [1, 1, 1, 1] + end_day = [28, 31, 31, 30] + ndays_season = [90, 92, 92, 91] + season_names = ['DJF', 'MAM', 'JJA', 'SON'] + deg2rad = pi/180.0 + + id = Dataset(cice_file, 'r') + lon_tmp = id.variables['TLON'][:-15,:] + lat_tmp = id.variables['TLAT'][:-15,:] + lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) + lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) + lon[:,:-1] = lon_tmp + lon[:,-1] = lon_tmp[:,0] + lat[:,:-1] = lat_tmp + lat[:,-1] = lat_tmp[:,0] + time_id = id.variables['time'] + time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) + + end_t = -1 + for t in range(size(time)-1, -1, -1): + if time[t].month == start_month[0] and time[t].day in range(start_day[0], start_day[0]+5): + end_t = t + break + if end_t == -1: + print 'Error: ' + cice_file + ' does not contain a complete Dec-Nov period' + return + + start_t = -1 + for t in range(end_t-60, -1, -1): + if time[t].month == start_month[0] and time[t].day in range(start_day[0]+1, start_day[0]+6): + start_t = t + break + if start_t == -1: + print 'Error: ' + cice_file + ' does not contain a complete Dec-Nov period' + return + + leap_year = False + if mod(time[end_t].year, 4) == 0: + leap_year = True + if mod(time[end_t].year, 100) == 0: + leap_year = False + if mod(time[end_t].year, 400) == 0: + leap_year = True + if leap_year: + end_day[0] += 1 + ndays_season[0] += 1 + + aice_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) + aice_tmp[:,:,:] = 0.0 + hs_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) + hs_tmp[:,:,:] = 0.0 + for season in range(4): + season_days = 0 + next_season = mod(season+1, 4) + + start_t_season = -1 + for t in range(start_t, end_t+1): + if time[t].month == start_month[season] and time[t].day in range(start_day[season]+1, start_day[season]+6): + start_t_season = t + break + if start_t_season == -1: + print 'Error: could not find starting timestep for season ' + season_names[season] + return + + end_t_season = -1 + for t in range(start_t_season+1, end_t+1): + if time[t].month == start_month[next_season] and time[t].day in range(start_day[next_season], start_day[next_season]+5): + end_t_season = t + break + if end_t_season == -1: + print 'Error: could not find ending timestep for season ' + season_names[season] + return + + if time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 5: + start_days = 5 + elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 4: + start_days = 4 + elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season]+ 3: + start_days = 3 + elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 2: + start_days = 2 + elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 1: + start_days = 1 + else: + print 'Error for season ' + season_names[season] + ': starting index is month ' + str(time[start_t_season].month) + ', day ' + str(time[start_t_season].day) + return + + aice_tmp[season,:,:] += id.variables['aice'][start_t_season,:-15,:]*start_days + hs_tmp[season,:,:] += id.variables['hs'][start_t_season,:-15,:]*start_days + season_days += start_days + + for t in range(start_t_season+1, end_t_season): + aice_tmp[season,:,:] += id.variables['aice'][t,:-15,:]*5 + hs_tmp[season,:,:] += id.variables['hs'][t,:-15,:]*5 + season_days += 5 + + if time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 4: + end_days = 1 + elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 3: + end_days = 2 + elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 2: + end_days = 3 + elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 1: + end_days = 4 + elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season]: + end_days = 5 + else: + print 'Error for season ' + season_names[season] + ': ending index is month ' + str(time[end_t_season].month) + ', day ' + str(time[end_t_season].day) + return + + aice_tmp[season,:,:] += id.variables['aice'][end_t_season,:-15,:]*end_days + hs_tmp[season,:,:] += id.variables['hs'][end_t_season,:-15,:]*end_days + season_days += end_days + + if season_days != ndays_season[season]: + print 'Error: found ' + str(season_days) + ' days instead of ' + str(ndays_season[season]) + return + + aice_tmp[season,:,:] /= season_days + hs_tmp[season,:,:] /= season_days + + id.close() + + aice = ma.empty([size(aice_tmp,0), size(aice_tmp,1), size(aice_tmp,2)+1]) + aice[:,:,:-1] = aice_tmp + aice[:,:,-1] = aice_tmp[:,:,0] + hs = ma.empty([size(hs_tmp,0), size(hs_tmp,1), size(hs_tmp,2)+1]) + hs[:,:,:-1] = hs_tmp + hs[:,:,-1] = hs_tmp[:,:,0] + + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + # Find boundaries for each side of plot based on extent of NSIDC grid + bdry1 = -35 + bdry2 = 35 + bdry3 = -33 + bdry4 = 37 + + # Set consistent colour levels + lev1 = linspace(0, 1, num=50) + lev2 = linspace(0, 0.5, num=50) + + # Plot + fig = figure(figsize=(20,9)) + # Loop over seasons + for season in range(4): + ax = fig.add_subplot(2, 4, season+1, aspect='equal') + img = contourf(x, y, aice[season,:,:], lev1) + xlim([bdry1, bdry2]) + ylim([bdry3, bdry4]) + axis('off') + if season == 0: + text(-39, 0, 'aice (1)', fontsize=21, ha='right') + title(season_names[season], fontsize=24) + if season == 3: + cbaxes1 = fig.add_axes([0.92, 0.55, 0.01, 0.3]) + cbar1 = colorbar(img, ticks=arange(0,1+0.25,0.25), cax=cbaxes1) + cbar1.ax.tick_params(labelsize=16) + ax = fig.add_subplot(2, 4, season+5, aspect='equal') + img = contourf(x, y, hs[season,:,:], lev2, extend='both') + xlim([bdry1, bdry2]) + ylim([bdry3, bdry4]) + axis('off') + if season == 0: + text(-39, 0, 'hs (m)', fontsize=21, ha='right') + if season == 3: + cbaxes2 = fig.add_axes([0.92, 0.15, 0.01, 0.3]) + cbar2 = colorbar(img, ticks=arange(0,0.5+0.1,0.1), cax=cbaxes2) + cbar2.ax.tick_params(labelsize=16) + subplots_adjust(wspace=0.025,hspace=0.025) + + # Finished + if save: + fig.savefig(fig_name) + else: + fig.show() + + +# Command-line interface +if __name__ == "__main__": + + cice_file = raw_input("Path to CICE file, containing at least one complete Dec-Nov period: ") + action = raw_input("Save figure (s) or display on screen (d)? ") + if action == 's': + save = True + fig_name = raw_input("File name for figure: ") + elif action == 'd': + save = False + fig_name = None + aice_hs_seasonal(cice_file, save, fig_name) + + + + + + + + + + + + + + + + + + + + + + diff --git a/cice_vectorplot.py b/cice_vectorplot.py index ae37e6e..ff219b1 100644 --- a/cice_vectorplot.py +++ b/cice_vectorplot.py @@ -52,19 +52,20 @@ def cice_vectorplot (file_path, tstep, xname, yname, cmax=None, save=False, fig_ # Rotate from local x-y space to lon-lat space u, v = rotate_vector_cice(u_xy, v_xy, angle) - # Calculate magnitude for the background filled contour plot + # Calculate magnitude of vector speed = sqrt(u**2 + v**2) - - # Calculate X and Y coordinates for plotting circumpolar projection - x = -(lat+90)*cos(lon*deg2rad+pi/2) - y = (lat+90)*sin(lon*deg2rad+pi/2) - + # Convert vector to polar coordinates, rotate to account for longitude in + # circumpolar projection, and convert back to vector components theta = arctan2(v, u) theta_circ = theta - lon*deg2rad u_circ = speed*cos(theta_circ) v_circ = speed*sin(theta_circ) - # Average X, Y, dX_dt, and dY_dt over block x block intervals + # Calculate x and y coordinates for plotting circumpolar projection + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + # Average x, y, u_circ, and v_circ over block x block intervals # Calculate number of blocks size0 = int(ceil(size(x,0)/float(block))) size1 = int(ceil((size(x,1)-1)/float(block))) diff --git a/file_guide.txt b/file_guide.txt index 2e2456b..68dbf34 100644 --- a/file_guide.txt +++ b/file_guide.txt @@ -619,6 +619,89 @@ temp_salt_seasonal.py: Make a 4x2 plot showing lat vs. depth slides of the screen. This script repeats as many times as you like. +ssflux_tamura_monthly.py: Make a figure comparing monthly-averaged sea ice + to ocean salt fluxes, from Tamura's dataset to CICE + output. + To run: First make sure the paths to monthly-averaged, + unit-corrected (to kg/m^2/s) Tamura data files + are correct. Then open python or ipython and + type "run ssflux_tamura_monthly.py". The + script will prompt you for paths to the CICE + output file, the month, and whether you want + to save the figure (and if so, what filename) + or display it on the screen. This script + repeats as many times as you like. + +ssflux_tamura_seasonal.py: Make a figure comparing seasonally-averaged sea ice + to ocean salt fluxes, from Tamura's dataset to CICE + output. + To run: First make sure the paths to + monthly-averaged, unit-corrected (to + kg/m^2/s) Tamura data files are correct. Then + open python or iptyhon and type + "run ssflux_tamura_seasonal.py". The script + will prompt you for paths to the CICE output + file and whether you want to save the figure + (and if so, what filename) or display it on + the screen. This script is non-repeating. + +sose_seasonal.py: Make a 4x2 plot showing lat vs. depth slices of seasonally + averaged temperature (top) and salinity (bottom) at the given + longitude, for the 2005-2010 SOSE climatology. + To run: First make sure the path to the SOSE seasonal + climatology file is correct (if you don't have this + file, ask Kaitlin). Then open python or ipython and + type "run sose_seasonal.py". The script will + prompt you for the longitude to interpolate to, the + deepest depth to plot, and whether to save the + figure (and if so, what filename) or display it on + the screen. This script is non-repeating. + +ice_drift_seasonal.py: Create a 2x2 plot replicating Figure 1 of Holland & + Kimura 2016, showing sea ice velocity vectors overlaying + sea ice concentration for the seasonal averages FMA, MMJ, + ASO, NDJ. + To run: Open python or ipython and type + "run ice_drift_seasonal.py". The script will + prompt you for the path to a CICE output file + containing 5-day averages for at least one + complete Feb-Jan period. It will also ask whether + you want to save the figure (and if so, what + filename) or display it on the screen. This + script is non-repeating. + +seaice_budget.py: Create four plots showing timeseries of the thermodynamic vs + dynamic volume tendency, averaged over (1) the continental + shelf (defined as anywhere south of 60S with seafloor + shallower than 1500 m), and (2) the offshore region + (everywhere else). Two plots are the absolute volume + tendencies (units of cm/day), the other two are cumulative + over the simulation (cm). + To run: Open python or ipython and type + "run seaice_budget.py." The script will prompt you for + the path to the CICE output file, the path to the ROMS + grid file, and whether you want to save the figures + (and if so, what filenames) or display them on the + screen. + +seaice_budget_thermo.py: Create four plots showing timeseries of the + thermodynamic terms of sea ice growth and melt: + congelation, frazil ice formation, snow-to-ice + flooding, top melt, basal melt, and lateral melt. + These variables are averaged over (1) the continental + shelf (defined as anywhere south of 60S with seafloor + shallower than 1500 m), and (2) the offshore region + (everywhere else). Two plots are the absolute volume + tendencies (units of cm/day), the other two are + cumulative over the simulation (cm). + To run: Open python or ipython and type + "run seaice_budget_thermo.py". The script will + prompt you for the path to the CICE output + file, the path to the ROMS grid file, and + whether you want to save the figures (and if + so, what filenames) or display them on the + screen. + ***ANIMATIONS*** @@ -636,12 +719,20 @@ aice_animation.py: Create an animation of sea ice concentration for the given ***FIGURES FOR ADVECTION PAPER*** -adv_amery_tsplots.py: For each advection experiment, plot zonal slices of - temperature and salinity through 71E (Amery Ice Shelf) - at the end of the simulation. This is really just a - special case of zonal_plot.py. - To run: Make sure the paths to each simulation are - correct. Then open python or ipython and type +adv_amery_tsplots_indiv.py: For each advection experiment, plot zonal slices of + temperature and salinity through 71E (Amery Ice + Shelf) at the end of the simulation. This is really + just a special case of zonal_plot.py. + To run: Make sure the paths to each simulation are + correct. Then open python or ipython and + type "run adv_amery_tsplots_indiv.py". + +adv_amery_tsplots.py: Create a 2x2 plot showing zonal slices of temperature and + salinity through 71E (Amery Ice Shelf) at the end of the + U3_LIM and C4_LD simulations. + To run: Make sure the paths to the simulation directories + and 31 December ROMS files are correct. Then + open python or ipython and type "run adv_amery_tsplots.py". adv_freezingpt_slice.py: Plot the difference from the freezing temperature at @@ -662,6 +753,38 @@ adv_timeseries_volume.py: Plot timeseries of total sea ice volume for all the python or ipython and type "run adv_timeseries_volume.py". +adv_frazil.py: Create a 3x2 plot of annually averaged frazil formation for each + advection experiment: the absolute values for U3_LIM, and the + anomalies from U3_LIM for the other 5 experiments. + To run: Make sure the paths to the simulation directories and + annually averaged CICE files are correct. Then open + python or ipython and type "run adv_frazil.py". + +adv_thickness.py: Like adv_frazil.py, but for sea ice effective thickness on + 23 August (the sea ice area max). + To run: Make sure the paths to the simulation directories and + 23 August CICE files are correct. Then open python or + ipython and type "run adv_frazil.py". + +adv_sss.py: Like adv_thickness.py, but for sea surface salinity. + To run: Make sure the paths to the simulation directories and 23 + August CICE files are correct. Then open python or ipython + and type "run adv_sss.py". + +adv_mld.py: Like adv_thickness.py, but for mixed layer depth as calculated by + the KPP parameterisation. + To run: Make sure the paths to the simulation directories and 23 + August ROMS files are correct. Then open python or ipython + and type "run adv_mld.py". + +adv_polynyas.py: Create a 1x2 plot of sea ice concentration along the coast of + Wilkes Land on 23 August (the sea ice area max), showing the + difference in polynya size between the U3_LIM and C4_LD + simulations. + To run: Make sure the paths to the simulation directories and + 23 August CICE files are correct. Then open python or + ipython and type "run adv_polynyas.py". + ***UTILITY FUNCTIONS*** diff --git a/ice2ocn_fwflux.py b/ice2ocn_fwflux.py index cd66690..80c77a6 100644 --- a/ice2ocn_fwflux.py +++ b/ice2ocn_fwflux.py @@ -13,13 +13,18 @@ def ice2ocn_fwflux (file_path, tstep, fig_name): deg2rad = pi/180 + rho_fw = 1000.0 + mps_to_cmpday = 8.64e6 # Read data id = Dataset(file_path, 'r') - # I think there is a problem with salinity weighting here. - # There is a difference between kg/m^2/s of freshwater, and kg/m^2/s of - # salt. - data_tmp = id.variables['fresh_ai'][tstep-1,:-15,:] - id.variables['fsalt_ai'][tstep-1,:-15,:]/1000.*60.*60.*24.*100. + # Read freshwater and salt fluxes, also salinity + fresh_ai = id.variables['fresh_ai'][tstep-1,:-15,:] + fsalt_ai = id.variables['fsalt_ai'][tstep-1,:-15,:] + sss = id.variables['sss'][tstep-1,:-15,:] + # Convert fsalt from kg/m^2/s of salt to cm/day of freshwater + data_tmp = fresh_ai - fsalt_ai*1e3/sss/rho_fw*mps_to_cmpday + # Read grid lon_tmp = id.variables['TLON'][:-15,:] lat_tmp = id.variables['TLAT'][:-15,:] id.close() diff --git a/ice_drift_seasonal.py b/ice_drift_seasonal.py index d071718..23b34ae 100644 --- a/ice_drift_seasonal.py +++ b/ice_drift_seasonal.py @@ -3,23 +3,43 @@ from matplotlib.pyplot import * from rotate_vector_cice import * +# Create a 2x2 plot replicating Figure 1 of Holland & Kimura 2016, showing +# sea ice velocity vectors overlaying sea ice concentration for the seasonal +# averages FMA, MMJ, ASO, NDJ. +# Input: +# cice_file = path to CICE output file with 5-day averages, containing at least +# one complete Feb-Jan period (if there are multiple such periods, +# this script uses the last one) +# save = optional boolean to save the figure to a file, rather than displaying +# it on the screen +# fig_name = if save=True, path to the desired filename for figure def ice_drift_seasonal (cice_file, save=False, fig_name=None): + # Starting and ending months (1-based) for each season start_month = [2, 5, 8, 11] end_month = [4, 7, 10, 1] + # Starting and ending days of the month (1-based) for each season start_day = [1, 1, 1, 1] end_day = [30, 31, 31, 31] + # Number of days in each season + # Assume no leap years, we'll fix this later if needed ndays_season = [89, 92, 92, 92] + # Season names for titles season_names = ['FMA', 'MMJ', 'ASO', 'NDJ'] + # Order of figures (clockwise) figure_order = [1, 2, 4, 3] - r = 6.371e6 + # Degrees to radians conversion deg2rad = pi/180.0 + # Side length of blocks to average vectors over (can't plot vector at + # every single point or the plot will be way too crowded) block = 15 + # Read CICE grid (including angle) and time values id = Dataset(cice_file, 'r') lon_tmp = id.variables['TLON'][:-15,:] lat_tmp = id.variables['TLAT'][:-15,:] angle_tmp = id.variables['ANGLET'][:-15,:] + # Wrap the periodic boundary by 1 cell lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) angle = ma.empty([size(angle_tmp,0), size(angle_tmp,1)+1]) @@ -30,99 +50,135 @@ def ice_drift_seasonal (cice_file, save=False, fig_name=None): angle[:,:-1] = angle_tmp angle[:,-1] = angle_tmp[:,0] time_id = id.variables['time'] + # Get the year, month, and day (all 1-based) for each output step + # These are 5-day averages marked with the next day's date. time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) - end_t = -1 + # Loop backwards through time indices to find the last one we care about + # (which contains 31 Jan in its averaging period) + end_t = -1 # Missing value flag for t in range(size(time)-1, -1, -1): if time[t].month == start_month[0] and time[t].day in range(start_day[0], start_day[0]+5): end_t = t break + # Make sure we actually found it if end_t == -1: print 'Error: ' + cice_file + ' does not contain a complete Feb-Jan period' return - start_t = -1 + # Continue looping backwards to find the first time index we care about + # (which contains 1 Feb the previous year in its averaging period) + start_t = -1 # Missing value flag for t in range(end_t-60, -1, -1): if time[t].month == start_month[0] and time[t].day in range(start_day[0]+1, start_day[0]+6): start_t = t break + # Make sure we actually found it if start_t == -1: print 'Error: ' + cice_file + ' does not contain a complete Feb-Jan period' return + # Check for leap years leap_year = False if mod(time[start_t].year, 4) == 0: + # Years divisible by 4 are leap years leap_year = True if mod(time[start_t].year, 100) == 0: + # Unless they're also divisible by 100, in which case they aren't + # leap years leap_year = False if mod(time[start_t].year, 400) == 0: + # Unless they're also divisible by 400, in which case they are + # leap years after all leap_year = True if leap_year: + # Update last day in February ndays_season[0] += 1 + # Initialise seasonal averages of CICE output aice_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) aice_tmp[:,:,:] = 0.0 uxy_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) uxy_tmp[:,:,:] = 0.0 vxy_tmp = ma.empty([4, size(lon_tmp,0), size(lon_tmp,1)]) vxy_tmp[:,:,:] = 0.0 - + # Process one season at a time for season in range(4): - season_days = 0 + season_days = 0 # Number of days in season; this will be incremented next_season = mod(season+1, 4) + # Find starting timestep start_t_season = -1 for t in range(start_t, end_t+1): if time[t].month == start_month[season] and time[t].day in range(start_day[season]+1, start_day[season]+6): start_t_season = t break + # Make sure we actually found it if start_t_season == -1: print 'Error: could not find starting timestep for season ' + season_names[season] return + # Find ending timestep end_t_season = -1 for t in range(start_t_season+1, end_t+1): if time[t].month == start_month[next_season] and time[t].day in range(start_day[next_season], start_day[next_season]+5): end_t_season = t break + # Make sure we actually found it if end_t_season == -1: print 'Error: could not find ending timestep for season ' + season_names[season] return + # Figure out how many of the 5 days averaged in start_t_season are + # actually within this season if time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 5: + # Starting day is in position 1 of 5; we care about all of them start_days = 5 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 4: + # Starting day is in position 2 of 5; we care about the last 4 start_days = 4 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season]+ 3: + # Starting day is in position 3 of 5; we care about the last 3 start_days = 3 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 2: + # Starting day is in position 4 of 5; we care about the last 2 start_days = 2 elif time[start_t_season].month == start_month[season] and time[start_t_season].day == start_day[season] + 1: + # Starting day is in position 5 of 5; we care about the last 1 start_days = 1 else: print 'Error for season ' + season_names[season] + ': starting index is month ' + str(time[start_t_season].month) + ', day ' + str(time[start_t_season].day) return + # Start accumulating data weighted by days aice_tmp[season,:,:] += id.variables['aice'][start_t_season,:-15,:]*start_days uxy_tmp[season,:,:] += id.variables['uvel'][start_t_season,:-15,:]*start_days vxy_tmp[season,:,:] += id.variables['vvel'][start_t_season,:-15,:]*start_days season_days += start_days + # Between start_t_season and end_t_season, we want all the days for t in range(start_t_season+1, end_t_season): aice_tmp[season,:,:] += id.variables['aice'][t,:-15,:]*5 uxy_tmp[season,:,:] += id.variables['uvel'][t,:-15,:]*5 vxy_tmp[season,:,:] += id.variables['vvel'][t,:-15,:]*5 season_days += 5 + # Figure out how many of the 5 days averaged in end_t_season are + # actually within this season if time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 4: + # Ending day is in position 1 of 5; we care about the first 1 end_days = 1 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 3: + # Ending day is in position 2 of 5; we care about the first 2 end_days = 2 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 2: + # Ending day is in position 3 of 5; we care about the first 3 end_days = 3 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season] + 1: + # Ending day is in position 4 of 5; we care about the first 4 end_days = 4 elif time[end_t_season].month == start_month[next_season] and time[end_t_season].day == start_day[next_season]: + # Ending day is in position 5 of 5; we care about all 5 end_days = 5 else: print 'Error for season ' + season_names[season] + ': ending index is month ' + str(time[end_t_season].month) + ', day ' + str(time[end_t_season].day) @@ -131,19 +187,22 @@ def ice_drift_seasonal (cice_file, save=False, fig_name=None): aice_tmp[season,:,:] += id.variables['aice'][end_t_season,:-15,:]*end_days uxy_tmp[season,:,:] += id.variables['uvel'][end_t_season,:-15,:]*end_days vxy_tmp[season,:,:] += id.variables['vvel'][end_t_season,:-15,:]*end_days - season_days += end_days + # Check that we got the correct number of days if season_days != ndays_season[season]: print 'Error: found ' + str(season_days) + ' days instead of ' + str(ndays_season[season]) return + # Finished accumulating data, now convert from sum to average aice_tmp[season,:,:] /= season_days uxy_tmp[season,:,:] /= season_days vxy_tmp[season,:,:] /= season_days + # Finished reading all CICE data id.close() + # Wrap the periodic boundary aice = ma.empty([size(aice_tmp,0), size(aice_tmp,1), size(aice_tmp,2)+1]) aice[:,:,:-1] = aice_tmp aice[:,:,-1] = aice_tmp[:,:,0] @@ -154,27 +213,39 @@ def ice_drift_seasonal (cice_file, save=False, fig_name=None): v_xy[:,:,:-1] = vxy_tmp v_xy[:,:,-1] = vxy_tmp[:,:,0] + # Rotate from local x-y space to lon-lat space u, v = rotate_vector_cice(u_xy, v_xy, angle) + # Calculate speed speed = sqrt(u**2 + v**2) + # Convert velocity to polar coordinates, rotate to account for longitude in + # circumpolar projection, and convert back to vector components theta = arctan2(v, u) theta_circ = theta - lon*deg2rad u_circ = speed*cos(theta_circ) v_circ = speed*sin(theta_circ) + # Calculate x and y coordinates for plotting circumpolar projection x = -(lat+90)*cos(lon*deg2rad+pi/2) y = (lat+90)*sin(lon*deg2rad+pi/2) + # Average x, y, u_circ, and v_circ over block x block intervals + # Calculate number of blocks size0 = int(ceil(size(x,0)/float(block))) size1 = int(ceil((size(x,1)-1)/float(block))) + # Set up arrays for averaged fields x_block = ma.empty([size0, size1]) y_block = ma.empty([size0, size1]) u_circ_block = ma.empty([4, size0, size1]) v_circ_block = ma.empty([4, size0, size1]) + # Loop over seasons for season in range(4): + # Set up arrays containing boundary indices posn0 = range(0, size(x,0), block) posn0.append(size(x,0)) posn1 = range(0, size(x,1), block) posn1.append(size(x,1)) + # Double loop to average each block (can't find a more efficient way to + # do this) for j in range(size0): for i in range(size1): start0 = posn0[j] @@ -182,41 +253,56 @@ def ice_drift_seasonal (cice_file, save=False, fig_name=None): start1 = posn1[i] end1 = posn1[i+1] if season == 0: + # x_block and y_block are season-independent so just do them + # for the first season x_block[j,i] = mean(x[start0:end0, start1:end1]) y_block[j,i] = mean(y[start0:end0, start1:end1]) u_circ_block[season,j,i] = mean(u_circ[season, start0:end0, start1:end1]) v_circ_block[season,j,i] = mean(v_circ[season, start0:end0, start1:end1]) + # Set up colour levels for aice lev = linspace(0, 1, num=50) + # Set boundaries for each side of plot bdry1 = -35 bdry2 = 35 bdry3 = -33 bdry4 = 37 + # Make the plot fig = figure(figsize=(16,12)) + # Loop over seasons for season in range(4): ax = fig.add_subplot(2, 2, figure_order[season], aspect='equal') + # Contour concentration img = contourf(x, y, aice[season,:,:], lev, cmap='jet') + # Add velocity vectors q = quiver(x_block, y_block, u_circ_block[season,:,:], v_circ_block[season,:,:], color='black') + # Configure plot xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') title(season_names[season], fontsize=24) if season == 0: + # Add colourbar cbaxes = fig.add_axes([0.07, 0.6, 0.02, 0.3]) cbar = colorbar(img, ticks=arange(0,1+0.25,0.25), cax=cbaxes) cbar.ax.tick_params(labelsize=16) if season == 3: + # Add 20 cm/s reference vector quiverkey(q, 0.07, 0.3, 0.2, '20 cm/s', coordinates='figure', fontproperties={'size':16}) + # Add main title suptitle('Sea ice concentration (1) and velocity (m/s)', fontsize=30) + # Make plots closer together subplots_adjust(wspace=0.025,hspace=0.1) + # Finished if save: fig.savefig(fig_name) else: fig.show() +# Command-line interface if __name__ == "__main__": cice_file = raw_input("Path to CICE file, containing at least one complete Feb-Jan period: ") diff --git a/nsidc_aice_seasonal.py b/nsidc_aice_seasonal.py index 9ca0acd..43a6fa5 100644 --- a/nsidc_aice_seasonal.py +++ b/nsidc_aice_seasonal.py @@ -27,7 +27,7 @@ def nsidc_aice_seasonal (cice_file, save=False, fig_name=None): end_day = [28, 31, 31, 30] # Number of days in each season (again, ignore leap years for now) ndays_season = [90, 92, 92, 91] - # Number of days in each month (no leap years, this is just for NSIDC) + # Number of days in each month (this is just for NSIDC) ndays_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] # Season names for titles season_names = ['DJF', 'MAM', 'JJA', 'SON'] @@ -91,6 +91,7 @@ def nsidc_aice_seasonal (cice_file, save=False, fig_name=None): # Update last day in February end_day[0] += 1 ndays_season[0] += 1 + ndays_month[1] += 1 # Initialise seasonal averages of CICE output cice_data_tmp = ma.empty([4, size(cice_lon_tmp,0), size(cice_lon_tmp,1)]) @@ -147,7 +148,7 @@ def nsidc_aice_seasonal (cice_file, save=False, fig_name=None): cice_data_tmp[season,:,:] += id.variables['aice'][start_t_season,:-15,:]*start_days season_days += start_days - # Beween start_t_season and end_t_season, we want all the days + # Between start_t_season and end_t_season, we want all the days for t in range(start_t_season+1, end_t_season): cice_data_tmp[season,:,:] += id.variables['aice'][t,:-15,:]*5 season_days += 5 diff --git a/seaice_budget.py b/seaice_budget.py new file mode 100644 index 0000000..d4892c2 --- /dev/null +++ b/seaice_budget.py @@ -0,0 +1,164 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from cartesian_grid_2d import * + +# Create four plots showing timeseries of the thermodynamic vs dynamic volume +# tendency, averaged over (1) the continental shelf (defined as anywhere south +# of 60S with seafloor shallower than 1500 m), and (2) the offshore region +# (everywhere else). Two plots are the absolute volume tendencies (units of +# cm/day), the other two are cumulative over the simulation (cm). +# Input: +# cice_file = path to CICE output file containing 5-day averages for the entire +# simulation +# roms_grid = path to ROMS grid file +# save = optional boolean flag indicating that the plots should be saved to +# files rather than displayed on the screen +# fig_name = if save=True, an array of size 4 containing filenames for each plot +def seaice_budget (cice_file, roms_grid, save=False, fig_names=None): + + # Read bathymetry values for ROMS grid + id = Dataset(roms_grid, 'r') + h = id.variables['h'][1:-1,1:-1] + id.close() + + # Read CICE grid + id = Dataset(cice_file, 'r') + lon = id.variables['TLON'][:,:] + lat = id.variables['TLAT'][:,:] + # Calculate elements of area + dx, dy = cartesian_grid_2d(lon, lat) + dA = dx*dy + # Read time values + time = id.variables['time'][:]/365.25 + # Read data (concentration and thermodynamic/dynamic volume tendencies) + aice = id.variables['aice'][:,:,:] + dvidtt = id.variables['dvidtt'][:,:,:] + dvidtd = id.variables['dvidtd'][:,:,:] + id.close() + + # Create masks for shelf and offshore region + shelf = (lat < -60)*(h < 1500) + offshore = invert(shelf) + + dvidtt_shelf = [] + dvidtd_shelf = [] + dvidtt_offshore = [] + dvidtd_offshore = [] + # Loop over timesteps + for t in range(size(time)): + # Only average over regions with at least 10% sea ice + aice_flag = aice[t,:,:] > 0.1 + # Thermodynamic volume tendency averaged over the continental shelf + dvidtt_shelf.append(sum(dvidtt[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Dynamic volume tendency averaged over the continental shelf + dvidtd_shelf.append(sum(dvidtd[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Thermodynamic volume tendency averaged over the offshore region + dvidtt_offshore.append(sum(dvidtt[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + # Dynamic volume tendency averaged over the offshore region + dvidtd_offshore.append(sum(dvidtd[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + + # Convert to arrays and sum to get total volume tendencies for each region + dvidtt_shelf = array(dvidtt_shelf) + dvidtd_shelf = array(dvidtd_shelf) + dvi_shelf = dvidtt_shelf + dvidtd_shelf + dvidtt_offshore = array(dvidtt_offshore) + dvidtd_offshore = array(dvidtd_offshore) + dvi_offshore = dvidtt_offshore + dvidtd_offshore + + # Set up continental shelf plot + fig1, ax1 = subplots(figsize=(8,6)) + # Add one timeseries at a time + ax1.plot(time, dvidtt_shelf, label='Thermodynamics', color='blue', linewidth=2) + ax1.plot(time, dvidtd_shelf, label='Dynamics', color='green', linewidth=2) + ax1.plot(time, dvi_shelf, label='Total', color='black', linewidth=2) + # Configure plot + title('Volume tendency averaged over continental shelf') + xlabel('Time (years)') + ylabel('cm/day') + grid(True) + # Add a legend + ax1.legend(loc='upper left') + if save: + fig1.savefig(fig_names[0]) + else: + fig1.show() + + # Same for offshore plot + fig2, ax2 = subplots(figsize=(8,6)) + ax2.plot(time, dvidtt_offshore, label='Thermodynamics', color='blue', linewidth=2) + ax2.plot(time, dvidtd_offshore, label='Dynamics', color='green', linewidth=2) + ax2.plot(time, dvi_offshore, label='Total', color='black', linewidth=2) + title('Volume tendency averaged over offshore region') + xlabel('Time (years)') + ylabel('cm/day') + grid(True) + ax2.legend(loc='lower right') + if save: + fig2.savefig(fig_names[1]) + else: + fig2.show() + + # Get cumulative sums of each term + dvidtt_shelf_cum = cumsum(dvidtt_shelf)*5 + dvidtd_shelf_cum = cumsum(dvidtd_shelf)*5 + dvi_shelf_cum = cumsum(dvi_shelf)*5 + dvidtt_offshore_cum = cumsum(dvidtt_offshore)*5 + dvidtd_offshore_cum = cumsum(dvidtd_offshore)*5 + dvi_offshore_cum = cumsum(dvi_offshore)*5 + + # Continental shelf cumulative plot + fig3, ax3 = subplots(figsize=(8,6)) + ax3.plot(time, dvidtt_shelf_cum, label='Thermodynamics', color='blue', linewidth=2) + ax3.plot(time, dvidtd_shelf_cum, label='Dynamics', color='green', linewidth=2) + ax3.plot(time, dvi_shelf_cum, label='Total', color='black', linewidth=2) + title('Cumulative volume tendency averaged over continental shelf') + xlabel('Time (years)') + ylabel('cm') + grid(True) + ax3.legend(loc='upper left') + if save: + fig3.savefig(fig_names[2]) + else: + fig3.show() + + # Offshore cumulative plot + fig4, ax4 = subplots(figsize=(8,6)) + ax4.plot(time, dvidtt_offshore_cum, label='Thermodynamics', color='blue', linewidth=2) + ax4.plot(time, dvidtd_offshore_cum, label='Dynamics', color='green', linewidth=2) + ax4.plot(time, dvi_offshore_cum, label='Total', color='black', linewidth=2) + title('Cumulative volume tendency averaged over offshore region') + xlabel('Time (years)') + ylabel('cm') + grid(True) + ax4.legend(loc='upper right') + if save: + fig4.savefig(fig_names[3]) + else: + fig4.show() + + +# Command-line interface +if __name__ == "__main__": + + cice_file = raw_input("Path to CICE history file: ") + roms_grid = raw_input("Path to ROMS grid file: ") + action = raw_input("Save figures (s) or display on screen (d)? ") + if action == 's': + save = True + name1 = raw_input("File name for first figure (continental shelf): ") + name2 = raw_input("File name for second figure (offshore): ") + name3 = raw_input("File name for third figure (continental shelf, cumulative): ") + name4 = raw_input("File name for fourth figure (offshore, cumulative): ") + fig_names = [name1, name2, name3, name4] + else: + save = False + fig_names = None + seaice_budget(cice_file, roms_grid, save, fig_names) + + + + + + + diff --git a/seaice_budget_thermo.py b/seaice_budget_thermo.py new file mode 100644 index 0000000..7e74e28 --- /dev/null +++ b/seaice_budget_thermo.py @@ -0,0 +1,231 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from matplotlib.font_manager import FontProperties +from cartesian_grid_2d import * + +# Create four plots showing timeseries of the thermodynamic terms of sea ice +# growth and melt: congelation, frazil ice formation, snow-to-ice flooding, +# top melt, basal melt, and lateral melt. These variables are averaged over +# (1) the continental shelf (defined as anywhere south of 60S with seafloor +# shallower than 1500 m), and (2) the offshore region (everywhere else). +# Two plots are the absolute volume tendencies (units of cm/day), the other +# two are cumulative over the simulation (cm). +# Input: +# cice_file = path to CICE output file containing 5-day averages for the entire +# simulation +# roms_grid = path to ROMS grid file +# save = optional boolean flag indicating that the plots should be saved to +# files rather than displayed on the screen +# fig_name = if save=True, an array of size 4 containing filenames for each plot +def seaice_budget_thermo (cice_file, roms_grid, save=False, fig_names=None): + + # Read bathymetry values for ROMS grid + id = Dataset(roms_grid, 'r') + h = id.variables['h'][1:-1,1:-1] + id.close() + + # Read CICE grid + id = Dataset(cice_file, 'r') + lon = id.variables['TLON'][:,:] + lat = id.variables['TLAT'][:,:] + # Calculate elements of area + dx, dy = cartesian_grid_2d(lon, lat) + dA = dx*dy + # Read time values + time = id.variables['time'][:]/365.25 + # Read all the fields we need + aice = id.variables['aice'][:,:,:] + congel = id.variables['congel'][:,:,:] + frazil = id.variables['frazil'][:,:,:] + snoice = id.variables['snoice'][:,:,:] + meltt = -1*id.variables['meltt'][:,:,:] + meltb = -1*id.variables['meltb'][:,:,:] + meltl = -1*id.variables['meltl'][:,:,:] + id.close() + + # Create masks for shelf and offshore region + shelf = (lat < -60)*(h < 1500) + offshore = invert(shelf) + + congel_shelf = [] + frazil_shelf = [] + snoice_shelf = [] + meltt_shelf = [] + meltb_shelf = [] + meltl_shelf = [] + congel_offshore = [] + frazil_offshore = [] + snoice_offshore = [] + meltt_offshore = [] + meltb_offshore = [] + meltl_offshore = [] + # Loop over timesteps + for t in range(size(time)): + # Only average over regions with at least 10% sea ice + aice_flag = aice[t,:,:] > 0.1 + # Congelation averaged over the continental shelf + congel_shelf.append(sum(congel[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Frazil ice formation averaged over the continental shelf + frazil_shelf.append(sum(frazil[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Snow-to-ice flooding averaged over the continental shelf + snoice_shelf.append(sum(snoice[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Top melt averaged over the continental shelf + meltt_shelf.append(sum(meltt[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Basal melt averaged over the continental shelf + meltb_shelf.append(sum(meltb[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Lateral melt averaged over the continental shelf + meltl_shelf.append(sum(meltl[t,:,:]*dA*shelf*aice_flag)/sum(dA*shelf*aice_flag)) + # Congelation averaged over the offshore region + congel_offshore.append(sum(congel[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + # Frazil ice formation averaged over the offshore region + frazil_offshore.append(sum(frazil[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + # Snow-to-ice flooding averaged over the offshore region + snoice_offshore.append(sum(snoice[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + # Top melt averaged over the offshore region + meltt_offshore.append(sum(meltt[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + # Basal melt averaged over the offshore region + meltb_offshore.append(sum(meltb[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + # Lateral melt averaged over the offshore region + meltl_offshore.append(sum(meltl[t,:,:]*dA*offshore*aice_flag)/sum(dA*offshore*aice_flag)) + + # Convert to arrays and sum to get total volume tendency for each region + congel_shelf = array(congel_shelf) + frazil_shelf = array(frazil_shelf) + snoice_shelf = array(snoice_shelf) + meltt_shelf = array(meltt_shelf) + meltb_shelf = array(meltb_shelf) + meltl_shelf = array(meltl_shelf) + total_shelf = congel_shelf + frazil_shelf + snoice_shelf + meltt_shelf + meltb_shelf + meltl_shelf + congel_offshore = array(congel_offshore) + frazil_offshore = array(frazil_offshore) + snoice_offshore = array(snoice_offshore) + meltt_offshore = array(meltt_offshore) + meltb_offshore = array(meltb_offshore) + meltl_offshore = array(meltl_offshore) + total_offshore = congel_offshore + frazil_offshore + snoice_offshore + meltt_offshore + meltb_offshore + meltl_offshore + + # Legends need small font to fit + fontP = FontProperties() + fontP.set_size('small') + + # Set up continental shelf plot + fig1, ax1 = subplots(figsize=(8,6)) + # Add one timeseries at a time + ax1.plot(time, congel_shelf, label='Congelation', color='blue', linewidth=2) + ax1.plot(time, frazil_shelf, label='Frazil', color='red', linewidth=2) + ax1.plot(time, snoice_shelf, label='Snow-to-ice', color='cyan', linewidth=2) + ax1.plot(time, meltt_shelf, label='Top melt', color='magenta', linewidth=2) + ax1.plot(time, meltb_shelf, label='Basal melt', color='green', linewidth=2) + ax1.plot(time, meltl_shelf, label='Lateral melt', color='yellow', linewidth=2) + ax1.plot(time, total_shelf, label='Total', color='black', linewidth=2) + # Configure plot + title('Volume tendency averaged over continental shelf') + xlabel('Time (years)') + ylabel('cm/day') + grid(True) + # Add a legend + ax1.legend(loc='upper left', prop=fontP) + if save: + fig1.savefig(fig_names[0]) + else: + fig1.show() + + # Same for offshore plot + fig2, ax2 = subplots(figsize=(8,6)) + ax2.plot(time, congel_offshore, label='Congelation', color='blue', linewidth=2) + ax2.plot(time, frazil_offshore, label='Frazil', color='red', linewidth=2) + ax2.plot(time, snoice_offshore, label='Snow-to-ice', color='cyan', linewidth=2) + ax2.plot(time, meltt_offshore, label='Top melt', color='magenta', linewidth=2) + ax2.plot(time, meltb_offshore, label='Basal melt', color='green', linewidth=2) + ax2.plot(time, meltl_offshore, label='Lateral melt', color='yellow', linewidth=2) + ax2.plot(time, total_offshore, label='Total', color='black', linewidth=2) + title('Volume tendency averaged over offshore region') + xlabel('Time (years)') + ylabel('cm/day') + grid(True) + ax2.legend(loc='lower right', prop=fontP) + if save: + fig2.savefig(fig_names[1]) + else: + fig2.show() + + # Get cumulative sums of each term + congel_shelf_cum = cumsum(congel_shelf)*5 + frazil_shelf_cum = cumsum(frazil_shelf)*5 + snoice_shelf_cum = cumsum(snoice_shelf)*5 + meltt_shelf_cum = cumsum(meltt_shelf)*5 + meltb_shelf_cum = cumsum(meltb_shelf)*5 + meltl_shelf_cum = cumsum(meltl_shelf)*5 + total_shelf_cum = cumsum(total_shelf)*5 + congel_offshore_cum = cumsum(congel_offshore)*5 + frazil_offshore_cum = cumsum(frazil_offshore)*5 + snoice_offshore_cum = cumsum(snoice_offshore)*5 + meltt_offshore_cum = cumsum(meltt_offshore)*5 + meltb_offshore_cum = cumsum(meltb_offshore)*5 + meltl_offshore_cum = cumsum(meltl_offshore)*5 + total_offshore_cum = cumsum(total_offshore)*5 + + # Continental shelf cumulative plot + fig3, ax3 = subplots(figsize=(8,6)) + ax3.plot(time, congel_shelf_cum, label='Congelation', color='blue', linewidth=2) + ax3.plot(time, frazil_shelf_cum, label='Frazil', color='red', linewidth=2) + ax3.plot(time, snoice_shelf_cum, label='Snow-to-ice', color='cyan', linewidth=2) + ax3.plot(time, meltt_shelf_cum, label='Top melt', color='magenta', linewidth=2) + ax3.plot(time, meltb_shelf_cum, label='Basal melt', color='green', linewidth=2) + ax3.plot(time, meltl_shelf_cum, label='Lateral melt', color='yellow', linewidth=2) + ax3.plot(time, total_shelf_cum, label='Total', color='black', linewidth=2) + title('Cumulative volume tendency averaged over continental shelf') + xlabel('Time (years)') + ylabel('cm') + grid(True) + ax3.legend(loc='lower left', prop=fontP) + if save: + fig3.savefig(fig_names[2]) + else: + fig3.show() + + # Offshore cumulative plot + fig4, ax4 = subplots(figsize=(8,6)) + ax4.plot(time, congel_offshore_cum, label='Congelation', color='blue', linewidth=2) + ax4.plot(time, frazil_offshore_cum, label='Frazil', color='red', linewidth=2) + ax4.plot(time, snoice_offshore_cum, label='Snow-to-ice', color='cyan', linewidth=2) + ax4.plot(time, meltt_offshore_cum, label='Top melt', color='magenta', linewidth=2) + ax4.plot(time, meltb_offshore_cum, label='Basal melt', color='green', linewidth=2) + ax4.plot(time, meltl_offshore_cum, label='Lateral melt', color='yellow', linewidth=2) + ax4.plot(time, total_offshore_cum, label='Total', color='black', linewidth=2) + title('Cumulative volume tendency averaged over offshore region') + xlabel('Time (years)') + ylabel('cm/day') + grid(True) + ax4.legend(loc='lower left', prop=fontP) + if save: + fig4.savefig(fig_names[3]) + else: + fig4.show() + + +# Command-line interface +if __name__ == "__main__": + + cice_file = raw_input("Path to CICE history file: ") + roms_grid = raw_input("Path to ROMS grid file: ") + action = raw_input("Save figures (s) or display on screen (d)? ") + if action == 's': + save = True + name1 = raw_input("File name for first figure (continental shelf): ") + name2 = raw_input("File name for second figure (offshore): ") + name3 = raw_input("File name for third figure (continental shelf, cumulative): ") + name4 = raw_input("File name for fourth figure (offshore, cumulative): ") + fig_names = [name1, name2, name3, name4] + else: + save = False + fig_names = None + seaice_budget_thermo(cice_file, roms_grid, save, fig_names) + + + + + + + diff --git a/sose_seasonal.py b/sose_seasonal.py index 4343065..a1ed29f 100644 --- a/sose_seasonal.py +++ b/sose_seasonal.py @@ -3,6 +3,15 @@ from matplotlib.pyplot import * from calc_z import * +# Make a 4x2 plot showing lat vs. depth slices of seasonally averaged +# temperature (top) and salinity (bottom) at the given longitude, for the +# 2005-2010 SOSE climatology. +# Input: +# lon0 = the specific longitude to plot (between -180 and 180) +# depth_bdry = deepest depth to plot (negative, in m) +# save = optional boolean flag; if True, the figure will be saved with file name +# fig_name; if False, the figure will display on the screen +# fig_name = optional string containing filename for figure, if save=True def sose_seasonal (lon0, depth_bdry, save=False, fig_name=None): # Path to SOSE seasonal climatology file @@ -69,6 +78,7 @@ def sose_seasonal (lon0, depth_bdry, save=False, fig_name=None): if season == 0: ylabel('depth (m)', fontsize=18) if season == 3: + # Add colourbar cbaxes1 = fig.add_axes([0.92, 0.55, 0.01, 0.3]) cbar1 = colorbar(img, cax=cbaxes1, ticks=arange(temp_min, temp_max+temp_ticks, temp_ticks)) cbar1.ax.tick_params(labelsize=16) @@ -82,6 +92,7 @@ def sose_seasonal (lon0, depth_bdry, save=False, fig_name=None): ylabel('depth (m)', fontsize=18) xlabel('Latitude', fontsize=18) if season == 3: + # Add colourbar cbaxes2 = fig.add_axes([0.92, 0.15, 0.01, 0.3]) cbar2 = colorbar(img, cax=cbaxes2, ticks=arange(salt_min, salt_max+salt_ticks, salt_ticks)) cbar2.ax.tick_params(labelsize=16) diff --git a/ssflux_tamura_monthly.py b/ssflux_tamura_monthly.py index d9440f3..c9ee1fd 100644 --- a/ssflux_tamura_monthly.py +++ b/ssflux_tamura_monthly.py @@ -1,22 +1,41 @@ -from numpy import * +nfrom numpy import * from netCDF4 import Dataset, num2date from matplotlib.pyplot import * +# Make a figure comparing monthly-averaged sea ice to ocean salt fluxes, +# from Tamura's dataset to CICE output. +# Input: +# cice_file = path to CICE output file with 5-day averages; if it covers more +# than one instance of the given month, plot the last one +# month = month number (0-indexed) from 0 to 11 +# save = optional boolean to save the figure to a file, rather than displaying +# it on the screen +# fig_name = if save=True, path to the desired filename for figure def ssflux_tamura_monthly (cice_file, month, save=False, fig_name=None): + # Beginning and end of Tamura file paths tamura_head = '/short/m68/kaa561/tamura_fluxes/Tamura_ssflux_' tamura_tail = '_monthly.nc' + # Starting and ending days in each month + # Assume no leap years, we'll fix this later if we need start_day = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] end_day = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + # Name of each month, for the title month_name = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] + # Degrees to radians conversion deg2rad = pi/180.0 + # Density of freshwater (used by ROMS to convert from kg/m^2/s to psu m/s) rho_fw = 1000.0 + # Density of seawater (used by CICE to convert from m/s to kg/m^2/s) rho_sw = 1026.0 + # Conversion factor: m/s to cm/day mps_to_cmpday = 8.64e6 + # Read the CICE grid and time values id = Dataset(cice_file, 'r') cice_lon_tmp = id.variables['TLON'][:-15,:] cice_lat_tmp = id.variables['TLAT'][:-15,:] + # Wrap the periodic boundary by 1 cell cice_lon = ma.empty([size(cice_lon_tmp,0), size(cice_lon_tmp,1)+1]) cice_lat = ma.empty([size(cice_lat_tmp,0), size(cice_lat_tmp,1)+1]) cice_lon[:,:-1] = cice_lon_tmp @@ -24,152 +43,209 @@ def ssflux_tamura_monthly (cice_file, month, save=False, fig_name=None): cice_lat[:,:-1] = cice_lat_tmp cice_lat[:,-1] = cice_lat_tmp[:,0] time_id = id.variables['time'] + # Get the year, month, and day (all 1-indexed) for each output step + # These are 5-day averages marked with the next day's date. cice_time = num2date(time_id[:], units=time_id.units, calendar='standard') + # Get index of the next month next_month = mod(month+1, 12) - prev_month = mod(month-1, 12) - end_t = -1 + # Loop backwards through time indices to find the last one we care about + # (which contains the last day of the month in its averaging period) + end_t = -1 # Missing value flag for t in range(size(cice_time)-1, -1, -1): + # Note that cice_time[t].month is converted to 0-indexed if cice_time[t].month-1 == next_month and cice_time[t].day in range(start_day[next_month], start_day[next_month]+5): end_t = t break + # Make sure we actually found it if end_t == -1: print 'Error: ' + cice_file + ' does not contain a complete ' + month_name[month] return - start_t = -1 + # Continue looping backwards to find the first time index we care about + start_t = -1 # Missing value flag for t in range(end_t, -1, -1): if cice_time[t].month-1 == month and cice_time[t].day in range(start_day[month]+1, start_day[month]+6): start_t = t break + # Make sure we actually found it if start_t == -1: print 'Error: ' + cice_file + ' does not contain a complete ' + month_name[month] return + # Check if this is a leap year leap_year = False cice_year = cice_time[end_t].year if month == 11: + # Timestep end_t is likely in the next year, so find the year of start_t cice_year = cice_time[start_t].year if mod(cice_year, 4) == 0: + # Years divisible by 4 are leap years leap_year = True if mod(cice_year, 100) == 0: + # Unless they're also divisible by 100, in which case they aren't + # leap years leap_year = False if mod(cice_.year, 400) == 0: + # Unless they're also divisible by 400, in which case they are + # leap years after all leap_year = True if leap_year: + # Update last day in February end_day[1] = 29 + # Calculate monthly average of CICE output cice_data_tmp = ma.empty(shape(cice_lon_tmp)) cice_data_tmp[:,:] = 0.0 num_days = 0 + # Figure out how many of the 5 days averaged in start_t are actually within + # this month if cice_time[start_t].month-1 == month and cice_time[start_t].day == start_day[month]+5: + # Starting day is in position 1 of 5; we care about all of them start_days = 5 elif cice_time[start_t].month-1 == month and cice_time[start_t].day == start_day[month]+4: + # Starting day is in position 2 of 5; we care about the last 4 start_days = 4 elif cice_time[start_t].month-1 == month and cice_time[start_t].day == start_day[month]+3: + # Starting day is in position 3 of 5; we care about the last 3 start_days = 3 elif cice_time[start_t].month-1 == month and cice_time[start_t].day == start_day[month]+2: + # Starting day is in position 4 of 5; we care about the last 2 start_days = 2 elif cice_time[start_t].month-1 == month and cice_time[start_t].day == start_day[month]+1: + # Starting day is in position 5 of 5; we care about the last 1 start_days = 1 else: print 'Error: starting index is month ' + str(cice_time[start_t].month) + ', day ' + str(cice_time[start_t].day) return + # Read the fields we need at start_t fresh_ai = id.variables['fresh_ai'][start_t,:-15,:] - fsalt_ai = id.variables['fsalt_ai'][start_t,:-15,:] sss = id.variables['sss'][start_t,:-15,:] - cice_data_tmp += -1/rho_fw*(fresh_ai*rho_sw/mps_to_cmpday - fsalt_ai*1e3/sss)*start_days + rain_ai = id.variables['rain_ai'][start_t,:-15,:] + fsalt_ai = id.variables['fsalt_ai'][start_t,:-15,:] + # Start accumulating data weighted by days + # Convert to units of psu m/s (equivalent to kg/m^2/s of salt) + # Subtract rain from freshwater flux, since Tamura doesn't count precip + cice_data_tmp += -1/rho_fw*((fresh_ai-rain_ai)*sss*rho_sw/mps_to_cmpday - fsalt_ai*1e3)*start_days num_days += start_days + # Between start_t and end_t, we want all the days for t in range(start_t+1, end_t): fresh_ai = id.variables['fresh_ai'][t,:-15,:] - fsalt_ai = id.variables['fsalt_ai'][t,:-15,:] sss = id.variables['sss'][t,:-15,:] - cice_data_tmp += -1/rho_fw*(fresh_ai*rho_sw/mps_to_cmpday - fsalt_ai*1e3/sss)*5 + rain_ai = id.variables['rain_ai'][t,:-15,:] + fsalt_ai = id.variables['fsalt_ai'][t,:-15,:] + cice_data_tmp += -1/rho_fw*((fresh_ai-rain_ai)*sss*rho_sw/mps_to_cmpday - fsalt_ai*1e3)*5 num_days += 5 + # Figure out how many of the 5 days averaged in end_t are actually within + # this month if cice_time[end_t].month-1 == next_month and cice_time[end_t].day == start_day[next_month]+4: + # Ending day is in position 1 of 5; we care about the first 1 end_days = 1 elif cice_time[end_t].month-1 == next_month and cice_time[end_t].day == start_day[next_month]+3: + # Ending day is in position 2 of 5; we care about the first 2 end_days = 2 elif cice_time[end_t].month-1 == next_month and cice_time[end_t].day == start_day[next_month]+2: + # Ending day is in position 3 of 5; we care about the first 3 end_days = 3 elif cice_time[end_t].month-1 == next_month and cice_time[end_t].day == start_day[next_month]+1: + # Ending day is in position 4 of 5; we care about the first 4 end_days = 4 elif cice_time[end_t].month-1 == next_month and cice_time[end_t].day == start_day[next_month]: + # Ending day is in position 5 of 5; we care about all 5 end_days = 5 else: print 'Error: ending index is month ' + str(cice_time[end_t].month) + ', day ' + str(cice_time[end_t].day) return fresh_ai = id.variables['fresh_ai'][end_t,:-15,:] - fsalt_ai = id.variables['fsalt_ai'][end_t,:-15,:] sss = id.variables['sss'][end_t,:-15,:] - cice_data_tmp += -1/rho_fw*(fresh_ai*rho_sw/mps_to_cmpday - fsalt_ai*1e3/sss)*end_days + rain_ai = id.variables['rain_ai'][end_t,:-15,:] + fsalt_ai = id.variables['fsalt_ai'][end_t,:-15,:] + cice_data_tmp += -1/rho_fw*((fresh_ai-rain_ai)*sss*rho_sw/mps_to_cmpday - fsalt_ai*1e3)*end_days num_days += end_days + # Check that we got the correct number of days if num_days != end_day[month]: print 'Error: found ' + str(num_days) + ' days instead of ' + str(end_day[month]) return + # Finished accumulating data id.close() + # Convert from sum to average cice_data_tmp /= num_days + # Multiply by 1e6 so colour bar is easier to read cice_data_tmp *= 1e6 + # Wrap periodic boundary cice_data = ma.empty([size(cice_data_tmp,0), size(cice_data_tmp,1)+1]) cice_data[:,:-1] = cice_data_tmp cice_data[:,-1] = cice_data_tmp[:,0] + # Read Tamura data and grid id = Dataset(tamura_head + str(cice_year) + tamura_tail, 'r') tamura_lon = id.variables['longitude'][:,:] tamura_lat = id.variables['latitude'][:,:] tamura_data = id.variables['ssflux'][month,:,:] id.close() + # Appply land mask and multiply by 1e6 tamura_data = ma.masked_where(isnan(tamura_data), tamura_data) tamura_data *= 1e6 + # Convert both grids to spherical coordinates cice_x = -(cice_lat+90)*cos(cice_lon*deg2rad+pi/2) cice_y = (cice_lat+90)*sin(cice_lon*deg2rad+pi/2) tamura_x = -(tamura_lat+90)*cos(tamura_lon*deg2rad+pi/2) tamura_y = (tamura_lat+90)*sin(tamura_lon*deg2rad+pi/2) - bound = 1.0 #max(amax(abs(cice_data)), amax(abs(tamura_data))) + # Choose colour levels + bound = 20.0 lev = linspace(-bound, bound, num=50) + # Find boundaries for each side of plot based on extent of grids bdry1 = max(amin(cice_x), amin(tamura_x)) bdry2 = min(amax(cice_x), amax(tamura_x)) bdry3 = max(amin(cice_y), amin(tamura_y)) bdry4 = min(amax(cice_y), amax(tamura_y)) + # Plot fig = figure(figsize=(20,9)) + # Tamura fig.add_subplot(1,2,1, aspect='equal') contourf(tamura_x, tamura_y, tamura_data, lev, cmap='RdYlBu_r') title('Tamura', fontsize=24) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') + # CICE fig.add_subplot(1,2,2, aspect='equal') img = contourf(cice_x, cice_y, cice_data, lev, cmap='RdYlBu_r') title('CICE', fontsize=24) xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') + # Add a horizontal colourbar at the bottom cbaxes = fig.add_axes([0.3, 0.04, 0.4, 0.04]) cbar = colorbar(img, orientation='horizontal', ticks=arange(-1,1+0.5, 0.5), cax=cbaxes, extend='both') cbar.ax.tick_params(labelsize=18) + # Add the main title suptitle(r'Ice-to-ocean salt flux (10$^{-6}$ kg/m$^2$/s, ' + month_name[month] + ' ' + str(cice_year), fontsize=30) + # Finished if save: fig.savefig(fig_name) else: fig.show() +# Command-line interface if __name__ == "__main__": cice_file = raw_input("Path to CICE file: ") + # Convert month from 1-indexed to 0-indexed month = int(raw_input("Month number (1-12): "))-1 action = raw_input("Save figure (s) or display on screen (d)? ") if action == 's': @@ -178,24 +254,33 @@ def ssflux_tamura_monthly (cice_file, month, save=False, fig_name=None): elif action == 'd': save = False fig_name = None + # Make the plot ssflux_tamura_monthly(cice_file, month, save, fig_name) while True: + # Repeat until the user wants to exit repeat = raw_input("Make another plot (y/n)? ") if repeat == 'y': while True: + # Ask for changes to the parameters until the user is done changes = raw_input("Enter a parameter to change: (1) file path, (2) month number, (3) save/display; or enter to continue: ") if len(changes) == 0: + # No more changes to parameters break else: if int(changes) == 1: + # New CICE file cice_file = raw_input("Path to CICE file: ") elif int(changes) == 2: + # New month month = int(raw_input("Month number (1-12): "))-1 elif int(changes) == 3: + # Switch from save to display, or vice versa save = not save if save: + # Get a new figure name fig_name = raw_input("File name for figure: ") + # Make the plot ssflux_tamura_monthly(cice_file, month, save, fig_name) else: break diff --git a/ssflux_tamura_seasonal.py b/ssflux_tamura_seasonal.py index 504e1b4..3bbfa08 100644 --- a/ssflux_tamura_seasonal.py +++ b/ssflux_tamura_seasonal.py @@ -2,25 +2,47 @@ from netCDF4 import Dataset, num2date from matplotlib.pyplot import * +# Make a figure comparing seasonally-averaged sea ice to ocean salt fluxes, +# from Tamura's dataset to CICE output. +# Input: +# cice_file = path to CICE output file with 5-day averages, containing at least +# one complete Dec-Nov period (if there are multiple such periods, +# this script uses the last one) +# save = optional boolean to save the figure to a file, rather than displaying +# it on the screen +# fig_name = if save=True, path to the desired filename for figure def ssflux_tamura_seasonal (cice_file, save=False, fig_name=None): + # Beginning and end of Tamura file paths tamura_head = '/short/m68/kaa561/tamura_fluxes/Tamura_ssflux_' tamura_tail = '_monthly.nc' + # Starting and ending months (1-based) for each season start_month = [12, 3, 6, 9] end_month = [2, 5, 8, 11] + # Starting and ending days of the month (1-based) for each season + # Assume no leap years, we'll fix this later if we need start_day = [1, 1, 1, 1] end_day = [28, 31, 31, 30] + # Number of days in each season (again, ignore leap years for now) ndays_season = [90, 92, 92, 91] + # Number of days in each month (this is just for Tamura) ndays_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] + # Season names for titles season_names = ['DJF', 'MAM', 'JJA', 'SON'] + # Degrees to radians conversion deg2rad = pi/180.0 + # Density of freshwater (used by ROMS to convert from kg/m^2/s to psu m/s) rho_fw = 1000.0 + # Density of seawater (used by CICE to convert from m/s to kg/m^2/s) rho_sw = 1026.0 + # Conversion factor: m/s to cm/day mps_to_cmpday = 8.64e6 + # Read the CICE grid and time values id = Dataset(cice_file, 'r') cice_lon_tmp = id.variables['TLON'][:-15,:] cice_lat_tmp = id.variables['TLAT'][:-15,:] + # Wrap the periodic boundary by 1 cell cice_lon = ma.empty([size(cice_lon_tmp,0), size(cice_lon_tmp,1)+1]) cice_lat = ma.empty([size(cice_lat_tmp,0), size(cice_lat_tmp,1)+1]) cice_lon[:,:-1] = cice_lon_tmp @@ -28,134 +50,182 @@ def ssflux_tamura_seasonal (cice_file, save=False, fig_name=None): cice_lat[:,:-1] = cice_lat_tmp cice_lat[:,-1] = cice_lat_tmp[:,0] time_id = id.variables['time'] + # Get the year, month, and day (all 1-indexed) for each output step + # These are 5-day averages marked with the next day's date. cice_time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) - end_t = -1 + # Loop backwards through time indices to find the last one we care about + # (which contains 30 November in its averaging period) + end_t = -1 # Missing value flag for t in range(size(cice_time)-1, -1, -1): if cice_time[t].month == start_month[0] and cice_time[t].day in range(start_day[0], start_day[0]+5): end_t = t break + # Make sure we actually found it if end_t == -1: print 'Error: ' + cice_file + ' does not contain a complete Dec-Nov period' return - start_t = -1 + # Continue looping backwards to find the first time index we care about + # (which contains 1 December the previous year in its averaging period) + start_t = -1 # Missing value flag for t in range(end_t-60, -1, -1): if cice_time[t].month == start_month[0] and cice_time[t].day in range(start_day[0]+1, start_day[0]+6): start_t = t break + # Make sure we actually found it if start_t == -1: print 'Error: ' + cice_file + ' does not contain a complete Dec-Nov period' return + # Check for leap years leap_year = False cice_year = cice_time[end_t].year if mod(cice_year, 4) == 0: + # Years divisible by 4 are leap years leap_year = True if mod(cice_year, 100) == 0: + # Unless they're also divisible by 100, in which case they aren't + # leap years leap_year = False if mod(cice_year, 400) == 0: + # Unless they're also divisible by 400, in which case they are + # leap years after all leap_year = True if leap_year: + # Update last day in February end_day[0] += 1 ndays_season[0] += 1 + ndays_month[1] += 1 + # Initialise seasonal averages of CICE output cice_data_tmp = ma.empty([4, size(cice_lon_tmp,0), size(cice_lon_tmp,1)]) cice_data_tmp[:,:,:] = 0.0 + # Process one season at a time for season in range(4): - season_days = 0 + season_days = 0 # Number of days in season; this will be incremented next_season = mod(season+1, 4) + # Find starting timestep start_t_season = -1 for t in range(start_t, end_t+1): if cice_time[t].month == start_month[season] and cice_time[t].day in range(start_day[season]+1, start_day[season]+6): start_t_season = t break + # Make sure we actually found it if start_t_season == -1: print 'Error: could not find starting timestep for season ' + season_names[season] return + # Find ending timestep end_t_season = -1 for t in range(start_t_season+1, end_t+1): if cice_time[t].month == start_month[next_season] and cice_time[t].day in range(start_day[next_season], start_day[next_season]+5): end_t_season = t break + # Make sure we actually found it if end_t_season == -1: print 'Error: could not find ending timestep for season ' + season_names[season] return + # Figure out how many of the 5 days averaged in start_t_season are + # actually within this season if cice_time[start_t_season].month == start_month[season] and cice_time[start_t_season].day == start_day[season] + 5: + # Starting day is in position 1 of 5; we care about all of them start_days = 5 elif cice_time[start_t_season].month == start_month[season] and cice_time[start_t_season].day == start_day[season] + 4: + # Starting day is in position 2 of 5; we care about the last 4 start_days = 4 elif cice_time[start_t_season].month == start_month[season] and cice_time[start_t_season].day == start_day[season]+ 3: + # Starting day is in position 3 of 5; we care about the last 3 start_days = 3 elif cice_time[start_t_season].month == start_month[season] and cice_time[start_t_season].day == start_day[season] + 2: + # Starting day is in position 4 of 5; we care about the last 2 start_days = 2 elif cice_time[start_t_season].month == start_month[season] and cice_time[start_t_season].day == start_day[season] + 1: + # Starting day is in position 5 of 5; we care about the last 1 start_days = 1 else: print 'Error for season ' + season_names[season] + ': starting index is month ' + str(cice_time[start_t_season].month) + ', day ' + str(cice_time[start_t_season].day) return + # Read the fields we need at start_t_season fresh_ai = id.variables['fresh_ai'][start_t_season,:-15,:] + sss = id.variables['sss'][start_t_season,:-15,:] rain_ai = id.variables['rain_ai'][start_t_season,:-15,:] fsalt_ai = id.variables['fsalt_ai'][start_t_season,:-15,:] - sss = id.variables['sss'][start_t_season,:-15,:] - cice_data_tmp[season,:,:] += -1/rho_fw*((fresh_ai-rain_ai)*rho_sw/mps_to_cmpday - fsalt_ai*1e3/sss)*start_days + # Start accumulating data weighted by days + # Convert to units of psu m/s (equivalent to kg/m^2/s of salt) + # Subtract rain from freshwater flux, since Tamura doesn't count precip + cice_data_tmp[season,:,:] += -1/rho_fw*((fresh_ai-rain_ai)*sss*rho_sw/mps_to_cmpday - fsalt_ai*1e3)*start_days season_days += start_days + # Between start_t_season and end_t_season, we want all the days for t in range(start_t_season+1, end_t_season): fresh_ai = id.variables['fresh_ai'][t,:-15,:] + sss = id.variables['sss'][t,:-15,:] rain_ai = id.variables['rain_ai'][t,:-15,:] fsalt_ai = id.variables['fsalt_ai'][t,:-15,:] - sss = id.variables['sss'][t,:-15,:] - cice_data_tmp[season,:,:] += -1/rho_fw*((fresh_ai-rain_ai)*rho_sw/mps_to_cmpday - fsalt_ai*1e3/sss)*5 + cice_data_tmp[season,:,:] += -1/rho_fw*((fresh_ai-rain_ai)*sss*rho_sw/mps_to_cmpday - fsalt_ai*1e3)*5 season_days += 5 + # Figure out how many of the 5 days averaged in end_t_season are + # actually within this season if cice_time[end_t_season].month == start_month[next_season] and cice_time[end_t_season].day == start_day[next_season] + 4: + # Ending day is in position 1 of 5; we care about the first 1 end_days = 1 elif cice_time[end_t_season].month == start_month[next_season] and cice_time[end_t_season].day == start_day[next_season] + 3: + # Ending day is in position 2 of 5; we care about the first 2 end_days = 2 elif cice_time[end_t_season].month == start_month[next_season] and cice_time[end_t_season].day == start_day[next_season] + 2: + # Ending day is in position 3 of 5; we care about the first 3 end_days = 3 elif cice_time[end_t_season].month == start_month[next_season] and cice_time[end_t_season].day == start_day[next_season] + 1: + # Ending day is in position 4 of 5; we care about the first 4 end_days = 4 elif cice_time[end_t_season].month == start_month[next_season] and cice_time[end_t_season].day == start_day[next_season]: + # Ending day is in position 5 of 5; we care about all 5 end_days = 5 else: print 'Error for season ' + season_names[season] + ': ending index is month ' + str(cice_time[end_t_season].month) + ', day ' + str(cice_time[end_t_season].day) return fresh_ai = id.variables['fresh_ai'][end_t_season,:-15,:] + sss = id.variables['sss'][end_t_season,:-15,:] rain_ai = id.variables['rain_ai'][end_t_season,:-15,:] fsalt_ai = id.variables['fsalt_ai'][end_t_season,:-15,:] - sss = id.variables['sss'][end_t_season,:-15,:] - cice_data_tmp[season,:,:] += -1/rho_fw*((fresh_ai-rain_ai)*rho_sw/mps_to_cmpday - fsalt_ai*1e3/sss)*end_days + cice_data_tmp[season,:,:] += -1/rho_fw*((fresh_ai-rain_ai)*sss*rho_sw/mps_to_cmpday - fsalt_ai*1e3)*end_days season_days += end_days + # Check that we got the correct number of days if season_days != ndays_season[season]: print 'Error: found ' + str(season_days) + ' days instead of ' + str(ndays_season[season]) return + # Finished accumulating data, now convert from sum to average cice_data_tmp[season,:,:] /= season_days id.close() + # Multiply by 1e6 so colour bar is easier to read cice_data_tmp *= 1e6 + # Wrap periodic boundary cice_data = ma.empty([size(cice_data_tmp,0), size(cice_data_tmp,1), size(cice_data_tmp,2)+1]) cice_data[:,:,:-1] = cice_data_tmp cice_data[:,:,-1] = cice_data_tmp[:,:,0] + # Read Tamura grid id = Dataset(tamura_head + str(cice_year) + tamura_tail, 'r') tamura_lon = id.variables['longitude'][:,:] tamura_lat = id.variables['latitude'][:,:] id.close() + # Set up array for seasonal averages of Tamura data tamura_data = ma.empty([4, size(tamura_lon,0), size(tamura_lon,1)]) tamura_data[:,:,:] = 0.0 for season in range(4): + # Work out which months we're interested in if season == 0: tamura_months = [11, 0, 1] elif season == 1: @@ -168,31 +238,43 @@ def ssflux_tamura_seasonal (cice_file, save=False, fig_name=None): for month in tamura_months: if season == 0 and month == 11: + # Read December from the previous year id = Dataset(tamura_head + str(cice_year-1) + tamura_tail, 'r') else: + # Read the given month from the current year id = Dataset(tamura_head + str(cice_year) + tamura_tail, 'r') tamura_data_tmp = id.variables['ssflux'][month,:,:] id.close() + # Apply land mask tamura_data_tmp = ma.masked_where(isnan(tamura_data_tmp), tamura_data_tmp) + # Accumulate seasonal average tamura_data[season,:,:] += tamura_data_tmp*ndays_month[month] season_days += ndays_month[month] + # Convert from sum to average tamura_data[season,:,:] /= season_days + # Multiply by 1e6 as for CICE tamura_data *= 1e6 + # Convert both grids to spherical coordinates cice_x = -(cice_lat+90)*cos(cice_lon*deg2rad+pi/2) cice_y = (cice_lat+90)*sin(cice_lon*deg2rad+pi/2) tamura_x = -(tamura_lat+90)*cos(tamura_lon*deg2rad+pi/2) tamura_y = (tamura_lat+90)*sin(tamura_lon*deg2rad+pi/2) - lev = linspace(-0.5, 0.5, num=50) - bdry1 = -35 #max(amin(cice_x), amin(tamura_x)) - bdry2 = 35 #min(amax(cice_x), amax(tamura_x)) - bdry3 = -33 #max(amin(cice_y), amin(tamura_y)) - bdry4 = 37 #min(amax(cice_y), amax(tamura_y)) + # Choose colour levels + lev = linspace(-10, 10, num=50) + # Bounds for each side of plot + bdry1 = -35 + bdry2 = 35 + bdry3 = -33 + bdry4 = 37 + # Plot fig = figure(figsize=(20,9)) + # Loop over seasons for season in range(4): + # Tamura ax = fig.add_subplot(2, 4, season+1, aspect='equal') contourf(tamura_x, tamura_y, tamura_data[season,:,:], lev, cmap='RdYlBu_r', extend='both') if season == 0: @@ -201,6 +283,7 @@ def ssflux_tamura_seasonal (cice_file, save=False, fig_name=None): xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') + # CICE ax = fig.add_subplot(2, 4, season+5, aspect='equal') img = contourf(cice_x, cice_y, cice_data[season,:,:], lev, cmap='RdYlBu_r', extend='both') if season == 0: @@ -208,18 +291,23 @@ def ssflux_tamura_seasonal (cice_file, save=False, fig_name=None): xlim([bdry1, bdry2]) ylim([bdry3, bdry4]) axis('off') + # Add a horizontal colourbar at the bottom cbaxes = fig.add_axes([0.25, 0.04, 0.5, 0.02]) - cbar = colorbar(img, orientation='horizontal', ticks=arange(-0.5,0.5+0.25, 0.25), cax=cbaxes) + cbar = colorbar(img, orientation='horizontal', ticks=arange(-10,10+2, 2), cax=cbaxes) cbar.ax.tick_params(labelsize=16) + # Add the main title suptitle(r'Ice-to-ocean salt flux (10$^{-6}$ kg/m$^2$/s)', fontsize=30) + # Make plots closer together subplots_adjust(wspace=0.025,hspace=0.025) + # Finished if save: fig.savefig(fig_name) else: fig.show() +# Command-line interface if __name__ == "__main__": cice_file = raw_input("Path to CICE file, containing at least one complete Dec-Nov period: ") diff --git a/temp_salt_seasonal.py b/temp_salt_seasonal.py index f863a89..879403b 100644 --- a/temp_salt_seasonal.py +++ b/temp_salt_seasonal.py @@ -85,7 +85,7 @@ def temp_salt_seasonal (file_path, lon0, depth_bdry, save=False, fig_name=None): # Continue looping backwards to find the first time index we care about # (which contains 1 December the previous year in its averaging period) - start_t = -1 # Missing value falg + start_t = -1 # Missing value flag for t in range(end_t-60, -1, -1): if time[t].month == end_month[-1] and time[t].day in range(end_day[-1]-1, end_day[-1]+1): start_t = t diff --git a/uv_vectorplot.py b/uv_vectorplot.py index cf4ef24..cd1f716 100644 --- a/uv_vectorplot.py +++ b/uv_vectorplot.py @@ -51,19 +51,20 @@ def uv_vectorplot (grid_path, file_path, tstep, depth_key, save=False, fig_name= # Throw away the overlapping periodic boundary u_rho = u_lonlat[:,:-1] v_rho = v_lonlat[:,:-1] - # Calculate speed for the background filled contour plot + # Calculate speed speed = sqrt(u_rho**2 + v_rho**2) - - # Calculate x and y coordinates for plotting circumpolar projection - x = -(lat+90)*cos(lon*deg2rad+pi/2) - y = (lat+90)*sin(lon*deg2rad+pi/2) - + # Convert velocity to polar coordinates, rotate to account for longitude in + # circumpolar projection, and convert back to vector components theta = arctan2(v_rho, u_rho) theta_circ = theta - lon*deg2rad u_circ = speed*cos(theta_circ) v_circ = speed*sin(theta_circ) - # Average X, Y, dX_dt, and dY_dt over block x block intervals + # Calculate x and y coordinates for plotting circumpolar projection + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + + # Average x, y, u_circ, and v_circ over block x block intervals # Calculate number of blocks size0 = int(ceil(size(x,0)/float(block))) size1 = int(ceil((size(x,1)-1)/float(block)))