From 3158d8b3e6ebe6c59a7fb1335ec1c08f3b41bb85 Mon Sep 17 00:00:00 2001 From: Kaitlin Naughten Date: Mon, 3 Apr 2017 16:45:39 +1000 Subject: [PATCH] I promise comments are coming soon --- circumpolar_cice_plot.py | 2 +- common_grid.py | 246 +++++++++++++++++++++++++++++++++++++++ freezingpt_slice.py | 2 +- gadv_frazil_bathy.py | 90 ++++++++++++++ gadv_frazil_thickness.py | 131 +++++++++++++++++++++ gadv_freezingpt_slice.py | 88 ++++++++++++++ iceberg_melt.py | 15 +-- monthly_avg_cice.py | 58 ++++++--- monthly_avg_roms.py | 78 +++++++++---- romscice_gpcp.py | 113 ++++++++++++++++++ 10 files changed, 772 insertions(+), 51 deletions(-) create mode 100644 common_grid.py create mode 100644 gadv_frazil_bathy.py create mode 100644 gadv_frazil_thickness.py create mode 100644 gadv_freezingpt_slice.py create mode 100644 romscice_gpcp.py diff --git a/circumpolar_cice_plot.py b/circumpolar_cice_plot.py index 40c7b2d..98f1c83 100644 --- a/circumpolar_cice_plot.py +++ b/circumpolar_cice_plot.py @@ -113,7 +113,7 @@ def circumpolar_cice_plot (file_path, var_name, tstep, colour_bounds=None, save= contourf(x, y, data, lev, cmap=colour_map, extend='both') cbar = colorbar() cbar.ax.tick_params(labelsize=20) - title('Sea ice concentration\n' + str(time.day) + ' ' + month_names[time.month-1] + ' ' + str(time.year), fontsize=24) + title(var_name + ' (' + units +')\n' + str(time.day) + ' ' + month_names[time.month-1] + ' ' + str(time.year), fontsize=24) #title(var_name+' ('+units+')', fontsize=30) axis('off') diff --git a/common_grid.py b/common_grid.py new file mode 100644 index 0000000..be4b9b8 --- /dev/null +++ b/common_grid.py @@ -0,0 +1,246 @@ +from numpy import * +from netCDF4 import Dataset, num2date +from scipy.interpolate import griddata +from rotate_vector_roms import * +from rotate_vector_cice import * +from monthly_avg_roms import * +from monthly_avg_cice import * + +def common_grid (roms_file, cice_file, out_file): + + # Resolution of common grid (degrees, same for lat and lon) + res = 0.25 + # Northern boundary to interpolate to + nbdry = -50 + # Radius of the Earth in metres + r = 6.371e6 + # Degrees to radians conversion factor + deg2rad = pi/180.0 + + print 'Calculating grids' + + # Make the latitude and longitude arrays for the common grid + lon_common = arange(-180, 180+res, res) + lat_common = arange(-90, nbdry+res, res) + # Get a 2D version of each to calculate dx and dy in metres + lat_2d, lon_2d = meshgrid(lat_common, lon_common) + # dx = r*cos(lat)*dlon where lat and dlon (i.e. res) are in radians + dx = r*cos(lat_2d*deg2rad)*res*deg2rad + # dy = r*dlat where dlat (i.e. res) is in radians + # This is constant so reshape to an array of the right dimensions + dy = zeros(shape(dx)) + r*res*deg2rad + + # Read the ROMS grid + id = Dataset(roms_file, 'r') + # We only need lat and lon on the rho grid + lon_rho = id.variables['lon_rho'][:,:] + lat_rho = id.variables['lat_rho'][:,:] + # Get shape of u and v grids + u_shape = id.variables['lon_u'].shape + v_shape = id.variables['lon_v'].shape + # Read land mask + mask_roms = id.variables['mask_rho'][:,:] + # Mask out ice shelves too + zice = id.variables['zice'][:,:] + mask_roms[zice != 0] = 0.0 + # Read angle (for rotation of vector components) + angle_roms = id.variables['angle'][:,:] + # Get time as an array of Date objects + time_id = id.variables['ocean_time'] + time = num2date(time_id[:], units=time_id.units, calendar=time_id.calendar.lower()) + id.close() + + # Read the CICE grid + id = Dataset(cice_file, 'r') + # We only need lat and lon on the velocity grid + lon_cice = id.variables['ULON'][:,:] + lat_cice = id.variables['ULAT'][:,:] + # Read angle (for rotation of vector components) + angle_cice = id.variables['ANGLE'][:,:] + id.close() + + # Make sure longitude is between -180 and 180 + index = lon_rho > 180 + lon_rho[index] = lon_rho[index] - 360 + index = lon_cice > 180 + lon_cice[index] = lon_cice[index] - 360 + + print 'Counting months' + # Assume we start at the beginning of a year + # Figure out how many complete years have happened since then + num_full_years = time[-1].year - time[0].year + if time[-1].month == 12 and time[-1].day in range(29,31+1): + # We happen to end at the very end of a year + num_full_years += 1 + else: + # Count the complete months that have happened this year + num_extra_months = time[-1].month - 1 + # Don't bother with the hassle of considering cases where we end at + # the very end of a month. Just ignore the month. + num_months = 12*num_full_years + num_extra_months + + print 'Interpolating land mask to new grid' + mask_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, mask_roms) + # Make the interpolation strict, i.e. cells with any land in their + # interpolation neighbourhood are considered land. This way there is no + # worrying about interpolating variables on the boundary. + mask_common[mask_common < 1] = 0 + + print 'Setting up ' + out_file + id = Dataset(out_file, 'w') + id.createDimension('longitude', size(lon_common)) + id.createDimension('latitude', size(lat_common)) + id.createDimension('time', None) + id.createVariable('longitude', 'f8', ('longitude')) + id.variables['longitude'].units = 'degrees' + id.variables['longitude'][:] = lon_common + id.createVariable('latitude', 'f8', ('latitude')) + id.variables['latitude'].units = 'degrees' + id.variables['latitude'][:] = lat_common + id.createVariable('time', 'f8', ('time')) + id.variables['time'].units = 'months' + id.createVariable('mask', 'f8', ('latitude', 'longitude')) + id.variables['mask'].units = '1' + id.variables['mask'][:,:] = mask_common + id.createVariable('shflux', 'f8', ('time', 'latitude', 'longitude')) + id.variables['shflux'].long_name = 'surface heat flux into ocean' + id.variables['shflux'].units = 'W/m^2' + id.createVariable('ssflux', 'f8', ('time', 'latitude', 'longitude')) + id.variables['ssflux'].long_name = 'surface virtual salinity flux into ocean' + id.variables['ssflux'].units = 'psu m/s' + id.createVariable('sustr', 'f8', ('time', 'latitude', 'longitude')) + id.variables['sustr'].long_name = 'zonal surface stress' + id.variables['sustr'].units = 'N/m^2' + id.createVariable('svstr', 'f8', ('time', 'latitude', 'longitude')) + id.variables['svstr'].long_name = 'meridional surface stress' + id.variables['svstr'].units = 'N/m^2' + id.createVariable('curl_str', 'f8', ('time', 'latitude', 'longitude')) + id.variables['curl_str'].long_name = 'curl of surface stress' + id.variables['curl_str'].units = 'N/m^3' + id.createVariable('uice', 'f8', ('time', 'latitude', 'longitude')) + id.variables['uice'].long_name = 'sea ice velocity eastward' + id.variables['uice'].units = 'm/s' + id.createVariable('vice', 'f8', ('time', 'latitude', 'longitude')) + id.variables['vice'].long_name = 'sea ice velocity northward' + id.variables['vice'].units = 'm/s' + + # Loop over months + for month in range(num_months): + print 'Processing month ' + str(month+1) + ' of ' + str(num_months) + # Write time value for this month + id.variables['time'][month] = month+1 + + print '...surface heat flux' + # Get monthly average + shflux_roms = monthly_avg_roms(roms_file, 'shflux', shape(lon_rho), month%12+1, instance=month/12+1) + # Interpolate to common grid + shflux_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, shflux_roms) + # Apply land mask + shflux = ma.masked_where(mask==0, shflux_common) + # Write to file + id.variables['shflux'][month,:,:] = shflux + + print '...surface salt flux' + # Get monthly average + ssflux_roms = monthly_avg_roms(roms_file, 'ssflux', shape(lon_rho), month%12+1, instance=month/12+1) + # Interpolate to common grid + ssflux_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, ssflux_roms) + # Apply land mask + ssflux = ma.masked_where(mask==0, ssflux_common) + # Write to file + id.variables['ssflux'][month,:,:] = ssflux + + print '...surface stress vector' + # Surface stresses + # Get monthly averages of both vector components + sustr_tmp = monthly_avg_roms(roms_file, 'sustr', u_shape, month%12+1, instance=month/12+1) + svstr_tmp = monthly_avg_roms(roms_file, 'svstr', v_shape, month%12+1, instance=month/12+1) + # Rotate to lon-lat space (note they are on the rho grid now) + sustr_roms, svstr_roms = rotate_vector_roms(sustr_tmp, svstr_tmp, angle_roms) + # Interpolate to common grid + sustr_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, sustr_roms) + svstr_common = interp_roms2common(lon_common, lat_common, lon_rho, lat_rho, svstr_roms) + # Apply land mask + sustr = ma.masked_where(mask==0, sustr_common) + svstr = ma.masked_where(mask==0, svstr_common) + # Write to file + id.variables['sustr'][month,:,:] = sustr + id.variables['svstr'][month,:,:] = svstr + + print '...curl of surface stress vector' + # Curl of surface stress = d/dx (svstr) - d/dy (sustr) + # First calculate the two derivatives + dsvstr_dx = ma.empty(shape(svstr_common)) + # Forward difference approximation + dsvstr_dx[:,:-1] = (svstr_common[:,1:] - svstr_common[:,:-1])/(2*dx[:,:-1]) + # Backward difference for the last row + dsvstr_dx[:,-1] = (svstr_common[:,-1] - svstr_common[:,-2])/(2*dx[:,-1]) + dsustr_dy = ma.empty(shape(sustr_common)) + dsustr_dy[:-1,:] = (sustr_common[1:,:] - sustr_common[:-1,:])/(2*dy[:-1,:]) + dsustr_dy[-1,:] = (sustr_common[-1,:] - sustr_common[-2,:])/(2*dy[-1,:]) + curl_str = dsvstr_dx - dsustr_dy + # Write to file + id.variables['curl_str'][month,:,:] = curl_str + + print '...sea ice velocity vector' + # Sea ice velocity (CICE variable not ROMS) + # Get monthly averages of both vector components + uice_tmp = monthly_avg_cice(cice_file, 'uvel', shape(lon_cice), month%12+1, instance=month/12+1) + vice_tmp = monthly_avg_cice(cice_file, 'vvel', shape(lon_cice), month%12+1, instance=month/12+1) + # Rotate to lon-lat space + uice_cice, vice_cice = rotate_vector_cice(uice_tmp, vice_tmp, angle_cice) + # Interpolate to common grid (note CICE grid not ROMS) + uice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, uice_cice) + vice_common = interp_roms2common(lon_common, lat_common, lon_cice, lat_cice, vice_cice) + # Apply land mask + uice = ma.masked_where(mask==0, uice_common) + vice = ma.masked_where(mask==0, vice_common) + # Write to file + id.variables['uice'][month,:,:] = uice + id.variables['vice'][month,:,:] = vice + + print 'Finished' + id.close() + + +# Interpolate from the ROMS grid to the common grid. +# This works for the CICE grid too. +# Input: +# lon_1d = 1D array of longitude on the common grid, -180 to 180 (size n) +# lat_1d = 1D array of latitude on the common grid (size m) +# lon_roms = 2D array of longitude on the ROMS grid, -180 to 180 (size pxq) +# lat_roms = 2D array of latitude on the ROMS grid (size pxq) +# data_roms = 2D array of any variable on the ROMS grid (size pxq) +# Output: +# data_common = 2D array of data_roms interpolated to the common grid (size mxn) +def interp_roms2common (lon_1d, lat_1d, lon_roms, lat_roms, data_roms): + + # Get a 2D field of common latitude and longitude + lat_2d, lon_2d = meshgrid(lat_1d, lon_1d) + + # Make an array of all the ROMS coordinates, flattened + points = empty([size(lon_roms), 2]) + points[:,0] = ravel(lon_roms) + points[:,1] = ravel(lat_roms) + # Also flatten the ROMS data + values = ravel(data_roms) + # Now make an array of all the common grid coordinates, flattened + xi = empty([size(lon_2d), 2]) + xi[:,0] = ravel(lon_2d) + xi[:,1] = ravel(lat_2d) + # Now call griddata + result = griddata(points, values, xi) + # Un-flatten the result + data_common = reshape(result, shape(lon_2d)) + + return data_common + + +# Command-line interface +if __name__ == "__main__": + + roms_file = raw_input("Path to ROMS averages file for the entire simulation, starting 1 Jan: ") + cice_file = raw_input("Path to CICE history file containing the same time indices: ") + out_file = raw_input("Path to desired output file: ") + common_grid(roms_file, cice_file, out_file) + diff --git a/freezingpt_slice.py b/freezingpt_slice.py index f01c6a4..6b444ca 100644 --- a/freezingpt_slice.py +++ b/freezingpt_slice.py @@ -38,7 +38,7 @@ def freezingpt_slice (file_path, tstep, i_val, depth_min, colour_bounds=None, sa id.close() # Calculate freezing point as seen by supercooling code - tfr = -0.054*salt + tfr = salt/(-18.48 + salt*18.48/1000.0) #-0.054*salt # Calculate difference from freezing point deltat = temp - tfr diff --git a/gadv_frazil_bathy.py b/gadv_frazil_bathy.py new file mode 100644 index 0000000..6b24c65 --- /dev/null +++ b/gadv_frazil_bathy.py @@ -0,0 +1,90 @@ +from netCDF4 import * +from numpy import * +from matplotlib.pyplot import * + +def gadv_frazil_bathy (): + + path_up3l = '/short/m68/kaa561/advection_bitreproducible/u3_lim/' + path_up3 = '/short/m68/kaa561/advection_bitreproducible/u3/' + roms_grid = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + file_tail = 'cice/rundir/history/iceh_avg.nc' + lon_min = 25 + lon_max = 45 + lat_min = -71 + lat_max = -64 + max_frazil = 0.4 + tick_frazil = 0.2 + max_bathy = 5 + tick_bathy = 1 + + id = Dataset(path_up3l + file_tail, 'r') + lon_cice = id.variables['TLON'][50:250,0:200] + lat_cice = id.variables['TLAT'][50:250,0:200] + frazil0 = id.variables['frazil'][0,50:250,0:200] + id.close() + id = Dataset(path_up3 + file_tail, 'r') + frazil1 = id.variables['frazil'][0,50:250,0:200] + id.close() + frazil_anom = frazil1 - frazil0 + + id = Dataset(roms_grid, 'r') + lon_roms = id.variables['lon_rho'][51:251,1:201] + lat_roms = id.variables['lat_rho'][51:251,1:201] + bathy = id.variables['h'][51:251,:201]*1e-3 + mask = id.variables['mask_rho'][51:251,:201]-id.variables['mask_zice'][51:251,:201] + id.close() + bathy = ma.masked_where(mask==0, bathy) + + fig = figure(figsize=(18,8)) + ax = fig.add_subplot(1, 2, 1) + img1 = pcolor(lon_cice, lat_cice, frazil_anom, vmin=-max_frazil, vmax=max_frazil, cmap='RdBu_r') + title('a) Frazil ice formation (cm/day), annual average\nAnomalies: UP3 minus UP3L', fontsize=20) + xlabel('Longitude', fontsize=18) + xlim([lon_min, lon_max]) + ylim([lat_min, lat_max]) + cbaxes1 = fig.add_axes([0.05, 0.25, 0.015, 0.5]) + cbar1 = colorbar(img1, ticks=arange(-max_frazil, max_frazil+tick_frazil, tick_frazil), cax=cbaxes1, extend='both') + cbar1.ax.tick_params(labelsize=16) + lon_ticks = arange(lon_min, lon_max+5, 5) + ax.set_xticks(lon_ticks) + lon_labels = [] + for val in lon_ticks: + lon_labels.append(str(int(round(val))) + r'$^{\circ}$E') + ax.set_xticklabels(lon_labels, fontsize=16) + lat_ticks = arange(lat_min+1, lat_max+1, 2) + ax.set_yticks(lat_ticks) + lat_labels = [] + for val in lat_ticks: + lat_labels.append('') + ax.set_yticklabels(lat_labels, fontsize=16) + ax = fig.add_subplot(1, 2, 2) + img2 = pcolor(lon_roms, lat_roms, bathy, vmin=0, vmax=max_bathy, cmap='jet') + title('b) Bathymetry (km)', fontsize=20) + xlabel('Longitude', fontsize=18) + ylabel('Latitude', fontsize=18) + xlim([lon_min, lon_max]) + ylim([lat_min, lat_max]) + cbaxes2 = fig.add_axes([0.93, 0.25, 0.015, 0.5]) + cbar2 = colorbar(img2, ticks=arange(0, max_bathy+tick_bathy, tick_bathy), cax=cbaxes2, extend='max') + cbar2.ax.tick_params(labelsize=16) + lon_ticks = arange(lon_min, lon_max+5, 5) + ax.set_xticks(lon_ticks) + lon_labels = [] + for val in lon_ticks: + lon_labels.append(str(int(round(val))) + r'$^{\circ}$E') + ax.set_xticklabels(lon_labels, fontsize=16) + lat_ticks = arange(lat_min+1, lat_max+1, 2) + ax.set_yticks(lat_ticks) + lat_labels = [] + for val in lat_ticks: + lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') + ax.set_yticklabels(lat_labels, fontsize=16) + + #fig.show() + fig.savefig('kn_fig3.png') + + +if __name__ == "__main__": + + gadv_frazil_bathy() + diff --git a/gadv_frazil_thickness.py b/gadv_frazil_thickness.py new file mode 100644 index 0000000..32f7e95 --- /dev/null +++ b/gadv_frazil_thickness.py @@ -0,0 +1,131 @@ +from netCDF4 import * +from numpy import * +from matplotlib.pyplot import * + +def gadv_frazil_thickness (): + + path_up3l = '/short/m68/kaa561/advection_bitreproducible/u3_lim/' + path_up3 = '/short/m68/kaa561/advection_bitreproducible/u3/' + file_tail_avg = 'cice/rundir/history/iceh_avg.nc' + file_tail_23aug = 'cice/rundir/history/iceh.1992-08-23.nc' + + max_frazil = 0.5 + tick_frazil = 0.25 + max_thickness = 1 + tick_thickness = 0.5 + + # Degrees to radians conversion factor + deg2rad = pi/180. + # Centre of missing circle in grid + lon_c = 50 + lat_c = -83 + # Radius of missing circle + radius = 10.5 + # Boundary of regular grid to embed circle in + circle_bdry = -70+90 + + lon_ticks = array([-120, -60, 60, 120, 180]) + lat_ticks = array([-58, -56, -54, -55.5, -56]) + lon_labels = [r'120$^{\circ}$W', r'60$^{\circ}$W', r'60$^{\circ}$E', r'120$^{\circ}$E', r'180$^{\circ}$'] + lon_rot = [-60, 60, -60, 60, 0] + + id = Dataset(path_up3l + file_tail_avg, 'r') + frazil_tmp = id.variables['frazil'][0,:275,:] + lon_tmp = id.variables['TLON'][:275,:] + lat_tmp = id.variables['TLAT'][:275,:] + mask_tmp = id.variables['tmask'][:275,:] + id.close() + # Wrap periodic boundary so there isn't a gap in the plot + lon = ma.empty([size(lon_tmp,0), size(lon_tmp,1)+1]) + lat = ma.empty([size(lat_tmp,0), size(lat_tmp,1)+1]) + mask = ma.empty([size(mask_tmp,0), size(mask_tmp,1)+1]) + frazil0 = ma.empty([size(frazil_tmp,0), size(frazil_tmp,1)+1]) + lon[:,:-1] = lon_tmp + lon[:,-1] = lon_tmp[:,0] + lat[:,:-1] = lat_tmp + lat[:,-1] = lat_tmp[:,0] + mask[:,:-1] = mask_tmp + mask[:,-1] = mask_tmp[:,0] + frazil0[:,:-1] = frazil_tmp + frazil0[:,-1] = frazil_tmp[:,0] + + id = Dataset(path_up3 + file_tail_avg, 'r') + frazil_tmp = id.variables['frazil'][0,:275,:] + id.close() + frazil1 = ma.empty([size(frazil_tmp,0), size(frazil_tmp,1)+1]) + frazil1[:,:-1] = frazil_tmp + frazil1[:,-1] = frazil_tmp[:,0] + frazil_anom = frazil1 - frazil0 + + id = Dataset(path_up3l + file_tail_23aug, 'r') + thickness_tmp = id.variables['aice'][0,:275,:]*id.variables['hi'][0,:275,:] + id.close() + thickness0 = ma.empty([size(thickness_tmp,0), size(thickness_tmp,1)+1]) + thickness0[:,:-1] = thickness_tmp + thickness0[:,-1] = thickness_tmp[:,0] + + id = Dataset(path_up3 + file_tail_23aug, 'r') + thickness_tmp = id.variables['aice'][0,:275,:]*id.variables['hi'][0,:275,:] + id.close() + thickness1 = ma.empty([size(thickness_tmp,0), size(thickness_tmp,1)+1]) + thickness1[:,:-1] = thickness_tmp + thickness1[:,-1] = thickness_tmp[:,0] + thickness_anom = thickness1 - thickness0 + + # Land mask + land = ma.masked_where(mask==1, mask) + + # Circumpolar x and y coordinates for plotting + x = -(lat+90)*cos(lon*deg2rad+pi/2) + y = (lat+90)*sin(lon*deg2rad+pi/2) + # Coordinates of centre of missing circle + x_c = -(lat_c+90)*cos(lon_c*deg2rad+pi/2) + y_c = (lat_c+90)*sin(lon_c*deg2rad+pi/2) + # Longitude labels + x_ticks = -(lat_ticks+90)*cos(lon_ticks*deg2rad+pi/2) + y_ticks = (lat_ticks+90)*sin(lon_ticks*deg2rad+pi/2) + # Regular grid to embed missing circle in + x_reg, y_reg = meshgrid(linspace(-circle_bdry, circle_bdry, num=100), linspace(-circle_bdry, circle_bdry, num=100)) + # Mask everything except the circle out of the regular grid + land_circle = zeros(shape(x_reg)) + land_circle = ma.masked_where(sqrt((x_reg-x_c)**2 + (y_reg-y_c)**2) > radius, land_circle) + + fig = figure(figsize=(18,8)) + ax = fig.add_subplot(1, 2, 1, aspect='equal') + # First shade land + contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + # Fill in missing circle + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + img1 = pcolor(x, y, frazil_anom, vmin=-max_frazil, vmax=max_frazil, cmap='RdBu_r') + # Add longitude labels + for i in range(size(x_ticks)): + text(x_ticks[i], y_ticks[i], lon_labels[i], ha='center', rotation=lon_rot[i], fontsize=15) + axis('off') + title('a) Frazil ice formation (cm/day), annual average', fontsize=20) + cbaxes1 = fig.add_axes([0.07, 0.25, 0.015, 0.5]) + cbar1 = colorbar(img1, ticks=arange(-max_frazil, max_frazil+tick_frazil, tick_frazil), cax=cbaxes1, extend='both') + cbar1.ax.tick_params(labelsize=16) + + ax = fig.add_subplot(1, 2, 2, aspect='equal') + contourf(x, y, land, 1, colors=(('0.6', '0.6', '0.6'))) + contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) + img2 = pcolor(x, y, thickness_anom, vmin=-max_thickness, vmax=max_thickness, cmap='RdBu_r') + for i in range(size(x_ticks)): + text(x_ticks[i], y_ticks[i], lon_labels[i], ha='center', rotation=lon_rot[i], fontsize=15) + axis('off') + title('b) Effective sea ice thickness (m), 23 August', fontsize=20) + cbaxes2 = fig.add_axes([0.93, 0.25, 0.015, 0.5]) + cbar2 = colorbar(img2, ticks=arange(-max_thickness, max_thickness+tick_thickness, tick_thickness), cax=cbaxes2, extend='both') + cbar2.ax.tick_params(labelsize=16) + + suptitle('Anomalies: UP3 minus UP3L', fontsize=26) + subplots_adjust(wspace=0.05) + + #fig.show() + fig.savefig('kn_fig2.png') + + +if __name__ == "__main__": + + gadv_frazil_thickness() + diff --git a/gadv_freezingpt_slice.py b/gadv_freezingpt_slice.py new file mode 100644 index 0000000..b64b07c --- /dev/null +++ b/gadv_freezingpt_slice.py @@ -0,0 +1,88 @@ +from netCDF4 import Dataset +from numpy import * +from matplotlib.pyplot import * +from calc_z import * + +def gadv_freezingpt_slice (): + + # Path to ocean history file + file_path = '/short/m68/kaa561/advection_bitreproducible/u3/ocean_his_7aug.nc' + # Timestep to plot + tstep = 1 #220 + # i-index to plot (1-based) + i_val = 10 + # Deepest depth to plot + depth_min = -100 + # Bounds on colour scale + scale_max = 0.08 + scale_tick = 0.04 + # Bounds on latitudes to plot + lat_min = -70.3 + lat_max = -66.5 + save = True + fig_name = 'kn_fig1.png' + + # Grid parameters + theta_s = 4.0 + theta_b = 0.9 + hc = 40 + N = 31 + + # Read temperature, salinity, and grid variables + id = Dataset(file_path, 'r') + temp = id.variables['temp'][tstep-1,:,:-15,i_val-1] + salt = id.variables['salt'][tstep-1,:,:-15,i_val-1] + h = id.variables['h'][:-15,:] + zice = id.variables['zice'][:-15,:] + # Sea surface height is time-dependent + zeta = id.variables['zeta'][tstep-1,:-15,:] + lon_2d = id.variables['lon_rho'][:-15,:] + lat_2d = id.variables['lat_rho'][:-15,:] + id.close() + + # Calculate freezing point as seen by supercooling code + tfr = salt/(-18.48 + salt*18.48/1000.0) + # Calculate difference from freezing point + deltat = temp - tfr + + # Get a 3D array of z-coordinates; sc_r and Cs_r are unused in this script + z_3d, sc_r, Cs_r = calc_z(h, zice, theta_s, theta_b, hc, N, zeta) + # Select depth and latitude at the given i-index + z = z_3d[:,:,i_val-1] + lat = tile(lat_2d[:,i_val-1], (N,1)) + + # Plot (pcolor not contour to show what each individual cell is doing) + fig, ax = subplots(figsize=(12,6)) + pcolor(lat, z, deltat, vmin=-scale_max, vmax=scale_max, cmap='RdBu_r') + cbar = colorbar(extend='both', ticks=arange(-scale_max, scale_max+scale_tick, scale_tick)) + cbar.ax.tick_params(labelsize=14) + title(r'Difference from freezing point ($^{\circ}$C) near 8$^{\circ}$E: UP3', fontsize=18) + xlabel('Latitude', fontsize=16) + ylabel('Depth (m)', fontsize=16) + xlim([lat_min, lat_max]) + ylim([depth_min, 0]) + + lat_ticks = arange(lat_min+0.5, lat_max+0.5, 1) + xticks(lat_ticks) + lat_labels = [] + for val in lat_ticks: + lat_labels.append(str(int(round(-val))) + r'$^{\circ}$S') + ax.set_xticklabels(lat_labels, fontsize=14) + depth_ticks = arange(depth_min, 0+25, 25) + yticks(depth_ticks) + depth_labels = [] + for val in depth_ticks: + depth_labels.append(str(int(round(-val)))) + ax.set_yticklabels(depth_labels, fontsize=14) + + # Finished + if save: + fig.savefig(fig_name) + else: + fig.show() + + +# Command-line interface +if __name__ == "__main__": + + gadv_freezingpt_slice() diff --git a/iceberg_melt.py b/iceberg_melt.py index 7434193..879ab3f 100644 --- a/iceberg_melt.py +++ b/iceberg_melt.py @@ -10,16 +10,18 @@ def iceberg_melt (out_file): # Naming conventions for iceberg files - iceberg_head = '/short/m68/kaa561/ROMS-CICE-MCT/data/originals/MartinAdcroft2010_iceberg_meltfluxes/icebergs.1861-1960.' + iceberg_head = '/short/m68/kaa561/metroms_iceshelf/data/originals/MartinAdcroft2010_iceberg_meltfluxes/icebergs.1861-1960.' iceberg_tail = '.melt.nc' # Iceberg grid file - iceberg_grid = '/short/m68/kaa561/ROMS-CICE-MCT/data/originals/MartinAdcroft2010_iceberg_meltfluxes/icebergs.static.nc' + iceberg_grid = '/short/m68/kaa561/metroms_iceshelf/data/originals/MartinAdcroft2010_iceberg_meltfluxes/icebergs.static.nc' # ROMS grid file - roms_grid ='/short/m68/kaa561/ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_good.nc' + roms_grid ='/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' # Density of freshwater rho_w = 1e3 # Seconds in 12 hours seconds_per_12h = 60.*60.*12. + # Days per month (neglect leap years) + days_per_month = array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]) # Read ROMS grid id = Dataset(roms_grid, 'r') @@ -78,10 +80,9 @@ def iceberg_melt (out_file): melt_roms = interp_iceberg2roms(melt_iceberg, lon_iceberg, lat_iceberg, lon_roms, lat_roms) # Convert to m per 12 h melt_roms = melt_roms/rho_w*seconds_per_12h - # Calculate time values centered in the middle of each month - time = 365.25/12*(month+0.5) - # Save to output file - out_id.variables['time'][month] = time + # Calculate time values: 15th of this month at midnight + out_id.variables['time'][month] = sum(days_per_month[0:month]) + 14 + # Save data out_id.variables['icebergs'][month,:,:] = melt_roms out_id.close() diff --git a/monthly_avg_cice.py b/monthly_avg_cice.py index e743d09..014d0f7 100644 --- a/monthly_avg_cice.py +++ b/monthly_avg_cice.py @@ -1,7 +1,7 @@ from netCDF4 import Dataset, num2date from numpy import * -def monthly_avg_cice (file_path, var, shape, month): +def monthly_avg_cice (file_path, var, shape, month, instance=-1): # Starting and ending days in each month # Assume no leap years, we'll fix this later if we need @@ -22,23 +22,45 @@ def monthly_avg_cice (file_path, var, shape, month): next_month = mod(month+1,12) - end_t = -1 - for t in range(size(cice_time)-1, -1, -1): - if cice_time[t].month-1 == next_month and cice_time[t].day in range(start_day[next_month], start_day[next_month]+5): - end_t = t - break - if end_t == -1: - print 'Error: ' + cice_file + ' does not contain a complete ' + month_name[month] - return - - start_t = -1 - for t in range(end_t, -1, -1): - if cice_time[t].month-1 == month and cice_time[t].day in range(start_day[month]+1, start_day[month]+6): - start_t = t - break - if start_t == -1: - print 'Error: ' + cice_file + ' does not contain a complete ' + month_name[month] - return + if instance == -1: + # Default: find the last instance of this month + end_t = -1 + for t in range(size(cice_time)-1, -1, -1): + if cice_time[t].month-1 == next_month and cice_time[t].day in range(start_day[next_month], start_day[next_month]+5): + end_t = t + break + if end_t == -1: + print 'Error: ' + cice_file + ' does not contain a complete ' + month_name[month] + return + start_t = -1 + for t in range(end_t, -1, -1): + if cice_time[t].month-1 == month and cice_time[t].day in range(start_day[month]+1, start_day[month]+6): + start_t = t + break + if start_t == -1: + print 'Error: ' + cice_file + ' does not contain a complete ' + month_name[month] + return + else: + # Find the given instance of the month + count = 0 + start_t = -1 + for t in range(size(time)): + if cice_time[t].month-1 == month and cice_time[t].day in range(start_day[month]+1, start_day[month]+6): + count += 1 + if count == instance: + start_t = t + break + if start_t == -1: + print 'Error: ' + file_path + ' does not contain ' + str(instance) + ' ' + month_name[month] + 's' + return + end_t = -1 + for t in range(start_t+1, size(time)): + if cice_time[t].month-1 == next_month and cice_time[t].day in range(start_day[next_month], start_day[next_month]+5): + end_t = t + break + if end_t == -1: + print 'Error: ' + file_path + ' does not contain ' + str(instance) + ' ' + month_name[month] + 's' + return leap_year = False cice_year = cice_time[end_t].year diff --git a/monthly_avg_roms.py b/monthly_avg_roms.py index 7614c32..3944770 100644 --- a/monthly_avg_roms.py +++ b/monthly_avg_roms.py @@ -1,7 +1,7 @@ from netCDF4 import Dataset, num2date from numpy import * -def monthly_avg_roms (file_path, var, shape, month): +def monthly_avg_roms (file_path, var, shape, month, instance=-1): # Starting and ending days in each month # Assume no leap years, we'll fix this later if we need @@ -23,29 +23,59 @@ def monthly_avg_roms (file_path, var, shape, month): next_month = mod(month+1,12) prev_month = mod(month-1,12) - end_t = -1 - for t in range(size(time)-1, -1, -1): - if time[t].month-1 == month and time[t].day in range(end_day[month]-2, end_day[month]+1): - end_t = t - break - if time[t].month-1 == next_month and time[t].day in range(start_day[next_month], start_day[next_month]+2): - end_t = t - break - if end_t == -1: - print 'Error: ' + file_path + ' does not contain a complete ' + month_name[month] - return - - start_t = -1 - for t in range(end_t, -1, -1): - if time[t].month-1 == prev_month and time[t].day in range(end_day[prev_month]-1, end_day[prev_month]+1): - start_t = t - break - if time[t].month-1 == month and time[t].day in range(start_day[month], start_day[month]+3): - start_t = t - break - if start_t == -1: - print 'Error: ' + file_path + ' does not contain a complete ' + month_name[month] - return + if instance == -1: + # Default: find the last instance of this month + end_t = -1 + for t in range(size(time)-1, -1, -1): + if time[t].month-1 == month and time[t].day in range(end_day[month]-2, end_day[month]+1): + end_t = t + break + if time[t].month-1 == next_month and time[t].day in range(start_day[next_month], start_day[next_month]+2): + end_t = t + break + if end_t == -1: + print 'Error: ' + file_path + ' does not contain a complete ' + month_name[month] + return + start_t = -1 + for t in range(end_t, -1, -1): + if time[t].month-1 == prev_month and time[t].day in range(end_day[prev_month]-1, end_day[prev_month]+1): + start_t = t + break + if time[t].month-1 == month and time[t].day in range(start_day[month], start_day[month]+3): + start_t = t + break + if start_t == -1: + print 'Error: ' + file_path + ' does not contain a complete ' + month_name[month] + return + else: + # Find the given instance of the month + count = 0 + start_t = -1 + for t in range(size(time)): + if time[t].month-1 == prev_month and time[t].day in range(end_day[prev_month]-1, end_day[prev_month]+1): + count += 1 + if count == instance: + start_t = t + break + if time[t].month-1 == month and time[t].day in range(start_day[month], start_day[month]+3): + count += 1 + if count == instance: + start_t = t + break + if start_t == -1: + print 'Error: ' + file_path + ' does not contain ' + str(instance) + ' ' + month_name[month] + 's' + return + end_t = -1 + for t in range(start_t+1, size(time)): + if time[t].month-1 == month and time[t].day in range(end_day[month]-2, end_day[month]+1): + end_t = t + break + if time[t].month-1 == next_month and time[t].day in range(start_day[next_month], start_day[next_month]+2): + end_t = t + break + if end_t == -1: + print 'Error: ' + file_path + ' does not contain ' + str(instance) + ' ' + month_name[month] + 's' + return leap_year = False roms_year = time[end_t].year diff --git a/romscice_gpcp.py b/romscice_gpcp.py new file mode 100644 index 0000000..c5c2590 --- /dev/null +++ b/romscice_gpcp.py @@ -0,0 +1,113 @@ +from netCDF4 import Dataset +from numpy import * +from scipy.interpolate import RectBivariateSpline + +def romscice_gpcp (year): + + grid_file = '/short/m68/kaa561/metroms_iceshelf/apps/common/grid/circ30S_quarterdegree.nc' + gpcp_head = '/short/m68/kaa561/gpcp/gpcp_cdr_v23rB1_y' + str(year) + '_m' + output_file = '/short/m68/kaa561/metroms_iceshelf/data/GPCP/precip_' + str(year) + '_monthly.nc' + conv_factor = 5e-4 # mm/day to m/12h + days_per_month = array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]) + + if year % 4 == 0: + days_per_month[1] = 29 + + # Read ROMS latitude and longitude + id = Dataset(grid_file, 'r') + lon_roms = id.variables['lon_rho'][:,:] + lat_roms = id.variables['lat_rho'][:,:] + id.close() + num_lon = size(lon_roms, 1) + num_lat = size(lon_roms, 0) + + # Read GPCP latitude and longitude + id = Dataset(gpcp_head + '01.nc', 'r') + lon_gpcp_raw = id.variables['longitude'][:] + lat_gpcp = id.variables['latitude'][:] + id.close() + + # Wrap the GPCP longitude axis around 360 so there is no gap + lon_gpcp = zeros(size(lon_gpcp_raw)+2) + lon_gpcp[0] = lon_gpcp_raw[-1]-360 + lon_gpcp[1:-1] = lon_gpcp_raw + lon_gpcp[-1] = lon_gpcp_raw[0]+360 + + # Find number of days between 1 Jan 1992 and 1 Jan this year + time_start = (year-1992)*365.0 + ceil((year-1992)/4.0) + + # Set up output file + out_id = Dataset(output_file, 'w') + # Define dimensions + out_id.createDimension('xi_rho', num_lon) + out_id.createDimension('eta_rho', num_lat) + out_id.createDimension('time', None) + out_id.createVariable('lon_rho', 'f8', ('eta_rho', 'xi_rho')) + out_id.variables['lon_rho'].long_name = 'longitude of rho-points' + out_id.variables['lon_rho'].units = 'degree_east' + out_id.variables['lon_rho'][:,:] = lon_roms + out_id.createVariable('lat_rho', 'f8', ('eta_rho', 'xi_rho')) + out_id.variables['lat_rho'].long_name = 'latitude of rho-points' + out_id.variables['lat_rho'].units = 'degree_north' + out_id.variables['lat_rho'][:,:] = lat_roms + out_id.createVariable('time', 'f8', ('time')) + out_id.variables['time'].units = 'days since 1992-01-01 00:00:0.0' + out_id.createVariable('rain', 'f8', ('time', 'eta_rho', 'xi_rho')) + out_id.variables['rain'].long_name = 'rain fall rate' + out_id.variables['rain'].units = 'm_per_12hr' + + # Loop over months + for month in range(12): + # Save output time value: 15th of this month at midnight + out_id.variables['time'][month] = time_start + sum(days_per_month[0:month]) + 14 + # Construct path to GPCP file for this month + if month+1 < 10: + month_str = '0' + str(month+1) + else: + month_str = str(month+1) + in_id = Dataset(gpcp_head + month_str + '.nc', 'r') + # Read GPCP precip data and convert to m/12h + precip_raw = in_id.variables['precip'][:,:]*conv_factor + in_id.close() + # Wrap around periodic boundary + precip = zeros([size(precip_raw,0), size(precip_raw,1)+2]) + precip[:,0] = precip_raw[:,-1] + precip[:,1:-1] = precip_raw + precip[:,-1] = precip_raw[:,0] + # Interpolate to ROMS grid + rain = interp_gpcp2roms(precip, lon_gpcp, lat_gpcp, lon_roms, lat_roms) + # Make sure there are no negative values + rain[rain < 0] = 0.0 + # Save + out_id.variables['rain'][month,:,:] = rain + out_id.close() + + +def interp_gpcp2roms (A, lon_gpcp, lat_gpcp, lon_roms, lat_roms): + + # First build a function to approximate A with 2D splines + interp_function = RectBivariateSpline(lat_gpcp, lon_gpcp, A) + B = zeros(shape(lon_roms)) + # Call this function for each grid point individually - if you try to do + # it all at once it throws a MemoryError + for i in range(size(lon_roms,1)): + for j in range(size(lon_roms,0)): + B[j,i] = interp_function(lat_roms[j,i], lon_roms[j,i]) + + # Enforce periodic boundary + B[:,0] = B[:,-2] + B[:,-1] = B[:,1] + + return B + + +# Command-line interface +if __name__ == "__main__": + + first_year = 1992 + last_year = 2005 + for year in range(first_year, last_year+1): + print 'Processing ' + str(year) + romscice_gpcp(year) + +