From 6bf7dbb8eeb742e3835adb86e82b3ce51fe9acdc Mon Sep 17 00:00:00 2001 From: Kaitlin Alexander Date: Thu, 4 Aug 2016 11:00:15 +1000 Subject: [PATCH] Added iceberg fluxes file and a few other small things --- add_iceberg_melt.py | 111 ++++++++++++++++++++++++++++++++++++++ aice_animation.py | 6 +-- depoorter_data | 27 ++++++++++ file_guide.txt | 23 +++++++- ismr_plot.py | 10 ++-- plot_volume.py | 2 +- repeat_forcing_monthly.py | 80 +++++++++++++++++++++++++++ romscice_ini_woa.py | 88 +++++++++++++++++++++++------- romscice_tides.py | 16 +++--- 9 files changed, 324 insertions(+), 39 deletions(-) create mode 100644 add_iceberg_melt.py create mode 100644 depoorter_data create mode 100644 repeat_forcing_monthly.py diff --git a/add_iceberg_melt.py b/add_iceberg_melt.py new file mode 100644 index 0000000..c42ede0 --- /dev/null +++ b/add_iceberg_melt.py @@ -0,0 +1,111 @@ +from netCDF4 import Dataset +from numpy import * +from scipy.interpolate import griddata + +# Read Martin and Adcroft's monthly climatology of freshwater fluxes +# from iceberg melt, and add to the precipitation fields used by +# ROMS and CICE. I suppose I should technically use a separate field +# such as runoff, but this is easier and has the same effect! +# Input: +# file = path to ROMS FC forcing file, containing one year of monthly +# averages for precipitation ("rain") in m/12h +def add_iceberg_melt (file): + + # Naming conventions for iceberg files + iceberg_head = '/short/m68/kaa561/ROMS-CICE-MCT/data/MartinAdcroft2010_iceberg_meltfluxes/icebergs.1861-1960.' + iceberg_tail = '.melt.nc' + # Iceberg grid file + iceberg_grid = '/short/m68/kaa561/ROMS-CICE-MCT/data/MartinAdcroft2010_iceberg_meltfluxes/icebergs.static.nc' + # ROMS grid file + roms_grid ='/short/m68/kaa561/ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_10m.nc' + # Density of freshwater + rho_w = 1e3 + # Seconds in 12 hours + seconds_per_12h = 60.*60.*12. + + # Read ROMS grid + id = Dataset(roms_grid, 'r') + lon_roms = id.variables['lon_rho'][:,:] + lat_roms = id.variables['lat_rho'][:,:] + id.close() + + # Read the iceberg grid + id = Dataset(iceberg_grid, 'r') + lon_iceberg = id.variables['lon'][:,:] + lat_iceberg = id.variables['lat'][:,:] + id.close() + + # Make sure longitudes are between 0 and 360 + index = lon_iceberg < 0 + lon_iceberg[index] = lon_iceberg[index] + 360 + + # Loop over months + for month in range(12): + print 'Processing month ' + str(month+1) + # Reconstruct the filename of this month's iceberg data + if month+1 < 10: + month_str = '0' + str(month+1) + else: + month_str = str(month+1) + iceberg_file = iceberg_head + month_str + iceberg_tail + # Read iceberg freshwater flux in kg/m^2/s + id = Dataset(iceberg_file, 'r') + melt_iceberg = id.variables['melt'][0,:,:] + id.close() + # Interpolate to ROMS grid + melt_roms = interp_iceberg2roms(melt_iceberg, lon_iceberg, lat_iceberg, lon_roms, lat_roms) + # Convert to m per 12 h + melt_roms = melt_roms/rho_w*seconds_per_12h + # Add to precipitation field for this month + id = Dataset(file, 'a') + id.variables['rain'][12*year+month,:,:] = id.variables['rain'][12*year+month,:,:] - melt_roms + id.close() + + +# Given a field A on the iceberg grid, linearly interpolate to the ROMS grid. +# Input: +# A = 2D array (m x n) containing data on the iceberg grid +# lon_iceberg = 2D array (m x n) contaning longitude values on the +# iceberg grid, from 0 to 360 +# lat_iceberg = 2D array (m x n) containing latitude values on the +# iceberg grid +# lon_roms = 2D array (p x q) containing longitude values on the ROMS grid, +# from 0 to 360 +# lat_roms = 2D array (p x q) containing latitude values on the ROMS grid +# Output: +# A_interp = 2D array (p x q) containing A interpolated to the ROMS grid +def interp_iceberg2roms (A, lon_iceberg, lat_iceberg, lon_roms, lat_roms): + + # Set up an nx2 array containing the coordinates of each point in the + # iceberg grid + points = empty([size(lon_iceberg), 2]) + points[:,0] = ravel(lon_iceberg) + points[:,1] = ravel(lat_iceberg) + # Also flatten the data + values = ravel(A) + # Now set up an mx2 array containing the coordinates of each point + # we want to interpolate to, in the ROMS grid + xi = empty([size(lon_roms), 2]) + xi[:,0] = ravel(lon_roms) + xi[:,1] = ravel(lat_roms) + # Now call griddata; fill out-of-bounds values (such as under ice shelves) + # with zeros + result = griddata(points, values, xi, method='linear', fill_value=0.0) + # Un-flatten the result + A_interp = reshape(result, shape(lon_roms)) + + return A_interp + + +if __name__ == "__main__": + + file = raw_input('Path to ROMS input FC file containing one year of monthly data: ') + add_iceberg_melt(file) + + + + + + + + diff --git a/aice_animation.py b/aice_animation.py index 33bd370..fc9876b 100644 --- a/aice_animation.py +++ b/aice_animation.py @@ -13,9 +13,9 @@ # Directory containing CICE output files directory = '/short/y99/kaa561/roms_spinup_newest/cice/' # Number of time indices in each file -num_ts = [18, 252, 108] +num_ts = [144] # File number to start with for the animation (1-based) -start_file = 3 +start_file = 1 # Degrees to radians conversion factor deg2rad = pi/180 # Names of each month for making titles @@ -65,6 +65,6 @@ def animate(i): return img # Animate once every time index from start_file to the last file -anim = FuncAnimation(fig, func=animate, frames=range(35,108)) #sum(array(num_ts[start_file-1:]))) +anim = FuncAnimation(fig, func=animate, frames=range(71,144)) #sum(array(num_ts[start_file-1:]))) # Save as an mp4 with one frame per second anim.save('aice.mp4', fps=1) diff --git a/depoorter_data b/depoorter_data new file mode 100644 index 0000000..5f0e1c4 --- /dev/null +++ b/depoorter_data @@ -0,0 +1,27 @@ +Name Mass loss Melt rate +Amery 39 +/- 21 0.65 +/- 0.35 +West 26 +/- 13 1.65 +/- 0.82 +Shackleton 76 +/- 23 2.20 +/- 0.67 +Totten 64 +/- 12 9.89 +/- 1.92 +Moscow Uni 28 +/- 7 4.93 +/- 1.32 +Mertz 5 +/- 4 0.87 +/- 0.79 +Ross 34 +/- 25 0.07 +/- 0.05 +Sulzberger 28 +/- 6 2.16 +/- 0.44 +Getz 136 +/- 23 4.09 +/- 0.68 +Thwaites 69 +/- 18 15.22 +/- 3.87 +Pine Island 95 +/- 14 15.96 +/- 2.38 +Abbot 86 +/- 22 2.72 +/- 0.7 +George VI 144 +/- 42 2.88 +/- 0.83 +Filchner-Ronne 50 +/- 40 0.12 +/- 0.09 +Brunt-RL 26 +/- 16 0.33 +/- 0.20 +Jelbart-Fimbul 24 +/- 13 0.52 +/- 0.27 +Total 1454 +/- 174 0.94 +/- 0.11 + +Notes: +Mertz a fair bit lower +Ross a bit lower +Sulzberger higher +Thwaites lower +Abbot higher +R-F waaay lower +Brunt-Riiser-Larsen much higher \ No newline at end of file diff --git a/file_guide.txt b/file_guide.txt index aa93b8c..0998937 100644 --- a/file_guide.txt +++ b/file_guide.txt @@ -74,6 +74,16 @@ nbdry_grid_hack.py: Modify the ROMS grid so that the northernmost 15 rows To run: Open python or ipython and type "run nbdry_grid_hack.py". +add_iceberg_melt.py: Read Martin and Adcroft's monthly climatology of + freshwater fluxes from iceberg melt, and add to the + precipitation fields used by ROMS and CICE. + To run: Open python or ipython and type + "run add_iceberg_melt.py". You will be prompted + for the path to a ROMS-CICE forcing file + containing one year of monthly averaged + precipitation; the iceberg fluxes will be added + to this. + ***POST-PROCESSING DIAGNOSTICS*** @@ -792,8 +802,17 @@ repeat_forcing.py: If you are spinning up ROMS-CICE-MCT using one repeating year March 1st records from the same time of day. To run: Edit the user parameters near the bottom of the file (paths to forcing files, years, variable names, etc) - then open python or ipython and type "run - repeat_forcing.py". + then open python or ipython and type + "run repeat_forcing.py". + +repeat_forcing_monthly.py: Same as repeat_forcing but for monthly forcing fields + rather than sub-daily. No interpolation is needed, + just modifying the time axis to put all data on the + 15th of each month, with a cycle length of 4 years. + To run: Edit the user parameters near the bottom of + the file (paths to forcing files, years) + then open python or ipython and type + "run repeat_forcing_monthly.py". romscice_ini_iceshelf.py: Given a ROMS initial condition file from ECCO2 data (eg created with romscice_ini.py), overwrite the diff --git a/ismr_plot.py b/ismr_plot.py index 777d4e7..fdca12a 100644 --- a/ismr_plot.py +++ b/ismr_plot.py @@ -42,12 +42,12 @@ def ismr_plot (file_path, save=False, fig_name=None): # Set colour map # Values for change points - cmap_vals = array([amin(ismr), 0, 1, 2, 5, 8]) - # Map to 0-1 - cmap_vals_norm = (cmap_vals - amin(ismr))/(8 - amin(ismr)) + cmap_vals = array([-0.5, 0, 1, 2, 5, 8]) # Colours for change points # (blue, white, yellow-orange, red-orange, dark red, purple) cmap_colors = [(0.26, 0.45, 0.86), (1, 1, 1), (1, 0.9, 0.4), (0.99, 0.59, 0.18), (0.5, 0.0, 0.08), (0.96, 0.17, 0.89)] + # Map to 0-1 + cmap_vals_norm = (cmap_vals + 0.5)/(8 + 0.5) # Combine into a list cmap_list = [] for i in range(size(cmap_vals)): @@ -55,7 +55,7 @@ def ismr_plot (file_path, save=False, fig_name=None): # Make colour map mf_cmap = LinearSegmentedColormap.from_list('melt_freeze', cmap_list) # Set levels - lev = linspace(amin(ismr), 8, num=100) + lev = linspace(-0.5, 8, num=100) # Get land/zice mask open_ocn = copy(mask_rho) @@ -83,7 +83,7 @@ def ismr_plot (file_path, save=False, fig_name=None): # Fill in the missing circle contourf(x_reg, y_reg, land_circle, 1, colors=(('0.6', '0.6', '0.6'))) # Now shade the melt rate - contourf(x, y, ismr, lev, cmap=mf_cmap, extend='max') + contourf(x, y, ismr, lev, cmap=mf_cmap, extend='both') cbar = colorbar(ticks=arange(0,8+1,1)) cbar.ax.tick_params(labelsize=20) # Add a black contour for the ice shelf front diff --git a/plot_volume.py b/plot_volume.py index d01c14f..32faa11 100644 --- a/plot_volume.py +++ b/plot_volume.py @@ -1,6 +1,6 @@ from re import split from numpy import * -from matplotlib.pyplot import plot,xlabel,ylabel,clf,show +from matplotlib.pyplot import * # Read the volume values written in ocean.log each timestep # and return them as a 1D array. diff --git a/repeat_forcing_monthly.py b/repeat_forcing_monthly.py new file mode 100644 index 0000000..a3fdc42 --- /dev/null +++ b/repeat_forcing_monthly.py @@ -0,0 +1,80 @@ +from netCDF4 import Dataset +from numpy import * + +# Convert a 1-year monthly dataset into an annually repeating dataset for +# ROMS-CICE. The easiest way to do this is by making 4 identical files, altering +# the time axis so data is always on the 15th of a month, and setting the cycle +# length attribute to 1461 days (4 years where one is a leap year). +# NB: This script assumes the first file has been copied three times to the +# correct names of the next three files. +# Sort of NB: This script uses the basic definition of leap year = year +# divisible by 4. This does not hold for all years ending in 00, +# e.g. 2000 was a leap year but 1900 wasn't. +# Input: +# directory = string containing path to the directory where the files exist +# head = string containing the first part of each filename +# tail = string containing the last part of each filename +# NB: This script assumes the files differ only by the year, e.g. +# AN_1995_unlim.nc through AN_1998_unlim.nc +# year_start = integer containing the first year, which is also the year of +# the original data + +def process (directory, head, tail, year_start): + + days_per_month = array([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]) + + # Loop through the four years + for year in range(year_start,year_start+4): + + # Check if this is a leap year and update days in February + if year % 4 == 0: + days_per_month[1] = 29 + else: + days_per_month[1] = 28 + + # Make a time axis with data on the 15th of every month + # First get the number of days between year_start and the current year + start_day = 365*(year-year_start) + if year > year_start: + for year_tmp in range(year_start, year): + if year_tmp % 4 == 0: + # A leap year has occurred + start_day += 1 + # Start on Jan 15th at midnight + time = [start_day + 14] + # Loop over months + for month in range(1,12): + time.append(start_day + sum(days_per_month[0:month]) + 14) + + file = directory + head + str(year) + tail + print 'Processing ' + file + id = Dataset(file, 'a') + id.variables['time'][:] = time + # Set the cycle_length to 4 years + id.variables['time'].cycle_length = 365.0*4 + 1 + id.close() + + +# Command-line interface +if __name__ == '__main__': + + # User parameters to edit here + + # Data where files yyyy and yyyy exist + directory = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/' + # First part of filename for AN and FC files + an_head = 'AN_' + fc_head = 'FC_' + # Last part of filename (common to both AN and FC) + tail = '_monthly.nc' + # Year to build data from + year_start = 1995 + + # Run the actual script + process(directory, an_head, tail, year_start) + process(directory, fc_head, tail, year_start) + + + + + diff --git a/romscice_ini_woa.py b/romscice_ini_woa.py index 6b446e3..e772a7f 100644 --- a/romscice_ini_woa.py +++ b/romscice_ini_woa.py @@ -1,7 +1,7 @@ # Build an initialisation file for ROMS using climatology temperature and # salinity values from the World Ocean Atlas. Set initial velocities and sea -# surface height to zero. Under the ice shelves, set salinity to a constant -# value (34.5 psu) and temperature to the local freezing point. +# surface height to zero. Under the ice shelves, extrapolate temperature and +# salinity from the ice shelf front. # NB for raijin users: RegularGridInterpolator needs python/2.7.6 but the # default is 2.7.3. Before running this script, switch them as follows: @@ -19,13 +19,6 @@ # Main routine def run (grid_file, woa_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdry_woa): - # Parameters for freezing point of seawater; taken from Ben's PhD thesis - a = -5.73e-2 - b = 8.32e-2 - c = -7.61e-8 - rho = 1035.0 - g = 9.81 - # Read WOA data and grid print 'Reading World Ocean Atlas data' woa_id = Dataset(woa_file, 'r') @@ -65,12 +58,6 @@ def run (grid_file, woa_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdr print 'Interpolating salinity' salt = interp_woa2roms(salt_woa, lon_woa, lat_woa, depth_woa, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, 34.5) - # Set the salinity in ice shelf cavities to 34.5 psu, and the temperature to the local freezing point - mask_zice_3d = tile(mask_zice, (N,1,1)) - index = mask_zice_3d == 1 - salt[index] = 34.5 - temp[index] = a*salt[index] + b + c*rho*g*abs(z_roms_3d[index]) - # Set initial velocities and sea surface height to zero u = zeros((N, num_lat, num_lon-1)) v = zeros((N, num_lat-1, num_lon)) @@ -162,7 +149,7 @@ def run (grid_file, woa_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdr # Given an array on the World Ocean Atlas grid, interpolate onto the ROMS grid, -# and fill the ice shelf cavities and the land mask with constant values. +# extrapolate under ice shelf cavities, and fill the land mask with constant values. # Input: # A = array of size nxmxo containing values on the World Ocean Atlas grid # (dimension longitude x latitude x depth) @@ -179,13 +166,18 @@ def run (grid_file, woa_file, output_file, Tcline, theta_s, theta_b, hc, N, nbdr # land 0) # mask_zice = array of size qxr containing ROMS ice shelf mask (ice shelf 1, # ocean/land 0) -# fill = scalar containing the value with which to fill the ROMS land mask and -# ice shelf cavities (really doesn't matter, don't use NaN) +# fill = scalar containing the value with which to fill the ROMS land mask +# (really doesn't matter, don't use NaN) # Output: # B = array of size pxqxr containing values on the ROMS grid (dimension depth x # latitude x longitude) def interp_woa2roms (A, lon_woa, lat_woa, depth_woa, lon_roms_3d, lat_roms_3d, z_roms_3d, mask_rho, mask_zice, fill): + # Radius of the Earth in m + r = 6.371e6 + # Degrees to radians conversion factor + deg2rad = pi/180.0 + # Calculate N based on size of ROMS grid N = size(lon_roms_3d, 0) @@ -199,12 +191,68 @@ def interp_woa2roms (A, lon_woa, lat_woa, depth_woa, lon_roms_3d, lat_roms_3d, z for k in range(N): print '...vertical level ', str(k+1), ' of ', str(N) tmp = interp_function((lon_roms_3d[k,:,:], lat_roms_3d[k,:,:], -z_roms_3d[k,:,:])) - # Fill land mask and ice shelf cavities with constant value + # Fill land mask with constant value tmp[mask_rho==0] = fill - tmp[mask_zice==1] = fill # Save this depth level B[k,:,:] = tmp + print '...selecting ice shelf front' + # Find the ice shelf front + front = zeros(shape(lon_roms_3d)) + # Find northernmost latitude with ice shelves + nbdry_zice = where(sum(mask_zice, axis=1) > 0)[-1][-1] + 2 + # Loop over all cells; can't find a cleaner way to do this + for j in range(nbdry_zice): + for i in range(size(mask_zice,1)): + # First make sure this is an ocean point + if mask_rho[j,i] == 1: + # Find bounds on window of radius 1 + j_min = max(j-1,0) + j_max = min(j+2, size(mask_zice,0)) + i_min = max(i-1,0) + i_max = min(i+2, size(mask_zice,1)) + # Points at the ice shelf front are not ice shelf points, but have at least one + # neighbour that is + if mask_zice[j,i] == 0 and any(mask_zice[j_min:j_max,i_min:i_max] == 1): + front[:,j,i] = 1 + + print '...extrapolating under ice shelves' + mask_zice_3d = tile(mask_zice, (N,1,1)) + for j in range(nbdry_zice, -1, -1): + print '...latitude index ' + str(nbdry_zice-j+1) + ' of ' + str(nbdry_zice+1) + # Find coordinates of ice shelf front points + lon_front = lon_roms_3d[front==1] + lat_front = lat_roms_3d[front==1] + z_front = z_roms_3d[front==1] + # Also find the values of the given variable here + B_front = B[front==1] + for i in range(size(mask_zice,1)): + if mask_zice[j,i] == 1: + for k in range(N): + # Calculate Cartesian distance from this point to each point + # at the ice shelf front + dlon = lon_front - lon_roms_3d[k,j,i] + index = dlon > 300 + dlon[index] -= 360 + index = dlon < -300 + dlon[index] += 360 + dlon = abs(dlon) + dlat = abs(lat_front - lat_roms_3d[k,j,i]) + dz = abs(z_front - z_roms_3d[k,j,i]) + dx = r*cos(lat_roms_3d[k,j,i]*deg2rad)*dlon*deg2rad + dy = r*dlat*deg2rad + dist = sqrt(dx**2 + dy**2 + dz**2) + # Find the index with the minimum distance: this is the + # nearest-neighbour + nearest = argmin(dist) + B[k,j,i] = B_front[nearest] + # Update the ice shelf front points to include the new points we've just + # extrapolated to + front_tmp = front[:,j,:] + mask_zice_tmp = mask_zice_3d[:,j,:] + front_tmp[mask_zice_tmp==1] = 1 + front[:,j,:] = front_tmp + return B diff --git a/romscice_tides.py b/romscice_tides.py index fe10ca3..a455579 100644 --- a/romscice_tides.py +++ b/romscice_tides.py @@ -20,7 +20,7 @@ def romscice_tides (): # Path to TPXO file tpxo_file = '../ROMS-CICE-MCT/data/h_tpxo7.2.nc' # Desired path to output file - out_file = '../ROMS-CICE-MCT/data/tides_tpxo72_1yr.nc' + out_file = '../ROMS-CICE-MCT/data/tides_tpxo72.nc' #_1yr.nc' # Bounds on latitude indices to read from TPXO2 (as close together as # possible while containing all the latitudes ROMS needs, so that there # aren't too many land points to fill with nearest neighbours which is @@ -37,12 +37,12 @@ def romscice_tides (): # file caisom001_tides.nc) period = array([44714.165191868, 43200.0012869521, 86164.0770050671, 92949.6357005365, 45570.0535117177, 86637.1997716528, 43082.0503185947, 96726.0857029666, 2380715.86358729, 1180295.54554976]) # Tweak all these periods so they evenly divide 1 year - period_1yr = zeros(num_cmp) - sec_per_year = 365.25*24*60*60 - for n in range(num_cmp): - period_orig = period[n] - freq = round(sec_per_year/period_orig) - period_1yr[n] = sec_per_year/freq + #period_1yr = zeros(num_cmp) + #sec_per_year = 365.25*24*60*60 + #for n in range(num_cmp): + #period_orig = period[n] + #freq = round(sec_per_year/period_orig) + #period_1yr[n] = sec_per_year/freq # Read ROMS grid id = Dataset(grid_file, 'r') @@ -87,7 +87,7 @@ def romscice_tides (): id.createVariable('tide_period', 'f8', ('tide_period')) id.variables['tide_period'].long_name = 'tide angular period' id.variables['tide_period'].units = 'seconds' - id.variables['tide_period'][:] = period_1yr + id.variables['tide_period'][:] = period #_1yr id.createVariable('tide_Ephase', 'f8', ('tide_period', 'eta_rho', 'xi_rho')) id.variables['tide_Ephase'].long_name = 'tidal elevation phase angle' id.variables['tide_Ephase'].units = 'degrees, time of maximum elevation with respect to chosen time origin'