Skip to content

Commit

Permalink
Added more ice shelf regions to massloss.py and switched from i- and …
Browse files Browse the repository at this point in the history
…j- limits to lat- and lon- limits
  • Loading branch information
Kaitlin Alexander committed Jun 14, 2016
1 parent 058525b commit 2128aa6
Show file tree
Hide file tree
Showing 7 changed files with 116 additions and 71 deletions.
6 changes: 3 additions & 3 deletions aice_animation.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
# Directory containing CICE output files
directory = '/short/y99/kaa561/roms_spinup_newest/cice/'
# Number of time indices in each file
num_ts = [18, 270, 270, 270, 252]
num_ts = [288]
# File number to start with for the animation (1-based)
start_file = 5
start_file = 1
# Degrees to radians conversion factor
deg2rad = pi/180
# Names of each month for making titles
Expand Down Expand Up @@ -66,6 +66,6 @@ def animate(i):
return img

# Animate once every time index from start_file to the last file
anim = FuncAnimation(fig, func=animate, frames=range(179,252)) #sum(array(num_ts[start_file-1:])))
anim = FuncAnimation(fig, func=animate, frames=range(215,288)) #sum(array(num_ts[start_file-1:])))
# Save as an mp4 with one frame per second
anim.save('aice.mp4', fps=1)
66 changes: 37 additions & 29 deletions massloss.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,22 @@
def massloss (file_path, log_path):

# Titles and figure names for each ice shelf
names = ['Amery Ice Shelf', 'Ross Ice Shelf', 'Getz Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Abbot Ice Shelf', 'George VI Ice Shelf', 'Larsen C Ice Shelf', 'Ronne-Filchner Ice Shelf', 'Brunt and Riiser-Larsen Ice Shelves', 'Fimbul and Jelbart Ice Shelves']
fig_names = ['amery_massloss.png', 'ross_massloss.png', 'getz_massloss.png', 'pig_massloss.png', 'abbot_massloss.png', 'george_vi_massloss.png', 'larsen_c_massloss.png', 'ronne_filchner_massloss.png', 'brunt_riiser_larsen_massloss.png', 'fimbul_jelbart_massloss.png']
# Limits on i- and j- coordinates for each ice shelf; this will vary
# depending on the grid
names = ['Larsen D Ice Shelf', 'Larsen C Ice Shelf', 'Wilkins & George VI & Stange Ice Shelves', 'Ronne-Filchner Ice Shelf', 'Abbot Ice Shelf', 'Pine Island Glacier Ice Shelf', 'Thwaites Ice Shelf', 'Dotson Ice Shelf', 'Getz Ice Shelf', 'Nickerson Ice Shelf', 'Sulzberger Ice Shelf', 'Mertz Ice Shelf', 'Totten & Moscow University Ice Shelves', 'Shackleton Ice Shelf', 'West Ice Shelf', 'Amery Ice Shelf', 'Prince Harald Ice Shelf', 'Baudouin & Borchgrevink Ice Shelves', 'Lazarev Ice Shelf', 'Nivl Ice Shelf', 'Fimbul & Jelbart & Ekstrom Ice Shelves', 'Brunt & Riiser-Larsen Ice Shelves', 'Ross Ice Shelf']
fig_names = ['larsen_d.png', 'larsen_c.png', 'wilkins_georgevi_stange.png', 'ronne_filchner.png', 'abbot.png', 'pig.png', 'thwaites.png', 'dotson.png', 'getz.png', 'nickerson.png', 'sulzberger.png', 'mertz.png', 'totten_moscowuni.png', 'shackleton.png', 'west.png', 'amery.png', 'princeharald.png', 'baudouin_borchgrevink.png', 'lazarev.png', 'nivl.png', 'fimbul_jelbart_ekstrom.png', 'brunt_riiserlarsen.png', 'ross.png']
# Limits on longitude and latitude for each ice shelf
# These depend on the source geometry, in this case RTopo 1.05
# Note there is one extra index at the end of each array; this is because
# the Fimbul-Jelbart region crosses the periodic boundary and therefore
# is split into two
i_min = [250, 700, 905, 1015, 1000, 1100, 1165, 1060, 1280, 1375, 1]
i_max = [350, 872, 975, 1030, 1090, 1155, 1190, 1240, 1369, 1443, 12]
j_min = [1, 20, 150, 140, 160, 150, 187, 1, 65, 80, 100]
j_max = [125, 123, 175, 160, 185, 200, 220, 135, 116, 150, 120]
# Observed mass loss (Rignot 2013) and uncertainty for each ice shelf
obs_massloss = [35.5, 47.7, 144.9, 101.2, 51.8, 89, 20.7, 155.4, 9.7, 22.5]
obs_massloss_error = [23, 34, 14, 8, 19, 17, 67, 45, 16, 12]
# the Ross region crosses the line 180W and therefore is split into two
lon_min = [-62.67, -65.5, -79.17, -85, -104.17, -102.5, -108.33, -114.5, -135.67, -149.17, -155, 144, 115, 94.17, 80.83, 65, 33.83, 19, 12.9, 9.33, -10.05, -28.33, -180, 158.33]
lon_max = [-59.33, -60, -66.67, -28.33, -88.83, -99.17, -103.33, -111.5, -114.33, -140, -145, 146.62, 123.33, 102.5, 89.17, 75, 37.67, 33.33, 16.17, 12.88, 7.6, -10.33, -146.67, 180]
lat_min = [-73.03, -69.35, -74.17, -83.5, -73.28, -75.5, -75.5, -75.33, -74.9, -76.42, -78, -67.83, -67.17, -66.67, -67.83, -73.67, -69.83, -71.67, -70.5, -70.75, -71.83, -76.33, -85, -84.5]
lat_max = [-69.37, -66.13, -69.5, -74.67, -71.67, -74.17, -74.67, -73.67, -73, -75.17, -76.41, -66.67, -66.5, -64.83, -66.17, -68.33, -68.67, -68.33, -69.33, -69.83, -69.33, -71.5, -77.77, -77]
# Observed mass loss (Rignot 2013) and uncertainty for each ice shelf, in Gt/y
obs_massloss = [1.4, 20.7, 135.4, 155.4, 51.8, 101.2, 97.5, 45.2, 144.9, 4.2, 18.2, 7.9, 90.6, 72.6, 27.2, 35.5, -2, 21.6, 6.3, 3.9, 26.8, 9.7, 47.7]
obs_massloss_error = [14, 67, 40, 45, 19, 8, 7, 4, 14, 2, 3, 3, 8, 15, 10, 23, 3, 18, 2, 2, 14, 16, 34]
# Observed ice shelf melt rates and uncertainty
obs_ismr = [0.6, 0.1, 4.3, 16.2, 1.7, 3.8, 0.4, 0.3, 0.1, 0.5]
obs_ismr_error = [0.4, 0.1, 0.4, 1, 0.6, 0.7, 1, 0.1, 0.2, 0.2]
obs_ismr = [0.1, 0.4, 3.1, 0.3, 1.7, 16.2, 17.7, 7.8, 4.3, 0.6, 1.5, 1.4, 7.7, 2.8, 1.7, 0.6, -0.4, 0.4, 0.7, 0.5, 0.5, 0.1, 0.1]
obs_ismr_error = [0.6, 1, 0.8, 0.1, 0.6, 1, 1, 0.6, 0.4, 0.3, 0.3, 0.6, 0.7, 0.6, 0.7, 0.4, 0.6, 0.4, 0.2, 0.2, 0.2, 0.2, 0.1]

# Density of ice in kg/m^3
rho_ice = 916
Expand Down Expand Up @@ -63,9 +62,9 @@ def massloss (file_path, log_path):
break
index += 1

# Calculate dA (masked with ice shelf mask) and i and j coordinates
# Calculate dA (masked with ice shelf mask) and lon and lat coordinates
print 'Analysing grid'
dA, i, j = calc_grid(file_path)
dA, lon, lat = calc_grid(file_path)

# Read time data and convert from seconds to years
id = Dataset(file_path, 'r')
Expand Down Expand Up @@ -104,13 +103,13 @@ def massloss (file_path, log_path):
for index in range(len(names)):

# Mask dA for the current ice shelf (note dA is already masked
# with the global ice shelf mask, so just restrict the i and j
# with the global ice shelf mask, so just restrict the indices
# to isolate the given ice shelf)
if index == len(names)-1:
# Fimbul-Jelbart region is split into two
dA_tmp = ma.masked_where(((i < i_min[index]) + (i > i_max[index]) + (j < j_min[index]) + (j > j_max[index]))*((i < i_min[index+1]) + (i > i_max[index+1]) + (j < j_min[index+1]) + (j > j_max[index+1])), dA)
# Ross region is split into two
dA_tmp = ma.masked_where(((lon < lon_min[index]) + (lon > lon_max[index]) + (lat < lat_min[index]) + (lat > lat_max[index]))*((lon < lon_min[index+1]) + (lon > lon_max[index+1]) + (lat < lat_min[index+1]) + (lat > lat_max[index+1])), dA)
else:
dA_tmp = ma.masked_where((i < i_min[index]) + (i > i_max[index]) + (j < j_min[index]) + (j > j_max[index]), dA)
dA_tmp = ma.masked_where((lon < lon_min[index]) + (lon > lon_max[index]) + (lat < lat_min[index]) + (lat > lat_max[index]), dA)

# Integrate ice shelf melt rate over area to get volume loss
volumeloss = sum(ismr*dA_tmp)
Expand Down Expand Up @@ -140,8 +139,16 @@ def massloss (file_path, log_path):
ax1.axhline(massloss_low, color='b', linestyle='dashed')
ax1.axhline(massloss_high, color='b', linestyle='dashed')
# Make sure y-limits won't cut off observed melt rate
ymin = min(0.95*ismr_low/factors[index], ax1.get_ylim()[0])
ymax = max(1.05*ismr_high/factors[index], ax1.get_ylim()[1])
ymin = amin([ismr_low/factors[index], massloss_low, amin(massloss[index,:])])
ymax = amax([ismr_high/factors[index], massloss_high, amax(massloss[index,:])])
if ymin < 0:
ymin = 1.05*ymin
else:
ymin = 0.95*ymin
if ymax < 0:
ymax = 0.95*ymax
else:
ymax = 1.05*ymax
ax1.set_ylim([ymin, ymax])
# Title and ticks in blue for this side of the plot
ax1.set_ylabel('Basal Mass Loss (Gt/y)', color='b')
Expand Down Expand Up @@ -178,11 +185,12 @@ def massloss (file_path, log_path):


# Given the path to a ROMS grid file, calculate differential of area and
# i- and j-indices.
# longitude and latitude.
# Input: file_path = string containing path to ROMS history/averages file
# Output:
# dA = differential of area on the 2D rho-grid, masked with zice
# i,j = i- and j- coordinates on the rho-grid, starting at 1
# lon = longitude values on the rho-grid, from -180 to 180
# lat = latitude values on the rho-grid
def calc_grid (file_path):

# Read grid variables
Expand All @@ -202,11 +210,11 @@ def calc_grid (file_path):
num_lat = size(lon, 0)
num_lon = size(lon, 1)

# Calculate i- and j-coordinates
i = tile(arange(1, num_lon+1), (num_lat, 1))
j = transpose(tile(arange(1, num_lat+1), (num_lon, 1)))
# Make longitude values go from -180 to 180, not 0 to 360
index = lon > 180
lon[index] = lon[index] - 360

return dA, i, j
return dA, lon, lat


# Command-line interface
Expand Down
55 changes: 29 additions & 26 deletions repeat_forcing.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,35 +71,38 @@ def process (directory, head, tail, year_start, perday, var_list):
id.close()


# User parameters to edit here
# Command-line interface
if __name__ == '__main__':

# Data where files <an_head>yyyy<tail> and <fc_head>yyyy<tail> exist
directory = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/subdaily/'
# First part of filename for AN and FC files
an_head = 'AN_'
fc_head = 'FC_'
# Last part of filename (common to both AN and FC)
tail = '_subdaily.nc'
# Year to build data from
year_start = 1995
# Number of records per day
an_perday = 4
fc_perday = 2
# Variables to interpolate Feb 29th for each file
an_var = ['Pair', 'Tair', 'Qair', 'cloud', 'Uwind', 'Vwind']
fc_var = ['rain']
# User parameters to edit here

if year_start % 4 == 0:
# This script assumes year_start has 365 days and then the leap year
# 1, 2, or 3 years after that will have Feb 29th added in by interpolation.
# However you could rework this script to remove Feb 29th from year_start
# and every following year except the leap year.
print 'year_start cannot be a leap year. Either choose a different year_start or rework this script.'
exit
# Data where files <an_head>yyyy<tail> and <fc_head>yyyy<tail> exist
directory = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/subdaily/30day_smoothed/'
# First part of filename for AN and FC files
an_head = 'AN_'
fc_head = 'FC_'
# Last part of filename (common to both AN and FC)
tail = '_subdaily.nc'
# Year to build data from
year_start = 1995
# Number of records per day
an_perday = 4
fc_perday = 2
# Variables to interpolate Feb 29th for each file
an_var = ['Pair', 'Tair', 'Qair', 'cloud', 'Uwind', 'Vwind']
fc_var = ['rain', 'snow']

# Run the actual script
process(directory, an_head, tail, year_start, an_perday, an_var)
process(directory, fc_head, tail, year_start, fc_perday, fc_var)
if year_start % 4 == 0:
# This script assumes year_start has 365 days and then the leap year
# 1, 2, or 3 years after that will have Feb 29th added in by interpolation.
# However you could rework this script to remove Feb 29th from year_start
# and every following year except the leap year.
print 'year_start cannot be a leap year. Either choose a different year_start or rework this script.'
exit

# Run the actual script
process(directory, an_head, tail, year_start, an_perday, an_var)
process(directory, fc_head, tail, year_start, fc_perday, fc_var)



Expand Down
25 changes: 25 additions & 0 deletions rignot_data
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
Name Area (km2) Mass loss (Gt/y) Melt rate (m/y) lon-bounds lat-bounds ROMS area (km^2) Area bias (%)
Larsen D 22,548 1.4 +/- 14 0.1 +/- 0.6 -62.67:-59.33 -73.03:-69.37 12,754 -43
Larsen C 46,465 20.7 +/- 67 0.4 +/- 1 -65.5:-60 -69.35:-66.13 52,009 12
Wilkins & George VI & Stange 44,327 135.4 +/- 40 3.1 +/- 0.8 -79.17:-66.67 -74.17:-69.5 47,287 7
Ronne-Filchner 443,140 155.4 +/- 45 0.3 +/- 0.1 -85:-28.33 -83.5:-74.67 353,435 -20
Abbot 29,688 51.8 +/- 19 1.7 +/- 0.6 -104.17:-88.83 -73.28:-71.67 31,291 5
Pine Island 6,249 101.2 +/- 8 16.2 +/- 1 -102.5:-99.17 -75.5:-74.17 5,162 -17
Thwaites 5,499 97.5 +/- 7 17.7 +/- 1 -108.33:-103.33 -75.5:-74.67 3,990 -27
Dotson 5,803 45.2 +/- 4 7.8 +/- 0.6 -114.5:-111.5 -75.33:-73.67 4,681 -19
Getz 34,018 144.9 +/- 14 4.3 +/- 0.4 -135.67:-114.33 -74.9:-73 32,447 -5
Nickerson 6,495 4.2 +/- 2 0.6 +/- 0.3 -149.17:-140 -76.42:-75.17 7,694 18
Sulzberger 12,333 18.2 +/- 3 1.5 +/- 0.3 -155:-145 -78:-76.41 13,537 10
Ross 500,809 47.7 +/- 34 0.1 +/- 0.1 -180:-146.67,158.33:180 -85:-77.77,-84.5:-77 429,253 -14
Mertz 5,522 7.9 +/- 3 1.4 +/- 0.6 144:146.62 -67.83:-66.67 4,918 -11
Totten & Moscow Uni 11,830 90.6 +/- 8 7.7 +/- 0.7 115:123.33 -67.17:-66.5 6,482 -45
Shackleton 26,080 72.6 +/- 15 2.8 +/- 0.6 94.17:102.5 -66.67:-64.83 30,522 17
West 15,666 27.2 +/- 10 1.7 +/- 0.7 80.83:89.17 -67.83:-66.17 15,158 -3
Amery 60,654 35.5 +/- 23 0.6 +/- 0.4 65:75 -73.67:-68.33 64,360 6
Prince Harald 5,392 -2 +/- 3 -0.4 +/- 0.6 33.83:37.67 -69.83:-68.67 4,576 -15
Baudouin & Borchgrevink 54,532 21.6 +/- 18 0.4 +/- 0.4 19:33.33 -71.67:-68.33 45,327 -17
Lazarev 8,519 6.3 +/- 2 0.7 +/- 0.2 12.9:16.17 -70.5:-69.33 8,111 -5
Nivl 7,285 3.9 +/- 2 0.5 +/- 0.2 9.33:12.88 -70.75:-69.83 7,088 -3
Fimbul & Jelbart & Ekstrom 58,559 26.8 +/- 14 0.5 +/- 0.2 -10.05:7.6 -71.83:-69.33 55,763 -5
Brunt & Riiser-Larsen 80,344 9.7 +/- 16 0.1 +/- 0.2 -28.33:-10.33 -76.33:-71.5 68,007 -15

6 changes: 3 additions & 3 deletions romscice_atm_monthly.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,11 @@ def convert_file (year):

# Paths of ROMS grid file, input ERA-Interim files, and output ROMS-CICE
# files; other users will need to change these
grid_file = '../ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_rp5.nc'
grid_file = '/short/m68/kaa561/ROMS-CICE-MCT/apps/common/grid/circ30S_quarterdegree_rp5.nc'
input_atm_file = '/short/y99/kaa561/FESOM/ERA_Interim_monthly/AN_' + str(year) + '_monthly_orig.nc'
input_ppt_file = '/short/y99/kaa561/FESOM/ERA_Interim_monthly/FC_' + str(year) + '_monthly_orig.nc'
output_atm_file = '../ROMS-CICE-MCT/data/ERA_Interim/monthly/historical/AN_' + str(year) + '_monthly.nc'
output_ppt_file = '../ROMS-CICE-MCT/data/ERA_Interim/monthly/historical/FC_' + str(year) + '_monthly.nc'
output_atm_file = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/monthly/historical/AN_' + str(year) + '_monthly.nc'
output_ppt_file = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/monthly/historical/FC_' + str(year) + '_monthly.nc'

Lv = 2.5e6 # Latent heat of vapourisation, J/kg
Rv = 461.5 # Ideal gas constant for water vapour, J/K/kg
Expand Down
10 changes: 5 additions & 5 deletions romscice_atm_smooth.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def smooth_var (filename, var, interval):
else:
# Easy, just moving ahead by 1 timestep with no wrapping around
data[-1,:,:] = id.variables[var][t+margin,:,:]
# Overwrite this timestep to be the time-avearge of data
# Overwrite this timestep to be the time-average of data
id.variables[var][t,:,:] = mean(data, axis=0)

id.close()
Expand All @@ -70,12 +70,12 @@ def smooth_var (filename, var, interval):
# User-defined values here
if __name__ == "__main__":

an_file = '../data/ERA_Interim/AN_1995_unlim.nc'
fc_file = '../data/ERA_Interim/FC_1995_unlim.nc'
an_file = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/subdaily/30day_smoothed/AN_1995_subdaily.nc'
fc_file = '/short/m68/kaa561/ROMS-CICE-MCT/data/ERA_Interim/subdaily/30day_smoothed/FC_1995_subdaily.nc'
# Names of variables to process in each file
an_var = ['Pair', 'Tair', 'Qair', 'cloud', 'Uwind', 'Vwind']
fc_var = ['rain']
sdays = 5 # 5-day moving average
fc_var = ['rain', 'snow']
sdays = 30 # 30-day moving average

# Loop over each listed variable in each file
for var in an_var:
Expand Down
19 changes: 14 additions & 5 deletions spinup_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,16 +127,25 @@ def calc_totalsalt (file_path, dV, rho, t):

# Calculate net basal mass loss based on the given ice shelf melt rate field.
# Input:
# ismr = 2D ice shelf melt rate field (m/y)
# file_path = path to ocean history/averages ilfe
# dA = elements of area on the rho grid, masked with zice
# t = timestep index in file_path
# Output:
# massloss = net basal mass loss (Gt/y)
# massloss_factor = conversion factor from mass loss to area-averaged melt rate
def calc_massloss (ismr, dA):
def calc_massloss (file_path, dA, t):

# Density of ice in kg/m^3
rho_ice = 916

# Read ice shelf melt rate, converting to float128 to prevent overflow
# during integration
id = Dataset(file_path, 'r')
ismr = ma.asarray(id.variables['m'][t,:,:], dtype=float128)
# Convert from m/s to m/y
ismr = ismr*365.25*24*60*60
id.close()

# Integrate over area to get volume loss
volumeloss = sum(ismr*dA)
# Convert to mass loss in Gt/y
Expand Down Expand Up @@ -309,8 +318,8 @@ def calc_bwtemp (file_path, dA, t):
def spinup_plots (file_path, cice_path, log_path):

# Observed basal mass loss (Rignot 2013) and uncertainty
obs_massloss = 1325
obs_massloss_error = 235
obs_massloss = 1500
obs_massloss_error = 237
# Observed ice shelf melt rates and uncertainty
obs_ismr = 0.85
obs_ismr_error = 0.1
Expand Down Expand Up @@ -395,7 +404,7 @@ def spinup_plots (file_path, cice_path, log_path):
print 'Calculating total salt content'
totalsalt.append(calc_totalsalt(file_path, dV, rho, t))
print 'Calculating basal mass loss'
massloss_tmp, massloss_factor = calc_massloss(ismr, dA)
massloss_tmp, massloss_factor = calc_massloss(file_path, dA, t)
massloss.append(massloss_tmp)
print 'Calculating total kinetic energy'
tke.append(calc_tke(file_path, dV, rho, t))
Expand Down

0 comments on commit 2128aa6

Please sign in to comment.