-
Notifications
You must be signed in to change notification settings - Fork 1
/
calculateMaxLightEntireArctic.py
350 lines (264 loc) · 13.2 KB
/
calculateMaxLightEntireArctic.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
from pylab import *
import os, sys, datetime, string
import numpy as np
from netCDF4 import Dataset, date2num, num2date
import time
from subprocess import call
import mpl_util
import pandas as pd
import calculateLightUnderIce
from mpl_toolkits.basemap import Basemap, interp, shiftgrid, addcyclic
import brewer2mpl
import pyximport
pyximport.install(setup_args={'include_dirs': [np.get_include()]})
__author__ = 'Trond Kristiansen'
__email__ = '[email protected]'
__created__ = datetime.datetime(2017, 12, 19)
__modified__ = datetime.datetime(2017, 12, 19)
__version__ = "1.1"
__status__ = "Development, 19.12.2017"
"""This script calculates the maximum and
average light irradiance (Wm-2) at given longitude
and latitude for a given date when the ice and snowthickness is known.
Wm-2 can be converted to umol/m2/s-1
by maxLight = maxLight/0.217
Compile : A cython function which is found in the file calculateLightUnderIce.pyx is
compiled with: python setup.py build_ext --inplace
"""
def calculateArea(lat0, lat1, lon0, lon1, areaIce):
earthRadius = 6371000
rad = np.pi / 180.0
""" -180 <= lon0 < lon1 <= 180
-90 <= lat0 < lat1 <= 90
areaIce is in percent
"""
area = earthRadius ** 2 * (np.sin(lat1 * rad) - np.sin(lat0 * rad)) * (lon1 - lon0) * rad
return area * areaIce
"""Function that opens a CMIP5 file and reads the contents. The innput
is assumed to be on grid 0-360 so all values are shifted to new grid
on format -180 to 180 using the shiftgrid function of basemap."""
def openCMIP5file(myvar, CMIP5Hist, CMIP5Proj, first):
if os.path.exists(CMIP5Hist):
myfileHist = Dataset(CMIP5Hist)
print("Opened CMIP5 file: %s" % (CMIP5Hist))
else:
print("Could not find CMIP5 input file %s : abort" % (CMIP5Hist))
print("Make sure that needToSubsetData is set to False if you have no data in SUBSET folder")
sys.exit()
if os.path.exists(CMIP5Proj):
myfileProj = Dataset(CMIP5Proj)
print("Opened CMIP5 file: %s" % (CMIP5Proj))
else:
print("Could not find CMIP5 input file %s : abort" % (CMIP5Proj))
print("Make sure that needToSubsetData is set to False if you have no data in SUBSET folder")
sys.exit()
dateobjects = [];
timeFull = []
if first is True:
timeHist = myfileHist.variables["time"][:]
timeProj = myfileProj.variables["time"][:]
refDateH = myfileHist.variables["time"].units
refDateP = myfileProj.variables["time"].units
refdateProj = datetime.datetime(int(refDateP[11:15]), 1, 1, 0, 0, 0)
refdateHist = datetime.datetime(int(refDateH[11:15]), 1, 1, 0, 0, 0)
startH = refdateHist + datetime.timedelta(days=float(timeHist[0]))
endH = refdateHist + datetime.timedelta(days=float(timeHist[-1]))
startP = refdateProj + datetime.timedelta(days=float(timeProj[0]))
endP = refdateProj + datetime.timedelta(days=float(timeProj[-1]))
if first is True:
print("Found Historical to start in year %s and end in %s" % (startH.year, endH.year))
print("Found Projections to start in year %s and end in %s" % (startP.year, endP.year))
"""Create the datetime objects for pandas"""
for t in timeHist:
obj=num2date(t, refDateH, calendar="365_day")
dateobjects.append(datetime.datetime(obj.year,obj.month,obj.day))
for t in timeProj:
obj = num2date(t, refDateP, calendar="365_day")
dateobjects.append(datetime.datetime(obj.year, obj.month, obj.day))
"""Combine the time arrays"""
timeFull = np.ma.concatenate((timeHist, timeProj), axis=0)
myTEMPHIST = np.squeeze(myfileHist.variables[myvar][:])
myTEMPHIST = np.ma.masked_where(myTEMPHIST == myTEMPHIST.fill_value, myTEMPHIST)
myTEMPPROJ = np.squeeze(myfileProj.variables[myvar][:])
myTEMPPROJ = np.ma.masked_where(myTEMPPROJ == myTEMPHIST.fill_value, myTEMPPROJ)
lonCMIP5 = np.squeeze(myfileHist.variables["lon"][:])
latCMIP5 = np.squeeze(myfileHist.variables["lat"][:])
"""Combine the myvarname arrays"""
dataFull = np.ma.concatenate((myTEMPHIST, myTEMPPROJ), axis=0)
"""Make sure that we have continous data around the globe"""
dataFull, loncyclicCMIP5 = addcyclic(dataFull, lonCMIP5)
lons, lats = np.meshgrid(loncyclicCMIP5, latCMIP5)
return dateobjects, timeFull, dataFull, lons, lats
"""Map related functions:"""
def plotMap(lon, lat, mydata, modelName, scenario, mydate):
plt.figure(figsize=(12, 12), frameon=False)
mymap = Basemap(projection='npstere', lon_0=0, boundinglat=50)
x, y = mymap(lon, lat)
levels = np.arange(np.min(mydata), np.max(mydata), 1)
# levels = np.arange(np.min(mydata), 60, 1)
CS1 = mymap.contourf(x, y, mydata, levels,
cmap=mpl_util.LevelColormap(levels, cmap=cm.RdBu_r),
extend='max',
antialiased=False)
mymap.drawparallels(np.arange(-90., 120., 15.), labels=[1, 0, 0, 0]) # draw parallels
mymap.drawmeridians(np.arange(0., 420., 30.), labels=[0, 1, 0, 1]) # draw meridians
mymap.drawcoastlines()
mymap.drawcountries()
mymap.fillcontinents(color='grey')
plt.colorbar(CS1, shrink=0.5)
title('Model:' + str(modelName) + ' Year:' + str(mydate.year) + ' Month:' + str(mydate.month))
CS1.axis = 'tight'
if not os.path.exists("Figures"):
os.mkdir("Figures/")
plotfile = 'figures/map_light_' + str(modelName) + '_' + str(mydate.year) + '_' + str(mydate.month) + '.png'
plt.savefig(plotfile, dpi=300)
plt.clf()
plt.close()
def remove_border(axes=None, top=False, right=False, left=True, bottom=True):
"""
Minimize chartjunk by stripping out unnecesasry plot borders and axis ticks
The top/right/left/bottom keywords toggle whether the corresponding plot border is drawn
"""
ax = axes or plt.gca()
ax.spines['top'].set_visible(top)
ax.spines['right'].set_visible(right)
ax.spines['left'].set_visible(left)
ax.spines['bottom'].set_visible(bottom)
# turn off all ticks
ax.yaxis.set_ticks_position('none')
ax.xaxis.set_ticks_position('none')
# now re-enable visibles
if top:
ax.xaxis.tick_top()
if bottom:
ax.xaxis.tick_bottom()
if left:
ax.yaxis.tick_left()
if right:
ax.yaxis.tick_right()
def plotTimeseries(ts, myvar):
ts_annual = ts.resample("A")
ts_quarterly = ts.resample("Q")
ts_monthly = ts.resample("M")
# Write data to file
mypath = "%s_annualaverages.csv" % (myvar)
if os.path.exists(mypath): os.remove(mypath)
ts.to_csv(mypath)
print("Wrote timeseries to file: %s" % (mypath))
red_purple = brewer2mpl.get_map('RdPu', 'Sequential', 9).mpl_colormap
colors = red_purple(np.linspace(0, 1, 12))
fig = plt.figure(figsize=(6, 6))
ax = fig.add_subplot(111)
# for mymonth in xrange(12):
# ts[(ts.index.month == mymonth + 1)].plot(marker='o', color=colors[mymonth], markersize=5, linewidth=0,
# alpha=0.8)
# ts_annual.plot(marker='o', color="#FA9D04", linewidth=0, alpha=1.0, markersize=7, label="Annual")
remove_border(top=False, right=False, left=True, bottom=True)
ts.resample("M").mean().plot(style="r", marker='o', linewidth=1, label="Monthly")
ts.resample("A").mean().plot(style="b", marker='o', linewidth=2, label="Annual")
# legend(loc='best')
if myvar == "light":
ylabel(r'Light (W m$^{-2})$')
if myvar == "temp":
ylabel(r'Temp ($^{o}$C)')
plotfile = 'figures/timeseries_' + str(myvar) + '.png'
plt.savefig(plotfile, dpi=300, bbox_inches="tight", pad_inches=0)
# print 'Saved figure file %s\n' % (plotfile)
plt.show()
def main():
scenarios = ["RCP85"]
needToSubsetData = False
outputNETCDFFilename = "Light_and_temperature_1850_2100_Arctic.nc"
if os.path.exists(outputNETCDFFilename): os.remove(outputNETCDFFilename)
outCDF = Dataset(outputNETCDFFilename, "w", format="NETCDF4")
print("Opened output netCDF file for stroning the temperature and light for the Arctic\n for the period 1850-2100 from NorESM")
myvars = ["sit", "sic", "snd", "ialb", "tos"] # order is important
debug = False
startYear = 1850
endYear = 2100
for scenario in scenarios:
print("-----------------------")
print("Running scenario: %s" % (scenario))
print("-----------------------")
first = True
counter = 0
for myvar in myvars:
modelRCP = "%s_Omon_NorESM1-M_%s_r1i1p1_200601-210012_rectangular.nc" % (myvar, scenario.lower())
modelHIST = "%s_OImon_NorESM1-M_historical_r1i1p1_185001-200512_rectangular.nc" % (myvar)
modelName = "NorESM1-M"
print("-----------------------------------\n")
print("Extracting data from model: %s " % (modelName))
print("-----------------------------------\n")
workDir = "/Users/trondkr/Dropbox/Projects/RegScen/NorESM/%s/" % (scenario.upper())
workDirHist = "/Users/trondkr/Dropbox/Projects/RegScen/NorESM/Historical/"
CMIP5file = workDir + modelRCP
CMIP5HISTfile = workDirHist + modelHIST
"""Prepare output file:"""
"""Save filenames and paths for creating output filenames"""
if not os.path.exists("SUBSET"):
os.mkdir("SUBSET/")
outfilenameProj = "SUBSET/" + os.path.basename(CMIP5file)[0:-3] + "_Arctic.nc"
outfilenameHist = "SUBSET/" + os.path.basename(CMIP5HISTfile)[0:-3] + "_Arctic.nc"
if needToSubsetData:
call(["cdo", "sellonlatbox,0,360,40,90", CMIP5file, outfilenameProj])
call(["cdo", "sellonlatbox,0,360,40,90", CMIP5HISTfile, outfilenameHist])
dateobjects, timeFull, dataFull, lons, lats = openCMIP5file(myvar, outfilenameHist, outfilenameProj, first)
if first is True:
allData = np.zeros((len(myvars), np.shape(dataFull)[0], np.shape(dataFull)[1], np.shape(dataFull)[2]))
lightData = np.zeros((np.shape(dataFull)[0], np.shape(dataFull)[1], np.shape(dataFull)[2]))
lightDataMap = np.zeros((np.shape(dataFull)[0], np.shape(dataFull)[1], np.shape(dataFull)[2]))
allDateObjects = dateobjects
first = False
"""Store all datavariables into one big array for easy access when looping afterwards"""
allData[counter, :, :, :] = dataFull
counter += 1
print("Finished storring all data into large array of shape:", np.shape(allData))
"""Now loop over each grid cell for all time-steps to caluclate the light in each cell"""
"""The following is cython function which is found in the file calculateLightUnderIce.pyx and
compile with: python setup.py build_ext --inplace"""
lightData, lightDataMap = calculateLightUnderIce.calculateLight(allData, allDateObjects, lightData, lightDataMap,
lons, lats, debug, startYear, endYear)
allDateObjects = np.asarray(allDateObjects)
outCDF.createDimension("time", None)
outCDF.createDimension("lat", np.shape(dataFull)[1])
outCDF.createDimension("lon", np.shape(dataFull)[2])
outCDF.Conventions = 'CF-1.6'
outCDF.institution = "NIVA"
latitudes = outCDF.createVariable("latitude", "f4", ("lat",))
longitudes = outCDF.createVariable("longitude", "f4", ("lon",))
times = outCDF.createVariable("time", "f4", ("time",))
temp = outCDF.createVariable("tos", "f4", ("time", "lat", "lon",), fill_value=1.0e20)
light = outCDF.createVariable("light", "f4", ("time", "lat", "lon",), fill_value=1.0e20)
outCDF.description = "File containing temperature and light at upper 10cm of water column for 1850-2100"
outCDF.history = "Created " + time.ctime(time.time())
outCDF.source = "calculateMaxLightentireArctic.py"
latitudes.units = "degree_north"
longitudes.units = "degree_east"
temp.units = "C"
temp.standard_name = "Sea surface temperature"
times.units = "days since 1948-01-01 00:00:00"
times.calendar = "365_day"
light.units = "W/m2"
light.standard_name = "Irradiance"
light.units = "days since 1948-01-01 00:00:00"
light.calendar = "365_day"
times[:] = date2num(allDateObjects, units="days since 1948-01-01 00:00:00", calendar="365_day")
latitudes[:] = np.squeeze(lats[:, 0])
longitudes[:] = np.squeeze(lons[0, :])
temp[:] = np.squeeze(allData[4, :, :, :]) - 273.15
light[:] = lightData
lightDataMap = np.ma.masked_invalid(lightDataMap)
# tempMap = np.ma.masked_where(temp >= 1.0e20, temp)
# print tempMap
# for dateindex in xrange(120):
# plotMap(lons, lats, np.squeeze(temp[dateindex, :, :]), modelName, scenario, allDateObjects[dateindex])
timeseriesLight = []
timeseriesTos = []
for dateindex in range(len(lightData[:, 0, 0])):
timeseriesLight.append(np.mean(lightData[dateindex, :, :]))
timeseriesTos.append(np.mean(temp[dateindex, :, :]))
tsl = pd.Series(timeseriesLight, dateobjects)
tst = pd.Series(timeseriesTos, dateobjects)
plotTimeseries(tsl, "light")
plotTimeseries(tst, "temp")
main()