-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathinterpolate_nsha18_hazard_grid.py
374 lines (277 loc) · 13.2 KB
/
interpolate_nsha18_hazard_grid.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 28 12:25:12 2018
@author: u56903
"""
'''
from tools.oq_tools import return_annualised_haz_curves
from numpy import array, mgrid, linspace, isinf, hstack, interp, log, exp
from misc_tools import dictlist2array
from scipy.interpolate import griddata
from os import path, mkdir
'''
##############################################################################
# some basic functions
##############################################################################
def get_probability_from_percent_chance(percent_chance, investigation_time):
from numpy import log
p0 = 1 - (percent_chance / 100.)
n = -log(p0)
probability = n / investigation_time
return_period = 1. / probability
return return_period, probability
def get_percent_chance_from_return_period(return_period, investigation_time):
from numpy import exp
n = (1. / return_period) * investigation_time
p0 = exp(-n)
percent_chance = 100*(1 - p0)
return percent_chance
# flattens a single key value from a list of dictionaries
def dictlist2array(dictList, key):
from numpy import array
flatList = []
for dl in dictList:
flatList.append(dl[key])
return array(flatList)
##############################################################################
# parse OpenQuake hazard curve grid files
##############################################################################
def return_annualised_haz_curves(hazcurvefile):
from numpy import array, log, unique, where
# get period
period = hazcurvefile.strip('.csv').split('-')[-1]
csvlines = open(hazcurvefile).readlines()
# get investigation time
for header in csvlines[0].split(','):
if header.strip().startswith('investigation_time'):
investigation_time = float(header.split('=')[-1])
# get intesity measures
header = csvlines[1].strip().split(',')[3:] # changed to 3 for oq version 3.1
try:
imls = array([float(x.split(':')[0]) for x in header])
except:
imls = []
imts = []
for iml in header:
iml = iml.split('-')
if len(iml) > 2:
imls.append(float('-'.join(iml[1:])))
else:
imls.append(float(iml[-1]))
imts.append(iml[0].strip(')').split('(')[-1])
imls = array(imls)
imts = array(imts)
# get unique periods
uimts = unique(imts)
# get site data
siteDict = []
for line in csvlines[2:]:
dat = line.split(',')
tmpdict = {'lon': float(dat[0]), 'lat': float(dat[1]), 'depth': float(dat[2])}
dat = dat[3:]
# loop through imts
for ut in uimts:
idx = where(imts == ut)[0]
poe50 = array([float(x) for x in array(dat)[idx]])
# now get annualised curves
P0 = 1 - array(poe50)
n = -1*log(P0)
annual_probs = n / investigation_time
tmpdict[period+'_probs_annual'] = annual_probs
tmpdict[period+'_probs_invtime'] = poe50
siteDict.append(tmpdict)
return siteDict, imls, investigation_time
##############################################################################
# parse hazard grid
##############################################################################
# parse grid file
def prep_hazcurve_grid(hazcurvefile):
from numpy import hstack
gridDict, poe_imls, investigation_time = return_annualised_haz_curves(hazcurvefile)
lons = dictlist2array(gridDict, 'lon')
lons = lons.reshape(len(lons), 1)
lats = dictlist2array(gridDict, 'lat')
lats = lats.reshape(len(lats), 1)
localities = hstack((lons, lats))
return lons, lats, localities, gridDict, poe_imls, investigation_time
##############################################################################
# interpolate hazard grid for each iml
##############################################################################
# for each IML, grid and interpolate
def interp_hazard_grid(poe_imls, gridDict, localities, interp_lon, interp_lat, period):
from scipy.interpolate import griddata
from numpy import array, isinf
interp_poe = []
# strip "SA"
#period = period.replace('SA','')
hazcurvetxt = 'IML, Annual PoE\n'
for i, iml in enumerate(poe_imls):
# for each iml, get PoE
poe = []
for gd in gridDict:
if isinf(gd[period+'_probs_annual'][i]):
# set to very small number
poe.append(1E-20)
else:
poe.append(gd[period+'_probs_annual'][i])
#poe.append(gd['poe_probs_invtime'][i])
poe = array(poe)
# grid the data.
interp_poe.append(griddata(localities, poe, (interp_lon, interp_lat), method='cubic')[0])
hazcurvetxt += ','.join((str(iml), str('%0.5e' % interp_poe[-1]))) + '\n'
return array(interp_poe)
##############################################################################
# interpolate to standard return periods and export
##############################################################################
def interp_hazard_curves(investigation_time, interp_poe, poe_imls, outhazcurve):
from os import path, mkdir
from numpy import array, exp, log, interp
# set grid return periods for hazard curve
return_periods = ['100', '250', '475', '500', '800', '1000', '1500', '2000', '2475', \
'2500', '3000', '5000']
interphazArray = []
haztxt = 'RETURN_PERIOD,ANNUAL_PROBABILITY,HAZARD_LEVEL(g)\n'
for return_period in return_periods:
percent_chance = get_percent_chance_from_return_period(float(return_period), investigation_time)
return_period_num, probability = get_probability_from_percent_chance(percent_chance, investigation_time)
print return_period_num, probability
print interp_poe, poe_imls
interphaz = exp(interp(log(probability), log(interp_poe[::-1]), log(poe_imls[::-1])))
haztxt += ','.join((return_period, str('%0.4e' % probability), str('%0.4e' % interphaz))) + '\n'
interphazArray.append(interphaz)
# check if folder exists
if path.isdir('4.3.1_interp_hazard_curve') == False:
mkdir('4.3.1_interp_hazard_curve')
# write to file
print 'Writing file:', outhazcurve
f = open(path.join('4.3.1_interp_hazard_curve', outhazcurve), 'wb')
f.write(haztxt)
f.close()
return array(interphazArray), array(return_period_num)
##############################################################################
# set some default values here
##############################################################################
# returns annualised hazard curves for all spectral periods
def get_nsha18_haz_curves(interp_lon, interp_lat, siteName):
from os import path, getcwd
from numpy import array, where, diff, hstack
import warnings
warnings.filterwarnings("ignore")
'''
# test data
interp_lon=149.13
interp_lat=-35.3
investigation_time=50.
percent_chance=10.
siteName='Canberra'
'''
periods = ['PGA', 'SA(0.05)', 'SA(0.1)', 'SA(0.2)', 'SA(0.3)', 'SA(0.5)', 'SA(0.7)', \
'SA(1.0)', 'SA(1.5)', 'SA(2.0)', 'SA(4.0)']
if getcwd().startswith('/nas'):
gridFolder = '/nas/active/ops/community_safety/ehp/georisk_earthquake/hazard/NSHA_18/reporting/NSHA18-grids_and_maps/data_pkg/4_data_grids/4.2_hazard_curve_grid_files'
interp_lon = array([interp_lon])
interp_lat = array([interp_lat])
# check latitude
if interp_lat[0] >= 0:
interp_lat[0] *= -1
haz_curve_dict = {}
for period in periods:
hazcurvefile = path.join(gridFolder,'hazard_curve-mean-'+period+'.csv')
print 'Parsing', hazcurvefile
# parse data for interpolation
lons, lats, localities, gridDict, poe_imls, investigation_time = prep_hazcurve_grid(hazcurvefile)
# get interpolated PoEs
print 'Interplolating', period, 'grid...'
spatial_interp_poe = interp_hazard_grid(poe_imls, gridDict, localities, interp_lon, interp_lat, period)
# find and strip negative exceedance probabilites
idx = where(spatial_interp_poe >= 0.0)[0]
spatial_interp_poe = spatial_interp_poe[idx]
poe_imls = poe_imls[idx]
# find decreasing exceedance probabilites
idx = where(diff(spatial_interp_poe[::-1]) >= 0.0)[0]
idx = idx + 1
idx = hstack((array([0]), idx))
# set filename
outhazcurve = '_'.join(('hazard_curve-mean-' + period, \
str(interp_lon[0])+'E', str(abs(interp_lat[0]))+'S', siteName + '.csv'))
# interp hazard curves to common probabilities and export
interphazArray, return_period_num = interp_hazard_curves(investigation_time, spatial_interp_poe, poe_imls, outhazcurve)
haz_curve_dict[period] = interphazArray
haz_curve_dict['Return Periods'] = array([100, 250, 475, 500, 800, 1000, 1500, 2000, 2475, \
2500, 3000, 5000])
return haz_curve_dict
##############################################################################
# set some default values here
##############################################################################
def get_nsha18_uhs(interp_lon, interp_lat, percent_chance, investigation_time, siteName):
from os import path, mkdir, getcwd
from numpy import array, diff, exp, log, interp, where, hstack
import warnings
warnings.filterwarnings("ignore")
periods = ['PGA', 'SA(0.05)', 'SA(0.1)', 'SA(0.2)', 'SA(0.3)', 'SA(0.5)', 'SA(0.7)', \
'SA(1.0)', 'SA(1.5)', 'SA(2.0)', 'SA(4.0)']
plt_periods = [0, 0.05, 0.1, 0.2, 0.3, 0.5, 0.7, 1.0, 1.5, 2.0, 4.0]
if getcwd().startswith('/nas'):
gridFolder = '/nas/active/ops/community_safety/ehp/georisk_earthquake/hazard/NSHA_18/reporting/NSHA18-grids_and_maps/data_pkg/4_data_grids/4.2_hazard_curve_grid_files'
'''
# test data
interp_lon=138.6
interp_lat=-34.93
investigation_time=50.
percent_chance=10.
siteName='Adelaide'
interp_lon=130.83
interp_lat=-12.45
investigation_time=50.
percent_chance=10.
siteName='Darwin'
'''
interp_lon = array([interp_lon])
interp_lat = array([interp_lat])
# check latitude
if interp_lat[0] >= 0:
interp_lat[0] *= -1
sa_values = []
for period in periods:
hazcurvefile = path.join(gridFolder,'hazard_curve-mean-'+period+'.csv')
# parse data for interpolation
lons, lats, localities, gridDict, poe_imls, investigation_time = prep_hazcurve_grid(hazcurvefile)
# get interpolated PoEs
print 'Interplolating', period, 'grid...'
spatial_interp_poe = interp_hazard_grid(poe_imls, gridDict, localities, interp_lon, interp_lat, period)
# get interpolation probability
return_period, probability = get_probability_from_percent_chance(percent_chance, investigation_time)
# find and strip negative exceedance probabilites
idx = where(spatial_interp_poe >= 0.0)[0]
spatial_interp_poe = spatial_interp_poe[idx]
poe_imls = poe_imls[idx]
# find decreasing exceedance probabilites
idx = where(diff(spatial_interp_poe[::-1]) >= 0.0)[0]
idx = idx + 1
idx = hstack((array([0]), idx))
# interp spatial_interp_poe to get value for return period of interest
sa_values.append(exp(interp(log(probability), log(spatial_interp_poe[::-1][idx]), log(poe_imls[::-1][idx]))))
# now test plot
'''
import matplotlib.pyplot as plt
plt.plot(plt_periods, sa_values, 'r')
plt.show()
'''
# set UHS header
uhstxt = '1/'+str(round(return_period, 1))+'-YEAR UNIFORM HAZARD SPECTRA FOR SITE, LON: '+str(interp_lon[0])+', LAT: '+str(interp_lat[0]) + '\n'
uhstxt += 'PERIOD,SA(g)\n'
# make remaining text
for t, sa in zip(plt_periods, sa_values):
uhstxt += ','.join((str(t), str('%0.4e' % sa))) + '\n'
# check if folder exists
if path.isdir('4.3.2_interp_uhs') == False:
mkdir('4.3.2_interp_uhs')
# set filename
outuhsfile = '_'.join(('uhs-mean-' + str(round(return_period, 1)), \
str(interp_lon[0])+'E', str(abs(interp_lat[0]))+'S', siteName + '.csv'))
# write to file
print 'Writing file:', outuhsfile
f = open(path.join('4.3.2_interp_uhs', outuhsfile), 'wb')
f.write(uhstxt)
f.close()
return array(periods), array(plt_periods), array(sa_values)