-
Notifications
You must be signed in to change notification settings - Fork 0
/
asdf_rf_calc_MPI.py
117 lines (76 loc) · 3.28 KB
/
asdf_rf_calc_MPI.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import pyasdf
from os.path import join, exists
from os import remove as rm
import matplotlib.pyplot as plt
from rf import RFStream, rfstats
from rf.imaging import plot_profile_map
from rf.profile import get_profile_boxes
import numpy as np
import glob
from collections import defaultdict
import time
code_start_time = time.time()
# =========================== User Input Required =========================== #
#Path to the data
data_path = '/g/data/ha3/'
#IRIS Virtual Ntework name
virt_net = '_ANU'
# FDSN network identifier (2 Characters)
FDSNnetwork = 'AA'
# =========================================================================== #
# ASDF quakes file (High Performance Dataset) one file per network
ASDF_in = join(data_path, virt_net, FDSNnetwork, 'ASDF', FDSNnetwork + '_quakes' + '.h5')
ASDF_out = join(data_path, virt_net, FDSNnetwork, 'ASDF', FDSNnetwork + '_RF' + '.h5')
# remove the output if it exists
if exists(ASDF_out):
rm(ASDF_out)
# Open the ASDF file
ds = pyasdf.ASDFDataSet(ASDF_in)
#get event catalogue
event_cat = ds.events
#for event in event_cat:
# if '3329830' in str(event.resource_id):
# print event.resource_id
def process_RF(st, inv):
station_name = st[0].stats.station
all_stn_RF = RFStream()
# make dictionary of lists containing indexes of the traces with the same referred event
event_dict = defaultdict(list)
for _i, tr in enumerate(st):
event_dict[tr.stats.asdf.event_ids[0]].append(_i)
for event_key in event_dict.keys():
if not len(event_dict[event_key]) == 3:
print 'Not enough components'
continue
# Make sure the referred event matches for each stream
ref_events = []
ref_events.append(st[event_dict[event_key][0]].stats.asdf.event_ids[0])
ref_events.append(st[event_dict[event_key][1]].stats.asdf.event_ids[0])
ref_events.append(st[event_dict[event_key][2]].stats.asdf.event_ids[0])
if not all(x == ref_events[0] for x in ref_events):
print "Events are not the same"
continue
rf_stream = RFStream(traces=[st[event_dict[event_key][0]], st[event_dict[event_key][1]],
st[event_dict[event_key][2]]])
stats = None
found_event = False
for event in event_cat:
if event.resource_id == ref_events[0]:
found_event = True
stats = rfstats(station=inv[0][0], event=event, phase='P', dist_range=(30, 90))
if not found_event:
print 'Event not in Catalogue'
# Stats might be none if epicentral distance of earthquake is outside dist_range
if not stats == None:
for tr in rf_stream:
tr.stats.update(stats)
rf_stream.filter('bandpass', freqmin=0.05, freqmax=1.)
rf_stream.rf(method='P', trim=(-10, 30), downsample=50, deconvolve='time')
all_stn_RF.extend(rf_stream)
#all_stn_RF.sort(keys=['distance'])
all_stn_RF.select(station=station_name, component='Q').plot_rf(fillcolors=('k', 'gray'))
return all_stn_RF
ds.process(process_function=process_RF, output_filename=ASDF_out, tag_map={'unproc_quake': 'receiver_function'})
del ds
print '\n'
print("--- Execution time: %s seconds ---" % (time.time() - code_start_time))