Skip to content
Snippets Groups Projects
Commit 1dbb4df0 authored by doetschj's avatar doetschj
Browse files

Bug fixes and change in input file.

Magnitudes are now calculated around predicted p-wave arrival, not pick (in window plus minus diff in p and s traveltime)
parent f716c079
No related branches found
No related tags found
No related merge requests found
......@@ -3,47 +3,47 @@
General:
project_name : Elegancy_CSD
project_location : Mt_Terri
operator : ['L. Villiger','J. Doetsch','S. Demir']
operator : ['J. Doetsch','S. Demir']
sensor_count : 32
sensor_coords : [ # in local coordinate system; x=East, y=North, z=elevation
0,0,0,
8.652,84.869,2.870,
16.187,98.370,2.841,
23.103,110.886,2.747,
30.700,125.024,2.546,
42.158,135.681,3.575,
71.148,135.037,2.476,
71.169,125.102,2.192,
70.764,114.158,2.429,
70.569,104.196,2.495,
67.306,93.020,3.403,
71.162,122.331,16.367,
70.716,104.681,16.150,
67.236,91.771,16.019,
70.069,77.752,16.315,
57.553,96.126,-11.814,
50.997,96.164,-19.365,
54.114,96.061,-8.701,
37.572,96.071,-19.943,
57.522,111.956,-12.363,
50.805,111.951,-19.771,
49.318,111.967,-11.975,
37.742,111.972,-19.848,
68.599,107.806,4.081,
68.565,105.806,4.115,
68.530,103.807,4.150,
68.496,101.807,4.184,
22.974,110.826,2.791,
42.048,135.768,3.571,
71.237,134.959,2.515,
70.852,114.095,2.480,
67.279,92.885,3.421,]
20.838,69.465,13.148,
19.781,70.950,13.195,
18.655,72.337,13.163,
17.638,73.809,13.209,
16.641,75.305,13.232,
15.495,76.710,13.245,
14.503,78.192,13.262,
13.547,79.520,13.338,
10.478,77.285,13.287,
11.507,75.913,13.226,
12.605,74.466,13.232,
13.574,72.967,13.249,
14.598,71.448,13.186,
15.745,70.015,13.207,
16.707,68.573,13.196,
17.676, 67.099, 13.218,
18.790, 65.669, 13.185,
7.9388,76.7132,-5.4494,
9.76492,74.75961,-2.4747,
11.581,72.81184,0.5099,
12.03274,72.32524,1.2576,
15.707958,72.294242,-20.5874,
15.809368,72.223778,-16.5893,
15.903385,72.162778,-12.5909,
15.986165,72.104954,-8.5922,
21.792, 67.946, 13.152,
0,0,0,
0,0,0,
0,0,0,
0,0,0,
0,0,0,]
active_trigger_channel : 1
origin_ch1903_east : 667400.0 # ch1903 easting origin of local coordinate system
origin_ch1903_north : 158800.0 # ch1903 northing of local coordinate system
origin_elev : 1730.0 # elevation of origin of local coordinate system
origin_ch1903_east : 579300.0 # ch1903 easting origin of local coordinate system
origin_ch1903_north : 247500.0 # ch1903 northing of local coordinate system
origin_elev : 500.0 # elevation of origin of local coordinate system
stats:
network : 'GR'
network : 'XM'
Acquisition:
hardware_settings:
......@@ -53,18 +53,18 @@ Acquisition:
data_folder_tmp : 'e:/20181218_sw_test/tmp/'
compression : 'gzip-3' # default: 'gzip-3', 'gzip-0' to 'gzip-9' or None
file_length_sec : 60.0 # in seconds
station_naming : [0,2,4,6,8,10,12,14,1,3,5,7,9,11,13,15,16,18,20,22,24,26,28,30,17,19,21,23,25,27,29,31]
station_naming : [1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15, 8, 16, 17, 25, 18, 26, 19, 27, 20, 28, 21, 29, 22, 30, 23, 31, 24, 32]
Trigger:
asdf_folder : raw
channels : [1,16,17,18,19,20,21,22,23]
asdf_folder : 'raw'
channels : [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27]
overlap : 0.05
bandpass_f_min : 1000
bandpass_f_max : 12000
algorithm : 'recstalta' #other option: 'threshold'
coincidence : 4
coincidence : 10
sta_lta:
threshold_on : 5.0
threshold_on : 8.0
threshold_off : 2.0
st_window : 70 #in samples
lt_window : 700 #in samples
......@@ -74,34 +74,35 @@ Trigger:
# threshold : 100.0 # in miliVolt
classification:
spread_int : 1e-4
noise_vis : 'False'
noise_vis_interval : 10
noise_vis : 'True'
noise_vis_interval : 60 # in seconds
Processing:
vp : 5000.0 # velocity in m/s
vs : 2800.0 # velocity in m/s
vp : 2600.0 # velocity in m/s
vs : 2000.0 # velocity in m/s
bandpass_f_min : 1000
bandpass_f_max : 12000
Picking:
algorithm : 'sta_lta'
sta_lta:
threshold_on : 5.0
threshold_on : 8.0
threshold_off : 2.0
st_window : 70 #in samples #2.1
lt_window : 700 #in samples #4.2
Locate:
algorithm : 'hom_aniso' # other options: hom_aniso, nonlinloc
algorithm : 'hom_iso' # other options: hom_iso, hom_aniso, nonlinloc
min_picks : 6
damping : 0.01
hom_aniso:
epsilon : 0.0 # Thomsen epsilon
vp_min : 2400.0
epsilon : 0.263 # Thomsen epsilon
delta : 0.0 # Thomsen delta
azi : 290.0 # Azimuth of anisotropy vector in degrees
inc : 30.0 # Inclination of anisotropy vector in degrees
azi : 295.0 # Azimuth of anisotropy vector in degrees
inc : 38.0 # Inclination of anisotropy vector in degrees
Magnitude:
algorithm : 'relative'
......
This diff is collapsed.
......@@ -31,64 +31,18 @@ def event_processing(param, load_file, trig_time, event_id, classification):
# event.bandpass()
event.pick()
event.locate()
# event.est_magnitude()
event.est_magnitude()
event.event_plot(save_fig=True)
# event.prparam['Locate']['algorithm'] = 'hom_iso'
# event.locate()
# #event.est_magnitude()
# #event.est_magnitude(origin_number=1)
# event.classification = 'active'
# event.event_plot(save_fig=True)
# classification = 'passive'
#event.prparam['Locate']['algorithm'] = 'hom_aniso'
#event.locate()
#event.est_magnitude()
#event.est_magnitude(origin_number=1)
#event.classification = 'active'
#event.event_plot(save_fig=True)
#classification = 'passive'
if classification == 'passive':
event.write('%s/event%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml', validate=True)
elif classification == 'active':
event.write('%s/hammer%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml',
validate=True)
#
# """
# Processing module of DUG-Seis
#
#
# # Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
#
# Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
# 23.10.2018, Semih Demir
# 23.10.2018, Linus Villiger
#
#
# """
#
#
# import logging
# from dug_seis.processing.event import Event
# from dug_seis.processing.get_waveforms import get_waveforms
#
#
# def event_processing(param, load_file, trig_time, event_id, classification):
# logging.debug('Starting processing for event %d' % event_id)
# wf_stream = get_waveforms(param, load_file, trig_time)
#
# event = Event(param, wf_stream, event_id, classification)
# if classification == 'noise':
# event.event_plot(save_fig=True)
# elif classification == 'electronic':
# event.bandpass()
# event.event_plot(save_fig=True)
# else:
# #event.bandpass()
# event.pick()
# #event.locate()
# #event.est_magnitude()
# #event.prparam['Locate']['algorithm'] = 'hom_iso'
# #event.locate()
# #event.est_magnitude()
#
# event.event_plot(save_fig=True)
# if classification == 'passive':
# event.write('%s/event%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml', validate=True)
# elif classification == 'active':
# event.write('%s/hammer%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml',
# validate=True)
#
\ No newline at end of file
......@@ -12,15 +12,15 @@ Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
from obspy import Stream
import pyasdf
def get_waveforms(param, load_file, trig_time):
aparam=param['Acquisition']
gparam=param['General']
aparam = param['Acquisition']
gparam = param['General']
asdf_folder = param['Trigger']['asdf_folder']
print(gparam['stats']['network'] + '_001')
#print(gparam['stats']['network'] + '_001')
start_time = trig_time - param['Trigger']['starttime']
end_time = trig_time + param['Trigger']['endtime']
ch_in = range(1, gparam['sensor_count' ] +1)
r = range(len(ch_in))
if len(load_file) == 1: # load 2 snippets if event in overlap otherwise load 1
......@@ -33,8 +33,6 @@ def get_waveforms(param, load_file, trig_time):
endtime=end_time,
tag="raw_recording")
else:
ds1 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
ds2 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[1], mode='r') #GR_001
......@@ -66,75 +64,5 @@ def get_waveforms(param, load_file, trig_time):
tag="raw_recording")
return wf_stream
# """
# Load waveform data for processing
#
#
# # Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
#
# Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
# 23.10.2018, Semih Demir
#
# """
#
# from obspy import Stream
# import pyasdf
#
# def get_waveforms(param, load_file, trig_time):
# aparam=param['Acquisition']
# gparam=param['General']
# asdf_folder = param['Trigger']['asdf_folder']
#
# start_time = trig_time - param['Trigger']['starttime']
# end_time = trig_time + param['Trigger']['endtime']
#
#
# ch_in = range(1, gparam['sensor_count' ] +1)
# r = range(len(ch_in))
# if len(load_file) == 1: # load 2 snippets if event in overlap otherwise load 1
# wf_stream = Stream()
# ds = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
# for k in r:
# wf_stream += ds.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location=aparam['stats']['location'],
# channel=aparam['stats']['channel'],
# starttime=start_time,
# endtime=end_time,
# tag="raw_recording")
#
#
#
# else:
# ds1 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
# ds2 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[1], mode='r')
# if end_time > ds2.waveforms.GR_001.raw_recording.traces[0].stats["starttime"]:
# sta_p1 = Stream()
# sta_p2 = Stream()
# for k in r:
# sta_p1 += ds1.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3),
# location=aparam['stats']['location'],
# channel=aparam['stats']['channel'],
# starttime=start_time,
# endtime=ds1.waveforms.aparam['stats']['network'] + '_' +
# aparam['stats']['channel'].raw_recording.traces[0].stats["endtime"],
# tag="raw_recording")
# sta_p2 += ds2.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3),
# location=aparam['stats']['location'],
# channel=aparam['stats']['channel'],
# starttime=ds2.waveforms.print(
# aparam['stats']['network'] + '_' + aparam['stats'][
# 'channel']).raw_recording.traces[0].stats["starttime"],
# endtime=end_time,
# tag="raw_recording")
# wf_stream = sta_p1 + sta_p2
# wf_stream.merge(method=1, interpolation_samples=0)
#
# else:
# wf_stream = Stream()
# for k in r:
# wf_stream += ds1.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location=aparam['stats']['location'],
# channel=aparam['stats']['channel'],
# starttime=start_time,
# endtime=end_time,
# tag="raw_recording")
# return wf_stream
......@@ -109,133 +109,8 @@ def processing(param):
processed_files.append(current_file)
else:
flist = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')])
new_files = [f for f in flist if not f in processed_files]
logging.info('adding new files')
new_files = [f for f in flist if f not in processed_files]
if not len(new_files):
logging.info('Waiting for new files.')
time.sleep(1)
logging.info('sleeping')
"""
Trigger and processing module of DUG-Seis
# Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
23.10.2018, Semih Demir
23.10.2018, Linus Villiger
"""
#
# import logging
# from obspy import Stream
# import os
# import time
# import pyasdf
# from dug_seis.processing.dug_trigger import dug_trigger
# from dug_seis.processing.event_processing import event_processing
#
#
# def processing(param):
# tparam = param['Trigger']
# logging.info('Starting trigger module')
#
# # create folders for processing
# if not os.path.exists(param['Processing']['Folders']['quakeml_folder']):
# os.makedirs(param['Processing']['Folders']['quakeml_folder'])
# if not os.path.exists(param['Processing']['Folders']['plot_folder_active']):
# os.makedirs(param['Processing']['Folders']['plot_folder_active'])
# if not os.path.exists(param['Processing']['Folders']['plot_folder_passive']):
# os.makedirs(param['Processing']['Folders']['plot_folder_passive'])
# if not os.path.exists(param['Processing']['Folders']['plot_folder_electronic']):
# os.makedirs(param['Processing']['Folders']['plot_folder_electronic'])
# if tparam['noise_vis'] and not os.path.exists(param['Processing']['Folders']['noise_vis_folder']):
# os.makedirs(param['Processing']['Folders']['noise_vis_folder'])
#
# # Initialisation
# asdf_folder = tparam['asdf_folder'] # 'raw' # Location of .h5 file
# stations = [i - 1 for i in tparam['channels']]
# if param['General']['active_trigger_channel'] and param['General']['active_trigger_channel']-1 not in stations:
# stations.insert(0,param['General']['active_trigger_channel']-1)
# stations.sort()
# event_nr = 0
# event_nr_s = []
# next_noise_vis = 0
# # load list of ASDF snippets in folder
# sta_overlap = Stream()
# new_files = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')]) # generates a list of the .asdf files in asdf_folder
#
# processed_files = []
# while 1:
# if len(new_files):
# # print(new_files)
# current_file = new_files.pop(0)
# logging.info('Working on file ' + current_file)
# sta = Stream()
#
# ds = pyasdf.ASDFDataSet(asdf_folder + '/' + current_file, mode='r')
#
# wf_list = ds.waveforms.list()
# for k in stations:
# sta += ds.waveforms[wf_list[k]].raw_recording
# sta_copy = sta.copy()
# sta_total = sta_copy + sta_overlap
#
# # Use overlap statement for merging
# for tr in sta_total:
# tr.stats.delta = sta_total[0].stats.delta
# sta_total.merge(method=1, interpolation_samples=0)
#
# logging.debug(sta_total)
# trigger_out, event_nr = dug_trigger(sta_total, tparam, event_nr, event_nr_s) # run trigger function
#
# overlap = tparam['starttime'] + tparam['endtime'] #+ tparam['endtime']['sta_lta']['lt_window']/sta_total.stats.sampling_rate
#
# t_end = sta.traces[0].stats["endtime"]
# sta_overlap = sta.trim(t_end - overlap, t_end)
#
# #trigger_out = trigger_out[trigger_out['Classification'] == 'passive']
# logging.debug(trigger_out)
#
# event_id = [i for i in trigger_out['Event_id']]
# classification = [i for i in trigger_out['Classification']]
#
#
# trig_time=[i for i in trigger_out['Time'][trigger_out['Time'] <t_end-overlap]] #do not store times in overlap
# for l in trig_time: #load 2 snippets if event in overlap
# if l< sta_copy.traces[0].stats["starttime"]+param['Trigger']['starttime']:
# load_file = [processed_files[-1], current_file]
# else:
# load_file = [current_file]
#
# for l in range(0, len(trig_time)):
# event_processing(param, load_file, trig_time[l], event_id[l], classification[l]) # run processing for each event
#
# # noise visualization
# if tparam['noise_vis']=='True':
# if next_noise_vis == 0:
# next_noise_vis = sta_copy.traces[0].stats["starttime"]+param['Trigger']['starttime']#start time of current file+starttime
# while next_noise_vis < sta_copy.traces[0].stats["endtime"]:#end of current file:
# if next_noise_vis< sta_copy.traces[0].stats["starttime"] + param['Trigger']['starttime']:
# load_file = [processed_files[-1], current_file]
# else:
# load_file = [current_file]
# event_processing(param, load_file, next_noise_vis, 0, 'noise') # start processing to visualize noise
# next_noise_vis += tparam['noise_vis_interval']
#
# processed_files.append(current_file)
#
# else:
#
# flist = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')])
# new_files = [f for f in flist if not f in processed_files]
# logging.info('adding new files')
# if not len(new_files):
# time.sleep(1)
# logging.info('sleeping')
#
#
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment