Commit 62551a44 authored by doetschj's avatar doetschj
Browse files

merge with master

parents 2c4da0ab b8c788e1
......@@ -46,12 +46,13 @@ def cli(ctx, cfg, verbose, mode, log):
"""
# kill leftover celery workers
#os.system("pkill -9 - f 'celery worker'")
#os.system("pkill -9 -f 'celery worker'")
os.system("pkill -9 -f 'redis-server'")
# Setup logging. By default we log to stdout with ERROR level and to a log
# file (if specified) with INFO level. Setting verbose logs to both
# handlers with DEBUG level.
logger = logging.getLogger('dug-seis')
logger.setLevel(logging.DEBUG)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(levelname)-7s %(message)s')
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG if verbose else logging.INFO)
......
......@@ -238,7 +238,7 @@ class Event(ObsPyEvent):
trig_ch = []
tobs = []
for i in self.picks:
if self.param['General']['sensor_coords'][int(i.waveform_id['station_code'])-1, 0]:
if np.max(np.abs(self.param['General']['sensor_coords'][int(i.waveform_id['station_code'])-1, :])):
trig_ch.append(int(i.waveform_id['station_code']) - 1)
tobs.append((i.time - self.wf_stream.traces[0].stats["starttime"]) * 1000)
npicks = len(tobs)
......
......@@ -16,10 +16,8 @@ import logging
def get_waveforms(param, load_file, trig_time):
aparam = param['Acquisition']
gparam = param['General']
asdf_folder = param['Trigger']['asdf_folder']
#print(gparam['stats']['network'] + '_001')
start_time = trig_time - param['Trigger']['starttime']
end_time = trig_time + param['Trigger']['endtime']
......@@ -36,6 +34,7 @@ def get_waveforms(param, load_file, trig_time):
if len(load_file) == 1: # load 2 snippets if event in overlap otherwise load 1
wf_stream = Stream()
ds = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
for k in r:
wf_stream += ds.get_waveforms(network=gparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location='00',
channel='001',
......@@ -43,51 +42,51 @@ def get_waveforms(param, load_file, trig_time):
endtime=end_time,
tag="raw_recording")
if tparam['Gainrange'] == 'YAML':
for k in stations:
for k in r:
# print(np.amax(np.absolute(wf_stream.traces[k].data)))
wf_stream.traces[k].data = wf_stream.traces[k].data / 32768 * \
param['Acquisition']['hardware_settings']['gain_selection'][k]
logging.info('Gain range event retrieved from YAML file')
else:
ds1 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
ds2 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[1], mode='r') #GR_001
if end_time > ds2.waveforms.aparam['stats']['network']+'_001'.raw_recording.traces[0].stats["starttime"]:
if end_time > ds2.waveforms[gparam['stats']['network'] + '_001'].raw_recording.traces[0].stats["starttime"]:
sta_p1 = Stream()
sta_p2 = Stream()
for k in r:
sta_p1 += ds1.get_waveforms(network=gparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location='00',
channel='001',
starttime=start_time,
endtime=ds1.waveforms.gparam['stats']['network']+'_'+'001'.raw_recording.traces[0].stats["endtime"],
endtime=ds1.waveforms[gparam['stats']['network'] + '_001'].raw_recording.traces[0].stats["endtime"],
tag="raw_recording")
sta_p2 += ds2.get_waveforms(network=gparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location='00',
channel='001',
starttime=ds2.waveforms.print(gparam['stats']['network'] + '_' +'001').raw_recording.traces[0].stats["starttime"],
starttime=ds2.waveforms[gparam['stats']['network'] + '_001'].raw_recording.traces[0].stats["starttime"],
endtime=end_time,
tag="raw_recording")
wf_stream = sta_p1 + sta_p2
wf_stream.merge(method=1, interpolation_samples=0)
if tparam['Gainrange'] == 'YAML':
for k in stations:
for k in r:
wf_stream.traces[k].data = wf_stream.traces[k].data / 32768 * \
param['Acquisition']['hardware_settings']['gain_selection'][k]
logging.info('Gain range event retrieved from YAML file')
else:
wf_stream = Stream()
if ds1.waveforms[gparam['stats']['network'] + '_001'].raw_recording.traces[0].stats["endtime"] < end_time:
end_time = ds1.waveforms[gparam['stats']['network'] + '_001'].raw_recording.traces[0].stats["endtime"]
for k in r:
wf_stream += ds1.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location='00',
wf_stream += ds1.get_waveforms(network=gparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location='00',
channel='001',
starttime=start_time,
endtime=end_time,
tag="raw_recording")
if tparam['Gainrange'] == 'YAML':
for k in stations:
for k in r:
wf_stream.traces[k].data = wf_stream.traces[k].data / 32768 * \
param['Acquisition']['hardware_settings']['gain_selection'][k]
logging.info('Gain range event retrieved from YAML file')
......
......@@ -20,7 +20,6 @@ import pyasdf
from dug_seis.processing.dug_trigger import dug_trigger
from dug_seis.processing.event_processing import event_processing
import re
import numpy as np
def processing(param):
......@@ -28,13 +27,14 @@ def processing(param):
logger.info('Starting trigger module')
tparam = param['Trigger']
if param['Processing']['parallel_processing']:
from dug_seis.processing.celery_tasks import event_processing_celery
if sys.platform == 'win32':
os.environ.setdefault('FORKED_BY_MULTIPROCESSING', '1')
os.system('start redis-server > redis.log')
os.system('start celery -A dug_seis worker --loglevel=debug --concurrency=%i > celery.log' % param['Processing']['number_workers'])
else:
os.system('redis-server > redis.log &')
os.system('celery -A dug_seis worker --loglevel=debug --concurrency=%i &> celery.log &' %param['Processing']['number_workers'])
from dug_seis.processing.celery_tasks import event_processing_celery
# create folders for processing
if not os.path.exists(param['Processing']['Folders']['quakeml_folder']):
......@@ -76,7 +76,7 @@ def processing(param):
condition=int(str(tparam['Time']).replace('_', ''))
number_approved = sorted(i for i in number2 if i >= condition )
index_start_approved=number2.index(number_approved[0])
new_files=new_files1[index_start_approved:]
new_files = new_files1[index_start_approved:]
processed_files = []
while 1:
......@@ -91,9 +91,17 @@ def processing(param):
for k in stations:
sta += ds.waveforms[wf_list[k]].raw_recording
sta_copy = sta.copy()
sta_total = sta_copy + sta_overlap
# no overlap, if more than 10 samples are missing
if not len(sta_overlap):
sta_total = sta_copy
else:
if sta.traces[0].stats["starttime"]-sta_overlap.traces[0].stats["endtime"] > 10./param['Acquisition']['hardware_settings']['sampling_frequency']:
sta_total = sta_copy
logger.info('Gap in waveform data found.')
else:
sta_total = sta_overlap + sta_copy
# Use overlap statement for merging
# Use merge statement for merging
for tr in sta_total:
tr.stats.delta = sta_total[0].stats.delta
sta_total.merge(method=1, interpolation_samples=0)
......@@ -102,9 +110,9 @@ def processing(param):
for k in stations:
sta_total.traces[k].data = sta_total.traces[k].data / 32768 * \
param['Acquisition']['hardware_settings']['gain_selection'][k]
logging.info('Gain range trigger retrieved from YAML file')
logger.info('Gain range trigger retrieved from YAML file')
logging.debug(sta_total)
logger.debug(sta_total)
trigger_out, event_nr = dug_trigger(sta_total, tparam, event_nr, event_nr_s) # run trigger function
overlap = tparam['starttime'] + tparam['endtime'] #+ tparam['endtime']['sta_lta']['lt_window']/sta_total.stats.sampling_rate
......@@ -119,13 +127,11 @@ def processing(param):
classification = [i for i in trigger_out['Classification']]
trig_time=[i for i in trigger_out['Time'][trigger_out['Time'] <t_end-overlap]] #do not store times in overlap
for l in trig_time: #load 2 snippets if event in overlap
if l < sta_copy.traces[0].stats["starttime"]+param['Trigger']['starttime']:
for l in range(0, len(trig_time)):
if trig_time[l] < sta_copy.traces[0].stats["starttime"] + param['Trigger']['starttime']:
load_file = [processed_files[-1], current_file]
else:
load_file = [current_file]
for l in range(0, len(trig_time)):
if param['Processing']['parallel_processing']:
event_processing_celery.delay(param, load_file, trig_time[l], event_id[l], classification[l])
logger.info('Event ' + str(event_id[l]) + ' at ' + str(trig_time[l]) + ' sent to parallel worker.')
......
just a dummy file
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment