From 1dbb4df06656364c33ee7806c6fb1d3e2a056d2a Mon Sep 17 00:00:00 2001
From: Joseph Doetsch <joseph.doetsch@erdw.ethz.ch>
Date: Sun, 20 Jan 2019 17:51:31 +0100
Subject: [PATCH] Bug fixes and change in input file. Magnitudes are now
 calculated around predicted p-wave arrival, not pick (in window plus minus
 diff in p and s traveltime)

---
 config/dug-seis.yaml                    | 101 +++---
 dug_seis/processing/event.py            | 444 ++----------------------
 dug_seis/processing/event_processing.py |  62 +---
 dug_seis/processing/get_waveforms.py    |  82 +----
 dug_seis/processing/processing.py       | 129 +------
 5 files changed, 93 insertions(+), 725 deletions(-)

diff --git a/config/dug-seis.yaml b/config/dug-seis.yaml
index 42a9aa3..a97d72b 100644
--- a/config/dug-seis.yaml
+++ b/config/dug-seis.yaml
@@ -3,47 +3,47 @@
 General:
   project_name       : Elegancy_CSD
   project_location   : Mt_Terri
-  operator           : ['L. Villiger','J. Doetsch','S. Demir']
+  operator           : ['J. Doetsch','S. Demir']
   sensor_count       : 32
   sensor_coords      : [ # in local coordinate system; x=East, y=North, z=elevation
 0,0,0,
-8.652,84.869,2.870,
-16.187,98.370,2.841,
-23.103,110.886,2.747,
-30.700,125.024,2.546,
-42.158,135.681,3.575,
-71.148,135.037,2.476,
-71.169,125.102,2.192,
-70.764,114.158,2.429,
-70.569,104.196,2.495,
-67.306,93.020,3.403,
-71.162,122.331,16.367,
-70.716,104.681,16.150,
-67.236,91.771,16.019,
-70.069,77.752,16.315,
-57.553,96.126,-11.814,
-50.997,96.164,-19.365,
-54.114,96.061,-8.701,
-37.572,96.071,-19.943,
-57.522,111.956,-12.363,
-50.805,111.951,-19.771,
-49.318,111.967,-11.975,
-37.742,111.972,-19.848,
-68.599,107.806,4.081,
-68.565,105.806,4.115,
-68.530,103.807,4.150,
-68.496,101.807,4.184,
-22.974,110.826,2.791,
-42.048,135.768,3.571,
-71.237,134.959,2.515,
-70.852,114.095,2.480,
-67.279,92.885,3.421,]
+20.838,69.465,13.148,
+19.781,70.950,13.195,
+18.655,72.337,13.163,
+17.638,73.809,13.209,
+16.641,75.305,13.232,
+15.495,76.710,13.245,
+14.503,78.192,13.262,
+13.547,79.520,13.338,
+10.478,77.285,13.287,
+11.507,75.913,13.226,
+12.605,74.466,13.232,
+13.574,72.967,13.249,
+14.598,71.448,13.186,
+15.745,70.015,13.207,
+16.707,68.573,13.196,
+17.676,  67.099,  13.218,
+18.790,  65.669,  13.185,
+7.9388,76.7132,-5.4494,
+9.76492,74.75961,-2.4747,
+11.581,72.81184,0.5099,
+12.03274,72.32524,1.2576,
+15.707958,72.294242,-20.5874,
+15.809368,72.223778,-16.5893,
+15.903385,72.162778,-12.5909,
+15.986165,72.104954,-8.5922,
+21.792,  67.946,  13.152,
+0,0,0,
+0,0,0,
+0,0,0,
+0,0,0,
+0,0,0,]
   active_trigger_channel : 1
-  origin_ch1903_east     : 667400.0  # ch1903 easting origin of local coordinate system
-  origin_ch1903_north    : 158800.0 # ch1903 northing of local coordinate system
-  origin_elev            : 1730.0       # elevation of origin of local coordinate system
+  origin_ch1903_east     : 579300.0  # ch1903 easting origin of local coordinate system
+  origin_ch1903_north    : 247500.0 # ch1903 northing of local coordinate system
+  origin_elev            : 500.0       # elevation of origin of local coordinate system
   stats:
-    network           : 'GR'
+    network           : 'XM'
 
 Acquisition:
   hardware_settings:
@@ -53,18 +53,18 @@ Acquisition:
     data_folder_tmp   : 'e:/20181218_sw_test/tmp/'
     compression       : 'gzip-3'          # default: 'gzip-3', 'gzip-0' to 'gzip-9' or None
     file_length_sec   : 60.0              # in seconds
-    station_naming    : [0,2,4,6,8,10,12,14,1,3,5,7,9,11,13,15,16,18,20,22,24,26,28,30,17,19,21,23,25,27,29,31]
+    station_naming    : [1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15, 8, 16, 17, 25, 18, 26, 19, 27, 20, 28, 21, 29, 22, 30, 23, 31, 24, 32]
 
 Trigger:
-  asdf_folder        : raw
-  channels           : [1,16,17,18,19,20,21,22,23]
+  asdf_folder        : 'raw'
+  channels           : [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27]
   overlap            : 0.05
   bandpass_f_min     : 1000
   bandpass_f_max     : 12000
   algorithm          : 'recstalta'        #other option: 'threshold'
-  coincidence        : 4
+  coincidence        : 10
   sta_lta:
-    threshold_on     : 5.0
+    threshold_on     : 8.0
     threshold_off    : 2.0
     st_window        : 70                 #in samples
     lt_window        : 700                #in samples
@@ -74,34 +74,35 @@ Trigger:
   #  threshold       : 100.0              # in miliVolt
   classification:
     spread_int       : 1e-4
-  noise_vis          : 'False'
-  noise_vis_interval : 10
+  noise_vis          : 'True'
+  noise_vis_interval : 60                 # in seconds
 
 
 Processing:
-  vp                 : 5000.0             # velocity in m/s
-  vs                 : 2800.0             # velocity in m/s
+  vp                 : 2600.0             # velocity in m/s
+  vs                 : 2000.0             # velocity in m/s
   bandpass_f_min     : 1000
   bandpass_f_max     : 12000
 
   Picking:
     algorithm        : 'sta_lta'
     sta_lta:
-      threshold_on   : 5.0
+      threshold_on   : 8.0
       threshold_off  : 2.0
       st_window      : 70                 #in samples        #2.1
       lt_window      : 700                #in samples        #4.2
 
   Locate:
-    algorithm        : 'hom_aniso'          # other options: hom_aniso, nonlinloc
+    algorithm        : 'hom_iso'          # other options: hom_iso, hom_aniso, nonlinloc
     min_picks        : 6
     damping          : 0.01
 
     hom_aniso:
-      epsilon        : 0.0                # Thomsen epsilon
+      vp_min         : 2400.0
+      epsilon        : 0.263                # Thomsen epsilon
       delta          : 0.0                # Thomsen delta
-      azi            : 290.0              # Azimuth of anisotropy vector in degrees
-      inc            : 30.0               # Inclination of anisotropy vector in degrees
+      azi            : 295.0              # Azimuth of anisotropy vector in degrees
+      inc            : 38.0               # Inclination of anisotropy vector in degrees
 
   Magnitude:
     algorithm        : 'relative'
diff --git a/dug_seis/processing/event.py b/dug_seis/processing/event.py
index 66b6304..f79b4e9 100644
--- a/dug_seis/processing/event.py
+++ b/dug_seis/processing/event.py
@@ -35,9 +35,11 @@ class Event(ObsPyEvent):
         self.tcalc = []
         self.loc_ind = []
         if self.classification != 'noise':
-            logging.info('Event ' + str(self.event_id) + ': Processing started.')
+            t_start_plot = self.wf_stream.traces[0].stats["starttime"]
+            logging.info('Event ' + str(self.event_id) + ': Time ' + str(t_start_plot)[11:-1] + ', processing started.')
         else:
-            logging.info('Noise Visualisation ' + ': Processing started.')
+            t_start_plot = self.wf_stream.traces[0].stats["starttime"]
+            logging.info('Noise Visualisation at ' + str(t_start_plot)[11:-1] + ', processing started.')
 
     def bandpass(self):
         # bandpass filter using same parameters as for triggering
@@ -64,7 +66,6 @@ class Event(ObsPyEvent):
                                        method_id='recursive_sta_lta'))
         logging.info('Event ' + str(self.event_id) + ': ' + str(len(self.picks)) + ' picks.')
 
-
     def locate(self):
         lparam = self.prparam['Locate']
         if lparam['algorithm'] == 'hom_aniso':
@@ -80,11 +81,6 @@ class Event(ObsPyEvent):
                 tobs.append((i.time - self.wf_stream.traces[0].stats["starttime"]) * 1000)
         npicks = len(tobs)
         sensor_coords = self.param['General']['sensor_coords'][trig_ch, :]
-        # for i in range(len(sensor_coords) - 1, -1, -1):
-        #     if not sensor_coords[i, 0]:
-        #         trig_ch.remove(trig_ch[i])
-        #         tobs.remove([i])
-        # sensor_coords = self.param['General']['sensor_coords'][trig_ch, :]
         vp = self.prparam['vp'] * np.ones([npicks]) / 1000.
 
         if lparam['algorithm'] == 'hom_iso' or lparam['algorithm'] == 'hom_aniso':
@@ -104,8 +100,8 @@ class Event(ObsPyEvent):
                         theta = np.arccos(np.cos(inc) * np.cos(azi) * np.cos(aparam['inc']) * np.cos(aparam['azi']) +
                                           np.cos(inc) * np.sin(azi) * np.cos(aparam['inc']) * np.sin(aparam['azi']) +
                                           np.sin(inc) * np.sin(aparam['inc']))
-                        vp[i] = self.prparam['vp'] / 1000. * (1.0 + aparam['delta'] * np.sin(theta)**2 * np.cos(theta)**2 +
-                                                      aparam['epsilon'] * np.sin(theta)**4)
+                        vp[i] = aparam['vp_min'] / 1000. * (1.0 + aparam['delta'] * np.sin(theta)**2 * np.cos(theta)**2 +
+                                                            aparam['epsilon'] * np.sin(theta)**4)
 
                 dist = [np.linalg.norm(loc - sensor_coords[i, :]) for i in range(npicks)]
                 tcalc = [dist[i] / vp[i] + t0 for i in range(npicks)]
@@ -118,8 +114,7 @@ class Event(ObsPyEvent):
                 jacobian[:, 3] = np.ones(npicks)
 
                 dm = np.matmul(np.matmul(np.linalg.inv(np.matmul(np.transpose(jacobian), jacobian) +
-                                                       pow(lparam['damping'], 2) * np.eye(4)), np.transpose(jacobian)),
-                               res)
+                               pow(lparam['damping'], 2) * np.eye(4)), np.transpose(jacobian)), res)
                 loc = loc + dm[0:3]
                 t0 = t0 + dm[3]
         else:
@@ -169,7 +164,6 @@ class Event(ObsPyEvent):
         logging.info('Event ' + str(self.event_id) + ': Location %3.2f %3.2f %3.2f; %i iterations, rms %4.3f ms'
                      % (loc[0], loc[1], loc[2], nit, rms))
 
-
     def est_magnitude(self, origin_number=np.inf):
         emparam = self.prparam['Magnitude']
         if origin_number > len(self.origins):
@@ -185,24 +179,18 @@ class Event(ObsPyEvent):
         ch_in = [int(i.stats['station']) for i in self.wf_stream.traces]
         ind_trig = np.where(np.isin(ch_in, trig_ch))  # Finds correct axis
 
-
         if len(self.origins):
             self.ts_approx = []
             t0_orig = self.t0[origin_number] - self.wf_stream.traces[0].stats["starttime"]  # t0 for use in magnitude estimation
+            righthand = []
             for i in range(len(self.dist[origin_number])):
-                self.ts_approx.append((self.dist[origin_number][i] / (self.prparam['vs']) + t0_orig) * 1000)  # time of s wave arrival/end of the segment (time)
+                self.ts_approx.append((self.dist[origin_number][i] / self.prparam['vs'] + t0_orig) * 1000)  # time of s wave arrival/end of the segment (time)
+                righthand.append((self.dist[origin_number][i] / self.prparam['vs'] -
+                                  self.dist[origin_number][i] / self.prparam['vp']) * 1000)
                 if self.ts_approx[i] > (self.param['Trigger']['endtime']+self.param['Trigger']['endtime'])*1000:
                     self.ts_approx[i] = (self.param['Trigger']['endtime']+self.param['Trigger']['endtime'])*1000
 
-            righthand = []
             relstartwindow = []
-            relpicks=[]
-            for i in self.picks:
-                if self.param['General']['sensor_coords'][int(i.waveform_id['station_code']) - 1, 0]:
-                    relpicks.append((i.time - self.wf_stream[0].stats["starttime"])*1000)
-            for i in range(len(self.ts_approx)):
-                righthand.append(
-                    self.ts_approx[i] - relpicks[i])  # time between picked p arrival and s wave arrival in ms
             for i in range(len(self.ts_approx)):
                 relstartwindow.append(self.ts_approx[i] - 2 * righthand[i])  # begin of the segment (time)
                 if relstartwindow[i] < 0:
@@ -228,69 +216,13 @@ class Event(ObsPyEvent):
             average_magnitude = np.log10(np.mean(mag_exp))
 
             logging.info('Event ' + str(self.event_id) + ': Relative magnitude %4.2f.' % average_magnitude)
-            self.magnitudes.append(Magnitude(mag=average_magnitude,
-                                             resource_id='%s/magnitude/%d' % (
-                                             self.resource_id.id, len(self.magnitudes)+1),
-                                             origin_id=self.origins[origin_number].resource_id,
-                                             type='relative_amplitude'))
-            self.preferredMagnitudeID = self.magnitudes[-1].resource_id
-    # ######oldmagnitude
-    # def est_magnitude(self, origin_number=np.inf):
-    #     emparam = self.prparam['Magnitude']
-    #     if origin_number > len(self.origins):
-    #         origin_number = len(self.origins)
-    #     elif origin_number < 1:
-    #         logging.warning('Origin number does not exist.')
-    #     origin_number = origin_number - 1
-    #
-    #     trig_ch = [int(i.waveform_id['station_code']) for i in self.picks]
-    #     ch_in = [int(i.stats['station']) for i in self.wf_stream.traces]
-    #     ind_trig = np.where(np.isin(ch_in, trig_ch))  # Finds correct axis
-    #
-    #     if len(self.origins):
-    #         self.ts_approx = []
-    #         t0_orig = self.t0[origin_number] - self.wf_stream.traces[0].stats["starttime"]  # t0 for use in magnitude estimation
-    #         for i in range(len(self.dist[origin_number])):
-    #             self.ts_approx.append((self.dist[origin_number][i] / (self.prparam['vs']) + t0_orig) * 1000)  # time of s wave arrival/end of the segment (time)
-    #             if self.ts_approx[i] > (self.param['Trigger']['endtime']+self.param['Trigger']['endtime'])*1000:
-    #                 self.ts_approx[i] = (self.param['Trigger']['endtime']+self.param['Trigger']['endtime'])*1000
-    #
-    #         righthand = []
-    #         relstartwindow = []
-    #         relpicks = [(i.time - self.wf_stream[0].stats["starttime"])*1000  for i in self.picks]
-    #         for i in range(len(self.ts_approx)):
-    #             righthand.append(
-    #                 self.ts_approx[i] - relpicks[i])  # time between picked p arrival and s wave arrival in ms
-    #         for i in range(len(self.ts_approx)):
-    #             relstartwindow.append(self.ts_approx[i] - 2 * righthand[i])  # begin of the segment (time)
-    #             if relstartwindow[i] < 0:
-    #                 relstartwindow[i] = 0
-    #
-    #         sample_start = [int(round(i / 1000 * self.wf_stream.traces[0].stats.sampling_rate)) for i in
-    #                         relstartwindow]  # beginning of segment (samples)
-    #         sample_end = [int(round(i / 1000 * self.wf_stream.traces[0].stats.sampling_rate)) for i in
-    #                       self.ts_approx]  # end of segment (samples)
-    #
-    #         maxamp = []
-    #         for k in range(len(ind_trig[0])):
-    #             maxamp.append(max(abs(self.wf_stream[ind_trig[0][k]].data[
-    #                                   sample_start[k]:sample_end[k]])))  # find maximum amplitude in segment
-    #
-    #         corr_fac = []
-    #         mag_exp= []
-    #         for i in range(len(self.dist[origin_number])):
-    #             corr_fac.append(np.exp(np.pi * (self.dist[origin_number][i] - emparam['r0']) * emparam['f0'] / (emparam['q'] * self.prparam['vp'])))
-    #             mag_exp.append(maxamp[i] * self.dist[origin_number][i] / emparam['r0'] * corr_fac[i])
-    #             # calculate relative magnitude for each receiver/event recording
-    #         average_magnitude = np.log10(np.mean(mag_exp))
-    #
-    #         logging.info('Event ' + str(self.event_id) + ': Relative magnitude %4.2f.' % average_magnitude)
-    #         self.magnitudes.append(Magnitude(mag=average_magnitude,
-    #                                          resource_id='%s/magnitude/%d' % (
-    #                                          self.resource_id.id, len(self.magnitudes)+1),
-    #                                          origin_id=self.origins[origin_number].resource_id,
-    #                                          type='relative_amplitude'))
-    #         self.preferredMagnitudeID = self.magnitudes[-1].resource_id
+            if(average_magnitude < np.inf):
+                self.magnitudes.append(Magnitude(mag=average_magnitude,
+                                                 resource_id='%s/magnitude/%d' % (
+                                                 self.resource_id.id, len(self.magnitudes)+1),
+                                                 origin_id=self.origins[origin_number].resource_id,
+                                                 type='relative_amplitude'))
+                self.preferredMagnitudeID = self.magnitudes[-1].resource_id
 
     def event_plot(self, save_fig=False):
         fparam = self.prparam['Folders']
@@ -302,8 +234,6 @@ class Event(ObsPyEvent):
         else:
             t_title = 'Event ' + str(self.event_id) + ' at ' + t_start_plotn
 
-
-
         sampRate = self.wf_stream.traces[0].stats.sampling_rate
         time_v = np.arange(0, self.wf_stream[0].data.shape[0], 1) * 1 / sampRate * 1000
         a_max = []
@@ -335,26 +265,22 @@ class Event(ObsPyEvent):
             # finding the correct axes(stations)
             ind_trig = np.where(np.isin(ch_in, trig_ch))  # Finds correct axis
             trig_time = [(i.time - self.wf_stream.traces[0].stats["starttime"]) * 1000 for i in self.picks]
-            loc_ind_trig = ind_trig[0]#why was this? if this line gives error switch between ind_trig[0] and self.loc_ind[-1][:]
-
+            loc_ind_trig = self.loc_ind[-1][:] #why was this? if this line gives error switch between ind_trig[0] and self.loc_ind[-1][:]
 
             # plotting pick time
             for m in range(len(trig_time)):
                 axs[ind_trig[0][m]].plot((trig_time[m], trig_time[m]),
-                                         (axs[ind_trig[0][m]].get_ylim()[0],
-                                          axs[ind_trig[0][m]].get_ylim()[1]), 'r-',
-                                         linewidth=1.0)
+                                         (axs[ind_trig[0][m]].get_ylim()[0], axs[ind_trig[0][m]].get_ylim()[1]),
+                                         'r-', linewidth=1.0)
             for m in range(len(loc_ind_trig)):
                 if len(self.origins):
                     axs[loc_ind_trig[m]].plot((self.tcalc[len(self.origins)-1][m], self.tcalc[len(self.origins)-1][m]),
-                                             (axs[loc_ind_trig[m]].get_ylim()[0],
-                                              axs[loc_ind_trig[m]].get_ylim()[1]), 'g-',
-                                             linewidth=1.0)
+                                              (axs[loc_ind_trig[m]].get_ylim()[0], axs[loc_ind_trig[m]].get_ylim()[1]),
+                                              'g-', linewidth=1.0)
                 if len(self.magnitudes):
                     axs[loc_ind_trig[m]].plot((self.ts_approx[m], self.ts_approx[m]),
-                                             (axs[loc_ind_trig[m]].get_ylim()[0],
-                                              axs[loc_ind_trig[m]].get_ylim()[1]), 'y-',
-                                             linewidth=1.0)
+                                              (axs[loc_ind_trig[m]].get_ylim()[0], axs[loc_ind_trig[m]].get_ylim()[1]),
+                                              'y-', linewidth=1.0)
 
         plt.suptitle(t_title, fontsize=17)
         fig.text(0.49, 0.035, 'time [ms]', ha='center', fontsize=14)
@@ -373,324 +299,8 @@ class Event(ObsPyEvent):
             if save_fig:
 
                 fig.savefig(fparam['noise_vis_folder'] + "/noise vis-" + t_start_plotn + ".png", dpi=100)
-                logging.info('Noise Visualisation ' + ': Plotted, Figure saved.')
+                logging.info('Noise Visualisation: Plotted, Figure saved.')
 
             else:
-                logging.info('Noise Visualisation ' + ': Plotted, Figure not saved.')
+                logging.info('Noise Visualisation: Plotted, Figure not saved.')
         plt.close(fig)
-# """
-# Event class of DUG-Seis
-#
-#
-# # Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
-#
-# Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
-#              23.10.2018, Semih Demir
-#              23.10.2018, Linus Villiger
-#
-#
-# """
-#
-# from matplotlib import pyplot as plt
-# from obspy.core.event.event import Event as ObsPyEvent
-# from obspy.signal.trigger import recursive_sta_lta, trigger_onset
-# from obspy.core.event import WaveformStreamID, Pick, Origin, Arrival, Magnitude
-# import numpy as np
-# import pyproj
-# import logging
-# import pandas as pd
-#
-#
-# class Event(ObsPyEvent):
-#     def __init__(self, param, wf_stream, event_id, classification):
-#         super().__init__(resource_id='smi:%s/event/%d' % (param['General']['project_name'], event_id))
-#
-#         self.param = param
-#         self.prparam = param['Processing']
-#         self.wf_stream = wf_stream
-#         self.event_id = event_id
-#         self.classification = classification
-#         self.t0 = []
-#         self.dist = []
-#         self.tcalc = []
-#         self.loc_ind = []
-#         if self.classification != 'noise':
-#             logging.info('Event ' + str(self.event_id) + ': Processing started.')
-#         else:
-#             logging.info('Noise Visualisation ' + ': Processing started.')
-#
-#     def bandpass(self):
-#         # bandpass filter using same parameters as for triggering
-#         self.wf_stream.filter("bandpass", freqmin=self.prparam['bandpass_f_min'], freqmax=self.prparam['bandpass_f_max'])
-#
-#     def pick(self):
-#         piparam = self.prparam['Picking']
-#         df = self.wf_stream[0].stats.sampling_rate
-#
-#         for j in range(len(self.wf_stream)):  # do picking for each trace in 20ms snippet
-#             cft = recursive_sta_lta(self.wf_stream[j], piparam['sta_lta']['st_window'],
-#                                     piparam['sta_lta']['lt_window'])  # create characteristic function
-#             trig = trigger_onset(cft, piparam['sta_lta']['threshold_on'],
-#                                  piparam['sta_lta']['threshold_off'])  # do picking
-#             if len(trig):
-#                 t_picks=trig[0][0] / df  # if a pick is done on a trace, convert it to seconds
-#                 t_pick_UTC = self.wf_stream[0].stats[
-#                                       "starttime"] + t_picks  # and add the start time of the 20ms snippet
-#                 station_id = j + 1
-#                 self.picks.append(Pick(time=t_pick_UTC,
-#                                        resource_id='%s/picks/%d' % (self.resource_id.id, len(self.picks)+1),
-#                                        waveform_id=WaveformStreamID(network_code='GR', station_code='%03i' % station_id,
-#                                                                     channel_code='001', location_code='00'),
-#                                        method_id='recursive_sta_lta'))
-#         logging.info('Event ' + str(self.event_id) + ': ' + str(len(self.picks)) + ' picks.')
-#
-#
-#     def locate(self):
-#         lparam = self.prparam['Locate']
-#         if lparam['algorithm'] == 'hom_aniso':
-#             aparam = lparam['hom_aniso']
-#         if len(self.picks) < lparam['min_picks']:
-#             return
-#
-#         trig_ch = []
-#         tobs = []
-#         for i in self.picks:
-#             if self.param['General']['sensor_coords'][int(i.waveform_id['station_code'])-1, 0]:
-#                 trig_ch.append(int(i.waveform_id['station_code']) - 1)
-#                 tobs.append((i.time - self.wf_stream.traces[0].stats["starttime"]) * 1000)
-#         npicks = len(tobs)
-#         sensor_coords = self.param['General']['sensor_coords'][trig_ch, :]
-#         # for i in range(len(sensor_coords) - 1, -1, -1):
-#         #     if not sensor_coords[i, 0]:
-#         #         trig_ch.remove(trig_ch[i])
-#         #         tobs.remove([i])
-#         # sensor_coords = self.param['General']['sensor_coords'][trig_ch, :]
-#         vp = self.prparam['vp'] * np.ones([npicks]) / 1000.
-#
-#         if lparam['algorithm'] == 'hom_iso' or lparam['algorithm'] == 'hom_aniso':
-#             loc = sensor_coords[tobs.index(min(tobs)), :] + 0.1
-#             t0 = min(tobs)
-#             nit = 0
-#             jacobian = np.zeros([npicks, 4])
-#             dm = 1. * np.ones(4)
-#
-#             while nit < 100 and np.linalg.norm(dm) > 0.00001:
-#                 nit = nit + 1
-#
-#                 if lparam['algorithm'] == 'hom_aniso':  # calculate anisotropic velocities
-#                     for i in range(npicks):
-#                         azi = np.arctan2(sensor_coords[i, 0] - loc[0], sensor_coords[i, 1] - loc[1])
-#                         inc = np.arctan2(sensor_coords[i, 2] - loc[2], np.linalg.norm(sensor_coords[i,range(2)]-loc[range(2)]))
-#                         theta = np.arccos(np.cos(inc) * np.cos(azi) * np.cos(aparam['inc']) * np.cos(aparam['azi']) +
-#                                           np.cos(inc) * np.sin(azi) * np.cos(aparam['inc']) * np.sin(aparam['azi']) +
-#                                           np.sin(inc) * np.sin(aparam['inc']))
-#                         vp[i] = self.prparam['vp'] / 1000. * (1.0 + aparam['delta'] * np.sin(theta)**2 * np.cos(theta)**2 +
-#                                                       aparam['epsilon'] * np.sin(theta)**4)
-#
-#                 dist = [np.linalg.norm(loc - sensor_coords[i, :]) for i in range(npicks)]
-#                 tcalc = [dist[i] / vp[i] + t0 for i in range(npicks)]
-#
-#                 res = [tobs[i] - tcalc[i] for i in range(npicks)]
-#                 rms = np.linalg.norm(res) / npicks
-#                 for j in range(3):
-#                     for i in range(npicks):
-#                         jacobian[i, j] = -(sensor_coords[i, j] - loc[j]) / (vp[i] * dist[i])
-#                 jacobian[:, 3] = np.ones(npicks)
-#
-#                 dm = np.matmul(np.matmul(np.linalg.inv(np.matmul(np.transpose(jacobian), jacobian) +
-#                                                        pow(lparam['damping'], 2) * np.eye(4)), np.transpose(jacobian)),
-#                                res)
-#                 loc = loc + dm[0:3]
-#                 t0 = t0 + dm[3]
-#         else:
-#             logging.warning('Location algorithm not implemented. Location not estimated.')
-#
-#         x = loc[0] + self.param['General']['origin_ch1903_east']
-#         y = loc[1] + self.param['General']['origin_ch1903_north']
-#         z = loc[2] + self.param['General']['origin_elev']
-#         t0 = self.wf_stream.traces[0].stats["starttime"] + t0 / 1000.
-#
-#         # write CSV file with CH1903 coordinates
-#         cols = pd.Index(['Event_id', 'datenum', 'x', 'y', 'z'], name='cols')
-#         outdata = [self.event_id] + [t0.matplotlib_date] + [x] + [y] + [z]
-#         df = pd.DataFrame(data=[outdata], columns=cols)
-#         df.to_csv('events.csv', mode='a', header=False, index=False)
-#
-#         # convert coordinates to WGS84
-#         proj_ch1903 = pyproj.Proj(init="epsg:21781")
-#         proj_wgs84 = pyproj.Proj(init="epsg:4326")
-#         lon, lat = pyproj.transform(proj_ch1903, proj_wgs84, x, y)
-#         # create origin object and append to event
-#         o = Origin(time=t0,
-#                    longitude=lon, latitude=lat, depth=z,
-#                    resource_id='%s/origin/%d' % (self.resource_id.id, len(self.origins)+1),
-#                    depthType='elevation',
-#                    methodID=lparam['algorithm'])
-#         for i in self.picks:
-#             if self.param['General']['sensor_coords'][int(i.waveform_id['station_code'])-1, 0]:
-#                 o.arrivals.append(Arrival(pick_id=i.resource_id,
-#                                           time_residual=res[len(o.arrivals)] / 1000,
-#                                           phase='p',
-#                                           resource_id='%s/origin/%d/arrival/%d' % (
-#                                           self.resource_id.id, len(self.origins)+1, len(o.arrivals))))
-#         o.time_errors = rms / 1000
-#         self.origins.append(o)
-#         self.preferredOriginID = o.resource_id
-#         self.extra = {'x': {'value': loc[0],
-#                             'namespace': 'http://sccer-soe.ch/local_coordinates/1.0'},
-#                       'y': {'value': loc[1],
-#                             'namespace': 'http://sccer-soe.ch/local_coordinates/1.0'},
-#                       'z': {'value': loc[2],
-#                             'namespace': 'http://sccer-soe.ch/local_coordinates/1.0'}}
-#         self.t0.append(t0)
-#         self.dist.append(dist)
-#         self.tcalc.append(tcalc)
-#         self.loc_ind.append(trig_ch)
-#         logging.info('Event ' + str(self.event_id) + ': Location %3.2f %3.2f %3.2f; %i iterations, rms %4.3f ms'
-#                      % (loc[0], loc[1], loc[2], nit, rms))
-#
-#
-#     def est_magnitude(self, origin_number=np.inf):
-#         emparam = self.prparam['Magnitude']
-#         if origin_number > len(self.origins):
-#             origin_number = len(self.origins)
-#         elif origin_number < 1:
-#             logging.warning('Origin number does not exist.')
-#         origin_number = origin_number - 1
-#
-#         trig_ch = [int(i.waveform_id['station_code']) for i in self.picks]
-#         ch_in = [int(i.stats['station']) for i in self.wf_stream.traces]
-#         ind_trig = np.where(np.isin(ch_in, trig_ch))  # Finds correct axis
-#
-#         if len(self.origins):
-#             self.ts_approx = []
-#             t0_orig = self.t0[origin_number] - self.wf_stream.traces[0].stats["starttime"]  # t0 for use in magnitude estimation
-#             for i in range(len(self.dist[origin_number])):
-#                 self.ts_approx.append((self.dist[origin_number][i] / (self.prparam['vs']) + t0_orig) * 1000)  # time of s wave arrival/end of the segment (time)
-#                 if self.ts_approx[i] > (self.param['Trigger']['endtime']+self.param['Trigger']['endtime'])*1000:
-#                     self.ts_approx[i] = (self.param['Trigger']['endtime']+self.param['Trigger']['endtime'])*1000
-#
-#             righthand = []
-#             relstartwindow = []
-#             relpicks = [(i.time - self.wf_stream[0].stats["starttime"])*1000  for i in self.picks]
-#             for i in range(len(self.ts_approx)):
-#                 righthand.append(
-#                     self.ts_approx[i] - relpicks[i])  # time between picked p arrival and s wave arrival in ms
-#             for i in range(len(self.ts_approx)):
-#                 relstartwindow.append(self.ts_approx[i] - 2 * righthand[i])  # begin of the segment (time)
-#                 if relstartwindow[i] < 0:
-#                     relstartwindow[i] = 0
-#
-#             sample_start = [int(round(i / 1000 * self.wf_stream.traces[0].stats.sampling_rate)) for i in
-#                             relstartwindow]  # beginning of segment (samples)
-#
-#             sample_end = [int(round(i / 1000 * self.wf_stream.traces[0].stats.sampling_rate)) for i in
-#                           self.ts_approx]  # end of segment (samples)
-#
-#             maxamp = []
-#             for k in range(len(ind_trig[0])):
-#                 maxamp.append(max(abs(self.wf_stream[ind_trig[0][k]].data[
-#                                       sample_start[k]:sample_end[k]])))  # find maximum amplitude in segment
-#
-#             corr_fac = []
-#             mag_exp= []
-#             for i in range(len(self.dist[origin_number])):
-#                 corr_fac.append(np.exp(np.pi * (self.dist[origin_number][i] - emparam['r0']) * emparam['f0'] / (emparam['q'] * self.prparam['vp'])))
-#                 mag_exp.append(maxamp[i] * self.dist[origin_number][i] / emparam['r0'] * corr_fac[i])
-#                 # calculate relative magnitude for each receiver/event recording
-#             average_magnitude = np.log10(np.mean(mag_exp))
-#
-#             logging.info('Event ' + str(self.event_id) + ': Relative magnitude %4.2f.' % average_magnitude)
-#             self.magnitudes.append(Magnitude(mag=average_magnitude,
-#                                              resource_id='%s/magnitude/%d' % (
-#                                              self.resource_id.id, len(self.magnitudes)+1),
-#                                              origin_id=self.origins[origin_number].resource_id,
-#                                              type='relative_amplitude'))
-#             self.preferredMagnitudeID = self.magnitudes[-1].resource_id
-#
-#     def event_plot(self, save_fig=False):
-#         fparam = self.prparam['Folders']
-#         # for name of each saved png, use start and end time of the 20ms snippet
-#         t_start_plot = self.wf_stream.traces[0].stats["starttime"]
-#         t_start_plotn = str(t_start_plot)[11:-1]
-#         if self.classification=='noise':
-#             t_title = 'Noise Visualisation at ' + t_start_plotn
-#         else:
-#             t_title = 'Event ' + str(self.event_id) + ' at ' + t_start_plotn
-#
-#
-#
-#         sampRate = self.wf_stream.traces[0].stats.sampling_rate
-#         time_v = np.arange(0, self.wf_stream[0].data.shape[0], 1) * 1 / sampRate * 1000
-#         a_max = []
-#         for k in range(len(self.wf_stream)):
-#             a_max.append(format(np.amax(np.absolute(self.wf_stream[k].data)), '.1f'))
-#         fig, axs = plt.subplots(len(self.wf_stream), 1, figsize=(20, 10), facecolor='w', edgecolor='k')
-#
-#         plt.interactive(False)
-#         fig.subplots_adjust(hspace=0, wspace=0)
-#
-#         axs = axs.ravel()
-#
-#         for i in range(len(self.wf_stream)):
-#             axs[i].plot((time_v), self.wf_stream[i].data)
-#             axs[i].set_yticklabels([])
-#             axs[i].set_ylabel(self.wf_stream.traces[i].stats['station'] + '  ', rotation=0)
-#             axs[i].set_xlim([0, (len(time_v) / self.wf_stream[0].stats.sampling_rate) * 1000])
-#
-#             ax2 = axs[i].twinx()
-#             ax2.set_yticklabels([])
-#             ax2.set_ylabel('        ' + str(a_max[i]), rotation=0, color='g')
-#
-#             if i < len(self.wf_stream) - 1:
-#                 axs[i].set_xticklabels([])
-#
-#         if self.classification != 'noise':
-#             trig_ch = [int(i.waveform_id['station_code']) for i in self.picks]
-#             ch_in = [int(i.stats['station']) for i in self.wf_stream.traces]
-#             # finding the correct axes(stations)
-#             ind_trig = np.where(np.isin(ch_in, trig_ch))  # Finds correct axis
-#             trig_time = [(i.time - self.wf_stream.traces[0].stats["starttime"]) * 1000 for i in self.picks]
-#             loc_ind_trig = self.loc_ind[-1][:]
-#
-#             # plotting pick time
-#             for m in range(len(trig_time)):
-#                 axs[ind_trig[0][m]].plot((trig_time[m], trig_time[m]),
-#                                          (axs[ind_trig[0][m]].get_ylim()[0],
-#                                           axs[ind_trig[0][m]].get_ylim()[1]), 'r-',
-#                                          linewidth=1.0)
-#             for m in range(len(loc_ind_trig)):
-#                 if len(self.origins):
-#                     axs[loc_ind_trig[m]].plot((self.tcalc[len(self.origins)-1][m], self.tcalc[len(self.origins)-1][m]),
-#                                              (axs[loc_ind_trig[m]].get_ylim()[0],
-#                                               axs[loc_ind_trig[m]].get_ylim()[1]), 'g-',
-#                                              linewidth=1.0)
-#                 if len(self.magnitudes):
-#                     axs[loc_ind_trig[m]].plot((self.ts_approx[m], self.ts_approx[m]),
-#                                              (axs[loc_ind_trig[m]].get_ylim()[0],
-#                                               axs[loc_ind_trig[m]].get_ylim()[1]), 'y-',
-#                                              linewidth=1.0)
-#
-#         plt.suptitle(t_title, fontsize=17)
-#         fig.text(0.49, 0.035, 'time [ms]', ha='center', fontsize=14)
-#         fig.text(0.040, 0.5, 'channel []', va='center', rotation='vertical', fontsize=14)
-#         fig.text(.988, 0.5, 'peak amplitude [mV]', va='center', rotation=90, fontsize=14, color='g')
-#         # plt.show()
-#
-#         if self.classification != 'noise':
-#             if save_fig:
-#                 fig.savefig(fparam['plot_folder_'+self.classification] + "/event-" + str(self.event_id) + '_' + t_start_plotn + ".png",
-#                             dpi=100)
-#                 logging.info('Event ' + str(self.event_id) + ': Plotted, Figure saved.')
-#             else:
-#                 logging.info('Event ' + str(self.event_id) + ': Plotted, Figure not saved.')
-#         else:
-#             if save_fig:
-#
-#                 fig.savefig(fparam['noise_vis_folder'] + "/noise vis-" + t_start_plotn + ".png", dpi=100)
-#                 logging.info('Noise Visualisation ' + ': Plotted, Figure saved.')
-#
-#             else:
-#                 logging.info('Noise Visualisation ' + ': Plotted, Figure not saved.')
-#         plt.close(fig)
-#
\ No newline at end of file
diff --git a/dug_seis/processing/event_processing.py b/dug_seis/processing/event_processing.py
index ab13d8d..6a72e10 100644
--- a/dug_seis/processing/event_processing.py
+++ b/dug_seis/processing/event_processing.py
@@ -31,64 +31,18 @@ def event_processing(param, load_file, trig_time,  event_id, classification):
         # event.bandpass()
         event.pick()
         event.locate()
-        # event.est_magnitude()
+        event.est_magnitude()
         event.event_plot(save_fig=True)
-        # event.prparam['Locate']['algorithm'] = 'hom_iso'
-        # event.locate()
-        # #event.est_magnitude()
-        # #event.est_magnitude(origin_number=1)
-        # event.classification = 'active'
-        # event.event_plot(save_fig=True)
-        # classification = 'passive'
+        #event.prparam['Locate']['algorithm'] = 'hom_aniso'
+        #event.locate()
+        #event.est_magnitude()
+        #event.est_magnitude(origin_number=1)
+        #event.classification = 'active'
+        #event.event_plot(save_fig=True)
+        #classification = 'passive'
 
         if classification == 'passive':
             event.write('%s/event%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml', validate=True)
         elif classification == 'active':
             event.write('%s/hammer%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml',
                         validate=True)
-#
-# """
-# Processing module of DUG-Seis
-#
-#
-# # Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
-#
-# Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
-#              23.10.2018, Semih Demir
-#              23.10.2018, Linus Villiger
-#
-#
-# """
-#
-#
-# import logging
-# from dug_seis.processing.event import Event
-# from dug_seis.processing.get_waveforms import get_waveforms
-#
-#
-# def event_processing(param, load_file, trig_time,  event_id, classification):
-#     logging.debug('Starting processing for event %d' % event_id)
-#     wf_stream = get_waveforms(param, load_file, trig_time)
-#
-#     event = Event(param, wf_stream, event_id, classification)
-#     if classification == 'noise':
-#         event.event_plot(save_fig=True)
-#     elif classification == 'electronic':
-#         event.bandpass()
-#         event.event_plot(save_fig=True)
-#     else:
-#         #event.bandpass()
-#         event.pick()
-#         #event.locate()
-#         #event.est_magnitude()
-#         #event.prparam['Locate']['algorithm'] = 'hom_iso'
-#         #event.locate()
-#         #event.est_magnitude()
-#
-#         event.event_plot(save_fig=True)
-#         if classification == 'passive':
-#             event.write('%s/event%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml', validate=True)
-#         elif classification == 'active':
-#             event.write('%s/hammer%s.xml' % (param['Processing']['Folders']['quakeml_folder'], event_id), 'quakeml',
-#                         validate=True)
-#
\ No newline at end of file
diff --git a/dug_seis/processing/get_waveforms.py b/dug_seis/processing/get_waveforms.py
index a56726d..33a7f99 100644
--- a/dug_seis/processing/get_waveforms.py
+++ b/dug_seis/processing/get_waveforms.py
@@ -12,15 +12,15 @@ Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
 from obspy import Stream
 import pyasdf
 
+
 def get_waveforms(param, load_file, trig_time):
-    aparam=param['Acquisition']
-    gparam=param['General']
+    aparam = param['Acquisition']
+    gparam = param['General']
     asdf_folder = param['Trigger']['asdf_folder']
-    print(gparam['stats']['network'] + '_001')
+    #print(gparam['stats']['network'] + '_001')
     start_time = trig_time - param['Trigger']['starttime']
     end_time = trig_time + param['Trigger']['endtime']
 
-
     ch_in = range(1, gparam['sensor_count' ] +1)
     r = range(len(ch_in))
     if len(load_file) == 1:  # load 2 snippets if event in overlap otherwise load 1
@@ -33,8 +33,6 @@ def get_waveforms(param, load_file, trig_time):
                                           endtime=end_time,
                                           tag="raw_recording")
 
-
-
     else:
         ds1 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
         ds2 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[1], mode='r') #GR_001
@@ -66,75 +64,5 @@ def get_waveforms(param, load_file, trig_time):
                                                tag="raw_recording")
     return wf_stream
 
-# """
-# Load waveform data for processing
-#
-#
-# # Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
-#
-# Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
-#              23.10.2018, Semih Demir
-#
-# """
-#
-# from obspy import Stream
-# import pyasdf
-#
-# def get_waveforms(param, load_file, trig_time):
-#     aparam=param['Acquisition']
-#     gparam=param['General']
-#     asdf_folder = param['Trigger']['asdf_folder']
-#
-#     start_time = trig_time - param['Trigger']['starttime']
-#     end_time = trig_time + param['Trigger']['endtime']
-#
-#
-#     ch_in = range(1, gparam['sensor_count' ] +1)
-#     r = range(len(ch_in))
-#     if len(load_file) == 1:  # load 2 snippets if event in overlap otherwise load 1
-#         wf_stream = Stream()
-#         ds = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
-#         for k in r:
-#             wf_stream += ds.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location=aparam['stats']['location'],
-#                                           channel=aparam['stats']['channel'],
-#                                           starttime=start_time,
-#                                           endtime=end_time,
-#                                           tag="raw_recording")
-#
-#
-#
-#     else:
-#         ds1 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[0], mode='r')
-#         ds2 = pyasdf.ASDFDataSet(asdf_folder + '/' + load_file[1], mode='r')
-#         if end_time > ds2.waveforms.GR_001.raw_recording.traces[0].stats["starttime"]:
-#             sta_p1 = Stream()
-#             sta_p2 = Stream()
-#             for k in r:
-#                 sta_p1 += ds1.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3),
-#                                             location=aparam['stats']['location'],
-#                                             channel=aparam['stats']['channel'],
-#                                             starttime=start_time,
-#                                             endtime=ds1.waveforms.aparam['stats']['network'] + '_' +
-#                                                     aparam['stats']['channel'].raw_recording.traces[0].stats["endtime"],
-#                                             tag="raw_recording")
-#                 sta_p2 += ds2.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3),
-#                                             location=aparam['stats']['location'],
-#                                             channel=aparam['stats']['channel'],
-#                                             starttime=ds2.waveforms.print(
-#                                                 aparam['stats']['network'] + '_' + aparam['stats'][
-#                                                     'channel']).raw_recording.traces[0].stats["starttime"],
-#                                             endtime=end_time,
-#                                             tag="raw_recording")
-#                 wf_stream = sta_p1 + sta_p2
-#                 wf_stream.merge(method=1, interpolation_samples=0)
-#
-#         else:
-#             wf_stream = Stream()
-#             for k in r:
-#                 wf_stream += ds1.get_waveforms(network=aparam['stats']['network'], station=str(int(ch_in[k])).zfill(3), location=aparam['stats']['location'],
-#                                                channel=aparam['stats']['channel'],
-#                                                starttime=start_time,
-#                                                endtime=end_time,
-#                                                tag="raw_recording")
-#     return wf_stream
+
 
diff --git a/dug_seis/processing/processing.py b/dug_seis/processing/processing.py
index 7f5cff5..a03f8bb 100644
--- a/dug_seis/processing/processing.py
+++ b/dug_seis/processing/processing.py
@@ -109,133 +109,8 @@ def processing(param):
             processed_files.append(current_file)
 
         else:
-
             flist = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')])
-            new_files = [f for f in flist if not f in processed_files]
-            logging.info('adding new files')
+            new_files = [f for f in flist if f not in processed_files]
             if not len(new_files):
+                logging.info('Waiting for new files.')
                 time.sleep(1)
-                logging.info('sleeping')
-
-
-"""
-Trigger and processing module of DUG-Seis
-
-
-# Copyright (c) 2018 by SCCER-SoE and SED at ETHZ
-
-Version 0.0, 23.10.2018, Joseph Doetsch (doetschj)
-             23.10.2018, Semih Demir
-             23.10.2018, Linus Villiger
-
-
-"""
-#
-# import logging
-# from obspy import Stream
-# import os
-# import time
-# import pyasdf
-# from dug_seis.processing.dug_trigger import dug_trigger
-# from dug_seis.processing.event_processing import event_processing
-#
-#
-# def processing(param):
-#     tparam = param['Trigger']
-#     logging.info('Starting trigger module')
-#
-#     # create folders for processing
-#     if not os.path.exists(param['Processing']['Folders']['quakeml_folder']):
-#         os.makedirs(param['Processing']['Folders']['quakeml_folder'])
-#     if not os.path.exists(param['Processing']['Folders']['plot_folder_active']):
-#         os.makedirs(param['Processing']['Folders']['plot_folder_active'])
-#     if not os.path.exists(param['Processing']['Folders']['plot_folder_passive']):
-#         os.makedirs(param['Processing']['Folders']['plot_folder_passive'])
-#     if not os.path.exists(param['Processing']['Folders']['plot_folder_electronic']):
-#         os.makedirs(param['Processing']['Folders']['plot_folder_electronic'])
-#     if tparam['noise_vis'] and not os.path.exists(param['Processing']['Folders']['noise_vis_folder']):
-#         os.makedirs(param['Processing']['Folders']['noise_vis_folder'])
-#
-#     # Initialisation
-#     asdf_folder = tparam['asdf_folder']  # 'raw'  # Location of .h5 file
-#     stations = [i - 1 for i in tparam['channels']]
-#     if param['General']['active_trigger_channel'] and param['General']['active_trigger_channel']-1 not in stations:
-#         stations.insert(0,param['General']['active_trigger_channel']-1)
-#     stations.sort()
-#     event_nr = 0
-#     event_nr_s = []
-#     next_noise_vis = 0
-#     # load list of ASDF snippets in folder
-#     sta_overlap = Stream()
-#     new_files = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')])  # generates a list of the .asdf files in asdf_folder
-#
-#     processed_files = []
-#     while 1:
-#         if len(new_files):
-#             # print(new_files)
-#             current_file = new_files.pop(0)
-#             logging.info('Working on file ' + current_file)
-#             sta = Stream()
-#
-#             ds = pyasdf.ASDFDataSet(asdf_folder + '/' + current_file, mode='r')
-#
-#             wf_list = ds.waveforms.list()
-#             for k in stations:
-#                 sta += ds.waveforms[wf_list[k]].raw_recording
-#             sta_copy = sta.copy()
-#             sta_total = sta_copy + sta_overlap
-#
-#             # Use overlap statement for merging
-#             for tr in sta_total:
-#                 tr.stats.delta = sta_total[0].stats.delta
-#             sta_total.merge(method=1, interpolation_samples=0)
-#
-#             logging.debug(sta_total)
-#             trigger_out, event_nr = dug_trigger(sta_total, tparam, event_nr, event_nr_s)  # run trigger function
-#
-#             overlap = tparam['starttime'] + tparam['endtime'] #+ tparam['endtime']['sta_lta']['lt_window']/sta_total.stats.sampling_rate
-#
-#             t_end = sta.traces[0].stats["endtime"]
-#             sta_overlap = sta.trim(t_end - overlap, t_end)
-#
-#             #trigger_out = trigger_out[trigger_out['Classification'] == 'passive']
-#             logging.debug(trigger_out)
-#
-#             event_id = [i for i in trigger_out['Event_id']]
-#             classification = [i for i in trigger_out['Classification']]
-#
-#
-#             trig_time=[i for i in trigger_out['Time'][trigger_out['Time'] <t_end-overlap]] #do not store times in overlap
-#             for l in trig_time:  #load 2 snippets if event in overlap
-#                 if l< sta_copy.traces[0].stats["starttime"]+param['Trigger']['starttime']:
-#                     load_file = [processed_files[-1], current_file]
-#                 else:
-#                     load_file = [current_file]
-#
-#             for l in range(0, len(trig_time)):
-#                 event_processing(param, load_file, trig_time[l], event_id[l], classification[l])  # run processing for each event
-#
-#             # noise visualization
-#             if tparam['noise_vis']=='True':
-#                 if next_noise_vis == 0:
-#                     next_noise_vis = sta_copy.traces[0].stats["starttime"]+param['Trigger']['starttime']#start time of current file+starttime
-#                 while next_noise_vis < sta_copy.traces[0].stats["endtime"]:#end of current file:
-#                     if next_noise_vis< sta_copy.traces[0].stats["starttime"] + param['Trigger']['starttime']:
-#                         load_file = [processed_files[-1], current_file]
-#                     else:
-#                         load_file = [current_file]
-#                     event_processing(param, load_file, next_noise_vis, 0, 'noise')  # start processing to visualize noise
-#                     next_noise_vis += tparam['noise_vis_interval']
-#
-#             processed_files.append(current_file)
-#
-#         else:
-#
-#             flist = sorted([f for f in os.listdir(asdf_folder) if f.endswith('.h5')])
-#             new_files = [f for f in flist if not f in processed_files]
-#             logging.info('adding new files')
-#             if not len(new_files):
-#                 time.sleep(1)
-#                 logging.info('sleeping')
-#
-#
\ No newline at end of file
-- 
GitLab