From 503d2307847d5236e122a34c58fc364ae9a4e739 Mon Sep 17 00:00:00 2001
From: Marcus Herrmann <marcus.herrmann@sed.ethz.ch>
Date: Fri, 13 Jan 2017 21:39:19 +0100
Subject: [PATCH] Clean up & fixes + version bump to 0.3.0

Reason for version 0.3: config-file based
---
 TM/io.py          |  2 +-
 TM/main.py        |  8 ++++++--
 TM/setupclass.py  | 10 ++++++----
 TM/waveformops.py |  5 +----
 version           |  2 +-
 xcorr.py          |  3 ++-
 6 files changed, 17 insertions(+), 13 deletions(-)

diff --git a/TM/io.py b/TM/io.py
index d3461ac..1646049 100644
--- a/TM/io.py
+++ b/TM/io.py
@@ -956,7 +956,7 @@ def get_ready_chunk(chunkTime, chunkLength):
 #    dailyStream.trim(min(starts), max(ends), pad=True, fill_value=0)
 
     # Create a bandpassed stream (perhaps resampled)
-    # (not necessarily parallel - depends on config.use_threads)
+    # (not necessarily parallel - depends on config.parallel_filter)
     chunkStream_BP = waveform_processing_parallel(chunkStream, 'bandpass')  # (keeps original)
 
     # Clean up FFT cache originating from FFT resampling
diff --git a/TM/main.py b/TM/main.py
index b183dbc..27d1bef 100644
--- a/TM/main.py
+++ b/TM/main.py
@@ -23,7 +23,7 @@ import logging
 # Scientific
 import numpy as np
 #import statsmodels.api as sm
-#import scipy.fftpack
+import scipy.fftpack
 
 # ObsPy
 from obspy import UTCDateTime
@@ -185,6 +185,9 @@ def do_xcorr_oneday(iDate, scannedDays, templateSet, catalog=None):
         #  a cache that never gets cleared, effectively creating a memory leak)
         limit_numpy_fft_cache()
 
+    # Also clean up scipy's cache (used for resampling)
+    scipy.fftpack._fftpack.destroy_rfft_cache()
+
     # Close matplotlib figure
 #    plt.close(fig)
 
@@ -260,7 +263,7 @@ def template_matching(templateSet=None, catalog=None):
                              '\n - '.join(missingVals))
 
     else:
-        raise Warning("Cannot start template matching without template information."
+        raise Warning("Cannot start template matching without template information. "
                       "You may provide a template set and/or a catalog.")
 
     # If custom templates specified, make sure it's a list of list
@@ -368,6 +371,7 @@ def template_matching(templateSet=None, catalog=None):
         #      Optional: check if reached enddate and
         #                if current templateSet differs from the one used at the beginning
 #        if iDate == config.enddate and int(scannedDays[0][1]) < len(templateSet):
+        logger.info("\n-------------------------------------------------------")
         logger.info("First run is finished. Revisit whole period with newer templates again...")
         nextpass = True
         # And start from beginning again
diff --git a/TM/setupclass.py b/TM/setupclass.py
index 4d83dfa..d22b105 100644
--- a/TM/setupclass.py
+++ b/TM/setupclass.py
@@ -406,7 +406,7 @@ class Setup:
 
         if moreInfo:
             dirname += (
-                  '_%dcomp_resamp%d' % (self.numChannels, self.resampling) +
+                  '_%dcomp_resamp%d' % (self.num_channels, self.resampling) +
                   '_dur%.1fs_BP%d-%d_%dO' % tuple([self.duration] + self.filtConf))
 
         if suffix:
@@ -434,7 +434,7 @@ class Setup:
         """
 
         if filename:
-            # Get meta data fresh from FDSN
+            # Get meta data from an inventory file (XML)
 
             inv = read_inventory(filename)
 
@@ -448,7 +448,8 @@ class Setup:
                               "%s" % filename, err.args, "Perhaps station %s "
                               "is not in there." % self.station)
 
-        else:  # (from FDSN)
+        else:
+            # ... or fresh from FDSN server
 
             missing = self._check_if_set(['FDSN_server', 'network', 'station'])
             if missing:
@@ -868,6 +869,7 @@ class Setup:
         logger.setLevel(logging.INFO)
         handler = logging.StreamHandler()
         #formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
-        formatter = logging.Formatter('%(asctime)s %(message)s')
+#        formatter = logging.Formatter('%(asctime)s %(message)s')
+        formatter = logging.Formatter('%(message)s')
         handler.setFormatter(formatter)
         logger.addHandler(handler)
diff --git a/TM/waveformops.py b/TM/waveformops.py
index 78d05bd..4a5267a 100644
--- a/TM/waveformops.py
+++ b/TM/waveformops.py
@@ -668,10 +668,7 @@ def _waveform_processing_trace(trace, resampling, procType='bandpass', freqMax=N
     #  - float32 (resampling with scipy's FFT)
     #  - float64 (no resampling, or resampling with numpy's FFT)
     #    --> save memory by casting to float32 (single precision)
-    if not np.ma.is_mask(mask):
-        trace.data = trace.data.astype(np.float32)  # (doesn't copy if already float32)
-    else:
-        trace.data = trace.data.astype(np.float32)
+    trace.data = trace.data.astype(np.float32)  # (doesn't copy if already float32)
 
     # Set encoding flag to float32 dytpe
     try:
diff --git a/version b/version
index 0c62199..0d91a54 100644
--- a/version
+++ b/version
@@ -1 +1 @@
-0.2.1
+0.3.0
diff --git a/xcorr.py b/xcorr.py
index c8fc07a..bafea71 100644
--- a/xcorr.py
+++ b/xcorr.py
@@ -61,7 +61,8 @@ if get_from_DB:
     TM.io.getEventsFromSC3DB()
     sys.exit(0)
     # Purpose: now delete the events in the csv that are not needed
-    #          (e.g., events before startdate, or AUTOMATIC locations/magnitudes)
+    #          (e.g., events before startdate, or AUTOMATIC locations/magnitudes,
+    #                 not locatable events, etc.)
 
 catalog = TM.io.readCatalogSEDDB(timecol=-1, magcol=5)
 
-- 
GitLab