diff --git a/TM/io.py b/TM/io.py
index d3461acb85e7090015128e0e2eaab0b8a0e18c49..1646049debe1c9f046875bb906a0daa36094ec8c 100644
--- a/TM/io.py
+++ b/TM/io.py
@@ -956,7 +956,7 @@ def get_ready_chunk(chunkTime, chunkLength):
 #    dailyStream.trim(min(starts), max(ends), pad=True, fill_value=0)
 
     # Create a bandpassed stream (perhaps resampled)
-    # (not necessarily parallel - depends on config.use_threads)
+    # (not necessarily parallel - depends on config.parallel_filter)
     chunkStream_BP = waveform_processing_parallel(chunkStream, 'bandpass')  # (keeps original)
 
     # Clean up FFT cache originating from FFT resampling
diff --git a/TM/main.py b/TM/main.py
index b183dbc270ea1a4cb27a9e2a385e10a21e0ba053..27d1beff224a2e7c5066b84578d8081a742babf8 100644
--- a/TM/main.py
+++ b/TM/main.py
@@ -23,7 +23,7 @@ import logging
 # Scientific
 import numpy as np
 #import statsmodels.api as sm
-#import scipy.fftpack
+import scipy.fftpack
 
 # ObsPy
 from obspy import UTCDateTime
@@ -185,6 +185,9 @@ def do_xcorr_oneday(iDate, scannedDays, templateSet, catalog=None):
         #  a cache that never gets cleared, effectively creating a memory leak)
         limit_numpy_fft_cache()
 
+    # Also clean up scipy's cache (used for resampling)
+    scipy.fftpack._fftpack.destroy_rfft_cache()
+
     # Close matplotlib figure
 #    plt.close(fig)
 
@@ -260,7 +263,7 @@ def template_matching(templateSet=None, catalog=None):
                              '\n - '.join(missingVals))
 
     else:
-        raise Warning("Cannot start template matching without template information."
+        raise Warning("Cannot start template matching without template information. "
                       "You may provide a template set and/or a catalog.")
 
     # If custom templates specified, make sure it's a list of list
@@ -368,6 +371,7 @@ def template_matching(templateSet=None, catalog=None):
         #      Optional: check if reached enddate and
         #                if current templateSet differs from the one used at the beginning
 #        if iDate == config.enddate and int(scannedDays[0][1]) < len(templateSet):
+        logger.info("\n-------------------------------------------------------")
         logger.info("First run is finished. Revisit whole period with newer templates again...")
         nextpass = True
         # And start from beginning again
diff --git a/TM/setupclass.py b/TM/setupclass.py
index 4d83dfa35219a75501d3fe01c10806cbb21f89f7..d22b105bc9238e1c87b44457837730fdb3bcc3ce 100644
--- a/TM/setupclass.py
+++ b/TM/setupclass.py
@@ -406,7 +406,7 @@ class Setup:
 
         if moreInfo:
             dirname += (
-                  '_%dcomp_resamp%d' % (self.numChannels, self.resampling) +
+                  '_%dcomp_resamp%d' % (self.num_channels, self.resampling) +
                   '_dur%.1fs_BP%d-%d_%dO' % tuple([self.duration] + self.filtConf))
 
         if suffix:
@@ -434,7 +434,7 @@ class Setup:
         """
 
         if filename:
-            # Get meta data fresh from FDSN
+            # Get meta data from an inventory file (XML)
 
             inv = read_inventory(filename)
 
@@ -448,7 +448,8 @@ class Setup:
                               "%s" % filename, err.args, "Perhaps station %s "
                               "is not in there." % self.station)
 
-        else:  # (from FDSN)
+        else:
+            # ... or fresh from FDSN server
 
             missing = self._check_if_set(['FDSN_server', 'network', 'station'])
             if missing:
@@ -868,6 +869,7 @@ class Setup:
         logger.setLevel(logging.INFO)
         handler = logging.StreamHandler()
         #formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
-        formatter = logging.Formatter('%(asctime)s %(message)s')
+#        formatter = logging.Formatter('%(asctime)s %(message)s')
+        formatter = logging.Formatter('%(message)s')
         handler.setFormatter(formatter)
         logger.addHandler(handler)
diff --git a/TM/waveformops.py b/TM/waveformops.py
index 78d05bdec5fadf578dce229c81d53b9f4c786650..4a5267ab7fb83a5be6bb4a273df5b5db64ecd7d7 100644
--- a/TM/waveformops.py
+++ b/TM/waveformops.py
@@ -668,10 +668,7 @@ def _waveform_processing_trace(trace, resampling, procType='bandpass', freqMax=N
     #  - float32 (resampling with scipy's FFT)
     #  - float64 (no resampling, or resampling with numpy's FFT)
     #    --> save memory by casting to float32 (single precision)
-    if not np.ma.is_mask(mask):
-        trace.data = trace.data.astype(np.float32)  # (doesn't copy if already float32)
-    else:
-        trace.data = trace.data.astype(np.float32)
+    trace.data = trace.data.astype(np.float32)  # (doesn't copy if already float32)
 
     # Set encoding flag to float32 dytpe
     try:
diff --git a/version b/version
index 0c62199f16ac1e2d7f7ae75b420c1231325dff4e..0d91a54c7d439e84e3dd17d3594f1b2b6737f430 100644
--- a/version
+++ b/version
@@ -1 +1 @@
-0.2.1
+0.3.0
diff --git a/xcorr.py b/xcorr.py
index c8fc07ab0dffe9ace934192d2b8cb13b54274e18..bafea712ae4c7a77c175b2697ad2c21fa9258594 100644
--- a/xcorr.py
+++ b/xcorr.py
@@ -61,7 +61,8 @@ if get_from_DB:
     TM.io.getEventsFromSC3DB()
     sys.exit(0)
     # Purpose: now delete the events in the csv that are not needed
-    #          (e.g., events before startdate, or AUTOMATIC locations/magnitudes)
+    #          (e.g., events before startdate, or AUTOMATIC locations/magnitudes,
+    #                 not locatable events, etc.)
 
 catalog = TM.io.readCatalogSEDDB(timecol=-1, magcol=5)