diff --git a/Ccs/DBus_Basic.py b/Ccs/DBus_Basic.py
index cc8dd3b521be317ddabe8a7e6ce2f55f07c402cf..aa36a7f065fd24d99b1dfa9824160b76830df674 100644
--- a/Ccs/DBus_Basic.py
+++ b/Ccs/DBus_Basic.py
@@ -124,6 +124,10 @@ class MessageListener(dbus.service.Object):
             result = method_to_call(*args, **kwargs)
         except Exception as e:
             result = str(e)
+
+        if result is None:
+            return
+
         result = self.python_to_dbus(result, user_console=user_console)
         return result
 
diff --git a/Ccs/ccs_function_lib.py b/Ccs/ccs_function_lib.py
index d00b77a3c8e533549ee7d746a945d8e3e202acd2..7fe8ba81f4c7f6c2bf93ecfd0195b37fe9a5d970 100644
--- a/Ccs/ccs_function_lib.py
+++ b/Ccs/ccs_function_lib.py
@@ -27,8 +27,6 @@ from s2k_partypes import ptt, ptt_reverse, ptype_parameters, ptype_values
 import confignator
 import importlib
 
-import decompression
-
 
 cfg = confignator.get_config(check_interpolation=False)
 
@@ -4583,11 +4581,11 @@ def collect_13(pool_name, starttime=None, endtime=None, startidx=None, endidx=No
                sdu=None, verbose=True):
 
     if not os.path.isfile(pool_name):
-        logger.info('{} is not a file, looking it up in DB')
+        logger.debug('{} is not a file, looking it up in DB')
         # try fetching pool info from pools opened in viewer
-        pname = _get_displayed_pool_path(pool_name)
-        if pname:
-            pool_name = pname
+        # pname = _get_displayed_pool_path(pool_name)
+        # if pname:
+        #     pool_name = pname
 
     rows = get_pool_rows(pool_name, check_existence=True)
 
@@ -4732,7 +4730,8 @@ def dump_large_data(pool_name, starttime=0, endtime=None, outdir="", dump_all=Fa
     filedict = {}
     ldt_dict = collect_13(pool_name, starttime=starttime, endtime=endtime, join=True, collect_all=dump_all,
                           startidx=startidx, endidx=endidx, sdu=sdu, verbose=verbose)
-    n_ldt = len(ldt_dict)
+
+    ldt_cnt = 0
     for i, buf in enumerate(ldt_dict, 1):
         if ldt_dict[buf] is None:
             continue
@@ -4748,9 +4747,12 @@ def dump_large_data(pool_name, starttime=0, endtime=None, outdir="", dump_all=Fa
         with open(fname, "wb") as fdesc:
             fdesc.write(ldt_dict[buf])
             filedict[buf] = fdesc.name
+            ldt_cnt += 1
 
-    logger.info('Dumped {} CEs to {}'.format(n_ldt, outdir))
-    print('Dumped {} CEs to {}'.format(n_ldt, outdir))
+    if ldt_cnt != 0:
+        logger.info('Dumped {} CEs to {}'.format(ldt_cnt, outdir))
+    logger.debug('{} CEs found'.format(ldt_cnt))
+    # print('Dumped {} CEs to {}'.format(ldt_cnt, outdir))
 
     return filedict
 
diff --git a/Ccs/decompression.py b/Ccs/decompression.py
index 62af3b880a2c881e843a315b9f6e9ed084438c1d..3d87e1364e6266441eb9b53ea5f2fee27b8c5ec5 100644
--- a/Ccs/decompression.py
+++ b/Ccs/decompression.py
@@ -10,8 +10,8 @@ import confignator
 import ccs_function_lib as cfl
 
 cfg = confignator.get_config(check_interpolation=False)
-logger = logging.getLogger(__name__)
-logger.setLevel(getattr(logging, cfg.get('ccs-logging', 'level').upper()))
+logger = cfl.start_logging('Decompression')
+# logger.setLevel(getattr(logging, cfg.get('ccs-logging', 'level').upper()))
 
 CE_COLLECT_TIMEOUT = 1
 LDT_MINIMUM_CE_GAP = 0.001
@@ -81,7 +81,7 @@ def ce_decompress(outdir, pool_name=None, sdu=None, starttime=None, endtime=None
     decomp = CeDecompress(outdir, pool_name=pool_name, sdu=sdu, starttime=starttime, endtime=endtime, startidx=startidx,
                           endidx=endidx, ce_exec=ce_exec)
     decomp.start()
-    ce_decompressors[int(time.time())] = decomp
+    ce_decompressors[decomp.init_time] = decomp
 
 
 def ce_decompress_stop(name=None):
@@ -105,6 +105,8 @@ class CeDecompress:
         self.startidx = startidx
         self.endidx = endidx
 
+        self.init_time = int(time.time())
+
         if ce_exec is None:
             try:
                 self.ce_exec = cfg.get('ccs-misc', 'ce_exec')
@@ -137,7 +139,7 @@ class CeDecompress:
 
         try:
             thread.start()
-            logger.info('Started CeDecompress...')
+            logger.info('Started CeDecompress [{}]...'.format(self.init_time))
         except Exception as err:
             logger.error(err)
             self.ce_decompression_on = False
@@ -155,9 +157,14 @@ class CeDecompress:
             subprocess.run([self.ce_exec, cefile, fitspath], stdout=open(cefile[:-2] + 'log', 'w'))
 
         # first, get all TM13s already complete in pool
-        filedict = cfl.dump_large_data(pool_name=self.pool_name, starttime=self.last_ce_time, endtime=self.endtime,
-                                       outdir=self.outdir, dump_all=True, sdu=self.sdu, startidx=self.startidx,
-                                       endidx=self.endidx)
+        try:
+            filedict = cfl.dump_large_data(pool_name=self.pool_name, starttime=self.last_ce_time, endtime=self.endtime,
+                                           outdir=self.outdir, dump_all=True, sdu=self.sdu, startidx=self.startidx,
+                                           endidx=self.endidx)
+        except ValueError as err:
+            ce_decompressors.pop(self.init_time)
+            raise err
+
         for ce in filedict:
             self.last_ce_time = ce
             decompress(filedict[ce])
@@ -173,7 +180,8 @@ class CeDecompress:
             decompress(cefile)
             self.last_ce_time += self.ldt_minimum_ce_gap
             time.sleep(self.ce_collect_timeout)
-        logger.info('CeDecompress stopped.')
+        logger.info('CeDecompress stopped [{}].'.format(self.init_time))
+        ce_decompressors.pop(self.init_time)
 
     def start(self):
         self._ce_decompress()
diff --git a/Ccs/poolview_sql.py b/Ccs/poolview_sql.py
index 2480f12f9f12cc86419a88adbf1a2e9322241831..3c659a756304229995abdcc6c6846b62e6883dc5 100644
--- a/Ccs/poolview_sql.py
+++ b/Ccs/poolview_sql.py
@@ -117,7 +117,8 @@ class TMPoolView(Gtk.Window):
 
         self.refresh_treeview_active = False
         self.cnt = 0
-        self.active_pool_info = ActivePoolInfo(None, None, None, None)
+        # self.active_pool_info = ActivePoolInfo(None, None, None, None)
+        self.active_pool_info = ActivePoolInfo('', 0, '', False)
         self.set_border_width(2)
         self.set_resizable(True)
         self.set_default_size(1150, 1280)
@@ -575,7 +576,7 @@ class TMPoolView(Gtk.Window):
         self.adj = scrollbar.get_adjustment()
         # get size of tmpool
 
-        if self.active_pool_info.pool_name is not None:
+        if self.active_pool_info.pool_name not in (None, ''):
             self.adj.set_upper(self.count_current_pool_rows())
         self.adj.set_page_size(25)
         scrollbar.connect('value_changed', self._on_scrollbar_changed, self.adj, False)
diff --git a/Ccs/pus_datapool.py b/Ccs/pus_datapool.py
index abbc97cc788316133eaaa99c81b77ad58349f178..120f8c729fd3d848e27c1b980bb4a6402c76163c 100644
--- a/Ccs/pus_datapool.py
+++ b/Ccs/pus_datapool.py
@@ -235,7 +235,7 @@ class DatapoolManager:
             # new_session.flush()
             new_session.commit()
             new_session.close()
-            self.lo('DELETED POOL\n >{}<\nFROM DB'.format(pool_name))
+            self.logger.info('DELETED POOL\n >{}<\nFROM DB'.format(pool_name))
         return
 
     def _clear_db(self):