From d800de9e5c1c0b0677b168a1e4465f995f3232de Mon Sep 17 00:00:00 2001 From: Marko Mecina <marko.mecina@univie.ac.at> Date: Fri, 10 Nov 2023 13:26:41 +0100 Subject: [PATCH] reduce viewer and monitor dependence on running poolmanager instance --- Ccs/ccs_function_lib.py | 294 +++++++++++++++++++++++- Ccs/editor.py | 2 + Ccs/monitor.py | 51 ++--- Ccs/plotter.py | 2 +- Ccs/poolview_sql.py | 309 ++++++++++++++------------ Ccs/pus_datapool.py | 2 + Tst/json_to_barescript.py | 4 +- Tst/testing_library/testlib/report.py | 2 +- Tst/tst/view.py | 2 + 9 files changed, 482 insertions(+), 186 deletions(-) diff --git a/Ccs/ccs_function_lib.py b/Ccs/ccs_function_lib.py index fda779d..f8ac245 100644 --- a/Ccs/ccs_function_lib.py +++ b/Ccs/ccs_function_lib.py @@ -28,6 +28,7 @@ from database.tm_db import scoped_session_maker, DbTelemetry, DbTelemetryPool, R from sqlalchemy.exc import OperationalError as SQLOperationalError from sqlalchemy.sql.expression import func +from typing import NamedTuple from s2k_partypes import ptt, ptt_reverse, ptype_parameters, ptype_values import confignator import importlib @@ -130,6 +131,14 @@ else: user_tm_decoders = {} +ActivePoolInfo = NamedTuple( + 'ActivePoolInfo', [ + ('filename', str), + ('modification_time', int), + ('pool_name', str), + ('live', bool)]) + + def _reset_mib_caches(): _pcf_cache.clear() _cap_cache.clear() @@ -259,7 +268,7 @@ def start_editor(*files, console=False, **kwargs): # Start Parameter Monitor # Argumnet gives the possibility to run file in the console to see print comands -def start_monitor(pool_name=None, parameter_set=None, console=False, **kwargs): +def start_monitor(pool_name, parameter_set=None, console=False, **kwargs): """ Gets the path of the Startfile for the Monitor and executes it @@ -270,12 +279,10 @@ def start_monitor(pool_name=None, parameter_set=None, console=False, **kwargs): directory = cfg.get('paths', 'ccs') file_path = os.path.join(directory, 'monitor.py') - if pool_name is not None and parameter_set is not None: + if parameter_set is not None: start_app(file_path, directory, pool_name, parameter_set, console=console, **kwargs) - elif pool_name is not None: - start_app(file_path, directory, pool_name, console=console, **kwargs) else: - start_app(file_path, directory, console=console, **kwargs) + start_app(file_path, directory, pool_name, console=console, **kwargs) # Start Parameter Plotter @@ -5768,9 +5775,10 @@ def dump_large_data(pool_name, starttime=0, endtime=None, outdir="", dump_all=Fa return filedict +import threading class DbTools: """ - SQL database management helpers + SQL database management tools """ @staticmethod @@ -5893,6 +5901,280 @@ class DbTools: finally: new_session.close() + @staticmethod + def sql_insert_binary_dump(filename, brute=False, force_db_import=False, protocol='PUS', pecmode='warn', parent=None): + + active_pool_info = ActivePoolInfo( + filename, + int(os.path.getmtime(filename)), + os.path.basename(filename), + False) + + new_session = scoped_session_storage + pool_exists_in_db_already = new_session.query( + DbTelemetryPool + ).filter( + DbTelemetryPool.pool_name == active_pool_info.filename, + DbTelemetryPool.modification_time == active_pool_info.modification_time + ).count() > 0 + if (not pool_exists_in_db_already) or force_db_import: + if force_db_import: + del_time = time.strftime('%s') + new_session.execute( + 'UPDATE tm_pool SET pool_name="---TO-BE-DELETED{}" WHERE tm_pool.pool_name="{}"'.format( + del_time, filename)) + new_session.commit() + new_session.close() + # delete obsolete rows + del_thread = threading.Thread(target=DbTools.delete_abandoned_rows, args=[del_time], name='delete_abandoned') + del_thread.daemon = True + del_thread.start() + + logger.info("Data not in DB - must import...") + loadinfo = LoadInfo(parent=parent) + # loadinfo.spinner.start() + # loadinfo.show_all() + _loader_thread = threading.Thread(target=DbTools.import_dump_in_db, + args=[active_pool_info, loadinfo], + kwargs={'brute': brute, 'protocol': protocol, 'pecmode': pecmode}) + _loader_thread.daemon = True + _loader_thread.start() + + logger.info('Loaded Pool:' + str(filename)) + + new_session.close() + + logger.info('Loaded Pool:' + str(filename)) + + return active_pool_info + + @staticmethod + def import_dump_in_db(pool_info, loadinfo, brute=False, protocol='PUS', pecmode='warn'): + loadinfo.ok_button.set_sensitive(False) + loadinfo.spinner.start() + new_session = scoped_session_storage + new_session.query( + DbTelemetryPool + ).filter( + DbTelemetryPool.pool_name == pool_info.filename + ).delete() + new_session.flush() + newPoolRow = DbTelemetryPool( + pool_name=pool_info.filename, + modification_time=pool_info.modification_time, + protocol=protocol) + new_session.add(newPoolRow) + new_session.flush() # DB assigns auto-increment field (primary key iid) used below + + bulk_insert_size = 1000 # number of rows to transfer in one transaction + state = [1] + protocol_ids = SPW_PROTOCOL_IDS_R + + def mkdict_spw(head, tm_raw): + pkt = head.bits + + if protocol_ids[pkt.PROTOCOL_ID] == 'RMAP': + pcktdict = dict(pool_id=newPoolRow.iid, + idx=state[0], + cmd=pkt.PKT_TYPE, + write=pkt.WRITE, + verify=pkt.VERIFY, + reply=pkt.REPLY, + increment=pkt.INCREMENT, + keystat=pkt.KEY if pkt.PKT_TYPE == 1 else pkt.STATUS, + taid=pkt.TRANSACTION_ID, + addr=pkt.ADDR if pkt.PKT_TYPE == 1 else None, + datalen=pkt.DATA_LEN if hasattr(pkt, 'DATA_LEN') else 0, + raw=tm_raw) + + elif protocol_ids[pkt.PROTOCOL_ID] == 'FEEDATA': + pcktdict = dict(pool_id=newPoolRow.iid, + idx=state[0], + pktlen=pkt.DATA_LEN, + type=head.comptype, + framecnt=pkt.FRAME_CNT, + seqcnt=pkt.SEQ_CNT, + raw=tm_raw) + + else: + return + + state[0] += 1 + if state[0] % bulk_insert_size == 0: + GLib.idle_add(loadinfo.log.set_text, "Loaded {:d} rows.".format(state[0], )) + return pcktdict + + def mkdict_pus(tmd, tm_raw, truncate=True): + tm = tmd[0] + + pcktdict = dict(pool_id=newPoolRow.iid, + idx=state[0], + is_tm=tm.PKT_TYPE, + apid=tm.APID, + seq=tm.PKT_SEQ_CNT, + len_7=tm.PKT_LEN, + stc=tm.SERV_TYPE, + sst=tm.SERV_SUB_TYPE, + destID=tm.DEST_ID if tm.PKT_TYPE == 0 else tm.SOURCE_ID, + timestamp=cuc_time_str(tm), + data=tmd[1][:MAX_PKT_LEN], + raw=tm_raw[:MAX_PKT_LEN]) + + state[0] += 1 + if state[0] % bulk_insert_size == 0: + GLib.idle_add(loadinfo.log.set_text, "Loaded {:d} rows.".format(state[0], )) + return pcktdict + + if protocol == 'PUS': + mkdict = mkdict_pus + elif protocol == 'SPW': + mkdict = mkdict_spw + else: + new_session.rollback() + new_session.close() + logger.info("Protocol '{}' not supported".format(protocol)) + loadinfo.log.set_text("Protocol '{}' not supported".format(protocol)) + loadinfo.spinner.stop() + loadinfo.ok_button.set_sensitive(True) + return + + loadinfo.log.set_text("Parsing file...") + DbTools.db_bulk_insert(pool_info.filename, mkdict, bulk_insert_size=bulk_insert_size, brute=brute, + protocol=protocol, pecmode=pecmode) + + # self.pool.decode_tmdump_and_process_packets(pool_info.filename, process_tm, brute=brute) + new_session.commit() + logger.info("Loaded %d rows." % (state[0] - 1)) + loadinfo.log.set_text("Loaded %d rows." % (state[0] - 1)) + loadinfo.spinner.stop() + loadinfo.ok_button.set_sensitive(True) + # Ignore Reply is allowed here, since the instance is passed along + # pv.Functions('_set_list_and_display_Glib_idle_add', self.active_pool_info, int(self.my_bus_name[-1]), ignore_reply=True) + # GLib.idle_add(self._set_pool_list_and_display) + new_session.close() + + @staticmethod + def db_bulk_insert(filename, processor, bulk_insert_size=1000, brute=False, checkcrc=True, protocol='PUS', pecmode='warn'): + + with open(filename, 'rb') as buf: + + pcktcount = 0 + + new_session = scoped_session_storage + new_session.execute('set unique_checks=0,foreign_key_checks=0') + + if protocol == 'PUS': + buf = buf.read() + if brute: + pckts = extract_pus_brute_search(buf, filename=filename) + checkcrc = False # CRC already performed during brute_search + + else: + pckts = extract_pus(buf) + + pcktdicts = [] + for pckt in pckts: + if checkcrc: + if crc_check(pckt): + if pecmode == 'warn': + if len(pckt) > 7: + logger.info('db_bulk_insert: [CRC error]: packet with seq nr ' + str( + int(pckt[5:7].hex(), 16)) + '\n') + else: + logger.info('INVALID packet -- too short' + '\n') + elif pecmode == 'discard': + if len(pckt) > 7: + logger.info( + '[CRC error]: packet with seq nr ' + str( + int(pckt[5:7].hex(), 16)) + ' (discarded)\n') + else: + logger.info('INVALID packet -- too short' + '\n') + continue + + pcktdicts.append(processor(unpack_pus(pckt), pckt)) + pcktcount += 1 + if pcktcount % bulk_insert_size == 0: + new_session.execute(DbTelemetry.__table__.insert(), pcktdicts) + # new_session.bulk_insert_mappings(DbTelemetry, pcktdicts) + pcktdicts = [] + + new_session.execute(DbTelemetry.__table__.insert(), pcktdicts) + + elif protocol == 'SPW': + headers, pckts, remainder = extract_spw(buf) + + pcktdicts_rmap = [] + pcktdicts_feedata = [] + + for head, pckt in zip(headers, pckts): + + if SPW_PROTOCOL_IDS_R[head.bits.PROTOCOL_ID] == 'RMAP': + pcktdicts_rmap.append(processor(head, pckt)) + elif SPW_PROTOCOL_IDS_R[head.bits.PROTOCOL_ID] == 'FEEDATA': + pcktdicts_feedata.append(processor(head, pckt)) + + pcktcount += 1 + if pcktcount % bulk_insert_size == 0: + if len(pcktdicts_rmap) > 0: + new_session.execute(RMapTelemetry.__table__.insert(), pcktdicts_rmap) + pcktdicts_rmap = [] + if len(pcktdicts_feedata) > 0: + new_session.execute(FEEDataTelemetry.__table__.insert(), pcktdicts_feedata) + pcktdicts_feedata = [] + + if len(pcktdicts_rmap) > 0: + new_session.execute(RMapTelemetry.__table__.insert(), pcktdicts_rmap) + if len(pcktdicts_feedata) > 0: + new_session.execute(FEEDataTelemetry.__table__.insert(), pcktdicts_feedata) + + new_session.execute('set unique_checks=1, foreign_key_checks=1') + new_session.commit() + new_session.close() + + +class LoadInfo(Gtk.Window): + def __init__(self, parent=None, title="DB Loader"): + Gtk.Window.__init__(self) + + if title is None: + self.set_title('Loading data to pool...') + else: + self.set_title(title) + + self.pmgr = parent + + grid = Gtk.VBox() + # pixbuf = Gtk.gdk.pixbuf_new_from_file('pixmap/Icon_Space_weiß_en.png') + # pixbuf = pixbuf.scale_simple(100, 100, Gtk.gdk.INTERP_BILINEAR) + # logo = Gtk.image_new_from_pixbuf(pixbuf) + logo = Gtk.Image.new_from_file('pixmap/ccs_logo_2.svg') + + self.spinner = Gtk.Spinner() + self.spinner.set_size_request(48, 48) + # self.spinner.start() + self.log = Gtk.Label() + self.ok_button = Gtk.Button.new_with_label('OK') + self.ok_button.connect('clicked', self.destroy_window) + + grid.pack_start(logo, 1, 1, 0) + grid.pack_start(self.spinner, 1, 1, 0) + grid.pack_start(self.log, 1, 1, 0) + grid.pack_start(self.ok_button, 1, 1, 0) + grid.set_spacing(2) + + self.add(grid) + + self.show_all() + + def destroy_window(self, widget, *args): + # if is_open('poolviewer', communication['poolviewer']): + # pv = dbus_connection('poolviewer', communication['poolviewer']) + # pv.Functions('small_refresh_function') + try: + self.destroy() + except Exception as err: + print(err) + class TestReport: """ diff --git a/Ccs/editor.py b/Ccs/editor.py index ff9e48f..5f15165 100644 --- a/Ccs/editor.py +++ b/Ccs/editor.py @@ -1452,6 +1452,8 @@ class CcsEditor(Gtk.Window): # Add the different Starting Options vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=5, margin=4) for name in self.cfg['ccs-dbus_names']: + if name in ['plotter', 'monitor']: + continue start_button = Gtk.Button.new_with_label("Start " + name.capitalize()) start_button.connect("clicked", cfl.on_open_univie_clicked) vbox.pack_start(start_button, True, True, 0) diff --git a/Ccs/monitor.py b/Ccs/monitor.py index ab52b9f..9a8999c 100644 --- a/Ccs/monitor.py +++ b/Ccs/monitor.py @@ -94,23 +94,23 @@ class ParameterMonitor(Gtk.Window): self.reset_evt_cnt() return - def check_for_pools(self): - try: - # poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) - poolmgr = cfl.get_module_handle('poolmanager') - pools = poolmgr.Functions('loaded_pools_export_func') - if len(pools) == 1: - pool_name = pools[0][0] - if '/' in pools[0][0]: - pool_name = pools[0][0].split('/')[-1] - self.set_pool(pool_name) - return 1 - else: - self.logger.error('Failed to open a pool.') - return 0 - except Exception as err: - self.logger.error(err) - return 0 + # def check_for_pools(self): + # try: + # # poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) + # poolmgr = cfl.get_module_handle('poolmanager') + # pools = poolmgr.Functions('loaded_pools_export_func') + # if len(pools) == 1: + # pool_name = pools[0][0] + # if '/' in pools[0][0]: + # pool_name = pools[0][0].split('/')[-1] + # self.set_pool(pool_name) + # return 1 + # else: + # self.logger.error('Failed to open a pool.') + # return 0 + # except Exception as err: + # self.logger.error(err) + # return 0 def set_pool(self, pool_name): self.pool_name = pool_name @@ -1098,23 +1098,16 @@ if __name__ == "__main__": sys.argv.remove(arg) if len(sys.argv) == 2: - is_pool = win.set_pool(sys.argv[1]) - - elif len(sys.argv) >= 3: - - if len(sys.argv) > 3: - win.logger.warning('Too many arguments, ignoring {}'.format(sys.argv[3:])) + win.set_pool(sys.argv[1]) + elif len(sys.argv) > 2: + win.set_pool(sys.argv[1]) win.set_parameter_view(sys.argv[2]) - is_pool = win.set_pool(sys.argv[1]) - elif len(sys.argv) == 1: - is_pool = win.check_for_pools() + # elif len(sys.argv) == 1: + # is_pool = win.check_for_pools() else: - is_pool = 0 - - if is_pool == 0: win.quit_func() sys.exit() diff --git a/Ccs/plotter.py b/Ccs/plotter.py index 1644a8e..4aa5952 100644 --- a/Ccs/plotter.py +++ b/Ccs/plotter.py @@ -477,7 +477,7 @@ class PlotViewer(Gtk.Window): vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=5, margin=4) for name in self.cfg['ccs-dbus_names']: # don't provide "start plotter" - if name == 'plotter': + if name in ['plotter', 'monitor']: continue start_button = Gtk.Button.new_with_label("Start " + name.capitalize()) start_button.connect("clicked", cfl.on_open_univie_clicked) diff --git a/Ccs/poolview_sql.py b/Ccs/poolview_sql.py index f8ab42e..5f8303d 100644 --- a/Ccs/poolview_sql.py +++ b/Ccs/poolview_sql.py @@ -36,6 +36,7 @@ from event_storm_squasher import delayed from sqlalchemy.orm import load_only from database.tm_db import DbTelemetryPool, DbTelemetry, RMapTelemetry, FEEDataTelemetry, scoped_session_maker +ActivePoolInfo = cfl.ActivePoolInfo cfg = confignator.get_config(check_interpolation=False) @@ -43,16 +44,6 @@ project = 'packet_config_{}'.format(cfg.get('ccs-database', 'project')) packet_config = importlib.import_module(project) TM_HEADER_LEN, TC_HEADER_LEN, PEC_LEN = [packet_config.TM_HEADER_LEN, packet_config.TC_HEADER_LEN, packet_config.PEC_LEN] -ActivePoolInfo = NamedTuple( - 'ActivePoolInfo', [ - ('filename', str), - ('modification_time', int), - ('pool_name', str), - ('live', bool)]) - -# fmtlist = {'INT8': 'b', 'UINT8': 'B', 'INT16': 'h', 'UINT16': 'H', 'INT32': 'i', 'UINT32': 'I', 'INT64': 'q', -# 'UINT64': 'Q', 'FLOAT': 'f', 'DOUBLE': 'd', 'INT24': 'i24', 'UINT24': 'I24', 'bit*': 'bit'} - Telemetry = {'PUS': DbTelemetry, 'RMAP': RMapTelemetry, 'FEE': FEEDataTelemetry} @@ -94,7 +85,7 @@ class TMPoolView(Gtk.Window): colour_filters = {} # active_pool_info = None # type: Union[None, ActivePoolInfo] decoding_type = 'PUS' - live_signal = {True: '[LIVE]', False: None} + live_signal = {True: '[REC]', False: None} currently_selected = set() shift_range = [1, 1] active_row = None @@ -297,7 +288,8 @@ class TMPoolView(Gtk.Window): return for pool in pools: - self.set_pool(pool[0]) + if not pool[0].count('/'): + self.set_pool(pool[0]) except Exception as err: self.logger.exception(err) @@ -453,17 +445,10 @@ class TMPoolView(Gtk.Window): editor.Functions('_to_console_via_socket', "pv" +str(Count)+ " = dbus.SessionBus().get_object('" + str(My_Bus_Name) + "', '/MessageListener')") - ##### # Check if pool from poolmanagers should be loaded, default is True - check_for_pools = True - if '--not_load' in sys.argv: - check_for_pools = False - - if check_for_pools: + if '--noload' not in sys.argv: self.set_pool_from_pmgr() - return - def add_colour_filter(self, colour_filter): seq = len(self.colour_filters.keys()) rgba = Gdk.RGBA() @@ -878,7 +863,7 @@ class TMPoolView(Gtk.Window): self.treeview.thaw_child_notify() def _filter_rows(self, rows): - + def f_rule(x): if x[1] == '==': return x[0] == x[2] @@ -1575,8 +1560,8 @@ class TMPoolView(Gtk.Window): def create_pool_managebar(self): self.pool_managebar = Gtk.HBox() - self.pool_selector = Gtk.ComboBoxText(tooltip_text='Pool to view') - pool_names = Gtk.ListStore(str, str, str) + self.pool_selector = Gtk.ComboBoxText(tooltip_text='Pool path') + pool_names = Gtk.ListStore(str, str, str, str) # if self.pool != None: # [self.pool_names.append([name]) for name in self.pool.datapool.keys()] @@ -1938,9 +1923,8 @@ class TMPoolView(Gtk.Window): ) pool = pool.all() # new_session.commit() - time.sleep(0.5) # with no wait query might return empty. WHY???############################################## + # time.sleep(0.5) # with no wait query might return empty. WHY? # Still sometimes empty, if not pool abfrage should stopp this behaviour - #print(pool.all()) if not pool: self.decoding_type = 'PUS' @@ -1959,15 +1943,26 @@ class TMPoolView(Gtk.Window): # [self.pool_names.append([name]) for name in self.pool.datapool.keys()] # self.pool_selector.set_model(self.pool_names) - #This function fills the Active pool info variable with data form pus_datapool + # This function fills the Active pool info variable with data form pus_datapool def Active_Pool_Info_append(self, pool_info=None): if pool_info is not None: self.active_pool_info = ActivePoolInfo(str(pool_info[0]), int(pool_info[1]), str(pool_info[2]), bool(pool_info[3])) - #self.decoding_type = 'PUS' self.check_structure_type() + self._set_pool_selector_tooltip() return self.active_pool_info + def _set_pool_selector_tooltip(self): + self.pool_selector.set_tooltip_text(self.active_pool_info.filename) + + def _check_pool_is_loaded(self, filename): + model = self.pool_selector.get_model() + for pool in model: + if pool[3] == filename: + self.logger.info('Pool {} already loaded'.format(filename)) + return True + return False + def update_columns(self): columns = self.treeview.get_columns() for column in columns: @@ -1983,29 +1978,20 @@ class TMPoolView(Gtk.Window): def select_pool(self, selector, new_pool=None): if not new_pool: - pool_name = selector.get_active_text() + pool_name = selector.get_model()[selector.get_active_iter()][3] else: pool_name = new_pool - #self.active_pool_info = self.pool.loaded_pools[pool_name] - # self.active_pool_info = poolmgr.Dictionaries('loaded_pools', pool_name) + new_session = self.session_factory_storage try: - poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) - if not poolmgr: - raise TypeError - self.Active_Pool_Info_append(cfl.Dictionaries(poolmgr, 'loaded_pools', pool_name)) - - except: - new_session = self.session_factory_storage - - if self.active_pool_info.pool_name == pool_name: - #type = new_session.query(DbTelemetryPool).filter(DbTelemetryPool.pool_name == self.active_pool_info.filename).first() - #self.Active_Pool_Info_append([pool_name, type.modification_time, pool_name, False]) - pass - else: - type = new_session.query(DbTelemetryPool).filter(DbTelemetryPool.pool_name == pool_name) - self.Active_Pool_Info_append([pool_name, type.modification_time, pool_name, False]) + if self.active_pool_info.filename != pool_name: + dbpool = new_session.query(DbTelemetryPool).filter(DbTelemetryPool.pool_name == pool_name).first() + self.Active_Pool_Info_append([pool_name, dbpool.modification_time, dbpool.pool_name, False]) + except Exception as err: + self.logger.warning(err) + return + finally: new_session.close() self.update_columns() @@ -2033,6 +2019,11 @@ class TMPoolView(Gtk.Window): def clear_pool(self, widget): poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) + + # don't clear static pools + if self.active_pool_info.filename.count('/'): + return + widget.set_sensitive(False) pool_name = self.get_active_pool_name() @@ -2070,6 +2061,8 @@ class TMPoolView(Gtk.Window): # Add the different Starting Options vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=5, margin=4) for name in self.cfg['ccs-dbus_names']: + if name in ['plotter', 'monitor']: + continue start_button = Gtk.Button.new_with_label("Start " + name.capitalize()) start_button.connect("clicked", cfl.on_open_univie_clicked) vbox.pack_start(start_button, False, True, 0) @@ -2669,67 +2662,108 @@ class TMPoolView(Gtk.Window): return # Whole function is now done in Poolmgr - def load_pool(self, widget=None, filename=None, brute=False, force_db_import=False, protocol='PUS'): - if cfl.is_open('poolmanager', cfl.communication['poolmanager']): - poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) - else: - cfl.start_pmgr(True, '--nogui') - #path_pm = os.path.join(confignator.get_option('paths', 'ccs'), 'pus_datapool.py') - #subprocess.Popen(['python3', path_pm, '--nogui']) - self.logger.info('Poolmanager was started in the background') - - # Here we have a little bit of a tricky situation since when we start the Poolmanager it wants to tell the - # Manager to which number it can talk to but it can only do this when PoolViewer is not busy... - # Therefore it is first found out which number the new Poolmanager will get and it will be called by that - our_con = [] - # Look for all connections starting with com.poolmanager.communication, - # therefore only one loop over all connections is necessary - for service in dbus.SessionBus().list_names(): - if service.startswith(self.cfg['ccs-dbus_names']['poolmanager']): - our_con.append(service) - - new_pmgr_nbr = 0 - if len(our_con) != 0: # If an active PoolManager is found they have to belong to another prject - for k in range(1, 10): # Loop over all possible numbers - for j in our_con: # Check every number with every PoolManager - if str(k) == str(j[-1]): # If the number is not found set variable found to True - found = True - else: # If number is found set variable found to False - found = False - break - - if found: # If number could not be found save the number and try connecting - new_pmgr_nbr = k - break - - else: - new_pmgr_nbr = 1 - - if new_pmgr_nbr == 0: - self.logger.warning('The maximum amount of Poolviewers has been reached') - return - - # Wait a maximum of 10 seconds to connect to the poolmanager - i = 0 - while i < 100: - if cfl.is_open('poolmanager', new_pmgr_nbr): - poolmgr = cfl.dbus_connection('poolmanager', new_pmgr_nbr) - break - else: - i += 1 - time.sleep(0.1) + # def load_pool2(self, widget=None, filename=None, brute=False, force_db_import=False, protocol='PUS'): + # if cfl.is_open('poolmanager', cfl.communication['poolmanager']): + # poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) + # else: + # cfl.start_pmgr(gui=False) + # #path_pm = os.path.join(confignator.get_option('paths', 'ccs'), 'pus_datapool.py') + # #subprocess.Popen(['python3', path_pm, '--nogui']) + # self.logger.info('Poolmanager was started in the background') + # + # # Here we have a little bit of a tricky situation since when we start the Poolmanager it wants to tell the + # # Manager to which number it can talk to but it can only do this when PoolViewer is not busy... + # # Therefore it is first found out which number the new Poolmanager will get and it will be called by that + # our_con = [] + # # Look for all connections starting with com.poolmanager.communication, + # # therefore only one loop over all connections is necessary + # for service in dbus.SessionBus().list_names(): + # if service.startswith(self.cfg['ccs-dbus_names']['poolmanager']): + # our_con.append(service) + # + # new_pmgr_nbr = 0 + # if len(our_con) != 0: # If an active PoolManager is found they have to belong to another prject + # for k in range(1, 10): # Loop over all possible numbers + # for j in our_con: # Check every number with every PoolManager + # if str(k) == str(j[-1]): # If the number is not found set variable found to True + # found = True + # else: # If number is found set variable found to False + # found = False + # break + # + # if found: # If number could not be found save the number and try connecting + # new_pmgr_nbr = k + # break + # + # else: + # new_pmgr_nbr = 1 + # + # if new_pmgr_nbr == 0: + # self.logger.warning('The maximum amount of Poolviewers has been reached') + # return + # + # # Wait a maximum of 10 seconds to connect to the poolmanager + # i = 0 + # while i < 100: + # if cfl.is_open('poolmanager', new_pmgr_nbr): + # poolmgr = cfl.dbus_connection('poolmanager', new_pmgr_nbr) + # break + # else: + # i += 1 + # time.sleep(0.1) + # + # if filename is not None and filename: + # pool_name = filename.split('/')[-1] + # try: + # new_pool = cfl.Functions(poolmgr, 'load_pool_poolviewer', pool_name, filename, brute, force_db_import, + # self.count_current_pool_rows(), self.my_bus_name[-1], protocol) + # + # except: + # self.logger.warning('Pool could not be loaded, File: ' + str(filename) + 'does probably not exist') + # # print('Pool could not be loaded, File' +str(filename)+ 'does probably not exist') + # return + # else: + # dialog = Gtk.FileChooserDialog(title="Load File to pool", parent=self, action=Gtk.FileChooserAction.OPEN) + # dialog.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK) + # + # area = dialog.get_content_area() + # hbox, force_button, brute_extract, type_buttons = self.pool_loader_dialog_buttons() + # + # area.add(hbox) + # area.show_all() + # + # dialog.set_transient_for(self) + # + # response = dialog.run() + # + # if response == Gtk.ResponseType.OK: + # filename = dialog.get_filename() + # pool_name = filename.split('/')[-1] + # isbrute = brute_extract.get_active() + # force_db_import = force_button.get_active() + # for button in type_buttons: + # if button.get_active(): + # package_type = button.get_label() + # else: + # dialog.destroy() + # return + # + # if package_type: + # if package_type not in ['PUS', 'PLMSIM']: + # package_type == 'SPW' + # + # # package_type defines which type was selected by the user, if any was selected + # new_pool = cfl.Functions(poolmgr, 'load_pool_poolviewer', pool_name, filename, isbrute, force_db_import, + # self.count_current_pool_rows(), self.my_bus_name[-1], package_type) + # + # dialog.destroy() + # + # if new_pool: + # self._set_pool_list_and_display(new_pool) - if filename is not None and filename: - pool_name = filename.split('/')[-1] - try: - new_pool = cfl.Functions(poolmgr, 'load_pool_poolviewer', pool_name, filename, brute, force_db_import, - self.count_current_pool_rows(), self.my_bus_name[-1], protocol) + def load_pool(self, widget=None, filename=None, brute=False, force_db_import=False, protocol='PUS', no_duplicates=True): - except: - self.logger.warning('Pool could not be loaded, File: ' + str(filename) + 'does probably not exist') - # print('Pool could not be loaded, File' +str(filename)+ 'does probably not exist') - return - else: + if filename is None: dialog = Gtk.FileChooserDialog(title="Load File to pool", parent=self, action=Gtk.FileChooserAction.OPEN) dialog.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK) @@ -2745,47 +2779,29 @@ class TMPoolView(Gtk.Window): if response == Gtk.ResponseType.OK: filename = dialog.get_filename() - pool_name = filename.split('/')[-1] - isbrute = brute_extract.get_active() + brute = brute_extract.get_active() force_db_import = force_button.get_active() for button in type_buttons: if button.get_active(): - package_type = button.get_label() + protocol = button.get_label() else: dialog.destroy() return - if package_type: - if package_type not in ['PUS', 'PLMSIM']: - package_type == 'SPW' - - # package_type defines which type was selected by the user, if any was selected - new_pool = cfl.Functions(poolmgr, 'load_pool_poolviewer', pool_name, filename, isbrute, force_db_import, - self.count_current_pool_rows(), self.my_bus_name[-1], package_type) + if protocol: + if protocol not in ['PUS', 'PLMSIM']: + protocol = 'SPW' dialog.destroy() - if new_pool: - self._set_pool_list_and_display(new_pool) - - # If a new Pool is loaded show it - #if new_pool: - - #else: # If not just switch the page to the previously loaded pool - # #Check if pool is loaded in Poolviewer or if it is a loaded pool in the poolmanager which is not - # # live and therefore not loaded to the viewer in the booting process - # try: - # model = self.pool_selector.get_model() - # self.pool_selector.set_active([row[0] == self.active_pool_info.pool_name for row in model].index(True)) - # except: - # Pool is still importing do nothing - #all_pmgr_pools = poolmgr.Functions('loaded_pools_export_func') - #for pool in all_pmgr_pools: - # if pool[2] == pool_name: - # self._set_pool_list_and_display(pool) - # break - #pass - return + if no_duplicates and (not force_db_import): + if self._check_pool_is_loaded(filename): + return + + new_pool = cfl.DbTools.sql_insert_binary_dump(filename, brute=brute, force_db_import=force_db_import, + protocol=protocol, pecmode='warn', parent=self) + + self._set_pool_list_and_display(new_pool) def pool_loader_dialog_buttons(self): ''' @@ -2857,34 +2873,25 @@ class TMPoolView(Gtk.Window): self.update_columns() - # self.pool.create(pool_name) self.adj.set_upper(self.count_current_pool_rows()) self.offset = 0 self.limit = self.adj.get_page_size() self._on_scrollbar_changed(adj=self.adj, force=True) - # self.pool.load_pckts(pool_name, filename) - # pvqueue2 = self.pool.register_queue(pool_name) - # self.set_queue(*pvqueue2) # Check the decoding type to show a pool if self.decoding_type == 'PUS': model = self.pool_selector.get_model() - iter = model.append([self.active_pool_info.pool_name, self.live_signal[self.active_pool_info.live], self.decoding_type]) + iter = model.append([self.active_pool_info.pool_name, self.live_signal[self.active_pool_info.live], self.decoding_type, self.active_pool_info.filename]) self.pool_selector.set_active_iter(iter) else: # If not PUS open all other possible types but show RMAP for packet_type in Telemetry: if packet_type == 'RMAP': model = self.pool_selector.get_model() - iter = model.append([self.active_pool_info.pool_name, self.live_signal[self.active_pool_info.live], packet_type]) + iter = model.append([self.active_pool_info.pool_name, self.live_signal[self.active_pool_info.live], packet_type, self.active_pool_info.filename]) self.pool_selector.set_active_iter(iter) # Always show the RMAP pool when created else: model = self.pool_selector.get_model() - iter = model.append([self.active_pool_info.pool_name, self.live_signal[self.active_pool_info.live], packet_type]) - #try: - # poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) - # poolmgr.Functions('loaded_pools_func', self.active_pool_info.pool_name, self.active_pool_info) - #except: - # pass + iter = model.append([self.active_pool_info.pool_name, self.live_signal[self.active_pool_info.live], packet_type, self.active_pool_info.filename]) if self.active_pool_info.live: self.stop_butt.set_sensitive(True) @@ -3010,6 +3017,10 @@ class TMPoolView(Gtk.Window): def plot_parameters(self, widget=None, parameters=None, start_live=False): + if not self.active_pool_info.filename: + self.logger.warning('No pool selected') + return + cfl.start_plotter(pool_name=self.active_pool_info.filename) def start_recording(self, widget=None): @@ -3438,6 +3449,10 @@ class BigDataViewer(Gtk.Window): def on_draw(self, widget, cr): poolmgr = cfl.dbus_connection('poolmanager', cfl.communication['poolmanager']) + + if not poolmgr: + return + check = poolmgr.Functions('_return_colour_list', 'try') if check is False: return diff --git a/Ccs/pus_datapool.py b/Ccs/pus_datapool.py index af08b64..7102e65 100644 --- a/Ccs/pus_datapool.py +++ b/Ccs/pus_datapool.py @@ -2174,6 +2174,8 @@ class PUSDatapoolManagerGUI(Gtk.ApplicationWindow): # Add the different Starting Options vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=5, margin=4) for name in self.cfg['ccs-dbus_names']: + if name in ['plotter', 'monitor']: + continue start_button = Gtk.Button.new_with_label("Start " + name.capitalize()) start_button.connect("clicked", cfl.on_open_univie_clicked) vbox.pack_start(start_button, False, True, 0) diff --git a/Tst/json_to_barescript.py b/Tst/json_to_barescript.py index 87b13f3..7093702 100755 --- a/Tst/json_to_barescript.py +++ b/Tst/json_to_barescript.py @@ -81,7 +81,7 @@ def run(jfile, outfile, reportfunc=False, specfile=None): script += '\nreport.export()\n\n' if outfile[-1] == '/': # If path is given not the actual filename - outfile = outfile + data['_name'] + '-TS' + '-'.join(data['_spec_version']) + '.py' + outfile = outfile + data['_name'] + '-TS-' + '-'.join(data['_spec_version']) + '.py' with open(outfile, 'w') as fd: fd.write(script) @@ -90,7 +90,7 @@ def run(jfile, outfile, reportfunc=False, specfile=None): if __name__ == '__main__': json_file_path = sys.argv[1] - if len(sys.argv) > 1: # If filename is given + if len(sys.argv) > 2: # If filename is given outputfile = sys.argv[2] else: # If no filename is given take the working directory path, filename is used from the json file outputfile = os.getcwd() + '/' diff --git a/Tst/testing_library/testlib/report.py b/Tst/testing_library/testlib/report.py index f7abc97..3c30ef0 100644 --- a/Tst/testing_library/testlib/report.py +++ b/Tst/testing_library/testlib/report.py @@ -237,7 +237,7 @@ def write_log_test_footer(test): if len(test.step_results) < 1: if test.precond_ok is False: logger.info('Precondition not fulfilled!') - logger.info('No steps were ran successfully.') + logger.info('No steps were run successfully.') test.test_passed = False else: for item in test.step_results: diff --git a/Tst/tst/view.py b/Tst/tst/view.py index e7955a3..a37606b 100644 --- a/Tst/tst/view.py +++ b/Tst/tst/view.py @@ -184,8 +184,10 @@ class Board(Gtk.Box): self.label_comment.set_text(_('Test comment:')) # Make the area where the real command is entered self.comment_scrolled_window = Gtk.ScrolledWindow() + self.comment_scrolled_window.set_policy(Gtk.PolicyType.NEVER, Gtk.PolicyType.AUTOMATIC) # self.comment_scrolled_window.set_size_request(200, 100) self.test_meta_data_comment = Gtk.TextView.new() + self.test_meta_data_comment.set_wrap_mode(Gtk.WrapMode.WORD) Gtk.StyleContext.add_class(self.test_meta_data_comment.get_style_context(), 'text-view') self.comment_scrolled_window.add(self.test_meta_data_comment) -- GitLab