diff --git a/Ccs/tools/dataprocessing/fake_decompress.sh b/Ccs/tools/dataprocessing/fake_decompress.sh new file mode 100755 index 0000000000000000000000000000000000000000..62bf0cd7239a64c34049ad5e179064941adbf179 --- /dev/null +++ b/Ccs/tools/dataprocessing/fake_decompress.sh @@ -0,0 +1,4 @@ +#!/usr/bin/bash + +cp $1 $2 +echo Decompressed $1 to $2. diff --git a/Ccs/tools/dataprocessing/hk_processing.py b/Ccs/tools/dataprocessing/hk_processing.py index 050da9a880d52859b7c4228d5220dbd2e8257cd7..303fdc8e4d67888685ddade638d42e56749f04cf 100644 --- a/Ccs/tools/dataprocessing/hk_processing.py +++ b/Ccs/tools/dataprocessing/hk_processing.py @@ -39,11 +39,13 @@ class PktStructs: params = [] if tpsd == -1: - for plf in self.pus_tabs.plf[spid]: - name, offby, offbi = plf - descr, ptc, pfc, _ = self.pus_tabs.pcf[name] - fmts.append(ptt(ptc, pfc)) - params.append((name, descr, offby, offbi, ptc, pfc)) + # only try if there are any parameters defined + if spid in self.pus_tabs.plf: + for plf in self.pus_tabs.plf[spid]: + name, offby, offbi = plf + descr, ptc, pfc, _ = self.pus_tabs.pcf[name] + fmts.append(ptt(ptc, pfc)) + params.append((name, descr, offby, offbi, ptc, pfc)) else: for vpd in self.pus_tabs.vpd[spid]: pos, name, grp, fixrep = vpd diff --git a/Ccs/tools/dataprocessing/remove_duplicates.py b/Ccs/tools/dataprocessing/remove_duplicates.py new file mode 100755 index 0000000000000000000000000000000000000000..0ff7cbe3a76ec7fbe7d2d42112ff185494d468cd --- /dev/null +++ b/Ccs/tools/dataprocessing/remove_duplicates.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +""" +Remove PUS packet duplicates from binary file +""" + +import sys +from smile_L0b_converter import remove_duplicates + +if __name__ == '__main__': + if len(sys.argv) < 2: + print('Usage: ./remove_duplicates.py <INFILE> [<OUTFILE>]') + sys.exit() + elif len(sys.argv) == 2: + infile = sys.argv[1] + outfile = None + else: + infile, outfile = sys.argv[1:3] + + remove_duplicates(infile, outfile) \ No newline at end of file diff --git a/Ccs/tools/dataprocessing/smile_L0b_converter.py b/Ccs/tools/dataprocessing/smile_L0b_converter.py index 6bd47893da6e60b61465a8b4002a0a614a9c217c..82be537f0040e225bcfe4e187223a5a9420d55ce 100755 --- a/Ccs/tools/dataprocessing/smile_L0b_converter.py +++ b/Ccs/tools/dataprocessing/smile_L0b_converter.py @@ -11,9 +11,11 @@ import sys from astropy.io import fits import numpy as np +from numpy.lib import recfunctions as rf from hk_processing import proc_hk from packetstruct import ST_OFF, SST_OFF +from smile_raw_ce_converter import convert_ce, ED_BIN_DTYPE, STRUCT_CE_HEADER import crcmod @@ -23,7 +25,7 @@ puscrc = crcmod.predefined.mkPredefinedCrcFun('crc-ccitt-false') DP_OFFSET = 345544320 -PROC_ST = [1, 3, 5, 20] # PUS service types to be processed for ENG product +PROC_ST = [1, 3, 5, 17, 20, 197] # PUS service types to be processed for ENG product SDUID = 1 @@ -45,7 +47,8 @@ seqcnt = None trashcnt = 0 -CE_EXEC = os.path.join(os.path.dirname(os.path.abspath(__file__)), "smile_raw_ce_converter.py") +# CE_EXEC = os.path.join(os.path.dirname(os.path.abspath(__file__)), "smile_raw_ce_converter.py") +CE_EXEC = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fake_decompress.sh") PRODUCT_IDS = {0: 'SXI-SCI-ED', 2: 'SXI-SCI-FT', @@ -60,12 +63,12 @@ UNKNOWN_PROD = 'UNKNOWN' MODES = tuple(PRODUCT_IDS.values()) -FT_NODES = ('FT_CCD_NODE_0', 'FT_CCD_NODE_1', 'FT_CCD_NODE_2', 'FT_CCD_NODE_3') -UV_NODES = ('UV_CCD_NODE_0', 'UV_CCD_NODE_1', 'UV_CCD_NODE_2', 'UV_CCD_NODE_3') +FT_NODES = ['FT_NODE_0']#, 'FT_NODE_1', 'FT_NODE_2', 'FT_NODE_3'] +UV_NODES = ['UV_NODE_0']#, 'UV_NODE_1', 'UV_NODE_2', 'UV_NODE_3'] -ED_BIN_DTYPE = np.dtype( - [('TIME', '>f8'), ('CCDFRAME', '>u4'), ('CCDNR', 'u1'), ('RAWX', '>u2'), ('RAWY', '>u2'), ('AMP', 'u1'), - ('PHAS', '>u2', (25,))]) +# ED_BIN_DTYPE = np.dtype( +# [('TIME', '>f8'), ('CCDFRAME', '>u4'), ('CCDNR', 'u1'), ('RAWX', '>u2'), ('RAWY', '>u2'), ('AMP', 'u1'), +# ('PHAS', '>u2', (25,))]) FMT_LUT = {'UINT8': '>u1', 'B': '>u1', @@ -90,7 +93,9 @@ FMT_LUT = {'UINT8': '>u1', 'f': '>f8', 'd': '>f8', 'CUC918': '>f8', - 'S10': '|S10'} + 'I24': '>S3', + 'S8': '>S8', + 'S10': '>S10'} GROUP_TABLE_STRUCT = [('groupIdx', 'UINT16'), ('timetag', 'FLOAT'), @@ -98,8 +103,8 @@ GROUP_TABLE_STRUCT = [('groupIdx', 'UINT16'), ('ceCounter', 'UINT16'), ('sdpGroupMembers', 'UINT32'), ('ceSize', 'UINT32'), - ('ceKey', 'S10'), - ('product', 'S10'), + ('ceKey', 'S8'), + ('product', 'UINT8'), ('ceIntegrity', 'UINT8'), ('groupMetaSize', 'UINT32'), ('frameMetaSize', 'UINT32'), @@ -343,6 +348,35 @@ def fmt_func_signed_int16(uint_arr): return uint_arr.astype(np.uint16).view(np.int16) +def remove_duplicates(infile, outfile=None): + + if outfile is None: + outfile = infile + '_CLEAN.bin' + + unique = [] + + with open(infile, 'rb') as fd: + + while True: + pkt = extract_pus_crc(fd) + if pkt is None: + break + + unique.append(pkt) + + diff = len(unique) + clean = set() + unique = [pkt for pkt in unique if pkt not in clean and clean.add(pkt) is None] + diff -= len(unique) + + if diff > 0: + print('{} duplicate S13 packets found.'.format(diff)) + + with open(outfile, 'wb') as fd: + fd.write(b''.join(unique)) + print('Clean file written to {}'.format(outfile)) + + def read_pus(data): """ Read single PUS packet from buffer @@ -442,7 +476,7 @@ def extract_ce_data(pkt, check_seq=CHECK_SEQ): return pkt[SDU_DATA_OFF:SDU_DATA_OFF + datalen] -def parse_pkts(fd): +def parse_pkts(fd, no_hk=False, sduid=1): global seqcnt ces = {} @@ -453,6 +487,11 @@ def parse_pkts(fd): ce_id = None txpkts = None + if no_hk: + hk_to_proc = [] + else: + hk_to_proc = PROC_ST + while True: pkt = extract_pus_crc(fd) if pkt is None: @@ -463,7 +502,7 @@ def parse_pkts(fd): continue # handle ENG telemetry - elif pkt[ST_OFF] in PROC_ST: + elif pkt[ST_OFF] in hk_to_proc: pktkey, descr, procpkt, timestamp, decoded = proc_hk(pkt) if pktkey is None: @@ -495,7 +534,7 @@ def parse_pkts(fd): hks[key] = {'descr': descr, 'tpsd': tpsd, 'params': params, 'values': [values], 'decoded': decoded} - elif pkt[ST_OFF] == 13 and pkt[SDUID_OFF] == SDUID: # TODO: replace with SDUID + elif pkt[ST_OFF] == 13 and pkt[SDUID_OFF] == sduid: if pkt[SST_OFF] == 1: if not tx: @@ -575,6 +614,13 @@ def extract(infile, outdir): good_ces, bad_ces, hks = parse_pkts(fd) if trashcnt != 0: logging.warning('skipped {} bytes because of wrong CRCs'.format(trashcnt)) + + # rewind to parse and add FFs with SDUID=2 + fd.seek(0) + good_ffs, bad_ffs, _ = parse_pkts(fd, no_hk=True, sduid=2) + good_ces.update(good_ffs) + bad_ces.update(bad_ffs) + except Exception as err: logging.exception(err) good_ces, bad_ces, hks = [], [], [] @@ -603,7 +649,7 @@ def decompress(cefile, outdir): cefile = os.path.join(outdir, cefile) logging.info("Decompressing {}".format(cefile)) - fitsfile = os.path.basename(cefile)[:-2] + 'fits' + fitsfile = os.path.basename(cefile)[:-2] + 'de' fitspath = os.path.join(outdir, fitsfile) proc = subprocess.run([CE_EXEC, cefile, fitspath], capture_output=True) @@ -633,8 +679,9 @@ def mk_hk_prod(hks, infile, outdir): hdl.append(hdu) except Exception as err: logging.error(err) + logging.error(str(key)) - fname = os.path.join(outdir, os.path.basename(infile) + '_ENG.fits') + fname = mk_outfile_name(outdir, os.path.basename(infile), '_ENG.fits') hdl.writeto(fname, overwrite=True) return fname @@ -685,15 +732,15 @@ def mk_hk_table(data): return tab -def merge_fits(sorted_files, infile): +def merge_to_fits(sorted, infile, outdir): # ED - ed_merged = merge_ed(sorted_files['SXI-SCI-ED'], infile) + ed_merged = merge_ed(sorted['SXI-SCI-ED'], infile, outdir) # FT - ft_merged = merge_ft(sorted_files['SXI-SCI-FT'], infile) + ft_merged = merge_ft(sorted['SXI-SCI-FT'], infile, outdir) # FF - ff_merged = merge_ff(sorted_files['SXI-SCI-FF'], infile) + ff_merged = merge_ff(sorted['SXI-SCI-FF'], infile, outdir) # ST # st_merged = merge_st(sorted_files['SXI-SCI-ST'], infile) @@ -702,13 +749,93 @@ def merge_fits(sorted_files, infile): # pt_merged = merge_pt(sorted_files['SXI-SCI-PT'], infile) # UV - uv_merged = merge_uv(sorted_files['SXI-SCI-UV'], infile) + uv_merged = merge_uv(sorted['SXI-SCI-UV'], infile, outdir) return ed_merged, ft_merged, ff_merged, None, None, uv_merged # return ed_merged, ft_merged, ff_merged, st_merged, pt_merged, uv_merged -def merge_ed(files, infile): +def merge_meta(mode, groups): + hdul = mk_hdl(mode) + + data_list = [] + meta_group = [] + meta_frame = [] + + group_idx = [] + + for i, grp in enumerate(groups, 1): + head, data, metag, metaf = grp + + data_list.append(data) + + meta_frame.append(metaf) + group_idx.append(np.repeat(i, metaf.size)) + + g = (i, head.items.timestamp, head.items.obsid, head.items.ce_counter, head.items.sdp_group_members, head.items.ce_size, + '{:08X}'.format(head.items.ce_key), head.items.product, head.items.ce_integrity, head.items.group_meta_size, + head.items.frame_meta_size, head.items.compressed_meta_size, head.items.data_size, head.items.compressed_data_size, *metag[0]) + + meta_group.append(g) + + meta_frame = rf.append_fields(np.concatenate(meta_frame), 'groupIdx', np.array(group_idx).flatten(), dtypes='>u2', usemask=False) + meta_group = np.array(meta_group, dtype=[(p[0], FMT_LUT.get(p[1])) for p in GROUP_TABLE_STRUCT]) + + p_head = hdul[0].header + p_head['SOFTVER'] = head.items.version_number + p_head['BUILD'] = head.items.build_number + p_head['SDPVER'] = head.items.sdp_version + p_head['CREATOR'] = "SXITLM2FITS" + p_head['TLM2FITS'] = "0.1" + p_head['DATE'] = datetime.datetime.isoformat(datetime.datetime.now(datetime.UTC)) + + group_table = fits.BinTableHDU(data=meta_group, name='GROUP_HK') + frame_table = fits.BinTableHDU(data=meta_frame, name='FRAME_HK') + + # comment header items + gcom = group_table.header.comments + + # checksums + group_table.add_checksum() + frame_table.add_checksum() + + hdul.append(group_table) + hdul.append(frame_table) + + return hdul, data_list + + +def merge_ed(groups, infile, outdir): + + if len(groups) == 0: + return + + hdul, data = merge_meta('ED', groups) + + ed_data, wp_data = list(zip(*data)) + ed_data = np.concatenate(ed_data) + wp_data = np.concatenate(wp_data) + + ed_table = fits.BinTableHDU(data=ed_data, name='EVENTS') + ed_table.add_checksum() + hdul.append(ed_table) + + wp_table = fits.BinTableHDU(data=wp_data, name='WANDERING_PIXEL') + wp_table.add_checksum() + hdul.append(wp_table) + + fname = mk_outfile_name(outdir, infile, '_ED.fits') + + try: + hdul.writeto(fname, overwrite=True) + except Exception as err: + logging.exception(err) + return + + return fname + + +def merge_ed2(files, infile): if len(files) == 0: return @@ -762,7 +889,7 @@ def merge_ed(files, infile): hdul.append(frame_table) hdul.append(ed_table) - fname = infile + '_ED.fits' + fname = mk_outfile_name(outdir, infile, '_ED.fits') try: hdul.writeto(fname, overwrite=True) @@ -816,35 +943,22 @@ def format_ed_fits(fname, gidx): return group_new, frames_new, ed_new -def merge_ft(files, infile): - if len(files) == 0: +def merge_ft(groups, infile, outdir): + + if len(groups) == 0: return - hdul = mk_hdl('FT') + hdul, data = merge_meta('FT', groups) - group_idx = 1 # to associate frames to a group - group_data = [] - frame_data = [] - ft_data = [] - - meta = None - for file in files: - try: - ff = format_ft_fits(file, group_idx) - group_data.append(ff[0]) - frame_data += ff[1] - ft_data.append(ff[2]) + data = np.concatenate(data) - if meta is None: - metaf = fits.open(file) - metah = metaf[0] - metah.verify('fix') - meta = metah.header - except Exception as err: - logging.error(err) - group_idx += 1 + for i, node in enumerate(FT_NODES): + ndata = data[:, i, :, :] + dhdu = fits.ImageHDU(data=ndata, name=node) + dhdu.add_checksum() + hdul.append(dhdu) - fname = infile + '_FT.fits' + fname = mk_outfile_name(outdir, infile, '_FT.fits') try: hdul.writeto(fname, overwrite=True) @@ -874,35 +988,21 @@ def format_ft_fits(fname, gidx): return group_new, frames_new, nodes -def merge_ff(files, infile): - if len(files) == 0: +def merge_ff(groups, infile, outdir): + + if len(groups) == 0: return - hdul = mk_hdl('FF') + hdul, data = merge_meta('FF', groups) - group_idx = 1 # to associate frames to a group - group_data = [] - frame_data = [] - ff_data = [] + ffdata = np.stack(data) - meta = None - for file in files: - try: - ff = format_ft_fits(file, group_idx) - group_data.append(ff[0]) - frame_data += ff[1] - ff_data += ff[2] + dhdu = fits.ImageHDU(data=ffdata, name='FULLFRAME') + dhdu.add_checksum() + hdul.append(dhdu) - if meta is None: - metaf = fits.open(file) - metah = metaf[0] - metah.verify('fix') - meta = metah.header - except Exception as err: - logging.error(err) - group_idx += 1 - - fname = infile + '_FF.fits' + # fname = infile + '_FF.fits' + fname = mk_outfile_name(outdir, infile, '_FF.fits') try: hdul.writeto(fname, overwrite=True) @@ -927,7 +1027,7 @@ def format_ff_fits(fname, gidx): return group_new, frames_new, fullframe.data -def merge_st(files, infile): +def merge_st(files, infile, outdir): fname = None return fname @@ -945,7 +1045,7 @@ def merge_st(files, infile): return fname -def merge_pt(files, infile): +def merge_pt(files, infile, outdir): fname = None return fname @@ -963,35 +1063,21 @@ def merge_pt(files, infile): return fname -def merge_uv(files, infile): - if len(files) == 0: +def merge_uv(groups, infile, outdir): + + if len(groups) == 0: return - hdul = mk_hdl('UV') + hdul, data = merge_meta('UV', groups) - group_idx = 1 # to associate frames to a group - group_data = [] - frame_data = [] - uv_data = [] + data = np.concatenate(data) - meta = None - for file in files: - try: - ff = format_uv_fits(file, group_idx) - group_data.append(ff[0]) - frame_data += ff[1] - uv_data += ff[2] + for i, node in enumerate(UV_NODES): + dhdu = fits.ImageHDU(data=data[:, i, :, :], name=node) + dhdu.add_checksum() + hdul.append(dhdu) - if meta is None: - metaf = fits.open(file) - metah = metaf[0] - metah.verify('fix') - meta = metah.header - except Exception as err: - logging.error(err) - group_idx += 1 - - fname = infile + '_UV.fits' + fname = mk_outfile_name(outdir, infile, '_UV.fits') try: hdul.writeto(fname, overwrite=True) @@ -1021,6 +1107,9 @@ def format_uv_fits(fname, gidx): return group_new, frames_new, nodes +def mk_outfile_name(outdir, filepath, suffix): + return os.path.join(outdir, os.path.basename(filepath)) + suffix + # def get_dp_desc(dpid): # try: # return data_pool[dpid + DP_OFFSET][0] @@ -1043,20 +1132,15 @@ def calc_frame_time(rarr, reftime): return {i: t for i, t in zip(fcnt, ct + (ft << 8) / 1e6)} -def sort_by_mode(sorted_modes, file): - fn = os.path.basename(file) - - recognised = False - for mode in MODES: - if fn.count(mode): - sorted_modes[mode].append(file) - recognised = True - break - - if not recognised: - logging.error('Unidentified mode for file {}'.format(file)) - - return sorted_modes +# def sort_by_mode(sorted_modes, scimode, cehead, arrays): +# +# if scimode in MODES: +# sorted_modes[scimode].append([cehead, *arrays]) +# +# else: +# logging.error('Unidentified mode {}'.format(scimode)) +# +# return sorted_modes def mk_hdl(dmode): @@ -1075,7 +1159,8 @@ def mk_hdl(dmode): def process_file(infile, outdir): ces, hks = extract(infile, outdir) - decompressed = {mode: [] for mode in MODES} + decompressed = {PRODUCT_IDS[k]: [] for k in PRODUCT_IDS} + for ce in ces: try: @@ -1083,15 +1168,22 @@ def process_file(infile, outdir): logging.exception('Skipped decompression of {}'.format(ce)) continue - fitspath = decompress(ce, outdir) - if os.path.isfile(fitspath): - decompressed = sort_by_mode(decompressed, fitspath) + depath = decompress(ce, outdir) + if os.path.isfile(depath): + cehead, arrays = convert_ce(depath) + scimode = cehead.items.product + # decompressed = sort_by_mode(decompressed, scimode, cehead, arrays) + if scimode in SCI_PRODUCTS: + decompressed[PRODUCT_IDS[scimode]].append([cehead, *arrays]) + else: + logging.error('Unidentified mode {}'.format(scimode)) except Exception as err: logging.error('Decompression failed for {}'.format(ce)) logging.exception(err) - # merged = merge_fits(decompressed, infile) + merged = merge_to_fits(decompressed, infile, outdir) + print('\n'.join([x for x in merged if x is not None])) # put HK in FITS try: @@ -1102,7 +1194,7 @@ def process_file(infile, outdir): print(hkfile) - # return *merged, hkfile + return *merged, hkfile # def load_dp(): diff --git a/Ccs/tools/dataprocessing/smile_raw_ce_converter.py b/Ccs/tools/dataprocessing/smile_raw_ce_converter.py index 347358e670ff316bed9e3954d3854e132a3ab245..cf352bcf74fb8b8b27edf267723dbb7ba620b546 100755 --- a/Ccs/tools/dataprocessing/smile_raw_ce_converter.py +++ b/Ccs/tools/dataprocessing/smile_raw_ce_converter.py @@ -12,8 +12,9 @@ import sys import numpy as np from astropy.io import fits +from numpy.lib import recfunctions as rf -# expected CE sizes in bytes TODO: 24x24 binned FT +# expected CE sizes in bytes NROWS_FF = 4511 NCOLS_FF = 4608 NROWS_FT = 639 @@ -24,7 +25,7 @@ SIZE_FF = NROWS_FF * NCOLS_FF * 2 SIZE_FT = NROWS_FT * NCOLS_FT * 2 # 1 node SIZE_UV = NROWS_UV * NCOLS_UV * 2 # 1 node SIZE_ED = 64 # 1 event -NODE_NUM = 4 # total number of nodes +NODE_NUM = 1 # total number of nodes TODO: set to 4 for proper data SCI_PRODUCTS = {0: 'ED', 1: 'UNKNOWN', 2: 'FT', 3: 'UV', 4: 'FF', 5: 'ED_WP'} @@ -34,223 +35,252 @@ ED_BIN_DTYPE = np.dtype( [('TIME', '>f8'), ('CCDFRAME', '>u4'), ('CCDNR', 'u1'), ('RAWX', '>u2'), ('RAWY', '>u2'), ('AMP', 'u1'), ('PHAS', '>u2', (25,))]) +ED_PKT_DTYPE = np.dtype([('spw', '>u2'), ('dlen', '>u2'), ('mode', '>u1'), ('dtyp', '>u1'), ('fc', '>u2'), ('sc', '>u2'), + ('col', '>u2'), ('row', '>u2'), ('evts', '>u2', (25,))]) + +ID_ED = 1 +ID_WP = 3 + +WP_PER_FRAME = 1 + def convert_ce(cefile, fitsfile=None, guess=False): cedata = open(cefile, 'rb').read() - if guess: - # guess product based on CE size - if len(cedata) == SIZE_FF: - mode, hdl = mk_ff(cedata) - elif len(cedata) // SIZE_FT in [1, 2, 4]: - mode, hdl = mk_ft(cedata) - elif len(cedata) % SIZE_ED == 0: - mode, hdl = mk_ed(cedata) + # if guess: + # # guess product based on CE size + # if len(cedata) == SIZE_FF: + # mode, hdl = mk_ff(cedata) + # elif len(cedata) // SIZE_FT in [1, 2, 4]: + # mode, hdl = mk_ft(cedata) + # elif len(cedata) % SIZE_ED == 0: + # mode, hdl = mk_ed(cedata) + # else: + # print('Cannot determine product type for CE of length {}, aborting.'.format(len(cedata))) + # sys.exit() + # + # obsid = 0 + + # else: + try: + ce = CompressionEntity(cedata) + prod = ce.header.items.product + # cekey = ce.header.items.ce_key + + # obsid = ce.header.items.obsid + + assert ce.meta_frame.shape[0] == ce.header.items.sdp_group_members + + if fitsfile is not None: + asfits = True else: - print('Cannot determine product type for CE of length {}, aborting.'.format(len(cedata))) + asfits = False + + if SCI_PRODUCTS.get(prod) == 'FF': + mode, *hdl = mk_ff(ce, asfits=asfits) + elif SCI_PRODUCTS.get(prod) == 'FT': + mode, *hdl = mk_ft(ce, asfits=asfits) + elif SCI_PRODUCTS.get(prod) == 'UV': + mode, *hdl = mk_uv(ce, asfits=asfits) + elif SCI_PRODUCTS.get(prod) in ['ED', 'ED_WP']: + mode, *hdl = mk_ed(ce, asfits=asfits) + else: + print('Unknown product in CE ({}), aborting.'.format(prod)) sys.exit() - obsid = 0 + if fitsfile is not None: + hdl[0].writeto(fitsfile, overwrite=True) - else: - try: - ce = CompressionEntity(cedata) - prod = ce.header.items.product - - obsid = ce.header.items.obsid - - assert ce.meta_frame.shape[0] == ce.header.items.sdp_group_members - - if SCI_PRODUCTS.get(prod) == 'FF': - mode, hdl = mk_ff(ce) - elif SCI_PRODUCTS.get(prod) == 'FT': - mode, hdl = mk_ft(ce) - elif SCI_PRODUCTS.get(prod) == 'UV': - mode, hdl = mk_uv(ce) - elif SCI_PRODUCTS.get(prod) == 'ED': - mode, hdl = mk_ed(ce) - else: - print('Unknown product in CE ({}), aborting.'.format(prod)) - sys.exit() - - except Exception as err: - print(err) - sys.exit() + except Exception as err: + print(err) + sys.exit() - if fitsfile is None: - outdir = os.path.dirname(os.path.abspath(cefile)) - fitsfile = os.path.join(outdir, '{}_{}_{:010d}_{}.fits'.format(FILE_PREFIX, mode, obsid, _mk_ts())) + # if fitsfile is None: + # outdir = os.path.dirname(os.path.abspath(cefile)) + # fitsfile = os.path.join(outdir, '{}_{}_{:010d}_{}.fits'.format(FILE_PREFIX, mode, obsid, _mk_ts())) - hdl.writeto(fitsfile, overwrite=True) + return ce.header, hdl -def mk_ff(data): +def mk_ff(data, asfits=True): # create uint16 array from raw data and reshape arr = np.frombuffer(data.scidata, dtype='>H').reshape(NROWS_FF, NCOLS_FF) fnode = arr[:, ::2] enode = arr[:, 1::2][:, ::-1] ff = np.concatenate((fnode, enode), axis=1) - # write array to FITS file - hdl = _mk_hdl('FF', data.header) - fullframe = fits.ImageHDU(data=ff, name='FULLFRAME') - fullframe.add_checksum() - hdl.append(fullframe) - - group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') - frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') + if asfits: + # write array to FITS file + hdl = _mk_hdl('FF', data.header) + fullframe = fits.ImageHDU(data=ff, name='FULLFRAME') + fullframe.add_checksum() + hdl.append(fullframe) - # checksums - group_table.add_checksum() - frame_table.add_checksum() - hdl.append(group_table) - hdl.append(frame_table) + group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') + frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') - return 'FF', hdl + # checksums + group_table.add_checksum() + frame_table.add_checksum() + hdl.append(group_table) + hdl.append(frame_table) + return 'FF', hdl -def mk_ft(data): - arr = np.frombuffer(data.scidata, dtype='>H').reshape(-1, NROWS_FT, NCOLS_FT) + else: + return 'FF', ff, data.meta_group, data.meta_frame - hdl = _mk_hdl('FT', data.header) - n_nodes = arr.shape[0] - if n_nodes % NODE_NUM: - print('Total number of nodes ({}) is not a multiple of 4!'.format(n_nodes)) +def mk_ft(data, asfits=True): - for n in range(arr.shape[0]): - node = fits.ImageHDU(data=arr[n, :, :], name='FT_CCD_NODE_{}'.format(n)) - node.add_checksum() - hdl.append(node) + # check if stacking ('2') was applied and use correct pixel bit size + if '2' in hex(data.header.items.ce_key): + pixbits = '>I' else: - for n in range(NODE_NUM): - node = fits.ImageHDU(data=arr[n::NODE_NUM, :, :], name='FT_CCD_NODE_{}'.format(n)) - node.add_checksum() - hdl.append(node) + pixbits = '>H' - # arrange all nodes to full CCD - if n_nodes % 4 == 0: + arr = np.frombuffer(data.scidata, dtype=pixbits).reshape(-1, NODE_NUM, NROWS_FT, NCOLS_FT) - nn = _assemble_ft_frames_to_fp_view(arr) + if asfits: + hdl = _mk_hdl('FT', data.header) + n_nodes = arr.shape[0] - hdl.append(fits.ImageHDU(data=nn, name='FULLARRAY')) + if n_nodes % NODE_NUM: + print('Total number of nodes ({}) is not a multiple of 4!'.format(n_nodes)) + + for n in range(arr.shape[0]): + node = fits.ImageHDU(data=arr[n, :, :], name='FT_CCD_NODE_{}'.format(n)) + node.add_checksum() + hdl.append(node) + else: + for n in range(NODE_NUM): + node = fits.ImageHDU(data=arr[n::NODE_NUM, :, :], name='FT_CCD_NODE_{}'.format(n)) + node.add_checksum() + hdl.append(node) - group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') - frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') + # arrange all nodes to full CCD + if n_nodes % 4 == 0: - # checksums - group_table.add_checksum() - frame_table.add_checksum() - hdl.append(group_table) - hdl.append(frame_table) + nn = _assemble_ft_frames_to_fp_view(arr) - return 'FT', hdl + hdl.append(fits.ImageHDU(data=nn, name='FULLARRAY')) + group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') + frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') -def mk_uv(data): - arr = np.frombuffer(data.scidata, dtype='>H').reshape(-1, NROWS_UV, NCOLS_UV) + # checksums + group_table.add_checksum() + frame_table.add_checksum() + hdl.append(group_table) + hdl.append(frame_table) - hdl = _mk_hdl('UV', data.header) - n_nodes = arr.shape[0] + return 'FT', hdl - if n_nodes % NODE_NUM: - for n in range(arr.shape[0]): - node = fits.ImageHDU(data=arr[n, :, :], name='UV_CCD_NODE_{}'.format(n)) - node.add_checksum() - hdl.append(node) + return 'FT', arr, data.meta_group, data.meta_frame + + +def mk_uv(data, asfits=True): + + # check if stacking ('2') was applied and use correct pixel bit size + if '2' in hex(data.header.items.ce_key): + pixbits = '>I' else: - for n in range(NODE_NUM): - node = fits.ImageHDU(data=arr[n::NODE_NUM, :, :], name='UV_CCD_NODE_{}'.format(n)) - node.add_checksum() - hdl.append(node) + pixbits = '>H' - # arrange all nodes to full CCD - if n_nodes % 4 == 0: + arr = np.frombuffer(data.scidata, dtype=pixbits).reshape(-1, NODE_NUM, NROWS_UV, NCOLS_UV) - nn = _assemble_ft_frames_to_fp_view(arr) + if asfits: + hdl = _mk_hdl('UV', data.header) + n_nodes = arr.shape[0] - hdl.append(fits.ImageHDU(data=nn, name='FULLARRAY')) + if n_nodes % NODE_NUM: + for n in range(arr.shape[0]): + node = fits.ImageHDU(data=arr[n, :, :], name='UV_CCD_NODE_{}'.format(n)) + node.add_checksum() + hdl.append(node) + else: + for n in range(NODE_NUM): + node = fits.ImageHDU(data=arr[n::NODE_NUM, :, :], name='UV_CCD_NODE_{}'.format(n)) + node.add_checksum() + hdl.append(node) - group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') - frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') + # arrange all nodes to full CCD + if n_nodes % 4 == 0: - # checksums - group_table.add_checksum() - frame_table.add_checksum() - hdl.append(group_table) - hdl.append(frame_table) + nn = _assemble_ft_frames_to_fp_view(arr) - return 'UV', hdl + hdl.append(fits.ImageHDU(data=nn, name='FULLARRAY')) + group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') + frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') -def mk_ed(data): + # checksums + group_table.add_checksum() + frame_table.add_checksum() + hdl.append(group_table) + hdl.append(frame_table) + + return 'UV', hdl + + return 'UV', arr, data.meta_group, data.meta_frame + + +def mk_ed(data, asfits=True, edmap=False): # reshape into array of evt packets - arr = np.frombuffer(data.scidata, dtype='>H').reshape(-1, SIZE_ED // 2) + # arr = np.frombuffer(data.scidata, dtype='>H').reshape(-1, SIZE_ED // 2) - frmevtcnt = data.meta_frame['LastFrameEvtCount'] + arr = np.frombuffer(data.scidata, dtype=ED_PKT_DTYPE) - # handle wandering pixel mask if present - wpm = [] - if frmevtcnt.sum() != arr.shape[0]: - print('Inconsistent LastFrameEvtCount {} (sum). {} in data block.'.format(frmevtcnt.sum(), arr.shape[0])) - if data.header.items.sdp_group_members == (arr.shape[0] - frmevtcnt.sum()): - print('Extracting {} wandering pixel(s).'.format(data.header.items.sdp_group_members)) - wpm = arr[-data.header.items.sdp_group_members:] - arr = arr[:-data.header.items.sdp_group_members] + frmevtcnt = data.meta_frame['LastFrameEvtCount'] if data.meta_frame['LastFrameEvtCount'].sum() != data.meta_group['NOfEvtDet'][0]: print('Inconsistent CE frame count: LastFrameEvtCount_sum = {}, NOfEvtDet = {}'.format(frmevtcnt.sum(), data.meta_group['NOfEvtDet'][0])) hdl = _mk_hdl('ED', data.header) - # ts = int(hdl['PRIMARY'].header['OBS_ID']) ts = data.meta_frame['sdpProductStarttimeCrs'] + data.meta_frame['sdpProductStarttimeFine'] / 1e6 - ts_ext = ts.repeat(frmevtcnt) - bindata = np.array([_mk_bin_entry(evt, t, wpm_raise=True) for evt, t in zip(arr, ts_ext)], dtype=ED_BIN_DTYPE) - - if len(wpm) != 0: - try: - wpmdata = np.array([_mk_bin_entry(evt, t) for evt, t in zip(arr, ts)], dtype=ED_BIN_DTYPE) - except Exception as err: - wpmdata = None - print('Failed extracting wandering pixel masks ({})'.format(err)) - else: - wpmdata = None + # ts_ext = ts.repeat(frmevtcnt) - # also add an HDU with event map - nodes = np.zeros((2, 2, NROWS_FT, NCOLS_FT)) + # bindata = np.array([_mk_bin_entry(evt, t, wpm_raise=True) for evt, t in zip(arr, ts_ext)], dtype=ED_BIN_DTYPE) + bindata, wpmdata = _mk_bin_arrays(arr, ts, frmevtcnt) - for _, _, ccd, col, row, node, fx in bindata: - try: - nodes[ccd, node, row - 2:row + 3, col - 2:col + 3] += fx.reshape(5, 5) - except: - print(col, row, 'FAILED') + if asfits: + if edmap: + # also add an HDU with event map + nodes = np.zeros((2, 2, NROWS_FT, NCOLS_FT)) - # nodes[nodes == 0] = np.nan + for _, _, ccd, col, row, node, fx in bindata: + try: + nodes[ccd, node, row - 2:row + 3, col - 2:col + 3] += fx.reshape(5, 5) + except: + print(col, row, 'FAILED') - ed_img = _assemble_ft_frames_to_fp_view(nodes) + # nodes[nodes == 0] = np.nan + ed_img = _assemble_ft_frames_to_fp_view(nodes) - hdl.append(fits.ImageHDU(data=ed_img, name='EVTMAP')) + hdl.append(fits.ImageHDU(data=ed_img, name='EVTMAP')) - evts = fits.BinTableHDU(data=bindata, name='EVENTS') - evts.add_checksum() - hdl.append(evts) + evts = fits.BinTableHDU(data=bindata, name='EVENTS') + evts.add_checksum() + hdl.append(evts) - group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') - frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') + group_table = fits.BinTableHDU(data=data.meta_group, name='GROUP_HK') + frame_table = fits.BinTableHDU(data=data.meta_frame, name='FRAME_HK') - if wpmdata is not None: - wpm_table = fits.BinTableHDU(data=wpmdata, name='WNDRNG_PXL') - wpm_table.add_checksum() - hdl.append(wpm_table) + if len(wpmdata) > 0: + wpm_table = fits.BinTableHDU(data=wpmdata, name='WNDRNG_PXL') + wpm_table.add_checksum() + hdl.append(wpm_table) - # checksums - group_table.add_checksum() - frame_table.add_checksum() - hdl.append(group_table) - hdl.append(frame_table) + # checksums + group_table.add_checksum() + frame_table.add_checksum() + hdl.append(group_table) + hdl.append(frame_table) - return 'ED', hdl + return 'ED', hdl + + return 'ED', (bindata, wpmdata), data.meta_group, data.meta_frame def _mk_bin_entry(data, timestamp, wpm_raise=False): @@ -266,6 +296,29 @@ def _mk_bin_entry(data, timestamp, wpm_raise=False): return timestamp, fc, ccdnr, col, row, node, evts +def _fmt_bin_data(data, ts): + + ccdnr = (data['dtyp'] >> 4) & 1 + node = (data['dtyp'] >> 5) & 0b11 + + return np.array(list(zip(ts, data['fc'], ccdnr, data['col'], data['row'], node, data['evts'])), dtype=ED_BIN_DTYPE) + + +def _mk_bin_arrays(pkts, ts, frmevtcnt): + + evt_pkts = pkts[pkts['dtyp'] & 0b11 == ID_ED] + wp_pkts = pkts[pkts['dtyp'] & 0b11 == ID_WP] + + # if (len(wp_pkts) != 0) and (len(wp_pkts) != WP_PER_FRAME * len(frmevtcnt)): + if (len(wp_pkts) != 0) and (len(wp_pkts) % WP_PER_FRAME): + print('Unexpected number of WP packets ({})'.format(len(wp_pkts))) + + evts = _fmt_bin_data(evt_pkts, np.repeat(ts, frmevtcnt)) + wpm = _fmt_bin_data(wp_pkts, np.repeat(ts, WP_PER_FRAME)) + + return evts, wpm + + def _assemble_ft_frames_to_fp_view(arrnd): # interpreting TVAC results, CCD0/2 is at the "top" and CCD1/4 at the "bottom" in the detector plane layout @@ -316,7 +369,7 @@ def _mk_hdl(dmode, dhead): phdu.header['SDPVER'] = dhead.items.sdp_version phdu.header['CREATOR'] = "SXITLM2FITS" phdu.header['TLM2FITS'] = "0.2b" - phdu.header['DATE'] = datetime.datetime.isoformat(datetime.datetime.utcnow()) + phdu.header['DATE'] = datetime.datetime.isoformat(datetime.datetime.now(datetime.UTC)) hdl.append(phdu) @@ -550,8 +603,9 @@ class CompressionEntity: if __name__ == "__main__": - # cefile="/home/marko/space/smile/betatest/eqm_flatsat/newsci/LDT_207_0207139797_026819.ce" - # convert_ce(cefile, fitsfile=None) + # cefile="/home/marko/space/smile/tvac/oct2024/20241002_180038/proc/0000000000_00001_213125754215040_00593_SXI-SCI-FT.ce" + # fitsfile="/home/marko/space/smile/tvac/oct2024/20241002_180038/proc/0000000000_00001_213125754215040_00593_SXI-SCI-FT.ce.fits" + # convert_ce(cefile, fitsfile=fitsfile) # # sys.exit()