Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
C
CCS
Manage
Activity
Members
Plan
Wiki
Code
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Deploy
Releases
Package registry
Model registry
Operate
Terraform modules
Analyze
Contributor analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Marko Mecina
CCS
Commits
a6ba6736
Commit
a6ba6736
authored
9 months ago
by
Marko Mecina
Browse files
Options
Downloads
Patches
Plain Diff
update beta L0b processing
parent
9351ccaa
No related branches found
Branches containing commit
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
Ccs/tools/dataprocessing/hk_processing.py
+1
-2
1 addition, 2 deletions
Ccs/tools/dataprocessing/hk_processing.py
Ccs/tools/dataprocessing/smile_L0b_converter.py
+124
-44
124 additions, 44 deletions
Ccs/tools/dataprocessing/smile_L0b_converter.py
with
125 additions
and
46 deletions
Ccs/tools/dataprocessing/hk_processing.py
+
1
−
2
View file @
a6ba6736
...
@@ -8,8 +8,7 @@ from packetstruct import timepack, timecal, APID, TM_HEADER_LEN, PEC_LEN, PI1W
...
@@ -8,8 +8,7 @@ from packetstruct import timepack, timecal, APID, TM_HEADER_LEN, PEC_LEN, PI1W
from
s2k_partypes
import
ptt
from
s2k_partypes
import
ptt
import
timeformats
import
timeformats
MIBDIR
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
)),
'
mib
'
)
MIBDIR
=
'
mib
'
PIC_TAB
=
os
.
path
.
join
(
MIBDIR
,
'
pic.dat
'
)
PIC_TAB
=
os
.
path
.
join
(
MIBDIR
,
'
pic.dat
'
)
PID_TAB
=
os
.
path
.
join
(
MIBDIR
,
'
pid.dat
'
)
PID_TAB
=
os
.
path
.
join
(
MIBDIR
,
'
pid.dat
'
)
PLF_TAB
=
os
.
path
.
join
(
MIBDIR
,
'
plf.dat
'
)
PLF_TAB
=
os
.
path
.
join
(
MIBDIR
,
'
plf.dat
'
)
...
...
This diff is collapsed.
Click to expand it.
Ccs/tools/dataprocessing/smile_L0b_converter.py
100644 → 100755
+
124
−
44
View file @
a6ba6736
#!/usr/bin/env python3
#!/usr/bin/env python3
"""
"""
Process SMILE SXI L0b product
to L0d
Process SMILE SXI L0b product
"""
"""
import
datetime
import
datetime
...
@@ -45,7 +45,7 @@ seqcnt = None
...
@@ -45,7 +45,7 @@ seqcnt = None
trashcnt
=
0
trashcnt
=
0
CE_EXEC
=
"
./
smile_raw_ce_converter.py
"
CE_EXEC
=
os
.
path
.
join
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
)),
"
smile_raw_ce_converter.py
"
)
PRODUCT_IDS
=
{
0
:
'
SXI-SCI-ED
'
,
PRODUCT_IDS
=
{
0
:
'
SXI-SCI-ED
'
,
2
:
'
SXI-SCI-FT
'
,
2
:
'
SXI-SCI-FT
'
,
...
@@ -59,6 +59,7 @@ SCI_PRODUCTS = {0: 'ED', 1: 'UNKNOWN', 2: 'FT', 3: 'UV', 4: 'FF'}
...
@@ -59,6 +59,7 @@ SCI_PRODUCTS = {0: 'ED', 1: 'UNKNOWN', 2: 'FT', 3: 'UV', 4: 'FF'}
MODES
=
tuple
(
PRODUCT_IDS
.
values
())
MODES
=
tuple
(
PRODUCT_IDS
.
values
())
FT_NODES
=
(
'
FT_CCD_NODE_0
'
,
'
FT_CCD_NODE_1
'
,
'
FT_CCD_NODE_2
'
,
'
FT_CCD_NODE_3
'
)
FT_NODES
=
(
'
FT_CCD_NODE_0
'
,
'
FT_CCD_NODE_1
'
,
'
FT_CCD_NODE_2
'
,
'
FT_CCD_NODE_3
'
)
UV_NODES
=
(
'
UV_CCD_NODE_0
'
,
'
UV_CCD_NODE_1
'
,
'
UV_CCD_NODE_2
'
,
'
UV_CCD_NODE_3
'
)
ED_BIN_DTYPE
=
np
.
dtype
(
ED_BIN_DTYPE
=
np
.
dtype
(
[(
'
TIME
'
,
'
>f8
'
),
(
'
CCDFRAME
'
,
'
>u4
'
),
(
'
CCDNR
'
,
'
u1
'
),
(
'
RAWX
'
,
'
>u2
'
),
(
'
RAWY
'
,
'
>u2
'
),
(
'
AMP
'
,
'
u1
'
),
[(
'
TIME
'
,
'
>f8
'
),
(
'
CCDFRAME
'
,
'
>u4
'
),
(
'
CCDNR
'
,
'
u1
'
),
(
'
RAWX
'
,
'
>u2
'
),
(
'
RAWY
'
,
'
>u2
'
),
(
'
AMP
'
,
'
u1
'
),
...
@@ -619,7 +620,7 @@ def decompress(cefile, outdir):
...
@@ -619,7 +620,7 @@ def decompress(cefile, outdir):
return
fitspath
return
fitspath
def
mk_hk_prod
(
hks
,
infile
):
def
mk_hk_prod
(
hks
,
infile
,
outdir
):
hdl
=
mk_hdl
(
'
HK
'
)
hdl
=
mk_hdl
(
'
HK
'
)
for
key
in
hks
:
for
key
in
hks
:
...
@@ -630,7 +631,7 @@ def mk_hk_prod(hks, infile):
...
@@ -630,7 +631,7 @@ def mk_hk_prod(hks, infile):
except
Exception
as
err
:
except
Exception
as
err
:
logging
.
error
(
err
)
logging
.
error
(
err
)
fname
=
infile
.
replace
(
'
L0b
'
,
'
L0d
'
).
replace
(
'
.dat
'
,
'
_ENG.fits
'
)
fname
=
os
.
path
.
join
(
outdir
,
os
.
path
.
basename
(
infile
)
+
'
_ENG.fits
'
)
hdl
.
writeto
(
fname
,
overwrite
=
True
)
hdl
.
writeto
(
fname
,
overwrite
=
True
)
return
fname
return
fname
...
@@ -692,15 +693,16 @@ def merge_fits(sorted_files, infile):
...
@@ -692,15 +693,16 @@ def merge_fits(sorted_files, infile):
ff_merged
=
merge_ff
(
sorted_files
[
'
SXI-SCI-FF
'
],
infile
)
ff_merged
=
merge_ff
(
sorted_files
[
'
SXI-SCI-FF
'
],
infile
)
# ST
# ST
st_merged
=
merge_st
(
sorted_files
[
'
SXI-SCI-ST
'
],
infile
)
#
st_merged = merge_st(sorted_files['SXI-SCI-ST'], infile)
# PT
# PT
pt_merged
=
merge_pt
(
sorted_files
[
'
SXI-SCI-PT
'
],
infile
)
#
pt_merged = merge_pt(sorted_files['SXI-SCI-PT'], infile)
# UV
# UV
uv_merged
=
merge_uv
(
sorted_files
[
'
SXI-SCI-UV
'
],
infile
)
uv_merged
=
merge_uv
(
sorted_files
[
'
SXI-SCI-UV
'
],
infile
)
return
ed_merged
,
ft_merged
,
ff_merged
,
st_merged
,
pt_merged
,
uv_merged
return
ed_merged
,
ft_merged
,
ff_merged
,
None
,
None
,
uv_merged
# return ed_merged, ft_merged, ff_merged, st_merged, pt_merged, uv_merged
def
merge_ed
(
files
,
infile
):
def
merge_ed
(
files
,
infile
):
...
@@ -757,7 +759,7 @@ def merge_ed(files, infile):
...
@@ -757,7 +759,7 @@ def merge_ed(files, infile):
hdul
.
append
(
frame_table
)
hdul
.
append
(
frame_table
)
hdul
.
append
(
ed_table
)
hdul
.
append
(
ed_table
)
fname
=
infile
.
replace
(
'
L0b
'
,
'
L0d
'
).
replace
(
'
.dat
'
,
'
.fits
'
)
fname
=
infile
+
'
_ED
.fits
'
try
:
try
:
hdul
.
writeto
(
fname
,
overwrite
=
True
)
hdul
.
writeto
(
fname
,
overwrite
=
True
)
...
@@ -828,7 +830,7 @@ def merge_ft(files, infile):
...
@@ -828,7 +830,7 @@ def merge_ft(files, infile):
ff
=
format_ft_fits
(
file
,
group_idx
)
ff
=
format_ft_fits
(
file
,
group_idx
)
group_data
.
append
(
ff
[
0
])
group_data
.
append
(
ff
[
0
])
frame_data
+=
ff
[
1
]
frame_data
+=
ff
[
1
]
ft_data
+=
ff
[
2
]
ft_data
.
append
(
ff
[
2
]
)
if
meta
is
None
:
if
meta
is
None
:
metaf
=
fits
.
open
(
file
)
metaf
=
fits
.
open
(
file
)
...
@@ -839,10 +841,10 @@ def merge_ft(files, infile):
...
@@ -839,10 +841,10 @@ def merge_ft(files, infile):
logging
.
error
(
err
)
logging
.
error
(
err
)
group_idx
+=
1
group_idx
+=
1
fname
=
infile
.
replace
(
'
L0b
'
,
'
L0d
'
).
replace
(
'
.dat
'
,
'
-
FT.fits
'
)
fname
=
infile
+
'
_
FT.fits
'
try
:
try
:
hdul
.
writeto
(
fname
)
hdul
.
writeto
(
fname
,
overwrite
=
True
)
except
Exception
as
err
:
except
Exception
as
err
:
logging
.
exception
(
err
)
logging
.
exception
(
err
)
return
return
...
@@ -863,8 +865,8 @@ def format_ft_fits(fname, gidx):
...
@@ -863,8 +865,8 @@ def format_ft_fits(fname, gidx):
else
:
else
:
nodes
.
append
(
None
)
nodes
.
append
(
None
)
group_new
=
tuple
([
gidx
]
+
group
.
data
.
tolist
())
group_new
=
tuple
([
gidx
]
+
group
.
data
.
tolist
()
[
0
]
)
frames_new
=
tuple
(
frames
.
data
.
tolist
()
+
[
gidx
])
frames_new
=
tuple
(
[
frm
+
[
gidx
]
for
frm
in
frames
.
data
.
tolist
()])
return
group_new
,
frames_new
,
nodes
return
group_new
,
frames_new
,
nodes
...
@@ -875,18 +877,32 @@ def merge_ff(files, infile):
...
@@ -875,18 +877,32 @@ def merge_ff(files, infile):
hdul
=
mk_hdl
(
'
FF
'
)
hdul
=
mk_hdl
(
'
FF
'
)
group_idx
=
1
# to associate frames to a group
group_data
=
[]
frame_data
=
[]
ff_data
=
[]
meta
=
None
for
file
in
files
:
for
file
in
files
:
try
:
try
:
ff
=
fits
.
open
(
file
)
ff
=
format_ft_fits
(
file
,
group_idx
)
print
(
ff
)
group_data
.
append
(
ff
[
0
])
frame_data
+=
ff
[
1
]
ff_data
+=
ff
[
2
]
if
meta
is
None
:
metaf
=
fits
.
open
(
file
)
metah
=
metaf
[
0
]
metah
.
verify
(
'
fix
'
)
meta
=
metah
.
header
except
Exception
as
err
:
except
Exception
as
err
:
print
(
err
)
logging
.
error
(
err
)
logging
.
error
(
err
)
group_idx
+=
1
fname
=
infile
.
replace
(
'
L0b
'
,
'
L0d
'
).
replace
(
'
.dat
'
,
'
-
FF.fits
'
)
fname
=
infile
+
'
_
FF.fits
'
try
:
try
:
hdul
.
writeto
(
fname
)
hdul
.
writeto
(
fname
,
overwrite
=
True
)
except
Exception
as
err
:
except
Exception
as
err
:
logging
.
exception
(
err
)
logging
.
exception
(
err
)
return
return
...
@@ -894,9 +910,25 @@ def merge_ff(files, infile):
...
@@ -894,9 +910,25 @@ def merge_ff(files, infile):
return
fname
return
fname
def
format_ff_fits
(
fname
,
gidx
):
ff
=
fits
.
open
(
fname
)
group
=
ff
[
'
GROUP_HK
'
]
frames
=
ff
[
'
FRAME_HK
'
]
fullframe
=
ff
[
'
FULLFRAME
'
]
group_new
=
tuple
([
gidx
]
+
group
.
data
.
tolist
()[
0
])
frames_new
=
tuple
([
frm
+
[
gidx
]
for
frm
in
frames
.
data
.
tolist
()])
return
group_new
,
frames_new
,
fullframe
.
data
def
merge_st
(
files
,
infile
):
def
merge_st
(
files
,
infile
):
fname
=
None
fname
=
None
return
fname
hdul
=
mk_hdl
(
'
ST
'
)
hdul
=
mk_hdl
(
'
ST
'
)
for
file
in
files
:
for
file
in
files
:
...
@@ -913,6 +945,8 @@ def merge_st(files, infile):
...
@@ -913,6 +945,8 @@ def merge_st(files, infile):
def
merge_pt
(
files
,
infile
):
def
merge_pt
(
files
,
infile
):
fname
=
None
fname
=
None
return
fname
hdul
=
mk_hdl
(
'
PT
'
)
hdul
=
mk_hdl
(
'
PT
'
)
for
file
in
files
:
for
file
in
files
:
...
@@ -927,27 +961,69 @@ def merge_pt(files, infile):
...
@@ -927,27 +961,69 @@ def merge_pt(files, infile):
def
merge_uv
(
files
,
infile
):
def
merge_uv
(
files
,
infile
):
fname
=
None
if
len
(
files
)
==
0
:
return
hdul
=
mk_hdl
(
'
UV
'
)
hdul
=
mk_hdl
(
'
UV
'
)
group_idx
=
1
# to associate frames to a group
group_data
=
[]
frame_data
=
[]
uv_data
=
[]
meta
=
None
for
file
in
files
:
for
file
in
files
:
try
:
try
:
ff
=
fits
.
open
(
file
)
ff
=
format_uv_fits
(
file
,
group_idx
)
print
(
ff
)
group_data
.
append
(
ff
[
0
])
frame_data
+=
ff
[
1
]
uv_data
+=
ff
[
2
]
if
meta
is
None
:
metaf
=
fits
.
open
(
file
)
metah
=
metaf
[
0
]
metah
.
verify
(
'
fix
'
)
meta
=
metah
.
header
except
Exception
as
err
:
except
Exception
as
err
:
print
(
err
)
logging
.
error
(
err
)
logging
.
error
(
err
)
group_idx
+=
1
fname
=
infile
+
'
_UV.fits
'
try
:
hdul
.
writeto
(
fname
,
overwrite
=
True
)
except
Exception
as
err
:
logging
.
exception
(
err
)
return
return
fname
return
fname
def
get_dp_desc
(
dpid
):
def
format_uv_fits
(
fname
,
gidx
):
try
:
ff
=
fits
.
open
(
fname
)
return
data_pool
[
dpid
+
DP_OFFSET
][
0
]
except
KeyError
:
group
=
ff
[
'
GROUP_HK
'
]
logging
.
error
(
"
Unknown DP ID {} in header
"
.
format
(
dpid
))
frames
=
ff
[
'
FRAME_HK
'
]
return
str
(
dpid
)[:
8
]
nodes
=
[]
for
node
in
UV_NODES
:
if
node
in
ff
:
nodes
.
append
(
ff
[
node
].
data
)
else
:
nodes
.
append
(
None
)
group_new
=
tuple
([
gidx
]
+
group
.
data
.
tolist
()[
0
])
frames_new
=
tuple
([
frm
+
[
gidx
]
for
frm
in
frames
.
data
.
tolist
()])
return
group_new
,
frames_new
,
nodes
# def get_dp_desc(dpid):
# try:
# return data_pool[dpid + DP_OFFSET][0]
# except KeyError:
# logging.error("Unknown DP ID {} in header".format(dpid))
# return str(dpid)[:8]
def
calc_frame_time
(
rarr
,
reftime
):
def
calc_frame_time
(
rarr
,
reftime
):
...
@@ -1006,7 +1082,7 @@ def process_file(infile, outdir):
...
@@ -1006,7 +1082,7 @@ def process_file(infile, outdir):
# logging.error('Decompression failed for {}'.format(ce))
# logging.error('Decompression failed for {}'.format(ce))
logging
.
exception
(
err
)
logging
.
exception
(
err
)
merged
=
merge_fits
(
decompressed
,
infile
)
#
merged = merge_fits(decompressed, infile)
# put HK in FITS
# put HK in FITS
try
:
try
:
...
@@ -1015,21 +1091,28 @@ def process_file(infile, outdir):
...
@@ -1015,21 +1091,28 @@ def process_file(infile, outdir):
hkfile
=
None
hkfile
=
None
logging
.
error
(
"
Failed creating ENG product for {} ({}).
"
.
format
(
infile
,
err
))
logging
.
error
(
"
Failed creating ENG product for {} ({}).
"
.
format
(
infile
,
err
))
return
*
merged
,
hkfile
print
(
hkfile
)
# return *merged, hkfile
def
load_dp
():
with
open
(
'
dp.csv
'
,
'
r
'
)
as
fd
:
dp
=
fd
.
read
()
data
=
[
x
.
split
(
'
|
'
)[:
3
]
for
x
in
dp
.
split
(
'
\n
'
)[
2
:]]
# def load_dp():
# with open('dp.csv', 'r') as fd:
return
{
int
(
x
[
1
]):
(
x
[
0
].
strip
(),
x
[
2
].
strip
())
for
x
in
data
if
x
[
0
]}
# dp = fd.read()
#
# data = [x.split('|')[:3] for x in dp.split('\n')[2:]]
#
# return {int(x[1]): (x[0].strip(), x[2].strip()) for x in data if x[0]}
def
setup_logging
(
output_dir
):
def
setup_logging
(
output_dir
):
# Configure logging to write to a file in the output directory
# Configure logging to write to a file in the output directory
log_filename
=
os
.
path
.
join
(
output_dir
,
"
log.json
"
)
log_filename
=
os
.
path
.
join
(
output_dir
,
"
log.json
"
)
if
not
os
.
path
.
isfile
(
log_filename
):
with
open
(
log_filename
,
'
w
'
)
as
fd
:
fd
.
write
(
''
)
logging
.
basicConfig
(
filename
=
log_filename
,
level
=
logging
.
INFO
,
logging
.
basicConfig
(
filename
=
log_filename
,
level
=
logging
.
INFO
,
format
=
'
{
\n
"
timestamp
"
:
"
%(asctime)s
"
,
\n
"
level
"
:
"
%(levelname)s
"
,
\n
"
message
"
:
"
%(message)s
"
\n
},
'
)
format
=
'
{
\n
"
timestamp
"
:
"
%(asctime)s
"
,
\n
"
level
"
:
"
%(levelname)s
"
,
\n
"
message
"
:
"
%(message)s
"
\n
},
'
)
...
@@ -1038,19 +1121,16 @@ def setup_logging(output_dir):
...
@@ -1038,19 +1121,16 @@ def setup_logging(output_dir):
if
__name__
==
'
__main__
'
:
if
__name__
==
'
__main__
'
:
setup_logging
(
'
/home/marko/space/smile/cedata/proc
'
)
#
setup_logging('/home/marko/space/smile/cedata/proc')
process_file
(
'
/home/marko/space/smile/datapools/UL_flatsat_08072024_1156_rev_clk_dgen.bin
'
,
'
/home/marko/space/smile/cedata/proc
'
)
#
process_file('/home/marko/space/smile/
cedata/
datapools/UL_flatsat_08072024_1156_rev_clk_dgen.bin', '/home/marko/space/smile/cedata/proc')
sys
.
exit
()
#
sys.exit()
infile
=
sys
.
argv
[
1
]
infile
=
sys
.
argv
[
1
]
if
len
(
sys
.
argv
)
>=
3
:
if
len
(
sys
.
argv
)
>=
3
:
outdir
=
sys
.
argv
[
2
]
outdir
=
sys
.
argv
[
2
]
else
:
else
:
outdir
=
None
outdir
=
os
.
path
.
dirname
(
infile
)
setup_logging
(
outdir
)
process_file
(
infile
,
outdir
)
process_file
(
infile
,
outdir
)
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment