Skip to content
Snippets Groups Projects
Commit a8ae492a authored by lkugler's avatar lkugler
Browse files

not working yet

parent be039200
No related branches found
No related tags found
No related merge requests found
......@@ -10,7 +10,7 @@ exp.use_existing_obsseq = False # False or pathname (use precomputed obs_seq.ou
#exp.use_existing_obsseq = '/users/students/lehre/advDA_s2023/dartwrf_tutorial/very_cold_observation.out'
# path to the nature run, where we take observations from
exp.nature_wrfout = '/jetfs/home/lkugler/data/sim_archive/exp_v1.18_P1_nature/2008-07-30_06:00/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
exp.nature_wrfout = '/mnt/jetfs/scratch/lkugler/data/sim_archive/exp_v1.18_P1_nature/2008-07-30_06:00/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
exp.input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof'
......
......@@ -53,6 +53,8 @@ cluster.python_verif = '/users/staff/lkugler/miniconda3/bin/python'
cluster.ncks = '/home/swd/spack/opt/spack/linux-rhel8-skylake_avx512/gcc-8.5.0/nco-5.0.1-ntu44aoxlvwtr2tsrobfr4lht7cpvccf/bin/ncks'
cluster.ideal = '' #/jetfs/home/lkugler/bin/ideal-v4.3_v1.22.exe'
cluster.wrfexe = '' #/jetfs/home/lkugler/bin/wrf-v4.3_v1.22.exe'
cluster.dart_modules = ''
cluster.wrf_modules = ''
# paths for data output
cluster.wrf_rundir_base = utils.userhome+'/AdvDA23/run_WRF/' # path for temporary files
......
......@@ -23,13 +23,13 @@ def link_nature_to_dart_truth(time):
"""
# get wrfout_d01 from nature run
shutil.copy(time.strftime(exp.nature+'/'+wrfout_format),
shutil.copy(time.strftime(exp.nature_wrfout),
cluster.dart_rundir + "/wrfout_d01")
# DART may need a wrfinput file as well, which serves as a template for dimension sizes
symlink(cluster.dart_rundir + "/wrfout_d01",
cluster.dart_rundir + "/wrfinput_d01")
print("linked", time.strftime(exp.nature+'/'+wrfout_format),
print("linked", time.strftime(exp.nature_wrfout),
"to", cluster.dart_rundir + "/wrfout_d01")
......@@ -437,6 +437,46 @@ def archive_inflation_2(time):
copy(f_output, f_archive)
print(f_archive, 'saved')
def link_DART_binaries_and_RTTOV_files():
joinp = os.path.join
# link DART binaries
bins = ['perfect_model_obs', 'filter', 'obs_diag', 'obs_seq_to_netcdf']
for b in bins:
symlink(joinp(cluster.dart_srcdir, b),
joinp(cluster.dart_rundir, b))
# link RTTOV files
rttov_files = ['rttov13pred54L/rtcoef_msg_4_seviri_o3.dat',
'mfasis_lut/rttov_mfasis_cld_msg_4_seviri_deff.H5',
'cldaer_visir/sccldcoef_msg_4_seviri.dat']
try: # may fail quietly if we dont need RTTOV
for f_src in rttov_files:
destname = os.path.basename(f_src)
if 'rtcoef' in f_src:
destname = 'rtcoef_msg_4_seviri.dat'
symlink(cluster.rttov_srcdir + f_src,
cluster.dart_rundir+'/'+destname)
symlink(cluster.dart_rundir+'/rttov_mfasis_cld_msg_4_seviri_deff.H5',
cluster.dart_rundir+'/rttov_mfasis_cld_msg_4_seviri.H5') # use deff, not OPAC
symlink(cluster.dart_srcdir+'/../../../observations/forward_operators/rttov_sensor_db.csv',
cluster.dart_rundir+'/rttov_sensor_db.csv')
symlink(cluster.dart_srcdir+'/../../../assimilation_code/programs/gen_sampling_err_table/'
+'work/sampling_error_correction_table.nc',
cluster.dart_rundir+'/sampling_error_correction_table.nc')
print('prepared DART & RTTOV links in', cluster.dart_rundir)
except Exception as e:
if any([hasattr(obscfg, 'sat_channel') for obscfg in exp.observations]):
raise e
else:
pass # we dont need RTTOV anyway
def main(time, prior_init_time, prior_valid_time, prior_path_exp):
"""Assimilate observations
......@@ -464,8 +504,7 @@ def main(time, prior_init_time, prior_valid_time, prior_path_exp):
os.makedirs(cluster.dart_rundir, exist_ok=True) # create directory to run DART in
os.chdir(cluster.dart_rundir)
# link DART binaries to run_DART
os.system(cluster.python + " " + cluster.scripts_rundir + "/link_dart_rttov.py")
link_DART_binaries_and_RTTOV_files()
# remove any existing observation files
os.system("rm -f input.nml obs_seq.in obs_seq.out obs_seq.out-orig obs_seq.final")
......
......@@ -12,7 +12,7 @@ def read_namelist(filepath):
filepath (str): Path to namelist file
Returns:
dict: keys are namelist sections, values are dictionaries of namelist variables
dict: namelist[section][parameter] = [[arg1, arg2,], [arg3, arg4]]
"""
d = dict()
......@@ -24,8 +24,8 @@ def read_namelist(filepath):
# skip whitespace
line = line.strip()
# skip comments
if not line.startswith('#'):
if line.startswith('#') or line.startswith('!'):
continue # skip this line
# skip empty lines
if len(line) > 0:
......@@ -39,37 +39,78 @@ def read_namelist(filepath):
if '/' in line:
continue # skip end of namelist section
line = line.strip().strip(',')
try:
# split line into variable name and value
var, val = line.split('=')
val = val.strip().strip(',').strip()
param, val = line.split('=')
param = param.strip()
param_data = []
except ValueError:
# If the above split failed, we are still, we are still in the previous variable
nextline_values = line.strip().split(',').strip()
val = val + nextline_values
# If the above split failed,
# then there is additional data for the previous variable
val = line # in this line, there is only param_data
# param is still the one from previously
val = val.strip().strip(',').split(',')
# ensure that we have strings
if isinstance(val, list):
val = [str(v) for v in val]
else:
val = [str(val)]
param_data.append(val)
print('this iteration var, val ...', {param: param_data})
# add variable to dictionary
d[section][var] = val
d[section][param] = param_data
return d
def write_namelist_from_dict(d, filepath):
"""Write a DART namelist dictionary to a file.
Args:
d (dict): keys are namelist sections, values are dictionaries of namelist variables
d (dict): keys are namelist sections, values are dictionaries.
these dictionaries contain keys (=parameters) and values (list type)
every item in values is a line (list type)
every line contains possibly multiple entries
filepath (str): Path to namelist file
"""
with open(filepath, 'w') as f:
for section in d:
f.write(section+'\n')
for var in d[section]:
val = d[section][var]
if isinstance(val, list):
val = ', '.join(val)
f.write('\t '+var+' = '+str(val)+'\n')
f.write('\t /\n\n')
try:
parameters = d[section].keys()
print(parameters, [len(p) for p in parameters])
max_width_of_parameter_name = max([len(p) for p in parameters])
width = max_width_of_parameter_name + 1
except:
width = None
for parameter in parameters:
lines = d[section][parameter]
if isinstance(lines, str):
lines = [lines,]
for i, line in enumerate(lines):
try:
line = ', '.join(line) # write line (is a list)
except:
pass
if i == 0:
f.write(' '+parameter.ljust(width)+' = '+line+'\n')
else:
f.write(' '+' '*width+' = '+line+'\n')
f.write(' /\n\n')
def _get_list_of_localizations():
......@@ -142,13 +183,24 @@ def _get_list_of_localizations():
# set the other (unused) list to a dummy value
if len(l_loc_vert_km) > 0:
l_loc_vert_scaleheight = [-1,]
l_loc_vert_scaleheight = ["-1",]
else:
l_loc_vert_km = [-1,]
l_loc_vert_km = ["-1",]
return l_obstypes, l_loc_horiz_rad, l_loc_vert_km, l_loc_vert_scaleheight
def _to_fortran_list(l):
"""Ensure formatting as "arg1", "arg2", """
assert isinstance(l, list)
if len(l) > 1: # multiple entries
return ', '.join(['"'+v+'"' for v in l])
elif len(l) == 1: # single entry
return '"'+l[0]+'"'
else: # no entry
return ''
def write_namelist(just_prior_values=False):
"""Set DART namelist variables in 'input.nml' file.
......@@ -171,7 +223,7 @@ def write_namelist(just_prior_values=False):
nml = read_namelist(cluster.dart_srcdir + "/input.nml")
# make sure that observations defined in `exp.observations` are assimilated
nml['&obs_kind_nml']['assimilate_these_obs_types'] = list_obstypes
nml['&obs_kind_nml']['assimilate_these_obs_types'] = _to_fortran_list(list_obstypes)
# dont compute posterior, just evaluate prior
if just_prior_values:
......@@ -180,32 +232,39 @@ def write_namelist(just_prior_values=False):
nml['&filter_nml']['output_mean'] = '.false.'
nml['&filter_nml']['output_sd'] = '.false.'
nml['&obs_kind_nml']['assimilate_these_obs_types'] = []
nml['&obs_kind_nml']['evaluate_these_obs_types'] = list_obstypes
nml['&obs_kind_nml']['evaluate_these_obs_types'] = [_to_fortran_list(list_obstypes)]
# write localization variables
nml['&assim_tools_nml']['special_localization_obs_types'] = list_obstypes
nml['&assim_tools_nml']['special_localization_cutoffs'] = list_loc_horiz_rad
nml['&assim_tools_nml']['special_localization_obs_types'] = [_to_fortran_list(list_obstypes)]
nml['&assim_tools_nml']['special_localization_cutoffs'] = [_to_fortran_list(list_loc_horiz_rad)]
nml['&location_nml']['special_vert_normalization_obs_types'] = list_obstypes
nml['&location_nml']['special_vert_normalization_heights'] = list_loc_vert_km
nml['&location_nml']['special_vert_normalization_scale_heights'] = list_loc_vert_scaleheight
nml['&location_nml']['special_vert_normalization_obs_types'] = [_to_fortran_list(list_obstypes)]
nml['&location_nml']['special_vert_normalization_heights'] = [_to_fortran_list(list_loc_vert_km)]
nml['&location_nml']['special_vert_normalization_scale_heights'] = [_to_fortran_list(list_loc_vert_scaleheight)]
print(nml['&location_nml']['special_vert_normalization_obs_types'])
# overwrite namelist with DART-WRF/config/ configuration
for key, value in exp.dart_nml.items():
# overwrite namelist with experiment configuration
for section, sdata in exp.dart_nml.items():
# if key is not in namelist, add it
if key not in nml:
nml[key] = {}
# if section is not in namelist, add it
if section not in nml:
nml[section] = {}
# overwrite entry in each dictionary
nml[key] = value
for parameter, value in sdata.items():
if isinstance(value, list) and len(value) > 1: # it is a real list
value = [value] # value was a list of parameter values, but just one line
else:
value = [[value]] # value was a single entry
# overwrite entry in each dictionary
nml[section][parameter] = value # every entry in this list is one line
# final checks
# fail if horiz_dist_only == false but observations contain a satellite channel
if nml['&location_nml']['horiz_dist_only'] == '.false.':
if nml['&location_nml']['horiz_dist_only'][0] == '.false.':
for obscfg in exp.observations:
if hasattr(obscfg, "sat_channel"):
raise ValueError("Selected vertical localization, but observations contain satellite obs -> Not possible.")
......
import os
from config.cfg import exp
from config.cluster import cluster
from dartwrf.utils import symlink, copy, sed_inplace
joinp = os.path.join
if __name__ == "__main__":
# DART executables
bins = ['perfect_model_obs', 'filter', 'obs_diag', 'obs_seq_to_netcdf']
for b in bins:
symlink(joinp(cluster.dart_srcdir, b),
joinp(cluster.dart_rundir, b))
print(joinp(cluster.dart_rundir, b), 'created')
rttov_files = ['rttov13pred54L/rtcoef_msg_4_seviri_o3.dat',
#'mfasis_lut/rttov_mfasis_cld_msg_4_seviri_deff.dat',
'mfasis_lut/rttov_mfasis_cld_msg_4_seviri_deff.H5',
'cldaer_visir/sccldcoef_msg_4_seviri.dat']
for f_src in rttov_files:
destname = os.path.basename(f_src)
if 'rtcoef' in f_src:
destname = 'rtcoef_msg_4_seviri.dat'
symlink(cluster.rttov_srcdir + f_src,
cluster.dart_rundir+'/'+destname)
##################
symlink(cluster.dart_rundir+'/rttov_mfasis_cld_msg_4_seviri_deff.H5',
cluster.dart_rundir+'/rttov_mfasis_cld_msg_4_seviri.H5')
symlink(cluster.dart_srcdir+'/../../../observations/forward_operators/rttov_sensor_db.csv',
cluster.dart_rundir+'/rttov_sensor_db.csv')
symlink(cluster.dart_srcdir+'/../../../assimilation_code/programs/gen_sampling_err_table/'
+'work/sampling_error_correction_table.nc',
cluster.dart_rundir+'/sampling_error_correction_table.nc')
print('prepared DART & RTTOV links in', cluster.dart_rundir)
......@@ -14,7 +14,11 @@ class ClusterConfig(object):
"""Collection of variables regarding the cluster configuration"""
def __init__(self, exp):
self.exp = exp # makes derived properties available
self.dart_modules = '' # default value
# defaults
self.dart_modules = ''
self.wrf_modules = ''
self.size_jobarray = '1'
@property
def archivedir(self):
......@@ -71,8 +75,9 @@ class ClusterConfig(object):
else:
print(cmd)
returncode = os.system(cmd)
print(cmd, 'returncode', returncode)
if returncode != 0:
raise Exception('Error running command: '+cmd)
raise Exception('Error running command >>> '+cmd)
userhome = os.path.expanduser('~')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment