From 853b2838bc42d16d40a675b5413ced969e2e8dae Mon Sep 17 00:00:00 2001
From: Lukas Kugler <lukas.kugler@univie.ac.at>
Date: Tue, 11 Feb 2025 21:43:46 +0100
Subject: [PATCH] work in progress

---
 config/jet.py                      | 107 ++++-----
 dartwrf/assimilate.py              |  12 +-
 dartwrf/create_obskind_table.py    |  34 +--
 dartwrf/create_wbubble_wrfinput.py |   2 +-
 dartwrf/exp_config.py              | 173 ---------------
 dartwrf/namelist_handler.py        |  88 ++++++++
 dartwrf/obs/__init__.py            |  28 +++
 dartwrf/obs/obskind.py             | 338 -----------------------------
 dartwrf/obs/obsseq.py              |   4 +-
 dartwrf/obs/run_obs_diag.py        |   2 +-
 dartwrf/prep_IC_prior.py           |   4 +-
 dartwrf/prepare_namelist.py        | 128 ++++++-----
 dartwrf/prepare_wrfrundir.py       |  13 +-
 dartwrf/run_ens.jet.sh             |  25 ---
 dartwrf/run_ens.vsc.gfortran.sh    |  42 ----
 dartwrf/run_ens.vsc.mem_0096.sh    |  43 ----
 dartwrf/run_ens.vsc.sh             |  40 ----
 dartwrf/run_ens.vsc.single.sh      |  15 --
 dartwrf/server_config.py           | 107 ++++-----
 dartwrf/update_IC.py               |   2 +-
 dartwrf/utils.py                   | 225 +++++++++++--------
 dartwrf/workflows.py               | 329 ++++++++++++++--------------
 docs/source/tutorial1.rst          |  10 +-
 multiple_exps.py                   |  79 +++++++
 24 files changed, 701 insertions(+), 1149 deletions(-)
 delete mode 100644 dartwrf/exp_config.py
 create mode 100644 dartwrf/namelist_handler.py
 delete mode 100644 dartwrf/obs/obskind.py
 delete mode 100755 dartwrf/run_ens.jet.sh
 delete mode 100755 dartwrf/run_ens.vsc.gfortran.sh
 delete mode 100755 dartwrf/run_ens.vsc.mem_0096.sh
 delete mode 100755 dartwrf/run_ens.vsc.sh
 delete mode 100755 dartwrf/run_ens.vsc.single.sh
 create mode 100644 multiple_exps.py

diff --git a/config/jet.py b/config/jet.py
index ebb475d..df71c91 100755
--- a/config/jet.py
+++ b/config/jet.py
@@ -1,55 +1,56 @@
 """Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
-from dartwrf import utils
-from dartwrf.exp_config import exp
-
-cluster = utils.ClusterConfig(exp)
-cluster.name = 'jet'
-cluster.max_nproc = 20
-cluster.use_slurm = True
-cluster.size_WRF_jobarray = exp.n_ens
-cluster.np_WRF = 16
-
-# binaries
-cluster.python = '/jetfs/home/lkugler/miniforge3/envs/verif/bin/python'
-cluster.ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-2021.7.1/nco-5.1.0-izrhxv24jqco5epjhf5ledsqwanojc5m/bin/ncks'
-cluster.ideal = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe'
-cluster.wrfexe = '/jetfs/home/lkugler/data/compile/bin/wrf-v4.6.0_20250210_StS.exe'
-cluster.dart_modules = 'module purge; module load rttov/v13.2-gcc-8.5.0'
-cluster.wrf_modules = """module purge; module load netcdf-fortran/4.5.3-intel-2021.7.1-27ldrnt"""
-
-# paths for data output
-cluster.wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/'  # path for temporary files
-cluster.dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/'  # path for temporary files
-cluster.archive_base = '/jetfs/home/lkugler/data/sim_archive/'
-
-# paths used as input
-cluster.srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run'
-cluster.dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/'
-cluster.rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/'
-cluster.dartwrf_dir = '/jetfs/home/lkugler/DART-WRF/'
-
-# other inputs
-cluster.geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600' 
-cluster.geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200'
-#cluster.obs_impact_filename = cluster.dartwrf_dir+'/templates/impactfactor_T.txt'
-cluster.namelist = cluster.dartwrf_dir+'/config/templates/namelist.input_nat_exact'
-cluster.rttov_nml = cluster.dartwrf_dir + "/config/templates/obs_def_rttov.VIS+WV.nml"
-cluster.run_WRF = '/jetfs/home/lkugler/DART-WRF/dartwrf/run_ens.jet.sh'
-
-cluster.slurm_cfg = {"account": "lkugler", "partition": "all",  
-                 "ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
-                 "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"}
-
-# WRF file format, will only change if WRF changes
-cluster.wrfout_format = '/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-
-# pattern for the init_time folder in sim_archive
-cluster.pattern_init_time = "/%Y-%m-%d_%H:%M/"
-
-# how an obs_seq.out file is archived
-cluster.pattern_obs_seq_out = cluster.archivedir + \
-    "/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out"  
+from re import A
+from dartwrf.utils import ClusterConfig
+
+cluster = ClusterConfig(
+    max_nproc = 20,
+    max_nproc_for_each_ensemble_member = 16,
+    use_slurm = True,
+
+    # binaries
+    python = '/jetfs/home/lkugler/miniforge3/envs/verif/bin/python',
+    ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-2021.7.1/nco-5.1.0-izrhxv24jqco5epjhf5ledsqwanojc5m/bin/ncks',
+    ideal = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe',
+    wrfexe = '/jetfs/home/lkugler/data/compile/bin/wrf-v4.6.0_20250210_StS.exe',
+    dart_modules = 'module purge; module load rttov/v13.2-gcc-8.5.0',
+    wrf_modules = """module purge; module load netcdf-fortran/4.5.3-intel-2021.7.1-27ldrnt""",
+
+    # paths for data output
+    wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/',  # path for temporary files
+    dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/',  # path for temporary files
+    archive_base = '/jetfs/home/lkugler/data/sim_archive/',
+
+    # paths used as input
+    srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run',
+    dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/',
+    rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/',
     
-# how an obs_seq.final file is archived
-cluster.pattern_obs_seq_final = cluster.archivedir + \
-    "/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final"  
+    dartwrf_dir_dev = '/jetfs/home/lkugler/DART-WRF/',
+    WRF_namelist_template = '/jetfs/home/lkugler/DART-WRF/config/templates/namelist.input_nat_exact',
+    rttov_nml = "/jetfs/home/lkugler/DART-WRF/config/templates/obs_def_rttov.VIS+WV.nml",
+    
+    # other inputs
+    geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600',
+    geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
+    #obs_impact_filename = dartwrf_dir_dev+'/templates/impactfactor_T.txt'
+
+    WRF_exe_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF.jet.sh',
+    WRF_ideal_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF_ideal.sh',
+
+    slurm_cfg = {"account": "lkugler", "partition": "all",  
+                    "ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
+                    "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"},
+
+    # WRF file format, will only change if WRF changes
+    wrfout_format = '/wrfout_d01_%Y-%m-%d_%H:%M:%S',
+
+    # pattern for the init_time folder in sim_archive
+    pattern_init_time = "/%Y-%m-%d_%H:%M/",
+    
+    # how an obs_seq.out file is archived
+    pattern_obs_seq_out = "<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out",
+        
+    # how an obs_seq.final file is archived
+    pattern_obs_seq_final = "<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final",  
+
+    )
\ No newline at end of file
diff --git a/dartwrf/assimilate.py b/dartwrf/assimilate.py
index 4059257..71ebb75 100755
--- a/dartwrf/assimilate.py
+++ b/dartwrf/assimilate.py
@@ -71,7 +71,7 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_
     - removes probably pre-existing files which could lead to problems
     """
     print("prepare prior ensemble")
-    for iens in range(1, exp.n_ens + 1):
+    for iens in range(1, exp.ensemble_size + 1):
 
         print("link wrfout file to DART background file")
         wrfout_run = (
@@ -115,7 +115,7 @@ def use_linked_files_as_prior():
     """Instruct DART to use the prior ensemble as input
     """
     files = []
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         files.append("./prior_ens" + str(iens) + "/wrfout_d01")
     write_txt(files, cluster.dart_rundir+'/input_list.txt')
 
@@ -124,7 +124,7 @@ def use_filter_output_as_prior():
     """Use the last posterior as input for DART, e.g. to evaluate the analysis in observation space
     """
     files = []
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         f_new = cluster.dart_rundir+'/prior_ens'+str(iens)+'/wrfout_d01'
         try:
             os.remove(f_new)
@@ -139,7 +139,7 @@ def use_filter_output_as_prior():
 
 def write_list_of_outputfiles():
     files = []
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         files.append("./filter_restart_d01." + str(iens).zfill(4))
     write_txt(files, cluster.dart_rundir+'/output_list.txt')
 
@@ -187,7 +187,7 @@ def archive_filteroutput(time):
     copy(cluster.dart_rundir + "/input.nml", dir_out + "/input.nml")
 
     # copy filter_restart files to archive (initial condition for next run)
-    for iens in range(1, exp.n_ens + 1):  # single members
+    for iens in range(1, exp.ensemble_size + 1):  # single members
         copy(
             cluster.dart_rundir + "/filter_restart_d01." + str(iens).zfill(4),
             dir_out + "/filter_restart_d01." + str(iens).zfill(4),
@@ -206,7 +206,7 @@ def archive_filteroutput(time):
         try:
             ftypes = ['preassim', 'postassim']
             for ftype in ftypes:
-                for iens in range(1, exp.n_ens + 1):
+                for iens in range(1, exp.ensemble_size + 1):
                     fname = "/"+ftype+"_member_" + str(iens).zfill(4) + ".nc"
                     copy(cluster.dart_rundir + fname, dir_out + fname)
         except Exception as e:
diff --git a/dartwrf/create_obskind_table.py b/dartwrf/create_obskind_table.py
index 2c395bf..5994018 100644
--- a/dartwrf/create_obskind_table.py
+++ b/dartwrf/create_obskind_table.py
@@ -29,35 +29,6 @@ def _dict_to_py(d, outfile):
         f.write(txt)
 
 
-def _obskind_read(dart_srcdir):
-    """Read dictionary of observation types + ID numbers ("kind") 
-    from DART f90 script and return it as python dictionary
-    """
-    definitionfile = dart_srcdir + \
-        '/../../../assimilation_code/modules/observations/obs_kind_mod.f90'
-    with open(definitionfile, 'r') as f:
-        kind_def_f = f.readlines()
-
-    obskind_nrs = {}
-    for i, line in enumerate(kind_def_f):
-        if 'Integer definitions for DART OBS TYPES' in line:
-            # data starts below this line
-            i_start = i
-            break
-    for line in kind_def_f[i_start+1:]:
-        if 'MAX_DEFINED_TYPES_OF_OBS' in line:
-            # end of data
-            break
-        if '::' in line:
-            # a line looks like this
-            # integer, parameter, public ::       MSG_4_SEVIRI_TB =   261
-            data = line.split('::')[-1].split('=')
-            kind_str = data[0].strip()
-            kind_nr = int(data[1].strip())
-            obskind_nrs[kind_str] = kind_nr
-    return obskind_nrs
-
-
 def _save_config_to_scriptsdir(server_config, original_scripts_dir):
     try:
         dir_path = os.path.dirname(os.path.realpath(__file__))
@@ -80,10 +51,9 @@ def run(server_config='jet.py'):
     # import the config from scripts_dir
     sys.path.append(original_scripts_dir)
     from server_config import cluster
+    from dartwrf import obs
 
-    dart_srcdir = cluster.dart_srcdir
-
-    obskind_dictionary = _obskind_read(dart_srcdir)
+    obskind_dictionary = obs.obskind_read(cluster.dart_srcdir)
 
     _dict_to_py(obskind_dictionary, original_scripts_dir+'/obs/obskind.py')
     print('>>>', original_scripts_dir+'/obs/obskind.py', 'created')
diff --git a/dartwrf/create_wbubble_wrfinput.py b/dartwrf/create_wbubble_wrfinput.py
index bc79875..6fd8cac 100644
--- a/dartwrf/create_wbubble_wrfinput.py
+++ b/dartwrf/create_wbubble_wrfinput.py
@@ -23,7 +23,7 @@ if __name__ == "__main__":
     print('perturb wbubble = ', perturbations)
 
 
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         print('iens', iens)
         wrfin = cluster.wrf_rundir(iens)+'/wrfinput_d01'
 
diff --git a/dartwrf/exp_config.py b/dartwrf/exp_config.py
deleted file mode 100644
index e570351..0000000
--- a/dartwrf/exp_config.py
+++ /dev/null
@@ -1,173 +0,0 @@
-from dartwrf.utils import Experiment
-
-exp = Experiment()
-exp.expname = "exp_test"
-exp.model_dx = 2000
-exp.n_ens = 4
-exp.do_quality_control = False
-
-# path to the nature run, where we take observations from
-#exp.nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/exp_v1.18_P1_nature+1/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-#exp.nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/exp_v1.18_P1_nature/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-exp.nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/exp_v1.19_P3_wbub7_nat/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-
-exp.input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof'
-
-
-exp.dart_nml = {'&assim_tools_nml':
-                    dict(filter_kind='1',
-                        sampling_error_correction='.true.',
-                        # obs_impact_filename='/jetfs/home/lkugler/DART-WRF/templates/impactfactor_T.txt',
-                        ),
-                '&filter_nml':
-                    dict(   ens_size=exp.n_ens,
-                            num_output_state_members=exp.n_ens,
-                            num_output_obs_members=exp.n_ens,
-                            inf_flavor=['0', '4'],
-                            inf_initial=[1.04, 0.5],
-                            inf_initial_from_restart='.false.',
-                            output_members='.true.',
-                            output_mean='.true.',
-                            output_sd='.true.',
-                            stages_to_write='output',
-                        ),
-                '&quality_control_nml':
-                    dict(outlier_threshold='-1',
-                        ),
-                '&obs_def_radar_mod_nml':
-                    dict(apply_ref_limit_to_obs      =  '.true.',
-                         reflectivity_limit_obs      =  5.0,
-                         lowest_reflectivity_obs     =  5.0,
-                         apply_ref_limit_to_fwd_op   =  '.true.',
-                         reflectivity_limit_fwd_op   =  5.0,
-                         lowest_reflectivity_fwd_op  =  5.0,
-                         microphysics_type           =  '5',
-                         ),
-                '&location_nml':
-                    dict(horiz_dist_only='.false.',
-                        ),
-                '&model_nml':
-                    dict(wrf_state_variables = 
-                        [['U',     'QTY_U_WIND_COMPONENT',     'TYPE_U',    'UPDATE','999',],
-                         ['V',     'QTY_V_WIND_COMPONENT',     'TYPE_V',    'UPDATE','999',],
-                         ['W',     'QTY_VERTICAL_VELOCITY',    'TYPE_W',    'UPDATE','999',],
-                         ['PH',    'QTY_GEOPOTENTIAL_HEIGHT',  'TYPE_GZ',   'UPDATE','999',],
-                         ['THM',   'QTY_POTENTIAL_TEMPERATURE','TYPE_T',    'UPDATE','999',],
-                         ['MU',    'QTY_PRESSURE',             'TYPE_MU',   'UPDATE','999',],
-
-                         ['QVAPOR','QTY_VAPOR_MIXING_RATIO',   'TYPE_QV',   'UPDATE','999',],
-                         ['QCLOUD','QTY_CLOUDWATER_MIXING_RATIO','TYPE_QC', 'UPDATE','999',],
-                         ['QICE',  'QTY_ICE_MIXING_RATIO',     'TYPE_QI',   'UPDATE','999',],
-                        #  ['QRAIN','QTY_RAINWATER_MIXING_RATIO','TYPE_QR', 'UPDATE','999',],
-                        #  ['QSNOW','QTY_SNOW_MIXING_RATIO','TYPE_QS', 'UPDATE','999',],
-                        #  ['QGRAUP','QTY_GRAUPEL_MIXING_RATIO','TYPE_QG', 'UPDATE','999',],
-
-                         ['CLDFRA','QTY_CLOUD_FRACTION',       'TYPE_CFRAC','UPDATE','999',],
-                         ['PSFC',  'QTY_SURFACE_PRESSURE',     'TYPE_PSFC', 'UPDATE','999',],
-                         ['T2',    'QTY_2M_TEMPERATURE',       'TYPE_T',    'UPDATE','999',],
-                         ['TSK',   'QTY_SKIN_TEMPERATURE',     'TYPE_T',    'UPDATE','999',],
-                         ['REFL_10CM','QTY_RADAR_REFLECTIVITY','TYPE_REFL', 'UPDATE','999',]],
-                         
-                        wrf_state_bounds = 
-                        [['QVAPOR','0.0','NULL','CLAMP'],
-                         ['QCLOUD','0.0','NULL','CLAMP'],
-                         ['QICE','0.0','NULL','CLAMP'],
-                         ['CLDFRA','0.0','1.0','CLAMP'],
-                         
-                        #  ['QRAIN','0.0','NULL','CLAMP'],
-                        #  ['QSNOW','0.0','NULL','CLAMP'],
-                        #  ['QGRAUP','0.0','NULL','CLAMP'],
-                         ],
-                        ),
-                '&ensemble_manager_nml':
-                   dict(layout = 1,
-                        tasks_per_node = 12,
-                        communication_configuration = 1,
-                        ),
-                }
-
-
-
-# n_obs can be 22500: 2km, 5776: 4km, 121: 30km, 256:16x16 (20km); 961: 10km resoltn 
-# if radar: then n_obs is for each observation height level
-oeinf = 4.**.5
-
-vis = dict(var_name='VIS 0.6µm', unit='[1]',
-           kind='MSG_4_SEVIRI_BDRF', sat_channel=1, 
-           n_obs=961, obs_locations='square_array_evenly_on_grid',
-           # n_obs=1, obs_locations=[(44.141, -0.99)],
-           error_generate=0.03, error_assimilate=0.03*oeinf,
-           loc_horiz_km=20, 
-           #height=4000, loc_vert_km=3
-           )
-
-wv73 = dict(var_name='Brightness temperature WV 7.3µm', unit='[K]',
-            kind='MSG_4_SEVIRI_TB', sat_channel=6, 
-            n_obs=961, obs_locations='square_array_evenly_on_grid',
-            error_generate=1, error_assimilate=1*oeinf, 
-            loc_horiz_km=20, 
-            #height=7000, loc_vert_km=3
-            )
-
-wv62 = dict(var_name='Brightness temperature WV 6.2µm', unit='[K]',
-            kind='MSG_4_SEVIRI_TB', sat_channel=5, 
-            n_obs=961,  obs_locations='square_array_evenly_on_grid',
-            # n_obs=1, obs_locations=[(44.141, -0.99)],
-            error_generate=1, error_assimilate=1*oeinf, 
-            loc_horiz_km=20, 
-            #height=10000, loc_vert_km=3
-            )
-
-ir108 = dict(var_name='Brightness temperature IR 10.8µm', unit='[K]',
-             kind='MSG_4_SEVIRI_TB', sat_channel=9, 
-             n_obs=1, obs_locations='square_array_evenly_on_grid',
-             error_generate=5., error_assimilate=10.,
-             loc_horiz_km=32)
-
-radar = dict(var_name='Radar reflectivity', unit='[dBz]',
-             kind='RADAR_REFLECTIVITY', 
-             n_obs=961, obs_locations='square_array_evenly_on_grid',
-             # n_obs=2, obs_locations=[(45.332, 0.4735), (45.332, 0.53)],
-             heights=range(2000, 14001, 2000),
-             error_generate=2.5, error_assimilate=2.5*oeinf,
-             loc_horiz_km=20, loc_vert_km=3)
-
-t = dict(var_name='Temperature', unit='[K]',
-         kind='RADIOSONDE_TEMPERATURE', 
-         #n_obs=22500, obs_locations='square_array_evenly_on_grid',
-         n_obs=1, obs_locations=[(45., 0.)],
-         error_generate=0.2, error_assimilate=0.2,
-         heights=[1000,], #range(1000, 17001, 2000),
-         loc_horiz_km=50, loc_vert_km=2.5)
-
-q = dict(var_name='Specific humidity', unit='[kg/kg]',
-         kind='RADIOSONDE_SPECIFIC_HUMIDITY', n_obs=1,
-         error_generate=0., error_assimilate=5*1e-5,
-         heights=[1000], #range(1000, 17001, 2000),
-         loc_horiz_km=50, loc_vert_km=2.5)
-
-t2m = dict(var_name='SYNOP Temperature', unit='[K]',
-           kind='SYNOP_TEMPERATURE', 
-           n_obs=256, obs_locations='square_array_evenly_on_grid',
-           error_generate=0.3, error_assimilate=0.3,
-           loc_horiz_km=10, loc_vert_km=2)
-
-psfc = dict(var_name='SYNOP Pressure', unit='[Pa]',
-            kind='SYNOP_SURFACE_PRESSURE', n_obs=1, 
-            error_generate=50., error_assimilate=100.,
-            loc_horiz_km=32, loc_vert_km=5)
-
-exp.observations = [t, radar, ]
-
-# the variables which will be replaced in the WRF initial conditions file
-exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC']
-#exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'PSFC']
-#exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QRAIN', 'QSNOW', 'QGRAUP', 'PSFC']
-#exp.update_vars = ['QVAPOR', 'QCLOUD', 'QICE', 'PSFC']
-
-exp.use_existing_obsseq = False
-# exp.use_existing_obsseq='/jetfs/home/lkugler/data/sim_archive/exp_v1.22_P2_rr_WV73_obs10_loc20_oe1/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out'
-
-
-
-
diff --git a/dartwrf/namelist_handler.py b/dartwrf/namelist_handler.py
new file mode 100644
index 0000000..351f887
--- /dev/null
+++ b/dartwrf/namelist_handler.py
@@ -0,0 +1,88 @@
+import warnings
+import os
+
+# author: gthalassinos
+
+class WRF_namelist():
+    """Class to read and write WRF namelist files
+
+    Example:
+        n = WRF_namelist('/path_to_existing/namelist.input')
+        n.read()
+        
+        n.namelist  # is a dictionary
+        n.namelist['time_control']['run_days'] = 1
+        
+        n.write('/path_to_new/namelist.input')
+    """
+    def __init__(self):
+        self.namelist = {}
+
+    def read(self, fname: str) -> None:
+        """Read a WRF namelist file and store it in a dictionary
+        """
+        with open(fname, 'r') as f:
+            lines  = f.readlines()
+        
+        subdomains = True
+        block_name = None
+        for l in lines:
+            l = l.strip()
+            if l.startswith("&"):
+                block_name = l.split("&")[1]
+                self.namelist[block_name] = {}
+            elif l.startswith("/"):
+                block_name = None
+            elif l.startswith("!") and block_name:
+                continue
+            elif (block_name) and ("=" in l):
+                variable, value = l.split("=")
+                variable = variable.strip()
+                value = value.strip().rstrip(",")
+
+                if "," in value:
+                    try:
+                        values = eval(value)
+                    except:
+                        raise ValueError(f"{variable} is not a tuple!")
+                    subdomains = True
+                    value = values
+                # check if we have single numbers
+                if not isinstance(value, tuple):
+                    try:
+                        value = int(value)
+                    except ValueError:
+                        try:
+                            value = float(value)
+                        except ValueError:
+                            pass
+
+                self.namelist[
+                    block_name][
+                        variable] = value    
+
+            subdomains = False            
+                
+        pass
+
+    def write(self, fname: str) -> None:
+        """Write a WRF namelist file
+        """
+        if os.path.exists(fname):
+            warnings.warn(f"{fname} already exists!")
+            if input('Continue? (Y/n) ') in ['Y', 'y']:
+                pass      
+            else:
+                raise FileExistsError  
+        
+        with open(fname, 'w') as file:
+            for block, variables in self.namelist.items():
+                file.write(f" &{block}\n")
+                for variable, value in variables.items():
+                    if isinstance(value, str) and not value.startswith('.'):
+                        value = f'{value}'
+                    if isinstance(value, tuple):
+                        value = str(value)[1:-1]
+                    file.write(f" {variable:<35} = {value},\n")
+                file.write(" /\n\n")
+        pass    
\ No newline at end of file
diff --git a/dartwrf/obs/__init__.py b/dartwrf/obs/__init__.py
index e69de29..5c23b21 100644
--- a/dartwrf/obs/__init__.py
+++ b/dartwrf/obs/__init__.py
@@ -0,0 +1,28 @@
+
+def obskind_read(dart_srcdir: str) -> dict:
+    """Read dictionary of observation types + ID numbers ("kind") 
+    from DART f90 script and return it as python dictionary
+    """
+    definitionfile = dart_srcdir + \
+        '/../../../assimilation_code/modules/observations/obs_kind_mod.f90'
+    with open(definitionfile, 'r') as f:
+        kind_def_f = f.readlines()
+
+    obskind_nrs = {}
+    for i, line in enumerate(kind_def_f):
+        if 'Integer definitions for DART OBS TYPES' in line:
+            # data starts below this line
+            i_start = i
+            break
+    for line in kind_def_f[i_start+1:]:
+        if 'MAX_DEFINED_TYPES_OF_OBS' in line:
+            # end of data
+            break
+        if '::' in line:
+            # a line looks like this
+            # integer, parameter, public ::       MSG_4_SEVIRI_TB =   261
+            data = line.split('::')[-1].split('=')
+            kind_str = data[0].strip()
+            kind_nr = int(data[1].strip())
+            obskind_nrs[kind_str] = kind_nr
+    return obskind_nrs
\ No newline at end of file
diff --git a/dartwrf/obs/obskind.py b/dartwrf/obs/obskind.py
deleted file mode 100644
index 8011b5a..0000000
--- a/dartwrf/obs/obskind.py
+++ /dev/null
@@ -1,338 +0,0 @@
-""" NOTE: This file is autogenerated! 
-Use dartwrf/create_obskind_table.py to regenerate!
-"""
-obs_kind_nrs = {
-"RADIOSONDE_U_WIND_COMPONENT": 1, 
-"RADIOSONDE_V_WIND_COMPONENT": 2, 
-"RADIOSONDE_GEOPOTENTIAL_HGT": 3, 
-"RADIOSONDE_SURFACE_PRESSURE": 4, 
-"RADIOSONDE_TEMPERATURE": 5, 
-"RADIOSONDE_SPECIFIC_HUMIDITY": 6, 
-"DROPSONDE_U_WIND_COMPONENT": 7, 
-"DROPSONDE_V_WIND_COMPONENT": 8, 
-"DROPSONDE_SURFACE_PRESSURE": 9, 
-"DROPSONDE_TEMPERATURE": 10, 
-"DROPSONDE_SPECIFIC_HUMIDITY": 11, 
-"AIRCRAFT_U_WIND_COMPONENT": 12, 
-"AIRCRAFT_V_WIND_COMPONENT": 13, 
-"AIRCRAFT_TEMPERATURE": 14, 
-"AIRCRAFT_SPECIFIC_HUMIDITY": 15, 
-"ACARS_U_WIND_COMPONENT": 16, 
-"ACARS_V_WIND_COMPONENT": 17, 
-"ACARS_TEMPERATURE": 18, 
-"ACARS_SPECIFIC_HUMIDITY": 19, 
-"MARINE_SFC_U_WIND_COMPONENT": 20, 
-"MARINE_SFC_V_WIND_COMPONENT": 21, 
-"MARINE_SFC_TEMPERATURE": 22, 
-"MARINE_SFC_SPECIFIC_HUMIDITY": 23, 
-"MARINE_SFC_PRESSURE": 24, 
-"LAND_SFC_U_WIND_COMPONENT": 25, 
-"LAND_SFC_V_WIND_COMPONENT": 26, 
-"LAND_SFC_TEMPERATURE": 27, 
-"LAND_SFC_SPECIFIC_HUMIDITY": 28, 
-"LAND_SFC_PRESSURE": 29, 
-"SAT_U_WIND_COMPONENT": 30, 
-"SAT_V_WIND_COMPONENT": 31, 
-"ATOV_TEMPERATURE": 32, 
-"AIRS_TEMPERATURE": 33, 
-"AIRS_SPECIFIC_HUMIDITY": 34, 
-"GPS_PRECIPITABLE_WATER": 35, 
-"VADWND_U_WIND_COMPONENT": 36, 
-"VADWND_V_WIND_COMPONENT": 37, 
-"CIMMS_AMV_U_WIND_COMPONENT": 38, 
-"CIMMS_AMV_V_WIND_COMPONENT": 39, 
-"DOPPLER_RADIAL_VELOCITY": 40, 
-"RADAR_REFLECTIVITY": 41, 
-"RADAR_CLEARAIR_REFLECTIVITY": 42, 
-"PRECIPITATION_FALL_SPEED": 43, 
-"METAR_U_10_METER_WIND": 44, 
-"METAR_V_10_METER_WIND": 45, 
-"METAR_TEMPERATURE_2_METER": 46, 
-"METAR_SPECIFIC_HUMIDITY_2_METER": 47, 
-"METAR_SURFACE_PRESSURE": 48, 
-"METAR_POT_TEMP_2_METER": 49, 
-"DEWPOINT": 50, 
-"DEWPOINT_2_METER": 51, 
-"BUOY_DEWPOINT": 52, 
-"SHIP_DEWPOINT": 53, 
-"SYNOP_DEWPOINT": 54, 
-"AIREP_DEWPOINT": 55, 
-"AMDAR_DEWPOINT": 56, 
-"PILOT_DEWPOINT": 57, 
-"BOGUS_DEWPOINT": 58, 
-"AIRS_DEWPOINT": 59, 
-"METAR_DEWPOINT_2_METER": 60, 
-"RADIOSONDE_DEWPOINT": 61, 
-"DROPSONDE_DEWPOINT": 62, 
-"AIRCRAFT_DEWPOINT": 63, 
-"ACARS_DEWPOINT": 64, 
-"MARINE_SFC_DEWPOINT": 65, 
-"LAND_SFC_DEWPOINT": 66, 
-"RADIOSONDE_RELATIVE_HUMIDITY": 67, 
-"DROPSONDE_RELATIVE_HUMIDITY": 68, 
-"AIRCRAFT_RELATIVE_HUMIDITY": 69, 
-"ACARS_RELATIVE_HUMIDITY": 70, 
-"MARINE_SFC_RELATIVE_HUMIDITY": 71, 
-"LAND_SFC_RELATIVE_HUMIDITY": 72, 
-"METAR_RELATIVE_HUMIDITY_2_METER": 73, 
-"AIRS_RELATIVE_HUMIDITY": 74, 
-"MESONET_RELATIVE_HUMIDITY": 75, 
-"RADIOSONDE_SURFACE_ALTIMETER": 76, 
-"DROPSONDE_SURFACE_ALTIMETER": 77, 
-"MARINE_SFC_ALTIMETER": 78, 
-"LAND_SFC_ALTIMETER": 79, 
-"METAR_ALTIMETER": 80, 
-"MESONET_SURFACE_ALTIMETER": 81, 
-"TEMPERATURE": 82, 
-"SPECIFIC_HUMIDITY": 83, 
-"PRESSURE": 84, 
-"GPSRO_REFRACTIVITY": 85, 
-"VORTEX_LAT": 86, 
-"VORTEX_LON": 87, 
-"VORTEX_PMIN": 88, 
-"VORTEX_WMAX": 89, 
-"BUOY_U_WIND_COMPONENT": 90, 
-"BUOY_V_WIND_COMPONENT": 91, 
-"BUOY_SURFACE_PRESSURE": 92, 
-"BUOY_TEMPERATURE": 93, 
-"SHIP_U_WIND_COMPONENT": 94, 
-"SHIP_V_WIND_COMPONENT": 95, 
-"SHIP_SURFACE_PRESSURE": 96, 
-"SHIP_TEMPERATURE": 97, 
-"SYNOP_U_WIND_COMPONENT": 98, 
-"SYNOP_V_WIND_COMPONENT": 99, 
-"SYNOP_SURFACE_PRESSURE": 100, 
-"SYNOP_SPECIFIC_HUMIDITY": 101, 
-"SYNOP_TEMPERATURE": 102, 
-"AIREP_U_WIND_COMPONENT": 103, 
-"AIREP_V_WIND_COMPONENT": 104, 
-"AIREP_PRESSURE": 105, 
-"AIREP_TEMPERATURE": 106, 
-"AMDAR_U_WIND_COMPONENT": 107, 
-"AMDAR_V_WIND_COMPONENT": 108, 
-"AMDAR_PRESSURE": 109, 
-"AMDAR_TEMPERATURE": 110, 
-"PILOT_U_WIND_COMPONENT": 111, 
-"PILOT_V_WIND_COMPONENT": 112, 
-"PILOT_PRESSURE": 113, 
-"PILOT_TEMPERATURE": 114, 
-"BOGUS_U_WIND_COMPONENT": 115, 
-"BOGUS_V_WIND_COMPONENT": 116, 
-"BOGUS_PRESSURE": 117, 
-"BOGUS_TEMPERATURE": 118, 
-"PROFILER_U_WIND_COMPONENT": 119, 
-"PROFILER_V_WIND_COMPONENT": 120, 
-"PROFILER_PRESSURE": 121, 
-"SATEM_THICKNESS": 122, 
-"NOAA_1_VTPR1_RADIANCE": 123, 
-"NOAA_2_VTPR1_RADIANCE": 124, 
-"NOAA_3_VTPR1_RADIANCE": 125, 
-"NOAA_4_VTPR1_RADIANCE": 126, 
-"NOAA_5_HIRS_RADIANCE": 127, 
-"NOAA_5_MSU_TB": 128, 
-"NOAA_5_AVHRR_RADIANCE": 129, 
-"NOAA_6_HIRS_RADIANCE": 130, 
-"NOAA_6_MSU_TB": 131, 
-"NOAA_6_AVHRR_RADIANCE": 132, 
-"NOAA_7_HIRS_RADIANCE": 133, 
-"NOAA_7_MSU_TB": 134, 
-"NOAA_7_AVHRR_RADIANCE": 135, 
-"NOAA_8_HIRS_RADIANCE": 136, 
-"NOAA_8_MSU_TB": 137, 
-"NOAA_8_AVHRR_RADIANCE": 138, 
-"NOAA_9_HIRS_RADIANCE": 139, 
-"NOAA_9_MSU_TB": 140, 
-"NOAA_9_AVHRR_RADIANCE": 141, 
-"NOAA_10_HIRS_RADIANCE": 142, 
-"NOAA_10_MSU_TB": 143, 
-"NOAA_10_AVHRR_RADIANCE": 144, 
-"NOAA_11_HIRS_RADIANCE": 145, 
-"NOAA_11_MSU_TB": 146, 
-"NOAA_11_AVHRR_RADIANCE": 147, 
-"NOAA_12_HIRS_RADIANCE": 148, 
-"NOAA_12_MSU_TB": 149, 
-"NOAA_12_AVHRR_RADIANCE": 150, 
-"NOAA_13_AVHRR_RADIANCE": 151, 
-"NOAA_14_HIRS_RADIANCE": 152, 
-"NOAA_14_MSU_TB": 153, 
-"NOAA_14_AVHRR_RADIANCE": 154, 
-"NOAA_15_HIRS_RADIANCE": 155, 
-"NOAA_15_AMSUA_TB": 156, 
-"NOAA_15_AMSUB_TB": 157, 
-"NOAA_15_AVHRR_RADIANCE": 158, 
-"NOAA_16_HIRS_RADIANCE": 159, 
-"NOAA_16_AMSUA_TB": 160, 
-"NOAA_16_AMSUB_TB": 161, 
-"NOAA_16_AVHRR_RADIANCE": 162, 
-"NOAA_17_HIRS_RADIANCE": 163, 
-"NOAA_17_AMSUA_TB": 164, 
-"NOAA_17_AMSUB_TB": 165, 
-"NOAA_17_AVHRR_RADIANCE": 166, 
-"NOAA_18_HIRS_RADIANCE": 167, 
-"NOAA_18_AMSUA_TB": 168, 
-"NOAA_18_AVHRR_RADIANCE": 169, 
-"NOAA_18_MHS_TB": 170, 
-"NOAA_19_HIRS_RADIANCE": 171, 
-"NOAA_19_AMSUA_TB": 172, 
-"NOAA_19_AVHRR_RADIANCE": 173, 
-"NOAA_19_MHS_TB": 174, 
-"NOAA_20_ATMS_TB": 175, 
-"NOAA_20_VIIRS_RADIANCE": 176, 
-"DMSP_8_SSMI_TB": 177, 
-"DMSP_9_SSMI_TB": 178, 
-"DMSP_10_SSMI_TB": 179, 
-"DMSP_11_SSMI_TB": 180, 
-"DMSP_11_SSMT2_TB": 181, 
-"DMSP_12_SSMI_TB": 182, 
-"DMSP_12_SSMT2_TB": 183, 
-"DMSP_13_SSMI_TB": 184, 
-"DMSP_14_SSMI_TB": 185, 
-"DMSP_14_SSMT2_TB": 186, 
-"DMSP_15_SSMI_TB": 187, 
-"DMSP_15_SSMT2_TB": 188, 
-"DMSP_16_SSMIS_TB": 189, 
-"DMSP_17_SSMIS_TB": 190, 
-"DMSP_18_SSMIS_TB": 191, 
-"DMSP_19_SSMIS_TB": 192, 
-"METEOSAT_1_MVIRI_RADIANCE": 193, 
-"METEOSAT_2_MVIRI_RADIANCE": 194, 
-"METEOSAT_3_MVIRI_RADIANCE": 195, 
-"METEOSAT_4_MVIRI_RADIANCE": 196, 
-"METEOSAT_5_MVIRI_RADIANCE": 197, 
-"METEOSAT_6_MVIRI_RADIANCE": 198, 
-"METEOSAT_7_MVIRI_RADIANCE": 199, 
-"GOES_4_SOUNDER_RADIANCE": 200, 
-"GOES_5_SOUNDER_RADIANCE": 201, 
-"GOES_6_SOUNDER_RADIANCE": 202, 
-"GOES_7_SOUNDER_RADIANCE": 203, 
-"GOES_8_IMAGER_RADIANCE": 204, 
-"GOES_8_SOUNDER_RADIANCE": 205, 
-"GOES_9_IMAGER_RADIANCE": 206, 
-"GOES_9_SOUNDER_RADIANCE": 207, 
-"GOES_10_IMAGER_RADIANCE": 208, 
-"GOES_10_SOUNDER_RADIANCE": 209, 
-"GOES_11_IMAGER_RADIANCE": 210, 
-"GOES_11_SOUNDER_RADIANCE": 211, 
-"GOES_12_IMAGER_RADIANCE": 212, 
-"GOES_12_SOUNDER_RADIANCE": 213, 
-"GOES_13_IMAGER_RADIANCE": 214, 
-"GOES_13_SOUNDER_RADIANCE": 215, 
-"GOES_14_IMAGER_RADIANCE": 216, 
-"GOES_14_SOUNDER_RADIANCE": 217, 
-"GOES_15_IMAGER_RADIANCE": 218, 
-"GOES_15_SOUNDER_RADIANCE": 219, 
-"GOES_16_ABI_RADIANCE": 220, 
-"GOES_17_ABI_RADIANCE": 221, 
-"GOES_18_ABI_RADIANCE": 222, 
-"GOES_19_ABI_RADIANCE": 223, 
-"GMS_1_IMAGER_RADIANCE": 224, 
-"GMS_2_IMAGER_RADIANCE": 225, 
-"GMS_3_IMAGER_RADIANCE": 226, 
-"GMS_4_IMAGER_RADIANCE": 227, 
-"GMS_5_IMAGER_RADIANCE": 228, 
-"FY2_2_VISSR_RADIANCE": 229, 
-"FY2_3_VISSR_RADIANCE": 230, 
-"FY2_4_VISSR_RADIANCE": 231, 
-"FY2_5_VISSR_RADIANCE": 232, 
-"FY2_7_VISSR_RADIANCE": 233, 
-"TRMM_1_TMI_TB": 234, 
-"ERS_1_ATSR_RADIANCE": 235, 
-"ERS_1_MWR_TB": 236, 
-"ERS_2_ATSR_RADIANCE": 237, 
-"ERS_2_MWR_TB": 238, 
-"EOS_1_MODIS_RADIANCE": 239, 
-"EOS_1_ASTER_RADIANCE": 240, 
-"EOS_2_AMSUA_TB": 241, 
-"EOS_2_AIRS_RADIANCE": 242, 
-"EOS_2_HSB_TB": 243, 
-"EOS_2_MODIS_RADIANCE": 244, 
-"EOS_2_AMSRE_TB": 245, 
-"METOP_1_HIRS_RADIANCE": 246, 
-"METOP_1_AMSUA_TB": 247, 
-"METOP_1_AVHRR_RADIANCE": 248, 
-"METOP_1_MHS_TB": 249, 
-"METOP_2_HIRS_RADIANCE": 250, 
-"METOP_2_AMSUA_TB": 251, 
-"METOP_2_AVHRR_RADIANCE": 252, 
-"METOP_2_MHS_TB": 253, 
-"METOP_3_AVHRR_RADIANCE": 254, 
-"ENVISAT_1_ATSR_RADIANCE": 255, 
-"ENVISAT_1_MWR_TB": 256, 
-"MSG_1_SEVIRI_RADIANCE": 257, 
-"MSG_2_SEVIRI_RADIANCE": 258, 
-"MSG_3_SEVIRI_RADIANCE": 259, 
-"MSG_4_SEVIRI_RADIANCE": 260, 
-"MSG_4_SEVIRI_TB": 261, 
-"MSG_4_SEVIRI_BDRF": 262, 
-"FY1_3_MVISR_RADIANCE": 263, 
-"FY1_4_MVISR_RADIANCE": 264, 
-"MTSAT_1_IMAGER_RADIANCE": 265, 
-"MTSAT_2_IMAGER_RADIANCE": 266, 
-"CORIOLIS_1_WINDSAT_TB": 267, 
-"JPSS_0_ATMS_TB": 268, 
-"JPSS_0_VIIRS_RADIANCE": 269, 
-"SENTINEL3_1_SLSTR_RADIANCE": 270, 
-"SENTINEL3_2_SLSTR_RADIANCE": 271, 
-"MEGHATR_1_SAPHIR_TB": 272, 
-"MEGHATR_1_MADRAS_TB": 273, 
-"FY3_1_MWTS_TB": 274, 
-"FY3_1_MWHS_TB": 275, 
-"FY3_1_IRAS_RADIANCE": 276, 
-"FY3_1_MWRI_TB": 277, 
-"FY3_2_MWTS_TB": 278, 
-"FY3_2_MWHS_TB": 279, 
-"FY3_2_MWRI_TB": 280, 
-"FY3_3_MWRI_TB": 281, 
-"FY3_3_MWTS2_TB": 282, 
-"FY3_3_MWHS2_TB": 283, 
-"FY3_3_MERSI1_RADIANCE": 284, 
-"FY3_4_MWRI_TB": 285, 
-"FY3_4_MWTS2_TB": 286, 
-"FY3_4_MWHS2_TB": 287, 
-"FY3_4_MERSI2_RADIANCE": 288, 
-"COMS_1_MI_RADIANCE": 289, 
-"METEOR_M_1_MSUMR_RADIANCE": 290, 
-"METEOR_M_2_MSUMR_RADIANCE": 291, 
-"METEOR_M_2_MTVZAGY_TB": 292, 
-"CALIPSO_1_IIR_RADIANCE": 293, 
-"GCOM_W_1_AMSR2_TB": 294, 
-"NIMBUS_3_MRIR_RADIANCE": 295, 
-"NIMBUS_4_THIR_RADIANCE": 296, 
-"NIMBUS_5_THIR_RADIANCE": 297, 
-"NIMBUS_6_HIRS_RADIANCE": 298, 
-"NIMBUS_6_SCAMS_TB": 299, 
-"NIMBUS_6_THIR_RADIANCE": 300, 
-"NIMBUS_7_SMMR_TB": 301, 
-"NIMBUS_7_THIR_RADIANCE": 302, 
-"HIMAWARI_8_AHI_RADIANCE": 303, 
-"HIMAWARI_9_AHI_RADIANCE": 304, 
-"MTG_1_FCI_RADIANCE": 305, 
-"SARAL_1_ALTIKA_TB": 306, 
-"METOPSG_1_ICI_TB": 307, 
-"METOPSG_1_METIMAGE_RADIANCE": 308, 
-"METOPSG_1_MWS_TB": 309, 
-"METOPSG_1_MWI_TB": 310, 
-"LANDSAT_4_TM_RADIANCE": 311, 
-"LANDSAT_5_TM_RADIANCE": 312, 
-"LANDSAT_7_TM_RADIANCE": 313, 
-"LANDSAT_8_TIRS_RADIANCE": 314, 
-"JASON_2_AMR_TB": 315, 
-"GPM_1_GMI_TB": 316, 
-"GPM_1_DPR_TB": 317, 
-"INSAT3_4_IMAGER_RADIANCE": 318, 
-"INSAT3_4_SOUNDER_RADIANCE": 319, 
-"INSAT3_5_IMAGER_RADIANCE": 320, 
-"INSAT3_5_SOUNDER_RADIANCE": 321, 
-"TICFIRE_1_MBFIRI_RADIANCE": 322, 
-"ISS_1_ECOSTRES_RADIANCE": 323, 
-"HJ1_2_IRMSS_RADIANCE": 324, 
-"GKOMPSAT2_1_AMI_RADIANCE": 325, 
-"GCOM_C_1_SGLI_RADIANCE": 326, 
-"SMOS_1_MIRAS_TB": 327, 
-"ORS_6_COWVR_TB": 328, 
-"FY4_1_AGRI_RADIANCE": 329, 
-"TROPICS_0_TROPICS_TB": 330, 
-"GF5_1_VIMS_RADIANCE": 331, 
-"HY2_1_MWRI_TB": 332, 
-"CLOUDSAT_1_CPR_TB": 333, 
-}
\ No newline at end of file
diff --git a/dartwrf/obs/obsseq.py b/dartwrf/obs/obsseq.py
index 7f0541f..7f35bd7 100755
--- a/dartwrf/obs/obsseq.py
+++ b/dartwrf/obs/obsseq.py
@@ -87,7 +87,7 @@ class ObsRecord(pd.DataFrame):
         """Retrieve H(x_prior) for all ensemble members
 
         Returns:
-            np.array (n_obs, n_ens)
+            np.array (n_obs, ensemble_size)
         """
         return self._get_model_Hx('prior')
 
@@ -95,7 +95,7 @@ class ObsRecord(pd.DataFrame):
         """Retrieve H(x_posterior) for all ensemble members
 
         Returns:
-            np.array (n_obs, n_ens)
+            np.array (n_obs, ensemble_size)
         """
         return self._get_model_Hx('posterior')
 
diff --git a/dartwrf/obs/run_obs_diag.py b/dartwrf/obs/run_obs_diag.py
index fa1e78e..2546438 100644
--- a/dartwrf/obs/run_obs_diag.py
+++ b/dartwrf/obs/run_obs_diag.py
@@ -9,7 +9,7 @@ rundir_program = '/home/fs71386/lkugler/data/run_DART/'
 def prepare(obserr_iszero='.true.'):
     copy(cluster.scriptsdir+'/../templates/input.eval.nml',
             rundir_program+'/input.nml')
-    sed_inplace(rundir_program+'/input.nml', '<n_ens>', str(int(exp.n_ens)))
+    sed_inplace(rundir_program+'/input.nml', '<n_ens>', str(int(exp.ensemble_size)))
     sed_inplace(rundir_program+'/input.nml', '<zero_error_obs>', obserr_iszero)
     sed_inplace(rundir_program+'/input.nml', '<horiz_dist_only>', '.false.')  # dummy
     sed_inplace(rundir_program+'/input.nml', '<vert_norm_hgt>', '5000.0')  # dummy
diff --git a/dartwrf/prep_IC_prior.py b/dartwrf/prep_IC_prior.py
index b91c823..5c84794 100755
--- a/dartwrf/prep_IC_prior.py
+++ b/dartwrf/prep_IC_prior.py
@@ -23,7 +23,7 @@ def create_wrfrst_in_WRF_rundir(time: dt.datetime, prior_init_time: dt.datetime,
     """Copy WRF restart files to run_WRF directory 
     These files will be used as initial conditions for the next WRF run
     """
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         clean_wrfdir(cluster.wrf_rundir(iens))
     
         prior_wrfrst = prior_path_exp + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
@@ -48,7 +48,7 @@ def create_updated_wrfinput_from_wrfout(time: dt.datetime, prior_init_time: dt.d
     
     """
     print('writing updated wrfout to WRF run directory as wrfinput')
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         prior_wrfout = prior_path_exp + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
                        +str(iens)+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
         new_start_wrfinput = cluster.wrf_rundir(iens) + '/wrfinput_d01' 
diff --git a/dartwrf/prepare_namelist.py b/dartwrf/prepare_namelist.py
index cffbef8..025a9b5 100755
--- a/dartwrf/prepare_namelist.py
+++ b/dartwrf/prepare_namelist.py
@@ -1,85 +1,34 @@
 """Create namelist.input files
 
 Usage:
-prepare_namelist.py <begin> <end> <intv> [--radt=<minutes>] [--restart=<flag>] [--restart_interval=<minutes>]
+prepare_namelist.py <config> <begin> <end> <intv> [--radt=<minutes>] [--restart=<flag>] [--restart_interval=<minutes>]
 
 Options:
 --radt=<minutes>   		Radiation interval [default: 5]
 --restart=<flag> 		Restart flag (.true., .false.) [default: .false.]
 --restart_interval=<minutes>	Restart frequency [default: 720]
 """
-import os, sys, shutil, warnings
+import os, sys
 import datetime as dt
 from docopt import docopt
 
-from dartwrf.exp_config import exp
+from dartwrf.namelist_handler import WRF_namelist
 from dartwrf.server_config import cluster
-from dartwrf.utils import sed_inplace, copy, symlink, mkdir
-
-def run(iens, begin, end, hist_interval_s=5*60, radt=5, archive=True,
-        restart=False, restart_interval=720):
-    """Create a namelist.input file for each ensemble member
-
-    Args:
-        archive (bool): if True, write to archivedir of experiment
-            if False, write to WRF run directory
-        restart (str): fortran bool whether to use wrfinput or wrfrst
-        restart_interval (int): output frequency of wrfrst (minutes)
-
-    Returns
-        None
-    """
-    rundir = cluster.wrf_rundir(iens)
-    copy(cluster.namelist, rundir+'/namelist.input')
-
-    sed_inplace(rundir+'/namelist.input', '<dx>', str(int(exp.model_dx)))
-    #sed_inplace(rundir+'/namelist.input', '<timestep>', str(int(exp.timestep)))
-    sed_inplace(rundir+'/namelist.input', '<hist_interval_s>', str(int(hist_interval_s)))
-
-    sed_inplace(rundir+'/namelist.input', '<radt>', str(int(radt)))
-
-    rst_flag = '.true.' if restart else '.false.'
-    sed_inplace(rundir+'/namelist.input', '<restart>', rst_flag)
-    sed_inplace(rundir+'/namelist.input', '<restart_interval>', str(int(float(restart_interval))))
-
-    if archive:
-        archdir = cluster.archivedir+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/')
-        os.makedirs(archdir, exist_ok=True)
-    else:
-        archdir = './'
-    sed_inplace(rundir+'/namelist.input', '<archivedir>', archdir)
-
-    # set times
-    for k, v in {'<y1>': '%Y', '<m1>': '%m', '<d1>': '%d',
-                 '<HH1>': '%H', '<MM1>': '%M', '<SS1>': '%S'}.items():
-        sed_inplace(rundir+'/namelist.input', k, begin.strftime(v))
-    for k, v in {'<y2>': '%Y', '<m2>': '%m', '<d2>': '%d',
-                 '<HH2>': '%H', '<MM2>': '%M', '<SS2>': '%S'}.items():
-        sed_inplace(rundir+'/namelist.input', k, end.strftime(v))
-
-    print('saved', rundir+'/namelist.input')
-    
-
-    if archive:
-        init_dir = cluster.archivedir+begin.strftime('/%Y-%m-%d_%H:%M/')+str(iens)
-        os.makedirs(init_dir, exist_ok=True)
-        copy(rundir+'/namelist.input', init_dir+'/namelist.input')
-        print('archived', init_dir+'/namelist.input')
-        
-        if not restart:
-            wrfin_old = rundir+'/wrfinput_d01'
-            wrfin_arch = init_dir+'/wrfinput_d01'
-            copy(wrfin_old, wrfin_arch)
-            print('archived', wrfin_arch)
+from dartwrf.utils import sed_inplace, copy, read_dict_from_pyfile
 
 
 if __name__ == '__main__':
-
+    
     args = docopt(__doc__)
+    archive = True
+    
+    f_config = args['<config>']
+    exp = read_dict_from_pyfile(f_config)
+    
     begin = dt.datetime.strptime(args['<begin>'], '%Y-%m-%d_%H:%M:%S')
     end = dt.datetime.strptime(args['<end>'], '%Y-%m-%d_%H:%M:%S')
-    intv = int(args['<intv>'])
-
+    hist_interval_s = int(args['<intv>'])
+    
     radt = int(args['--radt']) 
     if not radt:
         radt = '5'
@@ -87,13 +36,58 @@ if __name__ == '__main__':
     restart = False
     if args['--restart'] == '.true.':
         restart = True
+    rst_flag = '.true.' if restart else '.false.'
 
     restart_interval = args['--restart_interval']
     if not restart_interval:
         restart_interval = 720
+        
+    # replace these variables in the namelist
+    replace_dict = {
+    'time_control': {
+        # time start
+        '<y1>': begin.strftime('%Y'),
+        '<m1>': begin.strftime('%m'),
+        '<d1>': begin.strftime('%d'),
+        '<HH1>': begin.strftime('%H'),
+        '<MM1>': begin.strftime('%M'),
+        '<SS1>': begin.strftime('%S'),
+        
+        # time end
+        '<y2>': end.strftime('%Y'),
+        '<m2>': end.strftime('%m'),
+        '<d2>': end.strftime('%d'),
+        '<HH2>': end.strftime('%H'),
+        '<MM2>': end.strftime('%M'),
+        '<SS2>': end.strftime('%S'),
+        
+        # other variables
+        '<dx>': str(int(exp.model_dx)),
+        '<hist_interval_s>': str(int(hist_interval_s)),
+        '<radt>': str(int(radt)),
+        '<restart>': rst_flag,
+        '<restart_interval>': str(int(float(restart_interval))),
+        },
+    }
+        
+    print('prepare namelists for all ens members',radt,restart,restart_interval)
+    for iens in range(1, exp.ensemble_size+1):
 
-    print('prepare namelists for all ens members',intv,radt,restart,restart_interval)
-    for iens in range(1, exp.n_ens+1):
+        nml = WRF_namelist()
+        nml.read(cluster.WRF_namelist_template)
+        
+        # replace parameters
+        for section, section_dict in replace_dict.items():
+            for key, value in section_dict.items():
+                nml.namelist[section][key] = value
+                
+        f_out = cluster.wrf_rundir_base +'/'+ exp.name + '/'+str(iens)+'/namelist.input'
+        nml.write(f_out)
+        print('saved', f_out)
 
-        run(iens, begin, end, hist_interval_s=intv, radt=radt, 
-            restart=restart, restart_interval=restart_interval)
+        
+        if archive:
+            archdir = cluster.archive_base+'/'+exp.name+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/')
+            os.makedirs(archdir, exist_ok=True)
+        else:
+            archdir = './'
\ No newline at end of file
diff --git a/dartwrf/prepare_wrfrundir.py b/dartwrf/prepare_wrfrundir.py
index cd85e64..081cec2 100755
--- a/dartwrf/prepare_wrfrundir.py
+++ b/dartwrf/prepare_wrfrundir.py
@@ -12,16 +12,18 @@ Returns:
 """
 import os, sys, shutil
 import datetime as dt
-
-from dartwrf.exp_config import exp
 from dartwrf.server_config import cluster
 
-from dartwrf.utils import symlink, link_contents, try_remove
+from dartwrf.utils import symlink, link_contents, try_remove, read_dict_from_pyfile
 from dartwrf import prepare_namelist
 
 if __name__ == '__main__':
+    
+    time = sys.argv[1]
+    exp = read_dict_from_pyfile(sys.argv[2])
+    
 
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         rundir = cluster.wrf_rundir(iens)
         os.makedirs(rundir, exist_ok=True)
         link_contents(cluster.srcdir, rundir)
@@ -32,8 +34,9 @@ if __name__ == '__main__':
 
         # prepare input profiles
         try_remove(rundir+'/input_sounding')   # remove existing file
+        
         if hasattr(exp, 'input_profile'):
-            init_time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
+            init_time = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M')
             # prep namelist for ./ideal.exe
             prepare_namelist.run(iens, begin=init_time, end=init_time, archive=False) # time not important
             
diff --git a/dartwrf/run_ens.jet.sh b/dartwrf/run_ens.jet.sh
deleted file mode 100755
index de21234..0000000
--- a/dartwrf/run_ens.jet.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-<cluster.wrf_modules>
-export SLURM_STEP_GRES=none
-
-echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
-EXPNAME=<exp.expname> 
-MAINDIR=<cluster.wrf_rundir_base> 
-
-IENS=$SLURM_ARRAY_TASK_ID
-RUNDIR=$MAINDIR/$EXPNAME/$IENS
-echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
-cd $RUNDIR
-rm -rf rsl.out.0*
-echo "mpirun -np <exp.np_WRF> ./wrf.exe"
-mpirun -np <exp.np_WRF> ./wrf.exe
-
-
-# error checking
-line=`tail -n 2 rsl.out.0000`
-if [[ $line == *"SUCCESS COMPLETE WRF"* ]]; 
-then 
-   echo $RUNDIR 'SUCCESS COMPLETE WRF'
-else  
-   echo $RUNDIR $line
-   exit 1
-fi
diff --git a/dartwrf/run_ens.vsc.gfortran.sh b/dartwrf/run_ens.vsc.gfortran.sh
deleted file mode 100755
index 48dab4c..0000000
--- a/dartwrf/run_ens.vsc.gfortran.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-
-module purge
-#module load intel-mpi/2019.6 intel/19.1.0 netcdf-fortran/4.4.5-intel-19.0.5.281-qye4cqn zlib/1.2.11-intel-19.1.0.166-hs6m2qh  hdf5/1.10.5-intel-19.0.5.281-qyzojtm netcdf/4.7.0-intel-19.0.5.281-75t52g6
-module load gcc/12.2.0-gcc-8.5.0-aal4zp2 openmpi/4.1.4-gcc-12.2.0-khtxitv
-export SLURM_STEP_GRES=none
-
-##  $SLURM_ARRAY_TASK_ID
-echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
-EXPNAME=<exp.expname> 
-
-MAINDIR=<cluster.wrf_rundir_base> 
-#pinning=(0-11 12-23 24-35 36-47)
-pinning=(0,1,2,3,4,5,6,7,8,9,10,11 12,13,14,15,16,17,18,19,20,21,22,23 24,25,26,27,28,29,30,31,32,33,34,35 36,37,38,39,40,41,42,43,44,45,46,47)
-
-for ((n=1; n<=4; n++))
-do
-   IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"
-   RUNDIR=$MAINDIR/$EXPNAME/$IENS
-   echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
-   cd $RUNDIR
-   rm -rf rsl.out.0* 
-   echo "mpirun -np 12  --cpu-set ${pinning[$n-1]} /home/fs71386/lkugler/run_container.sh python.gcc9.5.0.vsc4.sif ./wrf.exe"
-   mpirun -np 12  --cpu-set ${pinning[$n-1]} /home/fs71386/lkugler/run_container.sh python.gcc9.5.0.vsc4.sif ./wrf.exe &
-   cd ../
-done
-wait
-
-# error checking
-for ((n=1; n<=4; n++))
-do
-   IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"
-   RUNDIR=$MAINDIR/$EXPNAME/$IENS
-   cd $RUNDIR
-   line=`tail -n 1 rsl.out.0000`
-   if [[ $line == *"SUCCESS COMPLETE WRF"* ]]; 
-   then 
-      echo $RUNDIR 'SUCCESS COMPLETE WRF'
-   else  
-      echo $RUNDIR $line
-      exit 1
-   fi
-done
diff --git a/dartwrf/run_ens.vsc.mem_0096.sh b/dartwrf/run_ens.vsc.mem_0096.sh
deleted file mode 100755
index 40904ff..0000000
--- a/dartwrf/run_ens.vsc.mem_0096.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-
-module purge
-module load intel-mpi/2019.6 intel/19.1.0 netcdf-fortran/4.4.5-intel-19.0.5.281-qye4cqn zlib/1.2.11-intel-19.1.0.166-hs6m2qh  hdf5/1.10.5-intel-19.0.5.281-qyzojtm netcdf/4.7.0-intel-19.0.5.281-75t52g6
-export I_MPI_DEBUG=4
-export OMP_NUM_THREADS=1
-mem_per_task=10G
-cpu_per_task=12
-
-echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
-EXPNAME=<exp.expname> 
-
-MAINDIR=<cluster.wrf_rundir_base> 
-pinning=(0-11 12-23 24-35 36-47)
-
-mytasks=4
-for n in `seq 1 $mytasks`
-do
-   IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"  # ensemble number (5,6,7,8 for job array element 2)
-   RUNDIR=$MAINDIR/$EXPNAME/$IENS
-   echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
-   cd $RUNDIR
-   rm -rf rsl.out.0* 
-   echo "srun --mem=$mem_per_task --cpus-per-task=$cpu_per_task --cpu_bind=map_cpu:${pinning[$n-1]} --ntasks=1 ./wrf.exe &"
-   srun --mem=$mem_per_task --cpus-per-task=$cpu_per_task --cpu_bind=map_cpu:${pinning[$n-1]} --ntasks=1 ./wrf.exe &
-   cd ../
-done
-wait
-
-# error checking
-for ((n=1; n<=4; n++))
-do
-   IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"
-   RUNDIR=$MAINDIR/$EXPNAME/$IENS
-   cd $RUNDIR
-   line=`tail -n 1 rsl.out.0000`
-   if [[ $line == *"SUCCESS COMPLETE WRF"* ]]; 
-   then 
-      echo $RUNDIR 'SUCCESS COMPLETE WRF'
-   else  
-      echo $RUNDIR $line
-      exit 1
-   fi
-done
diff --git a/dartwrf/run_ens.vsc.sh b/dartwrf/run_ens.vsc.sh
deleted file mode 100755
index 8c2dc1e..0000000
--- a/dartwrf/run_ens.vsc.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-
-module purge
-module load intel-mpi/2019.6 intel/19.1.0 netcdf-fortran/4.4.5-intel-19.0.5.281-qye4cqn zlib/1.2.11-intel-19.1.0.166-hs6m2qh  hdf5/1.10.5-intel-19.0.5.281-qyzojtm netcdf/4.7.0-intel-19.0.5.281-75t52g6
-export SLURM_STEP_GRES=none
-
-##  $SLURM_ARRAY_TASK_ID
-echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
-EXPNAME=<exp.expname> 
-
-MAINDIR=<cluster.wrf_rundir_base> 
-pinning=(0-11 12-23 24-35 36-47)
-
-for ((n=1; n<=4; n++))
-do
-   IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"
-   RUNDIR=$MAINDIR/$EXPNAME/$IENS
-   echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
-   cd $RUNDIR
-   rm -rf rsl.out.0* 
-   echo "mpirun -genv I_MPI_PIN_PROCESSOR_LIST="${pinning[$n-1]}" -np 12 ./wrf.exe"
-   mpirun -genv I_MPI_PIN_PROCESSOR_LIST=${pinning[$n-1]} -np 12 ./wrf.exe &
-   cd ../
-done
-wait
-
-# error checking
-for ((n=1; n<=4; n++))
-do
-   IENS="$(((($SLURM_ARRAY_TASK_ID - 1)* 4) + $n))"
-   RUNDIR=$MAINDIR/$EXPNAME/$IENS
-   cd $RUNDIR
-   line=`tail -n 1 rsl.out.0000`
-   if [[ $line == *"SUCCESS COMPLETE WRF"* ]]; 
-   then 
-      echo $RUNDIR 'SUCCESS COMPLETE WRF'
-   else  
-      echo $RUNDIR $line
-      exit 1
-   fi
-done
diff --git a/dartwrf/run_ens.vsc.single.sh b/dartwrf/run_ens.vsc.single.sh
deleted file mode 100755
index 6997f07..0000000
--- a/dartwrf/run_ens.vsc.single.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-
-module purge
-module load intel-mpi/2019.6 intel/19.1.0 netcdf-fortran/4.4.5-intel-19.0.5.281-qye4cqn zlib/1.2.11-intel-19.1.0.166-hs6m2qh  hdf5/1.10.5-intel-19.0.5.281-qyzojtm netcdf/4.7.0-intel-19.0.5.281-75t52g6
-
-##  $SLURM_ARRAY_TASK_ID
-echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
-EXPNAME=<exp.expname> 
-
-MAINDIR=<cluster.wrf_rundir_base> 
-IENS=$SLURM_ARRAY_TASK_ID
-RUNDIR=$MAINDIR/$EXPNAME/$IENS
-echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
-cd $RUNDIR
-rm -rf rsl.out.0* 
-mpirun -genv I_MPI_PIN_PROCESSOR_LIST=0-19 -np 20 ./wrf.exe
diff --git a/dartwrf/server_config.py b/dartwrf/server_config.py
index ebb475d..c787988 100755
--- a/dartwrf/server_config.py
+++ b/dartwrf/server_config.py
@@ -1,55 +1,56 @@
 """Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
-from dartwrf import utils
-from dartwrf.exp_config import exp
-
-cluster = utils.ClusterConfig(exp)
-cluster.name = 'jet'
-cluster.max_nproc = 20
-cluster.use_slurm = True
-cluster.size_WRF_jobarray = exp.n_ens
-cluster.np_WRF = 16
-
-# binaries
-cluster.python = '/jetfs/home/lkugler/miniforge3/envs/verif/bin/python'
-cluster.ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-2021.7.1/nco-5.1.0-izrhxv24jqco5epjhf5ledsqwanojc5m/bin/ncks'
-cluster.ideal = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe'
-cluster.wrfexe = '/jetfs/home/lkugler/data/compile/bin/wrf-v4.6.0_20250210_StS.exe'
-cluster.dart_modules = 'module purge; module load rttov/v13.2-gcc-8.5.0'
-cluster.wrf_modules = """module purge; module load netcdf-fortran/4.5.3-intel-2021.7.1-27ldrnt"""
-
-# paths for data output
-cluster.wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/'  # path for temporary files
-cluster.dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/'  # path for temporary files
-cluster.archive_base = '/jetfs/home/lkugler/data/sim_archive/'
-
-# paths used as input
-cluster.srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run'
-cluster.dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/'
-cluster.rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/'
-cluster.dartwrf_dir = '/jetfs/home/lkugler/DART-WRF/'
-
-# other inputs
-cluster.geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600' 
-cluster.geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200'
-#cluster.obs_impact_filename = cluster.dartwrf_dir+'/templates/impactfactor_T.txt'
-cluster.namelist = cluster.dartwrf_dir+'/config/templates/namelist.input_nat_exact'
-cluster.rttov_nml = cluster.dartwrf_dir + "/config/templates/obs_def_rttov.VIS+WV.nml"
-cluster.run_WRF = '/jetfs/home/lkugler/DART-WRF/dartwrf/run_ens.jet.sh'
-
-cluster.slurm_cfg = {"account": "lkugler", "partition": "all",  
-                 "ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
-                 "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"}
-
-# WRF file format, will only change if WRF changes
-cluster.wrfout_format = '/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-
-# pattern for the init_time folder in sim_archive
-cluster.pattern_init_time = "/%Y-%m-%d_%H:%M/"
-
-# how an obs_seq.out file is archived
-cluster.pattern_obs_seq_out = cluster.archivedir + \
-    "/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out"  
+from re import A
+from dartwrf.utils import ClusterConfig
+
+cluster = ClusterConfig(
+    max_nproc = 20,
+    max_nproc_for_each_ensemble_member = 16,
+    use_slurm = True,
+
+    # binaries
+    python = '/jetfs/home/lkugler/miniforge3/envs/verif/bin/python',
+    ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-2021.7.1/nco-5.1.0-izrhxv24jqco5epjhf5ledsqwanojc5m/bin/ncks',
+    ideal = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe',
+    wrfexe = '/jetfs/home/lkugler/data/compile/bin/wrf-v4.6.0_20250210_StS.exe',
+    dart_modules = 'module purge; module load rttov/v13.2-gcc-8.5.0',
+    wrf_modules = """module purge; module load netcdf-fortran/4.5.3-intel-2021.7.1-27ldrnt""",
+
+    # paths for data output
+    wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/',  # path for temporary files
+    dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/',  # path for temporary files
+    archive_base = '/jetfs/home/lkugler/data/sim_archive/',
+
+    # paths used as input
+    srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run',
+    dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/',
+    rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/',
     
-# how an obs_seq.final file is archived
-cluster.pattern_obs_seq_final = cluster.archivedir + \
-    "/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final"  
+    dartwrf_dir_dev = '/jetfs/home/lkugler/DART-WRF/',
+    namelist = '/jetfs/home/lkugler/DART-WRF/config/templates/namelist.input_nat_exact',
+    rttov_nml = "/jetfs/home/lkugler/DART-WRF/config/templates/obs_def_rttov.VIS+WV.nml",
+    
+    # other inputs
+    geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600',
+    geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
+    #obs_impact_filename = dartwrf_dir_dev+'/templates/impactfactor_T.txt'
+
+    WRF_exe_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF.jet.sh',
+    WRF_ideal_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF_ideal.sh',
+
+    slurm_cfg = {"account": "lkugler", "partition": "all",  
+                    "ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
+                    "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"},
+
+    # WRF file format, will only change if WRF changes
+    wrfout_format = '/wrfout_d01_%Y-%m-%d_%H:%M:%S',
+
+    # pattern for the init_time folder in sim_archive
+    pattern_init_time = "/%Y-%m-%d_%H:%M/",
+    
+    # how an obs_seq.out file is archived
+    pattern_obs_seq_out = "<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out",
+        
+    # how an obs_seq.final file is archived
+    pattern_obs_seq_final = "<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final",  
+
+    )
\ No newline at end of file
diff --git a/dartwrf/update_IC.py b/dartwrf/update_IC.py
index 4c66a47..883f76a 100755
--- a/dartwrf/update_IC.py
+++ b/dartwrf/update_IC.py
@@ -24,7 +24,7 @@ def update_initials_in_WRF_rundir(time):
     update_vars = ['Times',]
     update_vars.extend(exp.update_vars)
 
-    for iens in range(1, exp.n_ens+1):
+    for iens in range(1, exp.ensemble_size+1):
         ic_file = cluster.wrf_rundir(iens) + time.strftime(initials_fmt)
         if not os.path.isfile(ic_file):
             raise IOError(ic_file+' does not exist, updating impossible!')
diff --git a/dartwrf/utils.py b/dartwrf/utils.py
index 1205441..fad027b 100755
--- a/dartwrf/utils.py
+++ b/dartwrf/utils.py
@@ -15,56 +15,20 @@ import datetime as dt
 import re
 import tempfile
 import pickle
+import importlib.util
 
+userhome = os.path.expanduser('~')
 
-class Experiment(object):
-    """Collection of variables which define the experiment
-
-    Attributes:
-        expname (str): Name of the experiment
-        model_dx (int): WRF grid spacing in meters
-        n_ens (int): Ensemble size
-
-        nature_wrfout_pattern (str): Path to the nature run, where we take observations from; 
-            the path can contain wildcards (*,?), e.g. '/jetfs/exp1/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-        input_profile (str): Path to WRF idealized input profiles; 
-            e.g. '/data/initial_profiles/wrf/ens/raso.fc.<iens>.wrfprof';
-            <iens> is replaced by 001-040 for a 40-member ensemble
-
-        update_vars (list of str): Variables which will be updated after assimilation (update_IC.py)
-            e.g. ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR',]
-
-        observations (list of dict): Dictionaries which define an observation;
-            keys: 
-            `error_generate`: measurement error standard-deviation;
-            `error_assimilate`: assigned observation error std-dev;
-            `heights`: list of integers at which observations are taken;
-            `loc_horiz_km`: float of horizontal localization half-width in km;
-            `loc_vert_km`: float of vertical localization half-width in km;
-
-        use_existing_obsseq (str, False): Path to existing obs_seq.out file (False: generate new one);
-            time string is replaced by actual time: /path/%Y-%m-%d_%H:%M_obs_seq.out
-
-        dart_nml (dict): updates to the default input.nml of DART (in dart_srcdir)
-            keys are namelist section headers (e.g. &filter_nml)
-            values are dictionaries of parameters and values (e.g. dict(ens_size=exp.n_ens,))
-
-    """
-
-    def __init__(self):
-        pass
-
+def import_from_path(module_name, file_path):
+    spec = importlib.util.spec_from_file_location(module_name, file_path)
+    module = importlib.util.module_from_spec(spec) # type: ignore
+    sys.modules[module_name] = module
+    spec.loader.exec_module(module) # type: ignore
+    return module
 
 class ClusterConfig(object):
     """Collection of variables regarding the cluster configuration
 
-    Configuration name docs
-
-    When coding, use configuration settings like this:
-    $ from exp_config import exp
-    $ from cluster_config import cluster
-    $ path = cluster.archivedir
-
     Attributes:
         name (str): Name of the cluster
         max_nproc (int): Maximum number of processors that can be used
@@ -81,10 +45,6 @@ class ClusterConfig(object):
         dart_modules (str): Command to load modules before running DART 
         wrf_modules (str): Command to load modules before running WRF
 
-        wrf_rundir_base (str): Path to temporary files for WRF
-        dart_rundir_base (str): Path to temporary files for DART
-        archive_base (str): Path to long-time output storage
-
         srcdir (str): Path to where WRF has been compiled, including the 'run' folder of WRF, e.g. /home/WRF-4.3/run
         dart_srcdir (str): Path to DART compile directory, e.g. /home/DART-9.11.9/models/wrf/work
         rttov_srcdir (str): Path to RTTOV compile directory, e.g. /home/RTTOV13/rtcoef_rttov13/
@@ -103,48 +63,46 @@ class ClusterConfig(object):
 
     """
 
-    def __init__(self, exp):
-        self.exp = exp  # to access the experiment config in here
-
+    def __init__(self, 
+                 max_nproc: int, 
+                 max_nproc_for_each_ensemble_member: int,
+                 WRF_ideal_template: str, 
+                 WRF_exe_template: str, 
+                 archive_base: str,
+                 wrf_rundir_base: str,
+                 dart_rundir_base: str,
+                 dartwrf_dir_dev: str,
+                 WRF_namelist_template: str,
+                 **kwargs):
         # defaults
+        # these are overwritten with choices in **kwargs
         self.dart_modules = ''
         self.wrf_modules = ''
         self.size_jobarray = '1'
-
-    @property
-    def archivedir(self):
-        """Path to the directory where data for the experiment is stored
-
-        Example:
-            `/users/abcd/data/sim_archive/experiment1/`
-        """
-        return self.archive_base+'/'+self.exp.expname+'/'
-
-    @property
-    def scripts_rundir(self):
-        """Path to the directory where the DART-WRF scripts are executed
-
-        Note:
-            If you want to execute scripts from the folder where you develop code, use `self.dartwrf_dir` (not sure if this works)
-            If you want to execute the code from a different place ('research'), then use `self.archivedir+'/DART-WRF/'`
-
-        Example:
-            `/user/data/sim_archive/DART-WRF/dartwrf/`
-        """
-        return self.archivedir+'/DART-WRF/dartwrf/'
-
-    @property
-    def dart_rundir(self):
-        """Path to the directory where DART programs will run
-        Includes the experiment name
-        """
-        return self.dart_rundir_base+'/'+self.exp.expname+'/'
-
-    def wrf_rundir(self, iens):
-        """Path to the directory where an ensemble member will run WRF
-        Includes the experiment name and the ensemble member index
-        """
-        return self.wrf_rundir_base+'/'+self.exp.expname+'/'+str(iens)+'/'
+        self.use_slurm = False
+        self.slurm_cfg = {}
+        self.log_dir = './'
+        self.slurm_scripts_dir = './'
+        self.archive_base = archive_base
+        self.wrf_rundir_base = wrf_rundir_base        
+        self.dart_rundir_base = dart_rundir_base
+        self.dartwrf_dir_dev = dartwrf_dir_dev
+        self.WRF_namelist_template = WRF_namelist_template
+        self.python = 'python'
+        self.pattern_obs_seq_out = '<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out'
+        self.pattern_obs_seq_final = '<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final'
+        
+        self.max_nproc = max_nproc
+        self.max_nproc_for_each_ensemble_member = max_nproc_for_each_ensemble_member
+        self.WRF_ideal_template = WRF_ideal_template
+        self.WRF_exe_template = WRF_exe_template
+        
+        # user defined
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+
+    def __str__(self):
+        return f'ClusterConfig: {self.__dict__}'
 
     def run_job(self, cmd, jobname='', cfg_update=dict(), depends_on=None):
         """Run scripts in a shell
@@ -174,7 +132,100 @@ class ClusterConfig(object):
                 raise Exception('Error running command >>> '+cmd)
 
 
-userhome = os.path.expanduser('~')
+class Config(object):
+    """Collection of variables which define the experiment
+
+    Attributes:
+        expname (str): Name of the experiment
+        model_dx (int): WRF grid spacing in meters
+        ensemble_size (int): Ensemble size
+
+        nature_wrfout_pattern (str): Path to the nature run WRF files, where can be generated from; 
+            the path can contain wildcards (*,?), e.g. '/jetfs/exp1/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
+        
+        input_profile (str): Path to sounding profiles as initial condition (see WRF ideal guide)
+            e.g. '/data/initial_profiles/wrf/ens/raso.fc.<iens>.wrfprof';
+            <iens> is replaced by 001-040 for a 40-member ensemble
+
+        update_vars (list of str): Variables which will be updated after assimilation (update_IC.py)
+            e.g. ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR',]
+
+        observations (list of dict): Dictionaries which define an observation;
+            keys: 
+            `error_generate`: measurement error standard-deviation;
+            `error_assimilate`: assigned observation error std-dev;
+            `heights`: list of integers at which observations are taken;
+            `loc_horiz_km`: float of horizontal localization half-width in km;
+            `loc_vert_km`: float of vertical localization half-width in km;
+
+        use_existing_obsseq (str, False): Path to existing obs_seq.out file (False: generate new one);
+            time string is replaced by actual time: /path/%Y-%m-%d_%H:%M_obs_seq.out
+
+        dart_nml (dict): updates to the default input.nml of DART (in dart_srcdir)
+            keys are namelist section headers (e.g. &filter_nml)
+            values are dictionaries of parameters and values (e.g. dict(ens_size=exp.ensemble_size,))
+
+        wrf_rundir_base (str): Path to temporary files for WRF
+        dart_rundir_base (str): Path to temporary files for DART
+        archive_base (str): Path to long-time output storage
+
+    """
+
+    def __init__(self, name: str, model_dx: int, ensemble_size: int, 
+                 update_vars: list=[], dart_nml: dict={}, 
+                 use_existing_obsseq: bool | str = False,
+                 input_profile: bool | str = False,
+                 nature_wrfout_pattern: bool | str = False,
+                 **kwargs):
+        
+        # defining the compulsory variables
+        self.name = name
+        self.model_dx = model_dx
+        self.ensemble_size = ensemble_size
+        self.update_vars = update_vars
+        self.dart_nml = dart_nml
+        
+        # optional
+        self.use_existing_obsseq = use_existing_obsseq
+        self.input_profile = input_profile
+        self.nature_wrfout_pattern = nature_wrfout_pattern
+        
+        if not update_vars:
+            warnings.warn('No `update_vars` defined, not updating any variables after assimilation!')
+            
+        if not dart_nml:
+            warnings.warn('No `dart_nml` defined, using default DART namelist!')
+            
+        if not isinstance(use_existing_obsseq, str):
+            if use_existing_obsseq != False:
+                raise ValueError('`use_existing_obsseq` must be a string or False, but is', use_existing_obsseq)
+        
+        if isinstance(use_existing_obsseq, str):
+            print('Using existing observation sequence', use_existing_obsseq)
+
+        # user defined
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+
+
+
+def write_dict_to_pyfile(d: dict, filename: str):
+    """Write a dictionary to a python file"""
+    os.makedirs(os.path.dirname(filename), exist_ok=True)
+    with open(filename, 'w') as f:
+        for key, value in d.items():
+            f.write(key+' = '+str(value)+'\n')
+
+def read_dict_from_pyfile(filename: str) -> Config:
+    """Read a dictionary from a python file,
+    return as Config object
+    """
+    with open(filename, 'r') as f:
+        d = {}
+        for line in f:
+            key, value = line.split('=')
+            d[key.strip()] = value.strip()
+    return Config(**d)
 
 
 def shell(args, pythonpath=None):
diff --git a/dartwrf/workflows.py b/dartwrf/workflows.py
index a90dc61..f4f1b97 100644
--- a/dartwrf/workflows.py
+++ b/dartwrf/workflows.py
@@ -10,12 +10,15 @@ import sys
 import shutil
 import warnings
 import datetime as dt
+import random
+import string
 
-from dartwrf.utils import script_to_str, shell
+from dartwrf.utils import script_to_str, shell, import_from_path, read_dict_from_pyfile, write_dict_to_pyfile
+from dartwrf.utils import Config
 
 
 class WorkFlows(object):
-    def __init__(self, exp_config='cfg.py', server_config='server.py'):
+    def __init__(self, expname: str, server_config: str='server_config.py'):
         """Set up the experiment folder in `archivedir`.
 
         1. Copy the selected config files
@@ -34,91 +37,105 @@ class WorkFlows(object):
             exp (obj): experiment configuration as defined in exp_config file
         """
 
-        def _copy_dartwrf_to_archive(dirs_exist_ok=False):
-            # Copy DART-WRF/dartwrf/ to self.cluster.archivedir folder
-            # copy the dartwrf python package
-            shutil.copytree(self.cluster.dartwrf_dir+'/dartwrf/',
-                            self.cluster.archivedir+'/DART-WRF/dartwrf/',
-                            ignore=shutil.ignore_patterns('*.git',),
-                            dirs_exist_ok=dirs_exist_ok)
-            print('>>> DART-WRF scripts:          "' +
-                  self.cluster.archivedir+'/DART-WRF/"')
-
-            # copy this script for reproducibility
-            script_executing_this_process = sys.argv[0]
-            shutil.copy(script_executing_this_process,
-                        self.cluster.archivedir+'/DART-WRF/')
-
-        def _save_config_to(config_fname, destination):
+        def _save_config_to(src: str, dst: str):
             try:
-                shutil.copyfile('config/'+config_fname, destination)
+                shutil.copyfile(src, dst)
             except shutil.SameFileError:
                 pass
 
         print('------------------------------------------------------')
-        print('>>> Starting experiment ... ')
-        print('>>> Experiment configuration:  "./config/'+exp_config+'" ')
-        print('>>> Server configuration:      "./config/'+server_config+'"')
-
-        # 1
-        # copy the selected config files (arguments to Workflows(...)) to the scripts directory
-        # ./DART-WRF/dartwrf/server_config.py and ./DART-WRF/dartwrf/exp_config.py
-        # these config files will be used later, and no others!
-        # usually /home/DART-WRF/dartwrf/
-        original_scripts_dir = '/'.join(__file__.split('/')[:-1])
-        _save_config_to(server_config, original_scripts_dir +
-                        '/server_config.py')
-        _save_config_to(exp_config, original_scripts_dir+'/exp_config.py')
-
-        # 2
-        # import the configuration files from where we copied them just before
-        sys.path.append(original_scripts_dir)
-        from server_config import cluster
-        self.cluster = cluster
-        from exp_config import exp
-        self.exp = exp
-
-        print(" ")
-        print('>>> Main data folder:          "'+self.cluster.archivedir+'"')
-        print('>>> Temporary DART folder:     "'+self.cluster.dart_rundir+'"')
-        print('>>> Temporary WRF folder:      "' +
-              self.cluster.wrf_rundir_base+'"')
-
-        # 3
-        # Set paths and backup scripts
-        self.cluster.log_dir = self.cluster.archivedir+'/logs/'
+        self.expname = expname
+        print('>>> Experiment name:           "'+self.expname+'"')
+        print('>>> Server configuration:      "'+server_config+'"')
+
+        self.cluster = import_from_path('config', server_config).cluster
+
+        # some helpful variables
+        self.archivedir = self.cluster.archive_base+'/'+self.expname+'/'
+        dart_rundir = self.cluster.dart_rundir_base+'/'+self.expname+'/'
+        self.scripts_rundir = self.archivedir+'/DART-WRF/dartwrf/'
+        pattern_obs_seq_out = self.cluster.pattern_obs_seq_out.replace(
+            '<archivedir>', self.archivedir)
+        pattern_obs_seq_final = self.cluster.pattern_obs_seq_final.replace(
+            '<archivedir>', self.archivedir)
+        
+        # collect all variables to put into the config file
+        self.configure_defaults = {
+            'expname': self.expname,
+            'archivedir': self.archivedir,
+            'dart_rundir': dart_rundir,
+            'scripts_rundir': self.scripts_rundir,
+            'pattern_obs_seq_out': pattern_obs_seq_out,
+            'pattern_obs_seq_final': pattern_obs_seq_final,
+        }
+
+        self.f_cfg_base = self.archivedir + '/DART-WRF/configs/'
+        self.f_cfg_current = None
+
+        ############### ARCHIVE SCRIPTS AND CONFIGS
+        # Copy scripts and config files to `self.archivedir` folder
+        dirs_exist_ok = False
+        if os.path.exists(self.archivedir):
+            if input('The experiment name already exists! Overwrite existing experiment? (Y/n) ') in ['Y', 'y']:
+                dirs_exist_ok = True
+
+        shutil.copytree(self.cluster.dartwrf_dir_dev,
+                        self.archivedir+'/DART-WRF/',
+                        ignore=shutil.ignore_patterns('*.git','config/','__*','tests/'),
+                        dirs_exist_ok=dirs_exist_ok)
+        
+        # copy cluster config to /DART-WRF/dartwrf/
+        _save_config_to(server_config, self.scripts_rundir + '/server_config.py')
+        
         print(" ")
-        print('>>> Log-files:        "'+self.cluster.log_dir+'"')
+        print('>>> Running experiment in  "'+self.archivedir+'"')
+        self.cluster.log_dir = self.archivedir+'/logs/'
+
         if self.cluster.use_slurm:
-            self.cluster.slurm_scripts_dir = self.cluster.archivedir+'/slurm-scripts/'
-            print('>>> SLURM scripts:    "'+self.cluster.slurm_scripts_dir+'"')
+            self.cluster.slurm_scripts_dir = self.archivedir+'/slurm-scripts/'
+            
         print(" ")
-
-        # 4
-        # to be able to generate obs_seq.in files, we need a dictionary to convert obs kinds to numbers
-        # a) we read the obs kind definitions (obs_kind_mod.f90 from DART code)
-        # b) we generate a python file with this dictionary
-        import create_obskind_table
-        create_obskind_table.run(server_config)
-
-        # 5
-        # Copy scripts and config files to `self.cluster.archivedir` folder
-        try:
-            _copy_dartwrf_to_archive()
-        except FileExistsError as e:
-            if input('The experiment name already exists! Overwrite existing experiment? (Y/n) ') in ['Y', 'y']:
-                _copy_dartwrf_to_archive(dirs_exist_ok=True)
-            else:
-                raise e
+        print('>>> DART will run in       "'+dart_rundir+'"')
+        print('>>> WRF will run in        "'+self.cluster.wrf_rundir_base+'/'+self.expname+'"')
 
         # 6
         # we set the path from where python should import dartwrf modules
-        # every python command then imports DART-WRF from self.cluster.archivedir+'/DART-WRF/dartwrf/'
-        self.cluster.python = 'export PYTHONPATH=' + \
-            self.cluster.scripts_rundir+'/../; '+self.cluster.python
+        # every python command then imports DART-WRF from self.archivedir+'/DART-WRF/dartwrf/'
+        self.cluster.python = 'export PYTHONPATH=' +  \
+            self.scripts_rundir+'/../; '+self.cluster.python
         print('>>> DART-WRF experiment initialized. ')
         print('------------------------------------------------------')
 
+    def configure(self, **kwargs):
+        """Update the config in Experiment.cfg
+        because we cant just forward arguments to command line calls, 
+        we write a config file in a specified directory
+        """
+        # is there an already existing config?
+        if self.f_cfg_current is None:
+            # there is no config, we write a new one
+            cfg = Config(name=self.expname, 
+                         **self.configure_defaults,
+                         **kwargs) 
+            self.cfg = cfg  # this will be accessed by the module functions below
+            
+        else:
+            # there is already a config, we update it
+            
+            # read existing config
+            cfg = read_dict_from_pyfile(self.f_cfg_current)
+
+            # set attributes in existing object
+            for key, value in kwargs.items():
+                setattr(cfg, key, value)
+                
+        # finally, write cfg to file
+        # generate random string for filename
+        random_str = ''.join(random.choices(string.ascii_lowercase + string.digits, k=4))
+        self.f_cfg_current = self.f_cfg_base+'/cfg_'+random_str+'.py'
+        write_dict_to_pyfile(cfg.__dict__, self.f_cfg_current)
+
+
     def prepare_WRFrundir(self, init_time):
         """Prepare WRF run directories for all ensemble members
 
@@ -132,7 +149,7 @@ class WorkFlows(object):
         Returns:
             None
         """
-        cmd = 'python '+self.cluster.scripts_rundir + \
+        cmd = self.cluster.python+' '+self.scripts_rundir + \
             '/prepare_wrfrundir.py '+init_time.strftime('%Y-%m-%d_%H:%M')
         shell(cmd)
 
@@ -147,46 +164,13 @@ class WorkFlows(object):
         """
         times_str = ','.join([t.strftime('%Y-%m-%d_%H:%M') for t in times])
 
-        cmd = self.cluster.python+' '+self.cluster.scripts_rundir + \
+        cmd = self.cluster.python+' '+self.scripts_rundir + \
             '/obs/create_obsseq_out.py '+times_str
 
-        id = self.cluster.run_job(cmd, "obsgen-"+self.exp.expname,
+        id = self.cluster.run_job(cmd, "obsgen-"+self.expname,
                                   cfg_update={"ntasks": "20", "time": "30", "mem": "200G", "ntasks-per-node": "20"}, depends_on=[depends_on])
         return id
 
-    def run_ideal(self, depends_on=None):
-        """Run WRF's ideal.exe for every ensemble member
-
-        Args:
-            depends_on (str, optional): job ID of a previous job after which to run this job
-
-        Returns:
-            str: job ID of the submitted job
-        """
-        cmd = self.cluster.wrf_modules+"""
-    export SLURM_STEP_GRES=none
-    # run ideal.exe in parallel
-    for ((n=1; n<="""+str(self.exp.n_ens)+"""; n++))
-    do
-        rundir="""+self.cluster.wrf_rundir_base+'/'+self.exp.expname+"""/$n
-        echo $rundir
-        cd $rundir
-        mpirun -np 1 ./ideal.exe &
-    done
-    wait
-
-    # move log file to sim_archive
-    for ((n=1; n<="""+str(self.exp.n_ens)+"""; n++))
-    do
-        rundir="""+self.cluster.wrf_rundir_base+'/'+self.exp.expname+"""/$n
-        touch -a $rundir/rsl.out.0000  # create log file if it doesnt exist, to avoid error in mv if it doesnt exist
-        mv $rundir/rsl.out.0000 $rundir/rsl.out.input
-    done
-    """
-        id = self.cluster.run_job(cmd, "ideal-"+self.exp.expname, 
-                cfg_update={"ntasks": "40",  "ntasks-per-node": "40",
-                            "time": "30", "mem": "200G"}, depends_on=[depends_on])
-        return id
 
     def wrfinput_insert_wbubble(self, perturb=True, depends_on=None):
         """Inserts warm-bubble temperature perturbations into wrfinput files
@@ -204,13 +188,31 @@ class WorkFlows(object):
         pstr = ' '
         if perturb:
             pstr = ' perturb'
-        cmd = self.cluster.python+' '+self.cluster.scripts_rundir + \
+        cmd = self.cluster.python+' '+self.scripts_rundir + \
             '/create_wbubble_wrfinput.py'+pstr
 
         id = self.cluster.run_job(
-            cmd, "ins_wbub-"+self.exp.expname, cfg_update={"time": "5"}, depends_on=[depends_on])
+            cmd, "ins_wbub-"+self.expname, cfg_update={"time": "5"}, depends_on=[depends_on])
         return id
 
+    def run_ideal(self, depends_on=None):
+        """Run WRF's ideal.exe for every ensemble member
+
+        Args:
+            depends_on (str, optional): job ID of a previous job after which to run this job
+
+        Returns:
+            str: job ID of the submitted job
+        """
+        cmd = script_to_str(self.cluster.WRF_ideal_template
+                            ).replace('<expname>', self.expname
+                            ).replace('<wrf_rundir_base>', self.cluster.wrf_rundir_base
+                            ).replace('<wrf_modules>', self.cluster.wrf_modules,
+                            )
+        id = self.cluster.run_job(cmd, "ideal-"+self.expname, 
+                cfg_update={"ntasks": "1", "time": "30", "mem": "200G"}, depends_on=[depends_on])
+        return id
+    
     def run_ENS(self, begin, end, first_second=False,
                 input_is_restart=True, output_restart_interval=360, hist_interval_s=300,
                 depends_on=None):
@@ -228,45 +230,55 @@ class WorkFlows(object):
         Returns:
             str: job ID of the submitted job
         """
-
-        def prepare_WRF_inputfiles(begin, end, hist_interval_s=300, radt=1,
-                                   output_restart_interval=False, depends_on=None):
-
-            args = [self.cluster.python, self.cluster.scripts_rundir+'/prepare_namelist.py',
-                    begin.strftime(
-                        '%Y-%m-%d_%H:%M:%S'), end.strftime('%Y-%m-%d_%H:%M:%S'),
-                    str(hist_interval_s), '--radt='+str(radt), '--restart='+restart_flag,]
-
-            if output_restart_interval != False:
+        def _prepare_WRF_inputfiles(begin, end, hist_interval_s=300, radt=1,
+                                   output_restart_interval: int | None = None, 
+                                   depends_on=None):
+
+            args = [self.cluster.python, self.scripts_rundir+'/prepare_namelist.py',
+                    self.f_cfg_current,
+                    begin.strftime('%Y-%m-%d_%H:%M:%S'), 
+                    end.strftime('%Y-%m-%d_%H:%M:%S'),
+                    str(hist_interval_s), 
+                    '--radt='+str(radt), 
+                    '--restart='+restart_flag,]
+
+            if output_restart_interval:
                 args.append('--restart_interval=' +
                             str(int(float(output_restart_interval))))
 
             return self.cluster.run_job(' '.join(args), "preWRF",
                                         cfg_update=dict(time="2"), depends_on=[depends_on])
+            
+        ###########################################
+        # SLURM configuration for WRF
+        cfg_wrf = {"array": "1-"+str(self.cfg.ensemble_size),
+                      "nodes": "1", 
+                      "ntasks": str(self.cluster.max_nproc_for_each_ensemble_member), 
+                      "ntasks-per-core": "1", "mem": "90G", }
 
         id = depends_on
         restart_flag = '.false.' if not input_is_restart else '.true.'
-        wrf_cmd = script_to_str(self.cluster.run_WRF
-                                ).replace('<exp.expname>', self.exp.expname
-                                          ).replace('<cluster.wrf_rundir_base>', self.cluster.wrf_rundir_base
-                                                    ).replace('<cluster.wrf_modules>', self.cluster.wrf_modules,
-                                                              ).replace('<exp.np_WRF>', str(self.cluster.np_WRF))
-
+        
+        # command from template file
+        wrf_cmd = script_to_str(self.cluster.WRF_exe_template
+                            ).replace('<expname>', self.expname
+                            ).replace('<wrf_rundir_base>', self.cluster.wrf_rundir_base
+                            ).replace('<wrf_modules>', self.cluster.wrf_modules,
+                            ).replace('<WRF_number_of_processors>', "16")
+
+        # if 1-second forecast is required
         if first_second:
-            id = prepare_WRF_inputfiles(begin, begin+dt.timedelta(seconds=1),
+            id = _prepare_WRF_inputfiles(begin, begin+dt.timedelta(seconds=1),
                                         hist_interval_s=1,  # to get an output every 1 s
                                         radt=0,  # to get a cloud fraction CFRAC after 1 s
                                         output_restart_interval=output_restart_interval,
                                         depends_on=id)
 
-            id = self.cluster.run_job(wrf_cmd, "WRF-"+self.exp.expname,
-                                      cfg_update={"array": "1-"+str(self.cluster.size_WRF_jobarray),
-                                                  "nodes": "1", "ntasks": str(self.cluster.np_WRF), "ntasks-per-core": "1",
-                                                  "time": "5", "mem": "100G"},
-                                      depends_on=[id])
+            id = self.cluster.run_job(wrf_cmd, "WRF-"+self.expname,
+                                      cfg_update=cfg_wrf,  depends_on=[id])
 
         # forecast for the whole forecast duration
-        id = prepare_WRF_inputfiles(begin, end,
+        id = _prepare_WRF_inputfiles(begin, end,
                                     hist_interval_s=hist_interval_s,
                                     output_restart_interval=output_restart_interval,
                                     depends_on=id)
@@ -274,16 +286,15 @@ class WorkFlows(object):
         time_in_simulation_hours = (end-begin).total_seconds()/3600
         runtime_wallclock_mins_expected = int(
             time_in_simulation_hours*30 + 10)  # usually <15 min/hour
-        cfg_update = {"array": "1-"+str(self.cluster.size_WRF_jobarray),
-                      "nodes": "1", "ntasks": str(self.cluster.np_WRF), "ntasks-per-core": "1",
-                      "time": str(runtime_wallclock_mins_expected), "mem": "90G", }
 
+        cfg_wrf.update({"time": str(runtime_wallclock_mins_expected)})
+        
         if runtime_wallclock_mins_expected > 25:
-            cfg_update.update({"partition": "amd"})
+            cfg_wrf.update({"partition": "amd"})
         #     #cfg_update.update({"exclude": "jet03"})
 
         id = self.cluster.run_job(
-            wrf_cmd, "WRF-"+self.exp.expname, cfg_update=cfg_update, depends_on=[id])
+            wrf_cmd, "WRF-"+self.expname, cfg_update=cfg_wrf, depends_on=[id])
         return id
 
     def assimilate(self, assim_time, prior_init_time, prior_valid_time, prior_path_exp,
@@ -293,7 +304,7 @@ class WorkFlows(object):
         Args:
             assim_time (dt.datetime):       timestamp of prior wrfout files
             prior_init_time (dt.datetime):  timestamp to find the directory where the prior wrfout files are
-            prior_path_exp (str):           use this directory to get prior state (i.e. self.cluster.archivedir)
+            prior_path_exp (str):           use this directory to get prior state (i.e. self.archivedir)
 
         Returns:
             str: job ID of the submitted job
@@ -301,13 +312,13 @@ class WorkFlows(object):
         if not os.path.exists(prior_path_exp):
             raise IOError('prior_path_exp does not exist: '+prior_path_exp)
 
-        cmd = (self.cluster.python+' '+self.cluster.scripts_rundir+'/assimilate.py '
+        cmd = (self.cluster.python+' '+self.scripts_rundir+'/assimilate.py '
                + assim_time.strftime('%Y-%m-%d_%H:%M ')
                + prior_init_time.strftime('%Y-%m-%d_%H:%M ')
                + prior_valid_time.strftime('%Y-%m-%d_%H:%M ')
                + prior_path_exp)
 
-        id = self.cluster.run_job(cmd, "Assim-"+self.exp.expname,
+        id = self.cluster.run_job(cmd, "Assim-"+self.expname,
                                   cfg_update={"ntasks": "20", "time": "30", "mem": "110G",
                                               "ntasks-per-node": "20", "ntasks-per-core": "1"}, depends_on=[depends_on])
         return id
@@ -332,13 +343,13 @@ class WorkFlows(object):
         else:
             tnew = ''
 
-        cmd = (self.cluster.python+' '+self.cluster.scripts_rundir+'/prep_IC_prior.py '
+        cmd = (self.cluster.python+' '+self.scripts_rundir+'/prep_IC_prior.py '
                + prior_path_exp
                + prior_init_time.strftime(' %Y-%m-%d_%H:%M')
                + prior_valid_time.strftime(' %Y-%m-%d_%H:%M')
                + tnew)
         id = self.cluster.run_job(
-            cmd, "IC-prior-"+self.exp.expname, cfg_update=dict(time="18"), depends_on=[depends_on])
+            cmd, "IC-prior-"+self.expname, cfg_update=dict(time="18"), depends_on=[depends_on])
         return id
 
     def update_IC_from_DA(self, assim_time, depends_on=None):
@@ -351,40 +362,42 @@ class WorkFlows(object):
         Returns:
             str: job ID of the submitted job
         """
-        cmd = self.cluster.python+' '+self.cluster.scripts_rundir + \
+        cmd = self.cluster.python+' '+self.scripts_rundir + \
             '/update_IC.py '+assim_time.strftime('%Y-%m-%d_%H:%M')
-        id = self.cluster.run_job(cmd, "IC-update-"+self.exp.expname,
+        id = self.cluster.run_job(cmd, "IC-update-"+self.expname,
                                   cfg_update=dict(time="18"), depends_on=[depends_on])
         return id
 
     def create_satimages(self, init_time, depends_on=None):
         """Run a job array, one job per ensemble member, to create satellite images"""
         cmd = 'module purge; module load rttov/v13.2-gcc-8.5.0; ' \
-            + 'python ~/RTTOV-WRF/run_init.py '+self.cluster.archivedir+init_time.strftime('/%Y-%m-%d_%H:%M/ ') \
+            + 'python ~/RTTOV-WRF/run_init.py '+self.archivedir+init_time.strftime('/%Y-%m-%d_%H:%M/ ') \
             + '$SLURM_ARRAY_TASK_ID'
-        id = self.cluster.run_job(cmd, "RTTOV-"+self.exp.expname,
-                                  cfg_update={"ntasks": "1", "time": "60", "mem": "10G", "array": "1-"+str(self.exp.n_ens)}, depends_on=[depends_on])
+        id = self.cluster.run_job(cmd, "RTTOV-"+self.expname,
+                                  cfg_update={"ntasks": "1", "time": "60", "mem": "10G", 
+                                              "array": "1-"+str(self.cfg.ensemble_size)}, 
+                                  depends_on=[depends_on])
         return id
 
     def gen_obsseq(self, depends_on=None):
         """(not included in DART-WRF)"""
-        cmd = self.cluster.python+' '+self.cluster.scripts_rundir+'/obsseq_to_netcdf.py'
+        cmd = self.cluster.python+' '+self.scripts_rundir+'/obsseq_to_netcdf.py'
         id = self.cluster.run_job("obsseq_netcdf", cfg_update={"time": "10", "mail-type": "FAIL,END"},
                                   depends_on=[depends_on])
         return id
 
     def evaluate_obs_posterior_after_analysis(self, init, valid, depends_on=None):
 
-        cmd = self.cluster.python+' '+self.cluster.scripts_rundir+'/evaluate_obs_space.py ' + \
+        cmd = self.cluster.python+' '+self.scripts_rundir+'/evaluate_obs_space.py ' + \
             init.strftime('%Y-%m-%d_%H:%M,') + \
             valid.strftime('%Y-%m-%d_%H:%M:%S')
-        id = self.cluster.run_job(cmd, 'eval+1'+self.exp.expname, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
+        id = self.cluster.run_job(cmd, 'eval+1'+self.expname, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
                                                                               "time": "9", "mail-type": "FAIL"},
                                   depends_on=[depends_on])
 
-        # cmd = self.cluster.python+' '+self.cluster.scripts_rundir + \
+        # cmd = self.cluster.python+' '+self.scripts_rundir + \
         #     '/calc_linear_posterior.py '+init.strftime('%Y-%m-%d_%H:%M')
-        # id = self.cluster.run_job(cmd, 'linpost'+self.exp.expname, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
+        # id = self.cluster.run_job(cmd, 'linpost'+self.expname, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
         #                                                                        "time": "15", "mail-type": "FAIL"},
         #                           depends_on=[id])
         return id
@@ -392,17 +405,17 @@ class WorkFlows(object):
     def verify_sat(self, depends_on=None):
         """(not included in DART-WRF)"""
         cmd = self.cluster.python+' /jetfs/home/lkugler/osse_analysis/plot_from_raw/analyze_fc.py ' + \
-            self.exp.expname+' '+self.exp.nature_exp + ' sat has_node np=2 mem=110G'
+            self.expname+' '+self.cfg.nature_exp + ' sat has_node np=2 mem=110G'
 
-        self.cluster.run_job(cmd, "verif-SAT-"+self.exp.expname,
+        self.cluster.run_job(cmd, "verif-SAT-"+self.expname,
                              cfg_update={"time": "60", "mail-type": "FAIL,END", "ntasks": "2",
                                          "ntasks-per-node": "1", "ntasks-per-core": "2", "mem": "110G", }, depends_on=[depends_on])
 
     def verify_wrf(self, depends_on=None):
         """(not included in DART-WRF)"""
         cmd = self.cluster.python+' /jetfs/home/lkugler/osse_analysis/plot_from_raw/analyze_fc.py ' + \
-            self.exp.expname+' '+self.exp.nature_exp + ' wrf has_node np=10 mem=250G'
+            self.expname+' '+self.cfg.nature_exp + ' wrf has_node np=10 mem=250G'
 
-        self.cluster.run_job(cmd, "verif-WRF-"+self.exp.expname,
+        self.cluster.run_job(cmd, "verif-WRF-"+self.expname,
                              cfg_update={"time": "210", "mail-type": "FAIL,END", "ntasks": "10",
                                          "ntasks-per-node": "10", "ntasks-per-core": "1", "mem": "250G"}, depends_on=[depends_on])
diff --git a/docs/source/tutorial1.rst b/docs/source/tutorial1.rst
index 4817bc4..ed73fa4 100644
--- a/docs/source/tutorial1.rst
+++ b/docs/source/tutorial1.rst
@@ -39,14 +39,14 @@ Then set ``exp_config='exp1.py`` in the call to :class:`dartwrf.workflows.WorkFl
 
 Customize the settings in ``config/exp1.py``:
 * `expname` should be a unique experiment name and will be used as folder name
-* `n_ens` is the ensemble size
+* `ensemble_size` is the ensemble size
 * `update_vars` are the WRF variables which shall be updated by the assimilation
 
 .. code-block:: python
 
     exp = Experiment()
     exp.expname = "exp1"
-    exp.n_ens = 40
+    exp.ensemble_size = 40
     exp.update_vars = ['THM', 'PH', 'MU', 'QVAPOR',]
 
 
@@ -95,9 +95,9 @@ If you want to modify any parameters, specify your changes in a python dictionar
                              sampling_error_correction='.true.',
                             ),
                     '&filter_nml':
-                        dict(ens_size=exp.n_ens,
-                             num_output_state_members=exp.n_ens,
-                             num_output_obs_members=exp.n_ens,
+                        dict(ens_size=exp.ensemble_size,
+                             num_output_state_members=exp.ensemble_size,
+                             num_output_obs_members=exp.ensemble_size,
                              inf_flavor=['0', '4'],
                              output_members='.true.',
                              output_mean='.true.',
diff --git a/multiple_exps.py b/multiple_exps.py
new file mode 100644
index 0000000..94567bf
--- /dev/null
+++ b/multiple_exps.py
@@ -0,0 +1,79 @@
+import datetime as dt
+import sys
+import pandas as pd
+from dartwrf.workflows import WorkFlows
+from config import defaults
+
+
+# test multiple assimilation windows (11-12, 12-13, 13-14, )
+timedelta_btw_assim = dt.timedelta(minutes=15)
+assim_times_start = pd.date_range('2000-01-01 11:00', '2000-01-01 13:00', freq='h')
+    
+for t0 in assim_times_start:
+
+    # set constants
+    w = WorkFlows(server_config='/jetfs/home/lkugler/DART-WRF/config/jet.py', 
+                   expname=t0.strftime('test_%H%M'))
+   
+    # set potentially varying parameters
+    ens_size = 3
+    dart_nml = defaults.dart_nml
+    dart_nml.update(ens_size=ens_size)
+    
+    w.configure(model_dx=2000, 
+                ensemble_size=3,
+                dart_nml = dart_nml,
+                use_existing_obsseq=False,
+                update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QSNOW', 'PSFC'],
+                input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof',
+                nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/exp_v1.18_P1_nature+1/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S',
+                nature_exp = 'nat_250m_blockavg2km',)
+
+    
+    time = t0
+    id = None
+    w.prepare_WRFrundir(time)
+    id = w.run_ideal(depends_on=id)
+    sys.exit()
+    
+    prior_init_time = time - dt.timedelta(hours=1)
+    prior_valid_time = time
+    prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_v1.19_P2_noDA+1/'
+    
+    # assimilate at these times
+    assim_times = pd.date_range(time, time + dt.timedelta(hours=1), freq=timedelta_btw_assim)
+    last_assim_time = assim_times[-1]
+    
+    # loop over assimilations
+    for i, t in enumerate(assim_times):
+        
+        id = w.assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
+
+        # 1) Set posterior = prior
+        id = w.prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
+
+        # 2) Update posterior += updates from assimilation
+        id = w.update_IC_from_DA(time, depends_on=id)
+
+        # How long shall we integrate?
+        timedelta_integrate = timedelta_btw_assim
+        output_restart_interval = timedelta_btw_assim.total_seconds()/60
+        if time == last_assim_time:
+            timedelta_integrate = dt.timedelta(hours=4)
+            output_restart_interval = 9999  # no restart file after last assim
+
+        # 3) Run WRF ensemble
+        id = w.run_ENS(begin=time,  # start integration from here
+                       end=time + timedelta_integrate,  # integrate until here
+                       output_restart_interval=output_restart_interval,
+                       depends_on=id)
+        
+        if t < last_assim_time:
+            # continue the next cycle
+            prior_init_time = assim_times[i]
+            prior_valid_time = assim_times[i+1]
+            prior_path_exp = w.archivedir  # use own exp path as prior
+        else:
+            # exit the cycles
+            break
+        
\ No newline at end of file
-- 
GitLab