diff --git a/analysis_only.py b/analysis_only.py
deleted file mode 100755
index 840e603c24535935856591e3850aa12d270e50ba..0000000000000000000000000000000000000000
--- a/analysis_only.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/python3
-"""
-running the forecast model without assimilation
-"""
-import datetime as dt
-from dartwrf.workflows import WorkFlows
-
-
-prior_path_exp = '/users/students/lehre/advDA_s2023/data/sample_ensemble/'
-prior_init_time = dt.datetime(2008,7,30,12)
-prior_valid_time = dt.datetime(2008,7,30,13)
-assim_time = prior_valid_time
-
-w = WorkFlows(exp_config='exp_template.py', server_config='srvx1.py')
-
-id = w.assimilate(assim_time, prior_init_time, prior_valid_time, prior_path_exp)
-
-# w.create_satimages(time, depends_on=id)
diff --git a/config/exp_template.py b/config/exp_template.py
deleted file mode 100755
index 07247d7bc7265df4acae16e4800290dedc84b9b2..0000000000000000000000000000000000000000
--- a/config/exp_template.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""
-This configuration file is designed for the course Advanced Data Assimilation at the University of Vienna.
-Copy and customize it as you wish.
-"""
-from dartwrf.utils import Experiment
-
-exp = Experiment()
-exp.expname = "exp_test"
-exp.model_dx = 2000
-exp.n_ens = 4
-exp.do_quality_control = False
-
-# Path to the nature run. From there, observations are generated. Wildcards "*" are allowed in this path.
-exp.nature_wrfout_pattern = '/users/students/lehre/advDA_s2023/data/sample_nature/wrfout_d01_%Y-%m-%d_%H:%M:%S'
-
-# Path to sounding profiles for ensemble initialization. Not used in the Advanced DA course.
-exp.input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof'
-
-# Configuration of DART. For details see the DART or DART-WRF documentation.
-exp.dart_nml = {'&assim_tools_nml':
-                    dict(filter_kind='1',
-                        sampling_error_correction='.true.',
-                        ),
-                '&filter_nml':
-                    dict(   ens_size=exp.n_ens,
-                            num_output_state_members=exp.n_ens,
-                            num_output_obs_members=exp.n_ens,
-                            inf_flavor=['0', '4'],
-                            inf_initial=[1.04, 0.5],
-                            inf_initial_from_restart='.false.',
-                            output_members='.true.',
-                            output_mean='.true.',
-                            output_sd='.true.',
-                            stages_to_write='output',
-                        ),
-                '&quality_control_nml':
-                    dict(outlier_threshold='-1',
-                        ),
-                '&obs_def_radar_mod_nml':
-                    dict(apply_ref_limit_to_obs      =  '.true.',
-                         reflectivity_limit_obs      =  5.0,
-                         lowest_reflectivity_obs     =  5.0,
-                         apply_ref_limit_to_fwd_op   =  '.true.',
-                         reflectivity_limit_fwd_op   =  5.0,
-                         lowest_reflectivity_fwd_op  =  5.0,
-                         microphysics_type           =  '5',
-                         ),
-                '&location_nml':
-                    dict(horiz_dist_only='.false.',
-                        ),
-                '&model_nml':
-                    dict(wrf_state_variables = 
-                        [['U',     'QTY_U_WIND_COMPONENT',     'TYPE_U',    'UPDATE','999',],
-                         ['V',     'QTY_V_WIND_COMPONENT',     'TYPE_V',    'UPDATE','999',],
-                         ['W',     'QTY_VERTICAL_VELOCITY',    'TYPE_W',    'UPDATE','999',],
-                         ['PH',    'QTY_GEOPOTENTIAL_HEIGHT',  'TYPE_GZ',   'UPDATE','999',],
-                         ['THM',   'QTY_POTENTIAL_TEMPERATURE','TYPE_T',    'UPDATE','999',],
-                         ['MU',    'QTY_PRESSURE',             'TYPE_MU',   'UPDATE','999',],
-
-                         ['QVAPOR','QTY_VAPOR_MIXING_RATIO',   'TYPE_QV',   'UPDATE','999',],
-                         ['QCLOUD','QTY_CLOUDWATER_MIXING_RATIO','TYPE_QC', 'UPDATE','999',],
-                         ['QICE',  'QTY_ICE_MIXING_RATIO',     'TYPE_QI',   'UPDATE','999',],
-                        #  ['QRAIN','QTY_RAINWATER_MIXING_RATIO','TYPE_QR', 'UPDATE','999',],
-                        #  ['QSNOW','QTY_SNOW_MIXING_RATIO','TYPE_QS', 'UPDATE','999',],
-                        #  ['QGRAUP','QTY_GRAUPEL_MIXING_RATIO','TYPE_QG', 'UPDATE','999',],
-
-                         ['CLDFRA','QTY_CLOUD_FRACTION',       'TYPE_CFRAC','UPDATE','999',],
-                         ['PSFC',  'QTY_SURFACE_PRESSURE',     'TYPE_PSFC', 'UPDATE','999',],
-                         ['T2',    'QTY_2M_TEMPERATURE',       'TYPE_T',    'UPDATE','999',],
-                         ['TSK',   'QTY_SKIN_TEMPERATURE',     'TYPE_T',    'UPDATE','999',],
-                         ['REFL_10CM','QTY_RADAR_REFLECTIVITY','TYPE_REFL', 'UPDATE','999',]],
-                         
-                        wrf_state_bounds = 
-                        [['QVAPOR','0.0','NULL','CLAMP'],
-                         ['QCLOUD','0.0','NULL','CLAMP'],
-                         ['QICE','0.0','NULL','CLAMP'],
-                         ['CLDFRA','0.0','1.0','CLAMP'],
-                         
-                        #  ['QRAIN','0.0','NULL','CLAMP'],
-                        #  ['QSNOW','0.0','NULL','CLAMP'],
-                        #  ['QGRAUP','0.0','NULL','CLAMP'],
-                         ],
-                        ),
-                '&ensemble_manager_nml':
-                   dict(layout = 1,
-                        tasks_per_node = 12,
-                        communication_configuration = 1,
-                        ),
-                }
-
-
-
-# n_obs is the number of observations
-# currently allowed values are 22500 for one observation approximately every 2km; 5776 for 4km, and 961 for 10km resolution; 256 for 20km; 121 for 30km 
-# Note for radar, n_obs is the number of observations at each observation height level
-
-# oeinf is the observation inflation factor (1 means no inflation)
-oeinf = 1
-
-vis = dict(var_name='VIS 0.6µm', unit='[1]',
-           kind='MSG_4_SEVIRI_BDRF', sat_channel=1, 
-           n_obs=961, obs_locations='square_array_evenly_on_grid',
-           # n_obs=1, obs_locations=[(44.141, -0.99)],
-           error_generate=0.03, error_assimilate=0.03*oeinf,
-           loc_horiz_km=20, 
-           #height=4000, loc_vert_km=3
-           )
-
-wv73 = dict(var_name='Brightness temperature WV 7.3µm', unit='[K]',
-            kind='MSG_4_SEVIRI_TB', sat_channel=6, 
-            n_obs=961, obs_locations='square_array_evenly_on_grid',
-            error_generate=1, error_assimilate=1*oeinf, 
-            loc_horiz_km=20, 
-            #height=7000, loc_vert_km=3
-            )
-
-wv62 = dict(var_name='Brightness temperature WV 6.2µm', unit='[K]',
-            kind='MSG_4_SEVIRI_TB', sat_channel=5, 
-            n_obs=961,  obs_locations='square_array_evenly_on_grid',
-            # n_obs=1, obs_locations=[(44.141, -0.99)],
-            error_generate=1, error_assimilate=1*oeinf, 
-            loc_horiz_km=20, 
-            #height=10000, loc_vert_km=3
-            )
-
-ir108 = dict(var_name='Brightness temperature IR 10.8µm', unit='[K]',
-             kind='MSG_4_SEVIRI_TB', sat_channel=9, 
-             n_obs=1, obs_locations='square_array_evenly_on_grid',
-             error_generate=5., error_assimilate=10.,
-             loc_horiz_km=32)
-
-radar = dict(var_name='Radar reflectivity', unit='[dBz]',
-             kind='RADAR_REFLECTIVITY', 
-             n_obs=961, obs_locations='square_array_evenly_on_grid',
-             # n_obs=2, obs_locations=[(45.332, 0.4735), (45.332, 0.53)],
-             heights=range(2000, 14001, 2000),
-             error_generate=2.5, error_assimilate=2.5*oeinf,
-             loc_horiz_km=20, loc_vert_km=3)
-
-t = dict(var_name='Temperature', unit='[K]',
-         kind='RADIOSONDE_TEMPERATURE', 
-         #n_obs=22500, obs_locations='square_array_evenly_on_grid',
-         n_obs=1, obs_locations=[(45., 0.)],
-         error_generate=0.2, error_assimilate=0.2,
-         heights=[1000,], #range(1000, 17001, 2000),
-         loc_horiz_km=50, loc_vert_km=2.5)
-
-q = dict(var_name='Specific humidity', unit='[kg/kg]',
-         kind='RADIOSONDE_SPECIFIC_HUMIDITY', n_obs=1,
-         error_generate=0., error_assimilate=5*1e-5,
-         heights=[1000], #range(1000, 17001, 2000),
-         loc_horiz_km=50, loc_vert_km=2.5)
-
-t2m = dict(var_name='SYNOP Temperature', unit='[K]',
-           kind='SYNOP_TEMPERATURE', 
-           n_obs=256, obs_locations='square_array_evenly_on_grid',
-           error_generate=0.3, error_assimilate=0.3,
-           loc_horiz_km=10, loc_vert_km=2)
-
-psfc = dict(var_name='SYNOP Pressure', unit='[Pa]',
-            kind='SYNOP_SURFACE_PRESSURE', n_obs=1, 
-            error_generate=50., error_assimilate=100.,
-            loc_horiz_km=32, loc_vert_km=5)
-
-exp.observations = [t]
-
-# the variables which will be replaced in the WRF initial conditions file
-exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC']
-#exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'PSFC']
-#exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QRAIN', 'QSNOW', 'QGRAUP', 'PSFC']
-#exp.update_vars = ['QVAPOR', 'QCLOUD', 'QICE', 'PSFC']
-
-exp.use_existing_obsseq = False
-# exp.use_existing_obsseq='/jetfs/home/lkugler/data/sim_archive/exp_v1.22_P2_rr_WV73_obs10_loc20_oe1/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out'
-
-
-
-
diff --git a/config/jet.py b/config/jet.py
index df71c91dbab5bb5376eaff41958825589855736f..4f06715e82d24c97fc8a8833b93154af417c8878 100755
--- a/config/jet.py
+++ b/config/jet.py
@@ -1,43 +1,39 @@
 """Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
-from re import A
-from dartwrf.utils import ClusterConfig
 
-cluster = ClusterConfig(
-    max_nproc = 20,
-    max_nproc_for_each_ensemble_member = 16,
+cluster_defaults = dict(
+    max_nproc = 16,
+    max_nproc_for_each_ensemble_member = 9,
     use_slurm = True,
 
     # binaries
     python = '/jetfs/home/lkugler/miniforge3/envs/verif/bin/python',
     ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-2021.7.1/nco-5.1.0-izrhxv24jqco5epjhf5ledsqwanojc5m/bin/ncks',
-    ideal = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe',
+    idealexe = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe',
     wrfexe = '/jetfs/home/lkugler/data/compile/bin/wrf-v4.6.0_20250210_StS.exe',
     dart_modules = 'module purge; module load rttov/v13.2-gcc-8.5.0',
     wrf_modules = """module purge; module load netcdf-fortran/4.5.3-intel-2021.7.1-27ldrnt""",
 
     # paths for data output
-    wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/',  # path for temporary files
-    dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/',  # path for temporary files
-    archive_base = '/jetfs/home/lkugler/data/sim_archive/',
+    dir_wrf_run = '/jetfs/home/lkugler/data/run_WRF/<exp>/<ens>/',  # path for temporary files
+    dir_dart_run = '/jetfs/home/lkugler/data/run_DART/<exp>/',  # path for temporary files
+    dir_archive = '/jetfs/home/lkugler/data/sim_archive/<exp>/',
 
     # paths used as input
-    srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run',
-    dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/',
-    rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/',
+    dir_wrf_src = '/jetfs/home/lkugler/data/compile/WRF-4.3/run',
+    dir_dart_src = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/',
+    dir_rttov_src = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/',
+    dir_dartwrf_dev = '/jetfs/home/lkugler/DART-WRF/',
     
-    dartwrf_dir_dev = '/jetfs/home/lkugler/DART-WRF/',
-    WRF_namelist_template = '/jetfs/home/lkugler/DART-WRF/config/templates/namelist.input_nat_exact',
-    rttov_nml = "/jetfs/home/lkugler/DART-WRF/config/templates/obs_def_rttov.VIS+WV.nml",
+    WRF_namelist_template = '/jetfs/home/lkugler/DART-WRF/templates/namelist.input',
+    rttov_nml = "/jetfs/home/lkugler/DART-WRF/templates/obs_def_rttov.VIS+WV.nml",
     
     # other inputs
-    geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600',
-    geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
-    #obs_impact_filename = dartwrf_dir_dev+'/templates/impactfactor_T.txt'
+    #obs_impact_filename = dir_dartwrf_dev+'/templates/impactfactor_T.txt'
 
-    WRF_exe_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF.jet.sh',
-    WRF_ideal_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF_ideal.sh',
+    WRF_exe_template = '/jetfs/home/lkugler/DART-WRF/templates/run_WRF.jet.sh',
+    WRF_ideal_template = '/jetfs/home/lkugler/DART-WRF/templates/run_WRF_ideal.sh',
 
-    slurm_cfg = {"account": "lkugler", "partition": "all",  
+    slurm_kwargs = {"account": "lkugler", "partition": "hub",  
                     "ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
                     "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"},
 
diff --git a/config/srvx1.py b/config/srvx1.py
deleted file mode 100755
index 80f5eb2f2a9608e22d79bbf93b3bc52af15c81b1..0000000000000000000000000000000000000000
--- a/config/srvx1.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
-from dartwrf import utils
-from dartwrf.exp_config import exp
-
-cluster = utils.ClusterConfig(exp)
-cluster.name = 'srvx1'
-cluster.max_nproc = 6
-cluster.use_slurm = False
-
-# binaries
-cluster.python = 'python'
-cluster.python_verif = '/users/staff/lkugler/miniconda3/bin/python'
-cluster.ncks = '/home/swd/spack/opt/spack/linux-rhel8-skylake_avx512/gcc-8.5.0/nco-5.0.1-ntu44aoxlvwtr2tsrobfr4lht7cpvccf/bin/ncks'
-cluster.ideal = '' #/jetfs/home/lkugler/bin/ideal-v4.3_v1.22.exe'
-cluster.wrfexe = '' #/jetfs/home/lkugler/bin/wrf-v4.3_v1.22.exe'
-cluster.dart_modules = 'pip install scipy; module purge; module load netcdf-fortran/4.5.2-gcc-8.5.0-MPI3.1.6; '
-cluster.wrf_modules = ''
-
-# paths for data output
-cluster.wrf_rundir_base = utils.userhome+'/AdvDA/run_WRF/'  # path for temporary files
-cluster.dart_rundir_base = utils.userhome+'/AdvDA/run_DART/'  # path for temporary files
-cluster.archive_base = utils.userhome+'/data/sim_archive/'
-
-# paths used as input
-cluster.srcdir = '/users/staff/lkugler/AdvDA23/DART/WRF-4.3/run'
-cluster.dart_srcdir = '/users/students/lehre/advDA_s2023/DART/models/wrf/work'
-cluster.rttov_srcdir = '/users/students/lehre/advDA_s2023/RTTOV13/rtcoef_rttov13/'
-cluster.dartwrf_dir = utils.userhome+'/AdvDA/DART-WRF/'
-cluster.geo_em_for_WRF_ideal = '/users/students/lehre/advDA_s2023/data/geo_em.d01.nc'
-
-# templates/run scripts
-cluster.namelist = cluster.dartwrf_dir+'/../templates/namelist.input'
-cluster.run_WRF = cluster.dartwrf_dir+'/run_ens.jet.sh'
-
-cluster.slurm_cfg = None
diff --git a/config/vsc.py b/config/vsc.py
deleted file mode 100755
index 58a3b6cab07616e4bf9a9b55eaf47ccf5621f803..0000000000000000000000000000000000000000
--- a/config/vsc.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
-from dartwrf import utils
-from dartwrf.exp_config import exp
-
-
-cluster = utils.ClusterConfig(exp)
-cluster.name = 'VSC' 
-cluster.max_nproc = 20
-cluster.size_jobarray = 10  # 10 jobs with each 4 WRF processes per node
-cluster.use_slurm = True
-
-# binaries
-cluster.python = '/home/fs71386/lkugler/miniconda3/envs/DART/bin/python'
-cluster.python_enstools = '/home/fs71386/lkugler/miniconda3/envs/enstools/bin/python'
-cluster.ncks = '/home/fs71386/lkugler/miniconda3/envs/DART/bin/ncks'
-cluster.ideal = '/home/fs71386/lkugler/compile/bin/ideal-v4.2.2_v1.22.exe'
-cluster.wrfexe = '/home/fs71386/lkugler/compile/bin/wrf-v4.3_v1.22.exe'
-cluster.container = '/home/fs71386/lkugler/run_container.sh python.gcc9.5.0.vsc4.sif'
-
-# paths for data output
-cluster.wrf_rundir_base = '/gpfs/data/fs71386/lkugler/run_WRF/'  # path for temporary files
-cluster.dart_rundir_base = '/gpfs/data/fs71386/lkugler/run_DART/'  # path for temporary files
-cluster.archive_base = '/gpfs/data/fs71386/lkugler/sim_archive/'
-
-# paths used as input
-cluster.srcdir = '/gpfs/data/fs71386/lkugler/compile/WRF/WRF-4.3/run'
-cluster.dart_srcdir = '/gpfs/data/fs71386/lkugler/compile/DART/DART/models/wrf/work'
-cluster.rttov_srcdir = '/gpfs/data/fs71386/lkugler/compile/RTTOV13/rtcoef_rttov13/'
-cluster.scriptsdir = '/home/fs71386/lkugler/DART-WRF/dartwrf/'
-
-# templates/run scripts
-cluster.namelist = cluster.scriptsdir+'/../templates/namelist.input'
-cluster.run_WRF = '/home/fs71386/lkugler/DART-WRF/dartwrf/run_ens.vsc.sh'
-
-cluster.slurm_cfg = {"account": "p71386", "partition": "skylake_0384", "qos": "p71386_0384",
-                 "nodes": "1", "ntasks": "1", "ntasks-per-node": "48", "ntasks-per-core": "1",
-                 "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"}
diff --git a/cycled_exp.py b/cycled_exp.py
deleted file mode 100755
index 91ab2babbb42b59569ad5e44f4a1f5a1cee7c496..0000000000000000000000000000000000000000
--- a/cycled_exp.py
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/usr/bin/python3
-import os
-import sys
-import datetime as dt
-from dartwrf.workflows import WorkFlows
-from dartwrf.utils import save_dict
-
-if __name__ == "__main__":
-    """
-    Run a cycled OSSE with WRF and DART.
-    """
-    w = WorkFlows(exp_config='exp_hires.py', server_config='jet_ACF.py')
-
-    timedelta_integrate = dt.timedelta(minutes=15)
-    timedelta_btw_assim = dt.timedelta(minutes=15)
-
-    id = None
-
-    if False:  # warm bubble
-        prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P3_wbub7_noDA'
-
-        init_time = dt.datetime(2008, 7, 30, 12)
-        time = dt.datetime(2008, 7, 30, 12, 30)
-        last_assim_time = dt.datetime(2008, 7, 30, 13, 30)
-        forecast_until = dt.datetime(2008, 7, 30, 18)
-
-        w.prepare_WRFrundir(init_time)
-        # id = w.run_ideal(depends_on=id)
-        # id = w.wrfinput_insert_wbubble(depends_on=id)
-
-        prior_path_exp = '/jetfs/scratch/a11716773/master_thesis_2023/data2/sim_archive/free_ens/'
-        init_time = dt.datetime(2008, 7, 30, 11)
-        time = dt.datetime(2008, 7, 30, 12)
-        last_assim_time = dt.datetime(2008, 7, 30, 13)
-        forecast_until = dt.datetime(2008, 7, 30, 17)
-
-        prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA+1/'
-        init_time = dt.datetime(2008, 7, 30, 12, 30)
-        time = dt.datetime(2008, 7, 30, 13)
-        last_assim_time = dt.datetime(2008, 7, 30, 14)
-        forecast_until = dt.datetime(2008, 7, 30, 18)
-
-    if True:  # random
-        # exp_v1.19_P2_noDA+1/'
-        prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_nat250m_noDA_f/'
-        init_time = dt.datetime(2008, 7, 30, 11,45)
-        time = dt.datetime(2008, 7, 30, 12)
-        last_assim_time = dt.datetime(2008, 7, 30, 13)
-        forecast_until = dt.datetime(2008, 7, 30, 18)
-
-        w.prepare_WRFrundir(init_time)
-        # id = w.run_ideal(depends_on=id)
-
-    # prior_path_exp = w.cluster.archivedir
-    prior_init_time = init_time
-
-    while time <= last_assim_time:
-
-        # usually we take the prior from the current time
-        # but one could use a prior from a different time from another run
-        # i.e. 13z as a prior to assimilate 12z observations
-        prior_valid_time = time
-
-        if True:
-            ACF_config = dict(
-                var='WV73',
-                scales_km=(192, 96, 48, 24, 12),
-                observed_width_km=384,
-                dx_km_obs=1.0,
-                dx_km_forecast=2.0,
-                # ('value', 0.6), #('percentile', 30), #('value', 230), #('value', 0.6), #('value', 230), #('percentile', 90),  # ('value', 0.6),  #
-                threshold=('value', 230),
-                difference=True,
-                first_guess_pattern='/RT_wrfout_d01_%Y-%m-%d_%H:%M:%S.nc',
-
-                # observed_data='/jetfs/scratch/a11716773/master_thesis_2023/data2/sim_archive/nature_dx=2000m/RT_wrfout_d01_%Y-%m-%d_%H:%M:%S.nc',
-                observed_data='/jetfs/home/lkugler/data/sim_archive/nat_250m_obs1km/2008-07-30_12:00/1/RT_wrfout_d01_%Y-%m-%d_%H_%M_%S.nc',
-                # observed_data='/jetfs/home/lkugler/data/sim_archive/exp_v1.18_P1_nature+1/2008-07-30_06:00/1/RT_wrfout_d01_%Y-%m-%d_%H_%M_%S.nc',
-                f_grid_obs='/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m-1km_400x400',
-
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_VIS/OE_VIS_CF_0.6.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_VIS/OE_VIS_CF_0.6_difference.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_VIS/OE_VIS_SO_difference.csv',
-                #obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_IR/OE-WV73_CF_230.csv',
-                obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_IR/OE-WV73_CF_230_difference.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_IR/OE-WV73_superobs.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/hrNat_IR/OE-WV73_SO_difference.csv',
-
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/exp_v1.18_P1_nature+1/OE-STDEV_delta_192-6_12z-14z.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/exp_v1.18_P1_nature+1/obs_error_stddev_delta_approach_12z-14z.csv'
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/OE_WV73_SO_theoretical.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/OE_VIS_SO_theoretical.csv',
-                # obs_err_csv='/jetfs/home/lkugler/CloudfractionDA/data/obs_error_stddev_andrea.csv',
-                inflation_OE_var=2.0,
-            )
-            # if time.minute == 0:  # first and last assimilation
-            #     ACF_config['scales_km'] = (192, 96, 48, 24, 12)
-            # else:
-            #     ACF_config['scales_km'] = (24, 12)
-
-            save_dict(ACF_config, time.strftime(
-                w.cluster.scripts_rundir+'/ACF_config_%H%M.pkl'))
-
-        id = w.assimilate(time, prior_init_time,
-                          prior_valid_time, prior_path_exp, depends_on=id)
-
-        # 1) Set posterior = prior
-        id = w.prepare_IC_from_prior(
-            prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
-
-        # 2) Update posterior += updates from assimilation
-        id = w.update_IC_from_DA(time, depends_on=id)
-
-        # How long shall we integrate?
-        timedelta_integrate = timedelta_btw_assim
-        output_restart_interval = timedelta_btw_assim.total_seconds()/60
-        if time == last_assim_time:
-            timedelta_integrate = forecast_until - \
-                last_assim_time  # dt.timedelta(hours=4)
-            output_restart_interval = 9999  # no restart file after last assim
-
-        # 3) Run WRF ensemble
-        id = w.run_ENS(begin=time,  # start integration from here
-                       end=time + timedelta_integrate,  # integrate until here
-                       output_restart_interval=output_restart_interval,
-                       first_second=False,  # to get a +1 second forecast after each restart
-                       depends_on=id)
-
-        # evaluate the forecast at +1 second after the assimilation time
-        # _ = w.evaluate_obs_posterior_after_analysis(time, time+dt.timedelta(seconds=1), depends_on=id)
-
-        # as we have WRF output, we can use own exp path as prior
-        # prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA/'
-        prior_path_exp = w.cluster.archivedir
-
-        id = w.create_satimages(time, depends_on=id)
-
-        # increment time
-        time += timedelta_btw_assim
-
-        # update time variables
-        prior_init_time = time - timedelta_btw_assim
-
-    w.verify_sat(id)
-    w.verify_wrf(id)
diff --git a/cycled_exp_template.py b/cycled_exp_template.py
deleted file mode 100755
index 01a7a6da883d58572a70fc7bdd04d87835da26eb..0000000000000000000000000000000000000000
--- a/cycled_exp_template.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/python3
-import datetime as dt
-from dartwrf.workflows import WorkFlows
-
-if __name__ == "__main__":
-    """
-    Run a cycled OSSE with WRF and DART.
-    """
-    w = WorkFlows(exp_config='exp_template.py', server_config='jet.py')
-
-    timedelta_integrate = dt.timedelta(minutes=15)
-    timedelta_btw_assim = dt.timedelta(minutes=15)
-    
-    prior_path_exp = '/path_to/sim_archive/experiment_name/'
-    init_time = dt.datetime(2008, 7, 30, 11,45)
-    time = dt.datetime(2008, 7, 30, 12)
-    last_assim_time = dt.datetime(2008, 7, 30, 13)
-    forecast_until = dt.datetime(2008, 7, 30, 13,15)
-
-    w.prepare_WRFrundir(init_time)
-    # id = w.run_ideal(depends_on=id)
-
-    # prior_path_exp = w.cluster.archivedir
-    prior_init_time = init_time
-
-    while time <= last_assim_time:
-
-        # usually we take the prior from the current time
-        # but one could use a prior from a different time from another run
-        # i.e. 13z as a prior to assimilate 12z observations
-        prior_valid_time = time
-
-        id = w.assimilate(time, prior_init_time,
-                          prior_valid_time, prior_path_exp, depends_on=id)
-
-        # 1) Set posterior = prior
-        id = w.prepare_IC_from_prior(
-            prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
-
-        # 2) Update posterior += updates from assimilation
-        id = w.update_IC_from_DA(time, depends_on=id)
-
-        # How long shall we integrate?
-        timedelta_integrate = timedelta_btw_assim
-        output_restart_interval = timedelta_btw_assim.total_seconds()/60
-        if time == last_assim_time:
-            timedelta_integrate = forecast_until - \
-                last_assim_time  # dt.timedelta(hours=4)
-            output_restart_interval = 9999  # no restart file after last assim
-
-        # 3) Run WRF ensemble
-        id = w.run_ENS(begin=time,  # start integration from here
-                       end=time + timedelta_integrate,  # integrate until here
-                       output_restart_interval=output_restart_interval,
-                       depends_on=id)
-        
-        # as we have WRF output, we can use own exp path as prior
-        prior_path_exp = w.cluster.archivedir
-
-        # depends on the RTTOV-WRF repository
-        id = w.create_satimages(time, depends_on=id)
-
-        # increment time
-        time += timedelta_btw_assim
-
-        # update time variables
-        prior_init_time = time - timedelta_btw_assim
-
-    # not publically available
-    # w.verify_sat(id)
-    # w.verify_wrf(id)
diff --git a/dartwrf/assimilate.py b/dartwrf/assimilate.py
index 71ebb75a20da3b8120cae39c38eb4087e0678350..636806111e4be76b00586a5577aebabfbfb939e4 100755
--- a/dartwrf/assimilate.py
+++ b/dartwrf/assimilate.py
@@ -7,48 +7,12 @@ import time as time_module
 import datetime as dt
 import numpy as np
 
-from dartwrf.utils import symlink, copy, mkdir, try_remove, print, shell, write_txt, load_dict
+from dartwrf.utils import Config, symlink, copy, try_remove, print, shell, write_txt, load_dict
 from dartwrf import wrfout_add_geo
 from dartwrf.obs import error_models as err
-from dartwrf.obs import obsseq
+from dartwrf.obs import obsseq, obskind_read
 from dartwrf import dart_nml
 
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
-
-def link_DART_exe():
-    """Link the DART executables to the run_DART directory
-    """
-    bins = ['perfect_model_obs', 'filter', 'obs_diag', 'obs_seq_to_netcdf']
-    for b in bins:
-        symlink(os.path.join(cluster.dart_srcdir, b),
-                os.path.join(cluster.dart_rundir, b))
-
-    symlink(cluster.dart_srcdir+'/../../../assimilation_code/programs/gen_sampling_err_table/'
-            + 'work/sampling_error_correction_table.nc',
-            cluster.dart_rundir+'/sampling_error_correction_table.nc')
-
-def link_RTTOV_files():
-    """Link required files for running RTTOV to run_DART
-    """
-    if cluster.rttov_srcdir != False:
-        rttov_files = ['rttov13pred54L/rtcoef_msg_4_seviri_o3.dat',
-                       'mfasis_lut/rttov_mfasis_cld_msg_4_seviri_deff.H5',
-                       'cldaer_visir/sccldcoef_msg_4_seviri.dat']
-
-        for f_src in rttov_files:
-            destname = os.path.basename(f_src)
-            if 'rtcoef' in f_src:
-                destname = 'rtcoef_msg_4_seviri.dat'
-
-            symlink(cluster.rttov_srcdir + f_src,
-                    cluster.dart_rundir+'/'+destname)
-
-        symlink(cluster.dart_rundir+'/rttov_mfasis_cld_msg_4_seviri_deff.H5',
-                cluster.dart_rundir+'/rttov_mfasis_cld_msg_4_seviri.H5')  # use deff, not OPAC
-
-        symlink(cluster.dart_srcdir+'/../../../observations/forward_operators/rttov_sensor_db.csv',
-                cluster.dart_rundir+'/rttov_sensor_db.csv')
 
 def prepare_DART_grid_template():
     """Prepare DART grid template wrfinput_d01 file from a prior file
@@ -57,9 +21,14 @@ def prepare_DART_grid_template():
     No data except grid info will be read from this file.
     The grid information must match exactly with the prior file "wrfout_d01"
     """
-    copy(cluster.dart_rundir+'/prior_ens1/wrfout_d01', cluster.dart_rundir + "/wrfinput_d01")
-    if cluster.geo_em_forecast:
-        wrfout_add_geo.run(cluster.geo_em_forecast, cluster.dart_rundir + "/wrfinput_d01")
+    f_wrfout_dummy = cfg.dir_dart_run+'/prior_ens1/wrfout_d01'
+    if os.path.exists(f_wrfout_dummy):
+        copy(f_wrfout_dummy, cfg.dir_dart_run + "/wrfinput_d01")
+    
+        if cfg.geo_em_forecast:
+            wrfout_add_geo.run(cfg, cfg.geo_em_forecast, cfg.dir_dart_run + "/wrfinput_d01")
+    else:
+        pass # what now?
 
 def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_path_exp):
     """Prepares DART files for running filter
@@ -71,29 +40,31 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_
     - removes probably pre-existing files which could lead to problems
     """
     print("prepare prior ensemble")
-    for iens in range(1, exp.ensemble_size + 1):
+    for iens in range(1, cfg.ensemble_size + 1):
 
         print("link wrfout file to DART background file")
-        wrfout_run = (
+        f_wrfout = (
             prior_path_exp
-            + prior_init_time.strftime(cluster.pattern_init_time)
+            + prior_init_time.strftime(cfg.pattern_init_time)
             + str(iens)
-            + prior_valid_time.strftime(cluster.wrfout_format)
+            + prior_valid_time.strftime(cfg.wrfout_format)
         )
-        dart_ensdir = cluster.dart_rundir + "/prior_ens" + str(iens)
+        dart_ensdir = cfg.dir_dart_run + "/prior_ens" + str(iens)
         wrfout_dart = dart_ensdir + "/wrfout_d01"
 
         # copy prior ens file from archived wrfout files (prior_path_exp)
-        print("link", wrfout_run, "to", wrfout_dart)
-        symlink(wrfout_run, wrfout_dart)
+        print("link", f_wrfout, "to", wrfout_dart)
+        if not os.path.isfile(f_wrfout):
+            raise FileNotFoundError(f_wrfout + " does not exist")
+        symlink(f_wrfout, wrfout_dart)
 
         # ensure prior time matches assim time
         # can be intentionally different, e.g. by using a prior for a different time
         if assim_time != prior_valid_time:
-            copy(wrfout_run, wrfout_dart)
+            copy(f_wrfout, wrfout_dart)
             print("overwriting time in prior from nature wrfout")
-            shell(cluster.ncks + " -A -v XTIME,Times " +
-                  cluster.dart_rundir+"/wrfout_d01 " + wrfout_dart)
+            shell(cfg.ncks + " -A -v XTIME,Times " +
+                  cfg.dir_dart_run+"/wrfout_d01 " + wrfout_dart)
 
         # this seems to be necessary (else wrong level selection)
         #if cluster.geo_em_forecast:
@@ -103,48 +74,48 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_
     write_list_of_outputfiles()
 
     print("removing preassim and filter_restart")
-    os.system("rm -rf " + cluster.dart_rundir + "/preassim_*")
-    os.system("rm -rf " + cluster.dart_rundir + "/filter_restart*")
-    os.system("rm -rf " + cluster.dart_rundir + "/output_mean*")
-    os.system("rm -rf " + cluster.dart_rundir + "/output_sd*")
-    os.system("rm -rf " + cluster.dart_rundir + "/perfect_output_*")
-    os.system("rm -rf " + cluster.dart_rundir + "/obs_seq.fina*")
+    os.system("rm -rf " + cfg.dir_dart_run + "/preassim_*")
+    os.system("rm -rf " + cfg.dir_dart_run + "/filter_restart*")
+    os.system("rm -rf " + cfg.dir_dart_run + "/output_mean*")
+    os.system("rm -rf " + cfg.dir_dart_run + "/output_sd*")
+    os.system("rm -rf " + cfg.dir_dart_run + "/perfect_output_*")
+    os.system("rm -rf " + cfg.dir_dart_run + "/obs_seq.fina*")
 
 
 def use_linked_files_as_prior():
     """Instruct DART to use the prior ensemble as input
     """
     files = []
-    for iens in range(1, exp.ensemble_size+1):
+    for iens in range(1, cfg.ensemble_size+1):
         files.append("./prior_ens" + str(iens) + "/wrfout_d01")
-    write_txt(files, cluster.dart_rundir+'/input_list.txt')
+    write_txt(files, cfg.dir_dart_run+'/input_list.txt')
 
 
 def use_filter_output_as_prior():
     """Use the last posterior as input for DART, e.g. to evaluate the analysis in observation space
     """
     files = []
-    for iens in range(1, exp.ensemble_size+1):
-        f_new = cluster.dart_rundir+'/prior_ens'+str(iens)+'/wrfout_d01'
+    for iens in range(1, cfg.ensemble_size+1):
+        f_new = cfg.dir_dart_run+'/prior_ens'+str(iens)+'/wrfout_d01'
         try:
             os.remove(f_new)
         except:
             pass
-        os.rename(cluster.dart_rundir+'/filter_restart_d01.' +
+        os.rename(cfg.dir_dart_run+'/filter_restart_d01.' +
                   str(iens).zfill(4), f_new)
         files.append(f_new)
 
-    write_txt(files, cluster.dart_rundir+'/input_list.txt')
+    write_txt(files, cfg.dir_dart_run+'/input_list.txt')
 
 
 def write_list_of_outputfiles():
     files = []
-    for iens in range(1, exp.ensemble_size+1):
+    for iens in range(1, cfg.ensemble_size+1):
         files.append("./filter_restart_d01." + str(iens).zfill(4))
-    write_txt(files, cluster.dart_rundir+'/output_list.txt')
+    write_txt(files, cfg.dir_dart_run+'/output_list.txt')
 
 
-def filter(nproc=12):
+def filter():
     """Calls DART ./filter program
 
     Args:
@@ -153,43 +124,42 @@ def filter(nproc=12):
     Returns:
         None    (writes to file)
     """
-    if hasattr(cluster, 'max_nproc'):
-        nproc = cluster.max_nproc
+    nproc = cfg.max_nproc
         
     print("time now", dt.datetime.now())
     print("running filter")
-    os.chdir(cluster.dart_rundir)
-    try_remove(cluster.dart_rundir + "/obs_seq.final")
+    os.chdir(cfg.dir_dart_run)
+    try_remove(cfg.dir_dart_run + "/obs_seq.final")
 
     t = time_module.time()
     if nproc > 1:
         # -genv I_MPI_PIN_PROCESSOR_LIST=0-"+str(int(nproc) - 1)
-        shell(cluster.dart_modules+"; mpirun -np " +
+        shell(cfg.dart_modules+"; mpirun -np " +
               str(int(nproc))+" ./filter > log.filter")
     else:
-        shell(cluster.dart_modules+"; ./filter > log.filter")
+        shell(cfg.dart_modules+"; ./filter > log.filter")
     print("./filter took", int(time_module.time() - t), "seconds")
 
-    if not os.path.isfile(cluster.dart_rundir + "/obs_seq.final"):
+    if not os.path.isfile(cfg.dir_dart_run + "/obs_seq.final"):
         raise RuntimeError(
             "obs_seq.final does not exist in run_DART directory. ",
-            "Check log file at " + cluster.dart_rundir + "/log.filter")
+            "Check log file at " + cfg.dir_dart_run + "/log.filter")
 
 
 def archive_filteroutput(time):
     """Archive filter output files (filter_restart, preassim, postassim, output_mean, output_sd)
     """
     # archive diagnostics
-    dir_out = cluster.archivedir + time.strftime(cluster.pattern_init_time)
+    dir_out = cfg.dir_archive + time.strftime(cfg.pattern_init_time)
     os.makedirs(dir_out, exist_ok=True)
 
     # copy input.nml to archive
-    copy(cluster.dart_rundir + "/input.nml", dir_out + "/input.nml")
+    copy(cfg.dir_dart_run + "/input.nml", dir_out + "/input.nml")
 
     # copy filter_restart files to archive (initial condition for next run)
-    for iens in range(1, exp.ensemble_size + 1):  # single members
+    for iens in range(1, cfg.ensemble_size + 1):  # single members
         copy(
-            cluster.dart_rundir + "/filter_restart_d01." + str(iens).zfill(4),
+            cfg.dir_dart_run + "/filter_restart_d01." + str(iens).zfill(4),
             dir_out + "/filter_restart_d01." + str(iens).zfill(4),
         )
 
@@ -198,7 +168,7 @@ def archive_filteroutput(time):
               "postassim_mean.nc", "postassim_sd.nc",
               "output_mean.nc", "output_sd.nc"]:
         try:
-            copy(cluster.dart_rundir + "/" + f, dir_out + "/" + f)
+            copy(cfg.dir_dart_run + "/" + f, dir_out + "/" + f)
         except:
             warnings.warn(f+" not found")
 
@@ -206,14 +176,14 @@ def archive_filteroutput(time):
         try:
             ftypes = ['preassim', 'postassim']
             for ftype in ftypes:
-                for iens in range(1, exp.ensemble_size + 1):
+                for iens in range(1, cfg.ensemble_size + 1):
                     fname = "/"+ftype+"_member_" + str(iens).zfill(4) + ".nc"
-                    copy(cluster.dart_rundir + fname, dir_out + fname)
+                    copy(cfg.dir_dart_run + fname, dir_out + fname)
         except Exception as e:
             warnings.warn(str(e))
 
 
-def get_parametrized_error(obscfg, osf_prior):
+def get_parametrized_error(obscfg, osf_prior) -> np.ndarray: # type: ignore
     """Calculate the parametrized error for an ObsConfig (one obs type)
 
     Args:
@@ -253,9 +223,9 @@ def set_obserr_assimilate_in_obsseqout(oso, outfile="./obs_seq.out"):
         osf_prior (ObsSeq): python representation of obs_seq.final (output of filter in evaluate-mode without posterior)
                         contains prior values; used for parameterized errors
     """
-    from dartwrf.obs.obskind import obs_kind_nrs
+    obs_kind_nrs = obskind_read(cfg.dir_dart_src)
 
-    for obscfg in exp.observations:
+    for obscfg in cfg.assimilate_these_observations:
         kind_str = obscfg['kind']  # e.g. 'RADIOSONDE_TEMPERATURE'
         kind = obs_kind_nrs[kind_str]  # e.g. 263
 
@@ -266,9 +236,9 @@ def set_obserr_assimilate_in_obsseqout(oso, outfile="./obs_seq.out"):
             if obscfg["error_assimilate"] == False:
                 # get a parametrized error for this observation type
 
-                f_osf = cluster.dart_rundir + "/obs_seq.final"
+                f_osf = cfg.dir_dart_run + "/obs_seq.final"
                 if not os.path.isfile(f_osf):
-                    evaluate(time, f_out_pattern=f_osf)
+                    evaluate(cfg.time, f_out_pattern=f_osf)
 
                 # this file was generated by `evaluate()`
                 osf_prior = obsseq.ObsSeq(f_osf)
@@ -300,10 +270,10 @@ def reject_small_FGD(time, oso):
         The pre-existing obs_seq.out will be archived.
         The new obs_seq.out will be written to the DART run directory.
     """
-    osf_prior = obsseq.ObsSeq(cluster.dart_rundir + "/obs_seq.final")
+    osf_prior = obsseq.ObsSeq(cfg.dir_dart_run + "/obs_seq.final")
 
     # obs should be superobbed already!
-    for i, obscfg in enumerate(exp.observations):
+    for i, obscfg in enumerate(cfg.assimilate_these_observations):
         if i > 0:
             raise NotImplementedError(
                 'Multiple observation types -> might not work')
@@ -351,12 +321,12 @@ def reject_small_FGD(time, oso):
         print('QC removed', n_obs_orig-len(oso.df), 'observations')
 
         # archive obs_seq.out before QC (contains all observations, including later removed ones)
-        f_out_archive = time.strftime(cluster.pattern_obs_seq_out)+"-beforeQC"
+        f_out_archive = time.strftime(cfg.pattern_obs_seq_out)+"-beforeQC"
         os.makedirs(os.path.dirname(f_out_archive), exist_ok=True)
-        copy(cluster.dart_rundir + "/obs_seq.out", f_out_archive)
+        copy(cfg.dir_dart_run + "/obs_seq.out", f_out_archive)
 
         # for assimilation later
-        f_out_dart = cluster.dart_rundir+'/obs_seq.out'
+        f_out_dart = cfg.dir_dart_run+'/obs_seq.out'
         oso.to_dart(f_out_dart)
         print('saved', f_out_dart)
 
@@ -364,7 +334,7 @@ def reject_small_FGD(time, oso):
 def evaluate(assim_time,
              obs_seq_out=False,
              prior_is_filter_output=False,
-             f_out_pattern=cluster.pattern_obs_seq_final+"-evaluate"):
+             f_out_pattern: str = './obs_seq.final'):
     """Calculates either prior or posterior obs space values.
 
     Note: Depends on a prepared input_list.txt, which defines the ensemble (prior or posterior).
@@ -381,7 +351,7 @@ def evaluate(assim_time,
     Returns
         None (writes file)
     """
-    prepare_run_DART_folder()
+    prepare_run_DART_folder(cfg)
 
     if prior_is_filter_output:
         print('using filter_restart files in run_DART as prior')
@@ -392,16 +362,15 @@ def evaluate(assim_time,
 
     # the observations at which to evaluate the prior at
     if obs_seq_out:
-        copy(obs_seq_out, cluster.dart_rundir +
-             '/obs_seq.out')  # user defined file
+        copy(obs_seq_out, cfg.dir_dart_run + '/obs_seq.out')  # user defined file
     else:
         # use existing obs_seq.out file currently present in the run_DART directory
-        if not os.path.isfile(cluster.dart_rundir+'/obs_seq.out'):
-            raise RuntimeError(cluster.dart_rundir +
+        if not os.path.isfile(cfg.dir_dart_run+'/obs_seq.out'):
+            raise RuntimeError(cfg.dir_dart_run +
                                '/obs_seq.out does not exist')
 
-    dart_nml.write_namelist(just_prior_values=True)
-    filter(nproc=cluster.max_nproc)
+    dart_nml.write_namelist(cfg, just_prior_values=True)
+    filter()
     archive_filter_diagnostics(assim_time, f_out_pattern)
 
 
@@ -412,18 +381,18 @@ def archive_filter_diagnostics(time, f_out_pattern):
 
     dir_out = os.path.dirname(f_archive)
     os.makedirs(dir_out, exist_ok=True)
-    copy(cluster.dart_rundir + "/obs_seq.final", f_archive)
+    copy(cfg.dir_dart_run + "/obs_seq.final", f_archive)
     print(f_archive, "saved.")
 
 
 def txtlink_to_prior(time, prior_init_time, prior_path_exp):
     """For reproducibility, write the path of the prior to a txt file
     """
-    os.makedirs(cluster.archivedir +
+    os.makedirs(cfg.dir_archive +
                 time.strftime('/%Y-%m-%d_%H:%M/'), exist_ok=True)
     os.system('echo "'+prior_path_exp+'\n'+prior_init_time.strftime('/%Y-%m-%d_%H:%M/')
               + '\n'+time.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')+'" > '
-                + cluster.archivedir + time.strftime('/%Y-%m-%d_%H:%M/')+'link_to_prior.txt')
+                + cfg.dir_archive + time.strftime('/%Y-%m-%d_%H:%M/')+'link_to_prior.txt')
 
 
 def prepare_inflation_2(time, prior_init_time):
@@ -436,13 +405,13 @@ def prepare_inflation_2(time, prior_init_time):
         time (datetime): time of assimilation
         prior_init_time (datetime): time of prior assimilation
     """
-    dir_priorinf = cluster.archivedir + \
-        prior_init_time.strftime(cluster.pattern_init_time)
+    dir_priorinf = cfg.dir_archive + \
+        prior_init_time.strftime(cfg.pattern_init_time)
 
-    f_default = cluster.archive_base + "/input_priorinf_mean.nc"
+    f_default = cfg.dir_archive.replace('<exp>', cfg.name) + "/input_priorinf_mean.nc"
     f_prior = dir_priorinf + \
         time.strftime("/%Y-%m-%d_%H:%M_output_priorinf_mean.nc")
-    f_new = cluster.dart_rundir + '/input_priorinf_mean.nc'
+    f_new = cfg.dir_dart_run + '/input_priorinf_mean.nc'
 
     if os.path.isfile(f_prior):
         copy(f_prior, f_new)
@@ -451,10 +420,10 @@ def prepare_inflation_2(time, prior_init_time):
         warnings.warn(f_prior + ' does not exist. Using default file instead.')
         copy(f_default, f_new)
 
-    f_default = cluster.archive_base + "/input_priorinf_sd.nc"
+    f_default = cfg.archive_base + "/input_priorinf_sd.nc"
     f_prior = dir_priorinf + \
         time.strftime("/%Y-%m-%d_%H:%M_output_priorinf_sd.nc")
-    f_new = cluster.dart_rundir + '/input_priorinf_sd.nc'
+    f_new = cfg.dir_dart_run + '/input_priorinf_sd.nc'
 
     if os.path.isfile(f_prior):
         copy(f_prior, f_new)
@@ -465,36 +434,71 @@ def prepare_inflation_2(time, prior_init_time):
 
 
 def archive_inflation_2(time):
-    dir_output = cluster.archivedir + time.strftime(cluster.pattern_init_time)
+    dir_output = cfg.dir_archive + time.strftime(cfg.pattern_init_time)
     os.makedirs(dir_output, exist_ok=True)
 
-    f_output = cluster.dart_rundir + '/output_priorinf_sd.nc'
+    f_output = cfg.dir_dart_run + '/output_priorinf_sd.nc'
     f_archive = dir_output + \
         time.strftime("/%Y-%m-%d_%H:%M_output_priorinf_sd.nc")
     copy(f_output, f_archive)
     print(f_archive, 'saved')
 
-    f_output = cluster.dart_rundir + '/output_priorinf_mean.nc'
+    f_output = cfg.dir_dart_run + '/output_priorinf_mean.nc'
     f_archive = dir_output + \
         time.strftime("/%Y-%m-%d_%H:%M_output_priorinf_mean.nc")
     copy(f_output, f_archive)
     print(f_archive, 'saved')
 
 
-def prepare_run_DART_folder(prior_path_exp=False):
+def prepare_run_DART_folder(cfg: Config):
     # create directory to run DART in
-    os.makedirs(cluster.dart_rundir, exist_ok=True)
+
+    def __link_RTTOV_files():
+        """Link required files for running RTTOV to run_DART
+        """
+        if cfg.dir_rttov_src != False:
+            rttov_files = ['rttov13pred54L/rtcoef_msg_4_seviri_o3.dat',
+                        'mfasis_lut/rttov_mfasis_cld_msg_4_seviri_deff.H5',
+                        'cldaer_visir/sccldcoef_msg_4_seviri.dat']
+
+            for f_src in rttov_files:
+                destname = os.path.basename(f_src)
+                if 'rtcoef' in f_src:
+                    destname = 'rtcoef_msg_4_seviri.dat'
+
+                symlink(cfg.dir_rttov_src + f_src,
+                        cfg.dir_dart_run+'/'+destname)
+
+            symlink(cfg.dir_dart_run+'/rttov_mfasis_cld_msg_4_seviri_deff.H5',
+                    cfg.dir_dart_run+'/rttov_mfasis_cld_msg_4_seviri.H5')  # use deff, not OPAC
+
+            symlink(cfg.dir_dart_src+'/../../../observations/forward_operators/rttov_sensor_db.csv',
+                    cfg.dir_dart_run+'/rttov_sensor_db.csv')
+            
+    def __link_DART_exe():
+        """Link the DART executables to the run_DART directory
+        """
+        bins = ['perfect_model_obs', 'filter', 'obs_diag', 'obs_seq_to_netcdf']
+        for b in bins:
+            symlink(os.path.join(cfg.dir_dart_src, b),
+                    os.path.join(cfg.dir_dart_run, b))
+
+        symlink(cfg.dir_dart_src+'/../../../assimilation_code/programs/gen_sampling_err_table/'
+                + 'work/sampling_error_correction_table.nc',
+                cfg.dir_dart_run+'/sampling_error_correction_table.nc')
+    
+    #########################
+    # remove any remains of a previous run
+    os.makedirs(cfg.dir_dart_run, exist_ok=True)
+    os.chdir(cfg.dir_dart_run)
+    os.system("rm -f input.nml obs_seq.in obs_seq.out obs_seq.out-orig obs_seq.final output_* input_*")
     
-    link_DART_exe()
+    __link_DART_exe()
     
-    for obscfg in exp.observations:
+    for obscfg in cfg.assimilate_these_observations:
         if 'sat_channel' in obscfg:
-            link_RTTOV_files()
-            continue
-        
-    # remove any remains of a previous run
-    os.chdir(cluster.dart_rundir)
-    os.system("rm -f input.nml obs_seq.in obs_seq.out obs_seq.out-orig obs_seq.final")
+            __link_RTTOV_files()
+            continue  # only need to link RTTOV files once
 
 
 def get_obsseq_out(time, prior_path_exp, prior_init_time, prior_valid_time, lag=None):
@@ -514,64 +518,56 @@ def get_obsseq_out(time, prior_path_exp, prior_init_time, prior_valid_time, lag=
     Returns:
         obsseq.ObsSeq
     """
-    # access time-dependent configuration
-    try:
-        ACF_config = load_dict(
-            time.strftime(cluster.scripts_rundir+'/ACF_config_%H%M.pkl'))
-        print('using ACF_config:', ACF_config)
-        exp.ACF = True
-    except:
-        pass
+    use_ACF = False
+    if 'assimilate_cloudfractions' in cfg:
+        use_ACF = True
 
     oso = None
-    if isinstance(exp.use_existing_obsseq, str):
+    if isinstance(cfg.use_existing_obsseq, str):
         # assume that the user wants to use an existing obs_seq.out file
 
-        f_obsseq = time.strftime(exp.use_existing_obsseq)
+        f_obsseq = time.strftime(cfg.use_existing_obsseq)
         if os.path.isfile(f_obsseq):
             # copy to run_DART folder
-            copy(f_obsseq, cluster.dart_rundir+'/obs_seq.out')
-            print(f_obsseq, 'copied to', cluster.dart_rundir+'/obs_seq.out')
+            copy(f_obsseq, cfg.dir_dart_run+'/obs_seq.out')
+            print(f_obsseq, 'copied to', cfg.dir_dart_run+'/obs_seq.out')
 
         else:
             # explain the error if the file does not exist
-            raise IOError('exp.use_existing_obsseq is not False. \n'
+            raise IOError('cfg.use_existing_obsseq is not False. \n'
                           + 'In this case, use_existing_obsseq should be a file path (wildcards %H, %M allowed)!\n'
-                          + 'But there is no file with this pattern: '+str(exp.use_existing_obsseq))
+                          + 'But there is no file with this pattern: '+str(cfg.use_existing_obsseq))
 
-    elif hasattr(exp, 'ACF'):
+    elif use_ACF:
         # prepare observations with precomputed FO
-        if lag != None:
-            prior_valid_time += lag
-            time += lag
         pattern_prior = prior_path_exp + prior_init_time.strftime(
-            '/%Y-%m-%d_%H:%M/<iens>/') + prior_valid_time.strftime(ACF_config.pop('first_guess_pattern'))
+            '/%Y-%m-%d_%H:%M/<iens>/') + prior_valid_time.strftime(cfg.first_guess_pattern)
 
         from CloudFractionDA import obsseqout as cfoso
         cfoso.write_obsseq(time, pattern_prior, 
-                           time.strftime(ACF_config.pop('observed_data')), 
-                           ACF_config.pop('var'),
-                           path_output=cluster.dart_rundir + "/obs_seq.out",
-                           **ACF_config)
+                           time.strftime(cfg.obs_file_pattern), 
+                           cfg.cloudfraction_variable,
+                           path_output=cfg.dir_dart_run + "/obs_seq.out",
+        ) #**ACF_config)
 
     else:
         # do NOT use an existing obs_seq.out file
         # but generate observations with new observation noise
         from dartwrf.obs import create_obsseq_out as osq_out
-        oso = osq_out.generate_new_obsseq_out(time)
+        oso = osq_out.generate_new_obsseq_out(cfg)
 
     # copy to sim_archive
-    f_obsseq_archive = time.strftime(cluster.pattern_obs_seq_out)
+    f_obsseq_archive = time.strftime(cfg.pattern_obs_seq_out)
     os.makedirs(os.path.dirname(f_obsseq_archive), exist_ok=True)
-    copy(cluster.dart_rundir+'/obs_seq.out', f_obsseq_archive)
+    copy(cfg.dir_dart_run+'/obs_seq.out', f_obsseq_archive)
 
     # read so that we can return it
     if oso is None:
-        oso = obsseq.ObsSeq(cluster.dart_rundir + "/obs_seq.out")
+        oso = obsseq.ObsSeq(cfg.dir_dart_run + "/obs_seq.out")
     return oso
 
 
-def main(time, prior_init_time, prior_valid_time, prior_path_exp):
+def main(cfg: Config):
     """Assimilate observations
     as defined in config/cfg.py
     for a certain timestamp (argument) of the nature run (defined in config/clusters.py)
@@ -593,10 +589,16 @@ def main(time, prior_init_time, prior_valid_time, prior_path_exp):
     Returns:
         None
     """
+    # read config
+    time = cfg.time
+    prior_init_time = cfg.prior_init_time
+    prior_valid_time = cfg.prior_valid_time
+    prior_path_exp = cfg.prior_path_exp
+    
     # do_reject_smallFGD: triggers additional evaluations of prior & posterior
-    do_reject_smallFGD = getattr(exp, "do_reject_smallFGD", False)
-    prepare_run_DART_folder(prior_path_exp)
-    nml = dart_nml.write_namelist()
+    do_reject_smallFGD = getattr(cfg, "do_reject_smallFGD", False)
+    prepare_run_DART_folder(cfg)
+    nml = dart_nml.write_namelist(cfg)
 
     print(" get observations with specified obs-error")
     #lag = dt.timedelta(minutes=15)
@@ -609,10 +611,10 @@ def main(time, prior_init_time, prior_valid_time, prior_path_exp):
     # additional evaluation of prior (in case some observations are rejected)
     if do_reject_smallFGD:
         print(" evaluate prior for all observations (incl rejected) ")
-        evaluate(time, f_out_pattern=cluster.pattern_obs_seq_final+"-evaluate_prior")
+        evaluate(time, f_out_pattern=cfg.pattern_obs_seq_final+"-evaluate_prior")
 
     print(" assign observation-errors for assimilation ")
-    set_obserr_assimilate_in_obsseqout(oso, outfile=cluster.dart_rundir + "/obs_seq.out")
+    set_obserr_assimilate_in_obsseqout(oso, outfile=cfg.dir_dart_run + "/obs_seq.out")
 
     if do_reject_smallFGD:
         print(" reject observations? ")
@@ -623,28 +625,29 @@ def main(time, prior_init_time, prior_valid_time, prior_path_exp):
         prepare_inflation_2(time, prior_init_time)
 
     print(" run filter ")
-    dart_nml.write_namelist()
+    dart_nml.write_namelist(cfg)
     filter()
     archive_filteroutput(time)
-    archive_filter_diagnostics(time, cluster.pattern_obs_seq_final)
+    archive_filter_diagnostics(time, cfg.pattern_obs_seq_final)
     txtlink_to_prior(time, prior_init_time, prior_path_exp)
 
     if prior_inflation_type == '2':
         archive_inflation_2(time)
 
-    if exp.evaluate_posterior_in_obs_space:
-        print(" evaluate posterior in observation-space")
-        f_oso = time.strftime(cluster.pattern_obs_seq_out)
-        if do_reject_smallFGD:
-            # includes all observations (including rejected ones in reject_small_FGD())
-            f_oso += '-beforeQC'
+    if 'evaluate_posterior_in_obs_space' in cfg:
+        if cfg.evaluate_posterior_in_obs_space:
+            print(" evaluate posterior in observation-space")
+            f_oso = time.strftime(cfg.pattern_obs_seq_out)
+            if do_reject_smallFGD:
+                # includes all observations (including rejected ones in reject_small_FGD())
+                f_oso += '-beforeQC'
 
-        # evaluate() separately after ./filter is crucial when assimilating cloud variables
-        # as the obs_seq.final returned by ./filter was produced using un-clamped cloud variables
-        evaluate(time,
-                 obs_seq_out=f_oso,
-                 prior_is_filter_output=True,
-                 f_out_pattern=cluster.pattern_obs_seq_final+"-evaluate")
+            # evaluate() separately after ./filter is crucial when assimilating cloud variables
+            # as the obs_seq.final returned by ./filter was produced using un-clamped cloud variables
+            evaluate(time,
+                    obs_seq_out=f_oso,
+                    prior_is_filter_output=True,
+                    f_out_pattern=cfg.pattern_obs_seq_final+"-evaluate")
 
 
 if __name__ == "__main__":
@@ -653,10 +656,5 @@ if __name__ == "__main__":
     Example:
         python assimilate.py 2008-08-07_13:00 2008-08_12:00 2008-08-07_13:00 /path/to/experiment/
     """
-
-    time = dt.datetime.strptime(sys.argv[1], "%Y-%m-%d_%H:%M")
-    prior_init_time = dt.datetime.strptime(sys.argv[2], "%Y-%m-%d_%H:%M")
-    prior_valid_time = dt.datetime.strptime(sys.argv[3], "%Y-%m-%d_%H:%M")
-    prior_path_exp = str(sys.argv[4])
-
-    main(time, prior_init_time, prior_valid_time, prior_path_exp)
+    cfg = Config.from_file(sys.argv[1])
+    main(cfg)
diff --git a/dartwrf/dart_nml.py b/dartwrf/dart_nml.py
index 08204b7af47c93778cea9bf5e58bbd55107e688c..1c63560348471a31d1060d921e318505db85c6fb 100644
--- a/dartwrf/dart_nml.py
+++ b/dartwrf/dart_nml.py
@@ -1,14 +1,13 @@
+import sys
 import warnings
 import numpy as np
-from dartwrf.utils import append_file
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
+from dartwrf.utils import append_file, Config
 
 # 6370 for earth radius in km
 radius_periodic_km = 6370
 
 
-def read_namelist(filepath):
+def read_namelist(filepath: str) -> dict:
     """Read the DART namelist file into a dictionary.
 
     Args:
@@ -118,7 +117,7 @@ def write_namelist_from_dict(d, filepath):
                 max_width_of_parameter_name = max([len(p) for p in parameters])
                 width = max_width_of_parameter_name + 1
             except:
-                width = None
+                width = 12
 
             for parameter in parameters:
                 lines = d[section][parameter]
@@ -188,7 +187,7 @@ def write_namelist_from_dict(d, filepath):
             f.write('   /\n\n')
 
 
-def _get_horiz_localization():
+def _get_horiz_localization(cfg: Config):
     """Compile the list of localizations for the DART namelist variables
 
     Args:
@@ -205,7 +204,7 @@ def _get_horiz_localization():
     l_obstypes_all = []  # list of all observation types
     l_loc_horiz_rad = []  # list of respective horizontal localization values
 
-    for obscfg in exp.observations:
+    for obscfg in cfg.assimilate_these_observations:
         l_obstypes_all.append(obscfg["kind"])
 
         # compute horizontal localization value
@@ -219,7 +218,7 @@ def _get_horiz_localization():
     return l_obstypes_all, l_loc_horiz_rad
 
 
-def _get_vertical_localization():
+def _get_vertical_localization(cfg: Config):
     """Compile the list of vertical localizations for the DART namelist variables
 
     Vertical localization can be defined in section &location_nml of 'input.nml'
@@ -254,7 +253,7 @@ def _get_vertical_localization():
     vert_norm_levels = []
     vert_norm_pressures = []
 
-    for obscfg in exp.observations:
+    for obscfg in cfg.assimilate_these_observations:
 
         # if both loc_vert_km and loc_vert_scaleheight are False or not defined, then continue without localization
         if obscfg.get("loc_vert_km", False) == False and obscfg.get("loc_vert_scaleheight", False) == False:
@@ -291,7 +290,7 @@ def _get_vertical_localization():
 
                 # do we have vertical localization at all?
                 # check parameter horiz_dist_only == true
-                if exp.dart_nml['&location_nml']['horiz_dist_only'] == '.true.':
+                if cfg.dart_nml['&location_nml']['horiz_dist_only'] == '.true.':
                     # no vertical localization => set all to dummy values
                     vert_norm_heights.append(-1)
                     vert_norm_scale_heights.append(-1)
@@ -304,7 +303,7 @@ def _get_vertical_localization():
     return l_obstypes_vert, vert_norm_heights, vert_norm_scale_heights, vert_norm_levels, vert_norm_pressures
 
 
-def write_namelist(just_prior_values=False):
+def write_namelist(cfg: Config, just_prior_values=False) -> dict:
     """Write a DART namelist file ('input.nml')
 
     1. Uses the default namelist (from the DART source code)
@@ -324,16 +323,15 @@ def write_namelist(just_prior_values=False):
     Returns:
         None
    """
-    list_obstypes_all, list_loc_horiz_rad = _get_horiz_localization()
+    list_obstypes_all, list_loc_horiz_rad = _get_horiz_localization(cfg)
 
-    vert_norm_obs_types, vert_norm_heights, vert_norm_scale_heights, vert_norm_levels, vert_norm_pressures = _get_vertical_localization()
+    vert_norm_obs_types, vert_norm_heights, vert_norm_scale_heights, vert_norm_levels, vert_norm_pressures = _get_vertical_localization(cfg)
 
     # default compilation namelist
-    nml = read_namelist(cluster.dart_srcdir + "/input.nml")
+    nml = read_namelist(cfg.dir_dart_src + "/input.nml")
 
     n_obstypes = len(list_obstypes_all)
     if n_obstypes > 0:
-        # make sure that observations defined in `exp.observations` are assimilated
         nml['&obs_kind_nml']['assimilate_these_obs_types'] = [list_obstypes_all]
         nml['&obs_kind_nml']['use_precomputed_FOs_these_obs_types'] = [
             [a for a in list_obstypes_all if a.startswith('CF')]]  # only for cloud fraction
@@ -361,7 +359,7 @@ def write_namelist(just_prior_values=False):
     # then we read the configuration file of the experiment
     # and overwrite the default values where necessary
     # (to merge default and user-defined namelist settings)
-    for section, sdata in exp.dart_nml.items():
+    for section, sdata in cfg.dart_nml.items():
 
         # if section is not in namelist, add it
         if section not in nml:
@@ -414,23 +412,25 @@ def write_namelist(just_prior_values=False):
 
     # fail if horiz_dist_only == false but observations contain a satellite channel
     if nml['&location_nml']['horiz_dist_only'][0] == '.false.':
-        for obscfg in exp.observations:
+        for obscfg in cfg.assimilate_these_observations:
             if 'sat_channel' in obscfg:
                 warnings.warn(
                     "Selected vertical localization, but observations contain satellite obs -> Bug in DART.")
 
     # write to file
-    write_namelist_from_dict(nml, cluster.dart_rundir + "/input.nml")
+    dir_dart_run = cfg.dir_dart_run.replace('<exp>', cfg.name)
+    write_namelist_from_dict(nml, dir_dart_run + "/input.nml")
 
     # append section for RTTOV
-    if hasattr(cluster, 'rttov_nml'):
-        append_file(cluster.dart_rundir + "/input.nml", cluster.rttov_nml)
+    if hasattr(cfg, 'rttov_nml'):
+        append_file(dir_dart_run + "/input.nml", cfg.rttov_nml)
 
     return nml  # in case we want to access namelist settings in python
 
 
 if __name__ == "__main__":
     # for testing
+    cfg = Config.from_file(sys.argv[1])
 
-    nml = write_namelist()
+    nml = write_namelist(cfg)
     print(nml)
diff --git a/dartwrf/namelist_handler.py b/dartwrf/namelist_handler.py
index 351f8871d35e5b6421fabaa011dc4757fe73a556..8b80740b83a70b1f193293ab418fe472f57027dd 100644
--- a/dartwrf/namelist_handler.py
+++ b/dartwrf/namelist_handler.py
@@ -41,10 +41,12 @@ class WRF_namelist():
                 value = value.strip().rstrip(",")
 
                 if "," in value:
-                    try:
-                        values = eval(value)
-                    except:
-                        raise ValueError(f"{variable} is not a tuple!")
+                    values = value.split(",")
+                    values = tuple(v.strip() for v in values)
+                    # try:
+                    #     values = eval(value)
+                    # except:
+                    #     raise ValueError(f"{variable} is not a tuple!")
                     subdomains = True
                     value = values
                 # check if we have single numbers
@@ -67,22 +69,20 @@ class WRF_namelist():
 
     def write(self, fname: str) -> None:
         """Write a WRF namelist file
-        """
-        if os.path.exists(fname):
-            warnings.warn(f"{fname} already exists!")
-            if input('Continue? (Y/n) ') in ['Y', 'y']:
-                pass      
-            else:
-                raise FileExistsError  
-        
+        may overwrite existing files, not guaranteed to overwrite
+        """        
         with open(fname, 'w') as file:
             for block, variables in self.namelist.items():
                 file.write(f" &{block}\n")
                 for variable, value in variables.items():
-                    if isinstance(value, str) and not value.startswith('.'):
+                    
+                    if isinstance(value, tuple) or isinstance(value, list):
+                        # print each element of the tuple without '
+                        value = ', '.join([str(v) for v in value])
+                    else:
+                        # all other types are just transformed to str
                         value = f'{value}'
-                    if isinstance(value, tuple):
-                        value = str(value)[1:-1]
+                            
                     file.write(f" {variable:<35} = {value},\n")
                 file.write(" /\n\n")
         pass    
\ No newline at end of file
diff --git a/dartwrf/obs/calculate_obs_locations.py b/dartwrf/obs/calculate_obs_locations.py
index d86548b9619c31440cd2d32cdc69b5af5c53ce55..a533220d0bf5dffde33c9e5602920ff2ebe1fe4d 100755
--- a/dartwrf/obs/calculate_obs_locations.py
+++ b/dartwrf/obs/calculate_obs_locations.py
@@ -6,12 +6,8 @@ import os
 import sys
 import warnings
 import numpy as np
-import datetime as dt
 import xarray as xr
 
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
-
 #####################
 # Global variables
 
@@ -51,7 +47,8 @@ def square_array_from_domaincenter(n_obs, distance_between_obs_km):
     return coords
 
 
-def evenly_on_grid(km_between_obs, skip_border_km=0):
+def evenly_on_grid(f_geo_em_nature: str, 
+                   km_between_obs, skip_border_km=0):
     """Observations spread evenly over domain
 
     skip_border_km : no observations within this distance to the border
@@ -59,7 +56,7 @@ def evenly_on_grid(km_between_obs, skip_border_km=0):
     Returns
         tuple of (lat, lon) coordinates of observed gridpoints in degrees
     """
-    fcoords = cluster.geo_em_nature
+    fcoords = f_geo_em_nature
     ds = xr.open_dataset(fcoords)
 
     lons = ds.XLONG_M.isel(Time=0).values
@@ -101,7 +98,3 @@ def evenly_on_grid(km_between_obs, skip_border_km=0):
     print('first observation at gridpoint', skip_gridpoints,', last observation at gridpoint', i_grid)
     return coords
 
-
-if __name__ == '__main__':
-    
-    evenly_on_grid(12, skip_border_km=16)
\ No newline at end of file
diff --git a/dartwrf/obs/create_obsseq_in.py b/dartwrf/obs/create_obsseq_in.py
index 769dc377a8d5de7f4ffd225a7eb63e1bccaf2c23..b256f0b42cf88cca6609a5d83700b8484a3e5794 100755
--- a/dartwrf/obs/create_obsseq_in.py
+++ b/dartwrf/obs/create_obsseq_in.py
@@ -9,11 +9,10 @@ import numpy as np
 import datetime as dt
 from pysolar.solar import get_altitude, get_azimuth
 
-from dartwrf.server_config import cluster
 from dartwrf import utils
+from dartwrf.utils import Config
 from dartwrf.obs import calculate_obs_locations as col
-# dictionary string => DART internal indices
-from dartwrf.obs.obskind import obs_kind_nrs
+from dartwrf.obs import obskind_read
 
 # position on earth for RTTOV ray geometry
 lat0 = 45.
@@ -104,7 +103,7 @@ def _append_hgt_to_coords(coords, heights):
     return coords2
 
 
-def preamble(n_obs_3d_total, list_kinds):
+def preamble(n_obs_3d_total, list_kinds, obs_kind_nrs):
     """Writes the header of an obs_seq.out file
     """
     lines_obstypedef = ''
@@ -198,8 +197,7 @@ kind
 """+str(obs['obserr_var'])
 
 
-def create_obs_seq_in(time_dt, list_obscfg,
-                      output_path=cluster.dart_rundir+'/obs_seq.in'):
+def create_obs_seq_in(cfg: Config, output_path='./obs_seq.in'):
     """Create obs_seq.in with multiple obs types in one file
 
     Args:
@@ -215,7 +213,12 @@ def create_obs_seq_in(time_dt, list_obscfg,
             - error_assimilate (np.array or False) : False -> parameterized
             - cov_loc_radius_km (float)
     """
+    time_dt = cfg.time
+    list_obscfg = cfg.assimilate_these_observations
+    
     os.makedirs(os.path.dirname(output_path), exist_ok=True)
+    
+    obs_kind_nrs = obskind_read(cfg.dir_dart_src)
 
     print('creating obs_seq.in:')
     time_dt = _add_timezone_UTC(time_dt)
@@ -236,7 +239,8 @@ def create_obs_seq_in(time_dt, list_obscfg,
 
         if obs_locations == 'DEFAULT_GRID':
             # compute as a square_array_evenly_on_grid
-            coords = col.evenly_on_grid(obscfg['km_between_obs'],
+            coords = col.evenly_on_grid(cfg.geo_em_nature,
+                                        obscfg['km_between_obs'],
                                         obscfg['skip_border_km'])
         else:
             # obs_locations given by iterable
@@ -286,28 +290,14 @@ def create_obs_seq_in(time_dt, list_obscfg,
 
     n_obs_total = i_obs_total
     list_kinds = [a['kind'] for a in list_obscfg]
-    pretxt = preamble(n_obs_total, list_kinds)
+    pretxt = preamble(n_obs_total, list_kinds, obs_kind_nrs)
 
     alltxt = pretxt + txt
     _write_file(alltxt, output_path=output_path)
 
 
 if __name__ == '__main__':
-    # for testing
-    time_dt = dt.datetime(2008, 7, 30, 13, 0)
-
-    radar = dict(var_name='Radar reflectivity', unit='[dBz]',
-                 kind='RADAR_REFLECTIVITY',
-                 n_obs=5776, obs_locations='square_array_evenly_on_grid',
-                 error_generate=2.5, error_assimilate=2.5,
-                 heights=range(2000, 14001, 2000),
-                 loc_horiz_km=20, loc_vert_km=2.5)
-
-    create_obs_seq_in(time_dt, [radar],
-                      output_path=utils.userhome+'/run_DART/obs_seq.in')
-
-    if False:
-        error_assimilate = 5.*np.ones(n_obs*len(radar['heights']))
-        import assim_synth_obs as aso
-        aso.replace_errors_obsseqout(
-            cluster.dart_rundir+'/obs_seq.out', error_assimilate)
+    
+    cfg = Config.from_file(sys.argv[1])
+    
+    create_obs_seq_in(cfg, output_path=utils.userhome+'/run_DART/obs_seq.in')
diff --git a/dartwrf/obs/create_obsseq_out.py b/dartwrf/obs/create_obsseq_out.py
index 4abd01d48b02833eb0fb59fe21709cd83f5ca8b9..99bf135c542a7b6c863f9c2796b82f28952d852c 100644
--- a/dartwrf/obs/create_obsseq_out.py
+++ b/dartwrf/obs/create_obsseq_out.py
@@ -1,29 +1,27 @@
-import os
+import os, sys
 import shutil
 import glob
 import warnings
 
-from dartwrf.utils import try_remove, print, shell, symlink, copy
+from dartwrf.utils import Config, try_remove, print, shell, symlink, copy
 import dartwrf.obs.create_obsseq_in as osi
-from dartwrf.obs import obsseq
+from dartwrf.obs import obsseq, obskind_read
 from dartwrf import assimilate as aso
 from dartwrf import wrfout_add_geo
-from dartwrf.obs.obskind import obs_kind_nrs
 
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
 
-
-def prepare_nature_dart(time):
+def prepare_nature_dart(cfg: Config):
     """Prepares DART nature (wrfout_d01) if available
 
     Args:
         time (dt.datetime): Time at which observations will be made
     """
+    time = cfg.time
+    
     def _find_nature(time):
         """Find the path to the nature file for the given time
         """
-        glob_pattern = time.strftime(exp.nature_wrfout_pattern)
+        glob_pattern = time.strftime(cfg.nature_wrfout_pattern)
         print('searching for nature in pattern:', glob_pattern)
         try:
             f_nat = glob.glob(glob_pattern)[0]  # find the nature wrfout-file
@@ -44,32 +42,18 @@ def prepare_nature_dart(time):
     print("prepare nature")
     f_nat = _find_nature(time)
     # link nature wrfout to DART directory
-    copy(f_nat, cluster.dart_rundir + "/wrfout_d01")
+    copy(f_nat, cfg.dir_dart_run + "/wrfout_d01")
 
     # add coordinates if necessary
-    if cluster.geo_em_nature:
-        wrfout_add_geo.run(cluster.geo_em_nature, cluster.dart_rundir + "/wrfout_d01")
+    if cfg.geo_em_nature:
+        wrfout_add_geo.run(cfg, cfg.geo_em_nature, cfg.dir_dart_run + "/wrfout_d01")
         
     # as a grid template
-    symlink(cluster.dart_rundir + "/wrfout_d01", cluster.dart_rundir + "/wrfinput_d01")
+    symlink(cfg.dir_dart_run + "/wrfout_d01", cfg.dir_dart_run + "/wrfinput_d01")
     
 
-def force_obs_in_physical_range(oso):
-    """ Set values smaller than surface albedo to surface albedo
-    Highly hacky. Your albedo might be different.
-    """
-    print(" removing obs below surface albedo ")
-    clearsky_albedo = 0.2928  # custom value
-
-    if_vis_obs = oso.df['kind'].values == obs_kind_nrs['MSG_4_SEVIRI_BDRF']
-    if_obs_below_surface_albedo = oso.df['observations'].values < clearsky_albedo
-    oso.df.loc[if_vis_obs & if_obs_below_surface_albedo,
-               ('observations')] = clearsky_albedo
-    oso.to_dart(f=cluster.dart_rundir + "/obs_seq.out")
-    return oso
-
 
-def run_perfect_model_obs(nproc=12):
+def run_perfect_model_obs(cfg: Config):
     """Run the ./perfect_model_obs program
 
     Args:
@@ -79,24 +63,23 @@ def run_perfect_model_obs(nproc=12):
         None, creates obs_seq.out
     """
     print("running ./perfect_model_obs")
-    os.chdir(cluster.dart_rundir)
-    
-    if hasattr(cluster, 'max_nproc'):
-        nproc = cluster.max_nproc
+    os.chdir(cfg.dir_dart_run)
+    nproc = cfg.max_nproc
 
-    try_remove(cluster.dart_rundir + "/obs_seq.out")
-    if not os.path.exists(cluster.dart_rundir + "/obs_seq.in"):
-        raise RuntimeError(
-            "obs_seq.in does not exist in " + cluster.dart_rundir)
-    shell(cluster.dart_modules+'; mpirun -np '+str(nproc) +
-          " ./perfect_model_obs > log.perfect_model_obs")
-    if not os.path.exists(cluster.dart_rundir + "/obs_seq.out"):
+    try_remove(cfg.dir_dart_run + "/obs_seq.out")
+    if not os.path.exists(cfg.dir_dart_run + "/obs_seq.in"):
+        raise RuntimeError("obs_seq.in does not exist in " + cfg.dir_dart_run)
+    
+    shell(cfg.dart_modules+'; mpirun -np '+str(nproc) +
+          " ./perfect_model_obs &> log.perfect_model_obs")
+    
+    if not os.path.exists(cfg.dir_dart_run + "/obs_seq.out"):
         raise RuntimeError(
-            "obs_seq.out does not exist in " + cluster.dart_rundir,
-            "\n see "+cluster.dart_rundir + "/log.perfect_model_obs")
+            "obs_seq.out does not exist in " + cfg.dir_dart_run,
+            ". See "+cfg.dir_dart_run + "/log.perfect_model_obs")
 
 
-def generate_new_obsseq_out(time):
+def generate_new_obsseq_out(cfg: Config):
     """Generate an obs_seq.out file from obs_seq.in
     Expects an existing nature file in the cluster.dart_rundir.
 
@@ -110,26 +93,43 @@ def generate_new_obsseq_out(time):
     Returns:
         obsseq.ObsSeq: obs_seq.out representation
     """
-    def _provide_obs_seq_in(time):
-        if hasattr(exp, 'use_this_obs_seq_in'):
+    time = cfg.time
+    
+    def __force_obs_in_physical_range(oso):
+        """ Set values smaller than surface albedo to surface albedo
+        Highly hacky. Your albedo might be different.
+        """
+        print(" removing obs below surface albedo ")
+        clearsky_albedo = 0.2928  # custom value
+        obs_kind_nrs = obskind_read(cfg.dir_dart_src)
+
+        if_vis_obs = oso.df['kind'].values == obs_kind_nrs['MSG_4_SEVIRI_BDRF']
+        if_obs_below_surface_albedo = oso.df['observations'].values < clearsky_albedo
+        oso.df.loc[if_vis_obs & if_obs_below_surface_albedo,
+                ('observations')] = clearsky_albedo
+        oso.to_dart(f=cfg.dir_dart_run + "/obs_seq.out")
+        return oso
+
+    def __provide_obs_seq_in(cfg: Config):
+        if hasattr(cfg, 'use_this_obs_seq_in'):
             # custom definition of an obs_seq.in file
-            print("using obs_seq.in:", exp.use_this_obs_seq_in)
-            copy(exp.use_this_obs_seq_in, cluster.dart_rundir+'/obs_seq.in')
+            print("using obs_seq.in:", cfg.use_this_obs_seq_in)
+            copy(cfg.use_this_obs_seq_in, cfg.dir_dart_run+'/obs_seq.in')
         else:
             # create file from scratch
-            osi.create_obs_seq_in(time, exp.observations)
-        
-    _provide_obs_seq_in(time)
-    prepare_nature_dart(time)
-
-    run_perfect_model_obs(nproc=cluster.max_nproc)
+            osi.create_obs_seq_in(cfg, cfg.dir_dart_run+'/obs_seq.in')  
+    
+    ##############################
+    __provide_obs_seq_in(cfg)
+    prepare_nature_dart(cfg)
+    run_perfect_model_obs(cfg)
 
-    oso = obsseq.ObsSeq(cluster.dart_rundir + "/obs_seq.out")
-    oso = force_obs_in_physical_range(oso)
+    oso = obsseq.ObsSeq(cfg.dir_dart_run + "/obs_seq.out")
+    oso = __force_obs_in_physical_range(oso)
     
-    f_dest = time.strftime(cluster.pattern_obs_seq_out)
+    f_dest = time.strftime(cfg.pattern_obs_seq_out)
     os.makedirs(os.path.dirname(f_dest), exist_ok=True)
-    shutil.copy(cluster.dart_rundir+'/obs_seq.out', f_dest)
+    shutil.copy(cfg.dir_dart_run+'/obs_seq.out', f_dest)
     return oso
 
 
@@ -146,23 +146,13 @@ if __name__ == '__main__':
     Returns:
         None, creates obs_seq.out in cluster.archivedir
     """
-    import argparse
-    import datetime as dt
-    parser = argparse.ArgumentParser(
-        description='Generate obs_seq.out files from an experiment')
-
-    parser.add_argument('times', type=str, help='times of the observations')
-    args = parser.parse_args()
-    times = args.times.split(',')
-    
-    # before running perfect_model_obs, we need to set up the run_DART folder
-    from dartwrf import assimilate as aso
-    from dartwrf import dart_nml
-    aso.prepare_run_DART_folder()
-    nml = dart_nml.write_namelist()
-
-    for time in times:
-        print("time", time)
-        time = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M')
-        generate_new_obsseq_out(time)
+    # cfg = Config.from_file(sys.argv[1])
+
+    # aso.prepare_run_DART_folder()
+    # nml = dart_nml.write_namelist()
+
+    # for time in times:
+    #     print("time", time)
+    #     time = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M')
+    #     generate_new_obsseq_out(time)
         
diff --git a/dartwrf/obs/obsseq.py b/dartwrf/obs/obsseq.py
index 7f35bd74e2bbe43cad5d252a20938e01dfd89a4e..047bd88839867f02d964da820058d34f80c30ebf 100755
--- a/dartwrf/obs/obsseq.py
+++ b/dartwrf/obs/obsseq.py
@@ -908,7 +908,6 @@ class ObsSeq(object):
 
 
 if __name__ == "__main__":
-    from dartwrf.server_config import cluster
     # for testing purposes
 
     # f = cluster.scriptsdir + "/../tests/obs_seq.orig.out"
diff --git a/dartwrf/obs/obsseq_2dim.py b/dartwrf/obs/obsseq_2dim.py
deleted file mode 100755
index 40afa4a7c51999d5a518d73fc3405d6d16cf2be0..0000000000000000000000000000000000000000
--- a/dartwrf/obs/obsseq_2dim.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""Create obs_seq.out files with collapsed vertical dimension
-Specifically, one observation per column which is the maximum of the column
-
-Use this script before running the OSSE workflow, to prepare obs_seq.out files.
-
-Note:
-     path_3d_obsseq = '/path/exp_obs10_loc20/obs_seq_out/2008-07-30_%H:%M_obs_seq.out'  
-
-Note:
-     Only works in case there is 1 observation type!
-
-Example:
-     python obsseq_2dim.py exp_v1.22_P2_rr_REFL_obs10_loc20_oe2.5 2008-07-30_13:00
-"""
-
-from copy import copy
-import os, sys, warnings
-import datetime as dt
-import numpy as np
-
-from dartwrf.server_config import cluster
-from dartwrf import utils
-from dartwrf.obs import obsseq
-
-def _get_n_obs_per_layer(oso):     
-     """Determine number of observations per layer from obsseq.ObsSeq object
-
-     Args:
-          oso (obsseq.ObsSeq): obsseq object
-
-     Returns:
-          int
-     """
-     height_all = np.array([a[2] for a in oso.df.loc3d])
-
-     height_first = height_all[0]
-
-     # count how often this height appears
-     n_obs_per_layer = int(np.sum(height_all == height_first))
-     return n_obs_per_layer
-
-
-if __name__ == "__main__":
-     exp = sys.argv[1]
-     assim_time = dt.datetime.strptime(sys.argv[2], "%Y-%m-%d_%H:%M")
-     
-     path_3d_obsseq = cluster.archive_base+exp+'/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out'
-     oso_input = assim_time.strftime(path_3d_obsseq)
-     
-     # existing obsseq with multi levels
-     oso = obsseq.ObsSeq(oso_input)
-     
-     n_obs_3d = len(oso.df)
-     n_obs_per_layer = _get_n_obs_per_layer(oso)
-     nlev = int(n_obs_3d/n_obs_per_layer)
-     assert np.allclose(nlev, n_obs_3d/n_obs_per_layer), 'n_obs not evenly divisible!'
-
-     print('n_obs_per_layer', n_obs_per_layer)
-     print('n_obs_3d', n_obs_3d)
-     
-     output = copy(oso)  # copy will be modified
-     # output.df = output.df.copy()  # without this, we get a SettingWithCopyWarning
-     output.df = output.df.iloc[0::nlev]  #  every nth level = first level
-
-     # iterate through, set value to max
-     for i_obs in range(0, ):  # go through n_obs (all columns)
-
-          i_obs_subset = i_obs*nlev  # jumps by nlev (from one to next column)
-          column = oso.df.loc[0 + i_obs_subset:nlev + i_obs_subset, :]  # select column
-
-          output.df.loc[i_obs_subset, ('observations')] = float(column['observations'].max())
-          output.df.loc[i_obs_subset, ('truth')] = float(column['truth'].max())
-
-     print(output.df)
-
-     fout = cluster.archivedir + assim_time.strftime("/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out")
-     os.makedirs(cluster.archivedir+'/obs_seq_out', exist_ok=True)
-     output.to_dart(fout)
-     utils.write_txt(["created from", oso_input,], fout[:-3]+'.txt')
diff --git a/dartwrf/obs/obsseq_to_netcdf.py b/dartwrf/obs/obsseq_to_netcdf.py
deleted file mode 100644
index db40ff16c015f5af7dc8083fd0b08dd3b9580b1c..0000000000000000000000000000000000000000
--- a/dartwrf/obs/obsseq_to_netcdf.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import os, sys, glob, warnings
-
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
-import dartwrf.obs.run_obs_diag as rod
-
-def listdir_dirs(path):
-    return [a for a in os.listdir(path) if os.path.isdir(os.path.join(path, a))]
-
-if __name__ == '__main__':
-
-    datadir = cluster.archive_base
-    #expname = 'exp_v1.16_Pwbub-1_Radar_soe2' 
-    expname = exp.expname
-    ddir = datadir+expname+'/obs_seq_final/'
-
-    files = sorted(glob.glob(ddir+'/*.final')) 
-    rod.run_obsdiag(files, f_out=ddir+'/obsdiag.nc')
-    rod.run_obs_seq_to_netcdf(files, f_out=ddir+'/obs_epoch.nc') 
-
-    ddir = datadir+expname+'/obs_seq_final_1min/'
-    files = sorted(glob.glob(ddir+'/*.final'))
-    try:
-        rod.run_obs_seq_to_netcdf(files, f_out=ddir+'/obs_epoch.nc') 
-    except Exception as e:
-        warnings.warn(str(e))
diff --git a/dartwrf/obs/run_obs_diag.py b/dartwrf/obs/run_obs_diag.py
deleted file mode 100644
index 254643888e0737ffe69c0f77b8494993883e59ac..0000000000000000000000000000000000000000
--- a/dartwrf/obs/run_obs_diag.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import os, sys, shutil, glob
-
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
-from dartwrf.utils import symlink, copy, sed_inplace, append_file, shell
-
-rundir_program = '/home/fs71386/lkugler/data/run_DART/'
-
-def prepare(obserr_iszero='.true.'):
-    copy(cluster.scriptsdir+'/../templates/input.eval.nml',
-            rundir_program+'/input.nml')
-    sed_inplace(rundir_program+'/input.nml', '<n_ens>', str(int(exp.ensemble_size)))
-    sed_inplace(rundir_program+'/input.nml', '<zero_error_obs>', obserr_iszero)
-    sed_inplace(rundir_program+'/input.nml', '<horiz_dist_only>', '.false.')  # dummy
-    sed_inplace(rundir_program+'/input.nml', '<vert_norm_hgt>', '5000.0')  # dummy
-    
-    append_file(rundir_program+'/input.nml', cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml')
-
-
-def write_input_filelist(filepaths):
-    fpath = rundir_program+'/obsdiag_inputlist.txt'
-    print('writing', fpath)
-    if os.path.exists(fpath):
-        os.remove(fpath)
-
-    with open(fpath, 'w') as f:
-        for fin in filepaths:
-            f.write(fin)
-            f.write('\n')
-
-
-def run_obsdiag(filepaths, f_out='./obsdiag.nc'):
-    write_input_filelist(filepaths)
-
-    for obserr_iszero in ['.true.', '.false.']:
-        prepare(obserr_iszero=obserr_iszero)
-
-        # run_allinoneplace obs_diag
-        print('------ running obs_diag program')
-        os.chdir(rundir_program)
-        symlink(cluster.dart_srcdir+'/obs_diag', rundir_program+'/obs_diag')
-        shell(cluster.dart_modules+' ./obs_diag >& obs_diag.log')  # caution, this overwrites obs_seq_to_netcdf
-
-        # move output to archive
-        #outdir = os.path.dirname(f_out)  #'/'.join(folder_obs_seq_final.split('/')[:-1])
-        if obserr_iszero == '.true.':
-            fout = f_out[:-3]+'_wrt_truth.nc'   
-        elif obserr_iszero == '.false.':
-            fout = f_out[:-3]+'_wrt_obs.nc' 
-        shutil.move(rundir_program+'/obs_diag_output.nc', fout)
-        print(fout, 'saved.')
-
-
-def run_obs_seq_to_netcdf(filepaths, f_out='./obs_epoch.nc'):
-
-    write_input_filelist(filepaths)
-    print('------ running obs_seq_to_netcdf program')
-    #shutil.copy(cluster.dart_srcdir+'/obs_seq_to_netcdf-bak', rundir_program+'/obs_seq_to_netcdf')
-    os.chdir(rundir_program)
-    shell(cluster.dart_modules+' ./obs_seq_to_netcdf  >& obs_seq_to_netcdf.log')  # caution, overwrites its own binary?!
-    shutil.move(rundir_program+'/obs_epoch_001.nc', f_out)
-    print(f_out, 'saved.')
-
-
-if __name__ == '__main__':
-    #folder_obs_seq_final = '/home/fs71386/lkugler/data/DART-WRF/rundir/test' 
-    print('python run_obs_diag.py ')
-    folder_obs_seq_final = str(sys.argv[1])
-    files = sorted(glob.glob(folder_obs_seq_final+'/*.final'))  # input for obs_diag program
-    
-    run_obsdiag(files, f_out='./test.nc')  # input must be files with posterior data!!
-    run_obs_seq_to_netcdf(files, outdir=folder_obs_seq_final)  # input can be files without posterior data
diff --git a/dartwrf/prep_IC_prior.py b/dartwrf/prep_IC_prior.py
index 5c84794dcad8584c6fb6c317451e6c22990bf4d8..8a8a841b635036cabf6ffa90657c5e608bf74014 100755
--- a/dartwrf/prep_IC_prior.py
+++ b/dartwrf/prep_IC_prior.py
@@ -1,10 +1,6 @@
-import os, sys, warnings, glob
+import os, sys
 import datetime as dt
-import numpy as np
-
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
-from dartwrf.utils import copy, clean_wrfdir, try_remove
+from dartwrf.utils import copy, clean_wrfdir, Config
 
 """
 Sets initial condition data (wrfinput/wrfrst file) in the run_WRF directory for each ensemble member 
@@ -23,12 +19,13 @@ def create_wrfrst_in_WRF_rundir(time: dt.datetime, prior_init_time: dt.datetime,
     """Copy WRF restart files to run_WRF directory 
     These files will be used as initial conditions for the next WRF run
     """
-    for iens in range(1, exp.ensemble_size+1):
-        clean_wrfdir(cluster.wrf_rundir(iens))
+    for iens in range(1, cfg.ensemble_size+1):
+        dir_wrf_run = cfg.dir_wrf_run.replace('<exp>', cfg.name).replace('<ens>', str(iens))
+        clean_wrfdir(dir_wrf_run)
     
         prior_wrfrst = prior_path_exp + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
                        +str(iens)+time.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')
-        wrfrst = cluster.wrf_rundir(iens) + time.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')
+        wrfrst = dir_wrf_run + time.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')
         print('copy prior (wrfrst)', prior_wrfrst, 'to', wrfrst)
         copy(prior_wrfrst, wrfrst)
         
@@ -48,10 +45,12 @@ def create_updated_wrfinput_from_wrfout(time: dt.datetime, prior_init_time: dt.d
     
     """
     print('writing updated wrfout to WRF run directory as wrfinput')
-    for iens in range(1, exp.ensemble_size+1):
+    for iens in range(1, cfg.ensemble_size+1):
+        dir_wrf_run = cfg.dir_wrf_run.replace('<exp>', cfg.name).replace('<ens>', str(iens))
+        
         prior_wrfout = prior_path_exp + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
                        +str(iens)+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
-        new_start_wrfinput = cluster.wrf_rundir(iens) + '/wrfinput_d01' 
+        new_start_wrfinput = dir_wrf_run + '/wrfinput_d01' 
         copy(prior_wrfout, new_start_wrfinput)
         print(new_start_wrfinput, 'created.')
 
@@ -62,18 +61,19 @@ def create_updated_wrfinput_from_wrfout(time: dt.datetime, prior_init_time: dt.d
 
 
 if __name__ == '__main__':
-    prior_path_exp = sys.argv[1]
-    prior_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
-    prior_valid_time = dt.datetime.strptime(sys.argv[3], '%Y-%m-%d_%H:%M')
-
-    if len(sys.argv) == 4:
-        create_wrfrst_in_WRF_rundir(prior_valid_time, prior_init_time, prior_path_exp)
-    elif len(sys.argv) == 5:
-        # to start new simulation at different time than prior_valid_time
-        new_start_time = dt.datetime.strptime(sys.argv[4], '%Y-%m-%d_%H:%M')
+    cfg = Config.from_file(sys.argv[1])
+    
+    prior_valid_time = cfg.prior_valid_time
+    prior_init_time = cfg.prior_init_time
+    prior_path_exp = cfg.prior_path_exp
+    
+    if 'start_from_wrfout' in cfg:
+        if cfg.start_from_wrfout:
+            # use_wrfout_as_wrfinput
+            # Caution: Even without assimilation increments, this will change the model forecast
+            new_start_time = cfg.new_start_time
+            create_updated_wrfinput_from_wrfout(prior_valid_time, prior_init_time, prior_path_exp, new_start_time)
+            sys.exit(0)
 
-        # use_wrfout_as_wrfinput
-        # Caution: Even without assimilation increments, this will change the model forecast
-        create_updated_wrfinput_from_wrfout(prior_valid_time, prior_init_time, prior_path_exp, new_start_time)
-    else:
-        raise ValueError('Usage: python prep_IC_prior.py prior_path_exp prior_init_time prior_valid_time [new_start_time]')
\ No newline at end of file
+    # other case:
+    create_wrfrst_in_WRF_rundir(prior_valid_time, prior_init_time, prior_path_exp)
diff --git a/dartwrf/prepare_namelist.py b/dartwrf/prepare_namelist.py
index 025a9b5f32c1d9e45f0125cd2c934fa323bae98c..6b6f6230a0a9460e285135de2316f37f4e4729c2 100755
--- a/dartwrf/prepare_namelist.py
+++ b/dartwrf/prepare_namelist.py
@@ -1,93 +1,93 @@
 """Create namelist.input files
 
 Usage:
-prepare_namelist.py <config> <begin> <end> <intv> [--radt=<minutes>] [--restart=<flag>] [--restart_interval=<minutes>]
-
-Options:
---radt=<minutes>   		Radiation interval [default: 5]
---restart=<flag> 		Restart flag (.true., .false.) [default: .false.]
---restart_interval=<minutes>	Restart frequency [default: 720]
+prepare_namelist.py <config>
 """
 import os, sys
 import datetime as dt
-from docopt import docopt
-
 from dartwrf.namelist_handler import WRF_namelist
-from dartwrf.server_config import cluster
-from dartwrf.utils import sed_inplace, copy, read_dict_from_pyfile
-
-
-if __name__ == '__main__':
-    
-    args = docopt(__doc__)
-    archive = True
-    
-    f_config = args['<config>']
-    exp = read_dict_from_pyfile(f_config)
-    
-    begin = dt.datetime.strptime(args['<begin>'], '%Y-%m-%d_%H:%M:%S')
-    end = dt.datetime.strptime(args['<end>'], '%Y-%m-%d_%H:%M:%S')
-    hist_interval_s = int(args['<intv>'])
-    
-    radt = int(args['--radt']) 
-    if not radt:
-        radt = '5'
+from dartwrf.utils import Config, copy, try_remove
 
+def run(cfg: Config) -> None:
+    # defaults
+    hist_interval_s = 300
     restart = False
-    if args['--restart'] == '.true.':
-        restart = True
-    rst_flag = '.true.' if restart else '.false.'
+    restart_interval = 9999  # dummy
+    
+    # overwrite with config
+    if 'restart' in cfg:
+        restart = cfg.restart
 
-    restart_interval = args['--restart_interval']
-    if not restart_interval:
-        restart_interval = 720
+    if 'WRF_start' in cfg:
+        start = cfg.WRF_start
+        if 'WRF_end' in cfg:
+            end = cfg.WRF_end
+    else:  
+        start = cfg.time  # dummy time for ideal.exe
+        end = start
         
-    # replace these variables in the namelist
-    replace_dict = {
-    'time_control': {
-        # time start
-        '<y1>': begin.strftime('%Y'),
-        '<m1>': begin.strftime('%m'),
-        '<d1>': begin.strftime('%d'),
-        '<HH1>': begin.strftime('%H'),
-        '<MM1>': begin.strftime('%M'),
-        '<SS1>': begin.strftime('%S'),
+    if 'restart_interval' in cfg:
+        restart_interval = cfg.restart_interval
         
-        # time end
-        '<y2>': end.strftime('%Y'),
-        '<m2>': end.strftime('%m'),
-        '<d2>': end.strftime('%d'),
-        '<HH2>': end.strftime('%H'),
-        '<MM2>': end.strftime('%M'),
-        '<SS2>': end.strftime('%S'),
+    if 'hist_interval_s' in cfg:
+        hist_interval_s = cfg.hist_interval_s
+
         
-        # other variables
-        '<dx>': str(int(exp.model_dx)),
-        '<hist_interval_s>': str(int(hist_interval_s)),
-        '<radt>': str(int(radt)),
-        '<restart>': rst_flag,
-        '<restart_interval>': str(int(float(restart_interval))),
-        },
-    }
+    # replace these variables in the namelist
+    rst_flag = '.true.' if restart else '.false.'
         
-    print('prepare namelists for all ens members',radt,restart,restart_interval)
-    for iens in range(1, exp.ensemble_size+1):
+    print('prepare namelists from', start, 'to', end, 
+          ', restart=', restart, 'restart_interval=', restart_interval)
+    for iens in range(1, cfg.ensemble_size+1):
 
+        replace_dict = {
+            'time_control': {
+                'start_year': start.strftime('%Y'),
+                'start_month': start.strftime('%m'),
+                'start_day': start.strftime('%d'),
+                'start_hour': start.strftime('%H'),
+                'start_minute': start.strftime('%M'),
+                'start_second': start.strftime('%S'),
+                'end_year': end.strftime('%Y'),
+                'end_month': end.strftime('%m'),
+                'end_day': end.strftime('%d'),
+                'end_hour': end.strftime('%H'),
+                'end_minute': end.strftime('%M'),
+                'end_second': end.strftime('%S'),
+                'history_interval_s': str(int(hist_interval_s)),
+                'restart_interval': str(int(restart_interval)),
+                'restart': rst_flag,
+                'history_outname': "'"+cfg.dir_archive+'/'+start.strftime('%Y-%m-%d_%H:%M')+"/"+str(iens)+"/wrfout_d<domain>_<date>'",
+                'rst_outname': "'"+cfg.dir_archive+'/'+start.strftime('%Y-%m-%d_%H:%M')+"/"+str(iens)+"/wrfrst_d<domain>_<date>'",
+            },
+            'domains': {
+                'dx': str(cfg.model_dx),
+                
+            },
+        }
+        # define defaults from Config
         nml = WRF_namelist()
-        nml.read(cluster.WRF_namelist_template)
+        nml.read(cfg.WRF_namelist_template)
         
         # replace parameters
         for section, section_dict in replace_dict.items():
             for key, value in section_dict.items():
                 nml.namelist[section][key] = value
                 
-        f_out = cluster.wrf_rundir_base +'/'+ exp.name + '/'+str(iens)+'/namelist.input'
+        f_out = cfg.dir_wrf_run.replace('<ens>', str(iens)
+                                        )+'/namelist.input'
+        
+        try_remove(f_out)
         nml.write(f_out)
         print('saved', f_out)
 
-        
-        if archive:
-            archdir = cluster.archive_base+'/'+exp.name+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/')
-            os.makedirs(archdir, exist_ok=True)
-        else:
-            archdir = './'
\ No newline at end of file
+        # copy to archive
+        init = start.strftime('/%Y-%m-%d_%H:%M/')
+        archdir = '/'.join([cfg.dir_archive, init, str(iens)])
+        os.makedirs(archdir, exist_ok=True)
+        copy(f_out, archdir+'/namelist.input')
+
+if __name__ == '__main__':
+    
+    cfg = Config.from_file(sys.argv[1])
+    run(cfg)
diff --git a/dartwrf/prepare_wrfrundir.py b/dartwrf/prepare_wrfrundir.py
index 081cec250c4e4861e9dc291987d29764be075d4f..b512cfb0efe46deb73b5d71bb7c59f307288e458 100755
--- a/dartwrf/prepare_wrfrundir.py
+++ b/dartwrf/prepare_wrfrundir.py
@@ -1,7 +1,7 @@
 """Prepare WRF run directories, to use wrf.exe later
 Creates a directory where WRF will run (temporary);
 Creates softlinks from WRF's `run/` directory;
-Links executables set in `cluster.ideal` and `cluster.wrfexe`;
+Links executables set in `cfg.ideal` and `cfg.wrfexe`;
 If `exp.input_profile` is set, links the input profiles.
 
 Args:
@@ -12,37 +12,37 @@ Returns:
 """
 import os, sys, shutil
 import datetime as dt
-from dartwrf.server_config import cluster
 
-from dartwrf.utils import symlink, link_contents, try_remove, read_dict_from_pyfile
+from dartwrf.utils import Config, symlink, link_contents, try_remove
 from dartwrf import prepare_namelist
 
-if __name__ == '__main__':
-    
-    time = sys.argv[1]
-    exp = read_dict_from_pyfile(sys.argv[2])
-    
-
-    for iens in range(1, exp.ensemble_size+1):
-        rundir = cluster.wrf_rundir(iens)
+def run(cfg: Config):
+    """Prepare WRF run directories, to use ideal.exe or wrf.exe later
+    """
+    for iens in range(1, cfg.ensemble_size+1):
+        rundir = cfg.dir_wrf_run.replace('<exp>', cfg.name).replace('<ens>', str(iens))
         os.makedirs(rundir, exist_ok=True)
-        link_contents(cluster.srcdir, rundir)
-        symlink(cluster.wrfexe, rundir+'/wrf.exe')
         
-        if hasattr(cluster, 'ideal'):
-            symlink(cluster.ideal, rundir+'/ideal.exe')
-
-        # prepare input profiles
-        try_remove(rundir+'/input_sounding')   # remove existing file
+        link_contents(cfg.dir_wrf_src, rundir)
+        symlink(cfg.wrfexe, rundir+'/wrf.exe')
+        
+        if hasattr(cfg, 'idealexe'):
+            symlink(cfg.idealexe, rundir+'/ideal.exe')
         
-        if hasattr(exp, 'input_profile'):
-            init_time = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M')
-            # prep namelist for ./ideal.exe
-            prepare_namelist.run(iens, begin=init_time, end=init_time, archive=False) # time not important
+        if hasattr(cfg, 'input_profile'):
+            input_prof = cfg.input_profile.replace('<iens>', str(iens).zfill(3))
             
-            input_prof = (exp.input_profile).replace('<iens>', str(iens).zfill(3))
             if not os.path.isfile(input_prof):
                 raise FileNotFoundError(f'Input profile {input_prof} does not exist.')
             symlink(input_prof, rundir+'/input_sounding')
 
+    prepare_namelist.run(cfg)
     print('All run_WRF directories have been set up.')
+
+if __name__ == '__main__':
+    
+    cfg = Config.from_file(sys.argv[1])
+    run(cfg)
+    
+
+
diff --git a/dartwrf/server_config.py b/dartwrf/server_config.py
deleted file mode 100755
index c78798898f1205fc4a5cb77d5f77780f959c0c61..0000000000000000000000000000000000000000
--- a/dartwrf/server_config.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
-from re import A
-from dartwrf.utils import ClusterConfig
-
-cluster = ClusterConfig(
-    max_nproc = 20,
-    max_nproc_for_each_ensemble_member = 16,
-    use_slurm = True,
-
-    # binaries
-    python = '/jetfs/home/lkugler/miniforge3/envs/verif/bin/python',
-    ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-2021.7.1/nco-5.1.0-izrhxv24jqco5epjhf5ledsqwanojc5m/bin/ncks',
-    ideal = '/jetfs/home/lkugler/data/compile/bin/ideal-v4.6.0_20250210_StS.exe',
-    wrfexe = '/jetfs/home/lkugler/data/compile/bin/wrf-v4.6.0_20250210_StS.exe',
-    dart_modules = 'module purge; module load rttov/v13.2-gcc-8.5.0',
-    wrf_modules = """module purge; module load netcdf-fortran/4.5.3-intel-2021.7.1-27ldrnt""",
-
-    # paths for data output
-    wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/',  # path for temporary files
-    dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/',  # path for temporary files
-    archive_base = '/jetfs/home/lkugler/data/sim_archive/',
-
-    # paths used as input
-    srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run',
-    dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3_10pct/models/wrf/work/',
-    rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/',
-    
-    dartwrf_dir_dev = '/jetfs/home/lkugler/DART-WRF/',
-    namelist = '/jetfs/home/lkugler/DART-WRF/config/templates/namelist.input_nat_exact',
-    rttov_nml = "/jetfs/home/lkugler/DART-WRF/config/templates/obs_def_rttov.VIS+WV.nml",
-    
-    # other inputs
-    geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600',
-    geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
-    #obs_impact_filename = dartwrf_dir_dev+'/templates/impactfactor_T.txt'
-
-    WRF_exe_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF.jet.sh',
-    WRF_ideal_template = '/jetfs/home/lkugler/DART-WRF/config/templates/run_WRF_ideal.sh',
-
-    slurm_cfg = {"account": "lkugler", "partition": "all",  
-                    "ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
-                    "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"},
-
-    # WRF file format, will only change if WRF changes
-    wrfout_format = '/wrfout_d01_%Y-%m-%d_%H:%M:%S',
-
-    # pattern for the init_time folder in sim_archive
-    pattern_init_time = "/%Y-%m-%d_%H:%M/",
-    
-    # how an obs_seq.out file is archived
-    pattern_obs_seq_out = "<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out",
-        
-    # how an obs_seq.final file is archived
-    pattern_obs_seq_final = "<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final",  
-
-    )
\ No newline at end of file
diff --git a/dartwrf/update_IC.py b/dartwrf/update_IC.py
index 883f76a19d11bd0b4705aa53c5f0e50476342d6f..a4f4969e0264197717f7f9a71f25bf537ae7d91b 100755
--- a/dartwrf/update_IC.py
+++ b/dartwrf/update_IC.py
@@ -1,19 +1,18 @@
-print('load imports')
+
 import os, sys, warnings
 import datetime as dt
 import netCDF4 as nc
+from dartwrf.utils import Config
 
-from dartwrf.exp_config import exp
-from dartwrf.server_config import cluster
-print('loaded imports')
-
-def update_initials_in_WRF_rundir(time):
+def update_initials_in_WRF_rundir(cfg: Config) -> None:
     """Updates wrfrst-files in `/run_WRF/` directory 
     with posterior state from ./filter output, e.g. filter_restart_d01.0001
 
     Args:
         time (dt.datetime):     time of assimilation (directory preceeding ./assim_stage0/...)
     """
+    time = cfg.time
+    
     use_wrfrst = True  # if wrfrst is used to restart (recommended)
     if use_wrfrst:
         initials_fmt = '/wrfrst_d01_%Y-%m-%d_%H:%M:%S'
@@ -22,23 +21,22 @@ def update_initials_in_WRF_rundir(time):
 
     # which WRF variables will be updated?
     update_vars = ['Times',]
-    update_vars.extend(exp.update_vars)
+    update_vars.extend(cfg.update_vars)
 
-    for iens in range(1, exp.ensemble_size+1):
-        ic_file = cluster.wrf_rundir(iens) + time.strftime(initials_fmt)
+    for iens in range(1, cfg.ensemble_size+1):
+        ic_file = cfg.dir_wrf_run.replace('<exp>', cfg.name
+                                          ).replace('<ens>', str(iens)
+                                                    )+time.strftime(initials_fmt)
         if not os.path.isfile(ic_file):
             raise IOError(ic_file+' does not exist, updating impossible!')
         else:
             # overwrite DA updated variables
-            
-            filter_out = cluster.archivedir+time.strftime('/%Y-%m-%d_%H:%M/filter_restart_d01.'+str(iens).zfill(4))
+            filter_out = cfg.dir_archive.replace('<exp>', cfg.name) \
+                + time.strftime('/%Y-%m-%d_%H:%M/filter_restart_d01.'+str(iens).zfill(4))
 
             with nc.Dataset(filter_out, 'r') as ds_filter:
                 with nc.Dataset(ic_file, 'r+') as ds_new:
 
-                    if 'T' in update_vars or 'THM' in update_vars:
-                        ds_new.variables['T'][:] = ds_filter.variables['THM'][:]
-
                     # update all other variables
                     for var in update_vars:
                         if var in ds_new.variables:
@@ -53,5 +51,6 @@ def update_initials_in_WRF_rundir(time):
 
 
 if __name__ == '__main__':
-    time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
-    update_initials_in_WRF_rundir(time)
+    cfg = Config.from_file(sys.argv[1])
+    
+    update_initials_in_WRF_rundir(cfg)
diff --git a/dartwrf/utils.py b/dartwrf/utils.py
index fad027b7d9652e197ee86fcde1b9456b9d8c8d0e..1a578b4f08825edac0cc689bce186ccc375d9880 100755
--- a/dartwrf/utils.py
+++ b/dartwrf/utils.py
@@ -11,11 +11,15 @@ import glob
 import warnings
 import builtins as __builtin__
 import subprocess
+from pprint import pprint
 import datetime as dt
 import re
 import tempfile
 import pickle
 import importlib.util
+import random
+import string
+import pickle
 
 userhome = os.path.expanduser('~')
 
@@ -26,112 +30,6 @@ def import_from_path(module_name, file_path):
     spec.loader.exec_module(module) # type: ignore
     return module
 
-class ClusterConfig(object):
-    """Collection of variables regarding the cluster configuration
-
-    Attributes:
-        name (str): Name of the cluster
-        max_nproc (int): Maximum number of processors that can be used
-        np_WRF (int): Number of cores for WRF (mpirun -np xx ./wrf.exe)
-        use_slurm (bool): If True, use SLURM to submit jobs
-        size_WRF_jobarray (int): Size of SLURM job array for running the WRF ensemble
-
-        python (str): Path to python executable
-        python_verif (str): Path to python executable for verification
-        ncks (str): Path to ncks executable
-        ideal (str): Path to ideal.exe
-        wrfexe (str): Path to wrf.exe
-
-        dart_modules (str): Command to load modules before running DART 
-        wrf_modules (str): Command to load modules before running WRF
-
-        srcdir (str): Path to where WRF has been compiled, including the 'run' folder of WRF, e.g. /home/WRF-4.3/run
-        dart_srcdir (str): Path to DART compile directory, e.g. /home/DART-9.11.9/models/wrf/work
-        rttov_srcdir (str): Path to RTTOV compile directory, e.g. /home/RTTOV13/rtcoef_rttov13/
-        dartwrf_dir (str): Path where DART-WRF scripts reside, e.g. /home/DART-WRF/
-
-        geo_em_nature (str, False): Path to the geo_em.d01.nc file for idealized nature runs
-        geo_em_forecast (str, False): Path to the geo_em.d01.nc file for the forecast domain
-        obs_impact_filename
-        namelist (str): Path to a WRF namelist template; 
-                        strings like <hist_interval>, will be overwritten in scripts/prepare_namelist.py
-        run_WRF (str): Path to script which runs WRF on a node of the cluster
-
-        slurm_cfg (dict):   Dictionary containing the default configuration of SLURM
-                            as defined in SLURM docs (https://slurm.schedmd.com/sbatch.html).
-                            This configuration can be customized for any job (e.g. in workflows.py)
-
-    """
-
-    def __init__(self, 
-                 max_nproc: int, 
-                 max_nproc_for_each_ensemble_member: int,
-                 WRF_ideal_template: str, 
-                 WRF_exe_template: str, 
-                 archive_base: str,
-                 wrf_rundir_base: str,
-                 dart_rundir_base: str,
-                 dartwrf_dir_dev: str,
-                 WRF_namelist_template: str,
-                 **kwargs):
-        # defaults
-        # these are overwritten with choices in **kwargs
-        self.dart_modules = ''
-        self.wrf_modules = ''
-        self.size_jobarray = '1'
-        self.use_slurm = False
-        self.slurm_cfg = {}
-        self.log_dir = './'
-        self.slurm_scripts_dir = './'
-        self.archive_base = archive_base
-        self.wrf_rundir_base = wrf_rundir_base        
-        self.dart_rundir_base = dart_rundir_base
-        self.dartwrf_dir_dev = dartwrf_dir_dev
-        self.WRF_namelist_template = WRF_namelist_template
-        self.python = 'python'
-        self.pattern_obs_seq_out = '<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.out'
-        self.pattern_obs_seq_final = '<archivedir>/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final'
-        
-        self.max_nproc = max_nproc
-        self.max_nproc_for_each_ensemble_member = max_nproc_for_each_ensemble_member
-        self.WRF_ideal_template = WRF_ideal_template
-        self.WRF_exe_template = WRF_exe_template
-        
-        # user defined
-        for key, value in kwargs.items():
-            setattr(self, key, value)
-
-    def __str__(self):
-        return f'ClusterConfig: {self.__dict__}'
-
-    def run_job(self, cmd, jobname='', cfg_update=dict(), depends_on=None):
-        """Run scripts in a shell
-
-        If not using SLURM: calls scripts through shell
-        if using SLURM: uses slurmpy to submit jobs, keep certain default kwargs and only update some with kwarg `overwrite_these_configurations`
-
-        Args:
-            cmd (str): Bash command(s) to run
-            jobname (str, optional): Name of SLURM job
-            cfg_update (dict): The config keywords will be overwritten with values
-            depends_on (int or None): SLURM job id of dependency, job will start after this id finished.
-
-        Returns 
-            None
-        """
-        if self.use_slurm:
-            from slurmpy import Slurm
-            return Slurm(jobname, slurm_kwargs=dict(self.slurm_cfg, **cfg_update),
-                         log_dir=self.log_dir,
-                         scripts_dir=self.slurm_scripts_dir,
-                         ).run(cmd, depends_on=depends_on)
-        else:
-            print(cmd)
-            returncode = os.system(cmd)
-            if returncode != 0:
-                raise Exception('Error running command >>> '+cmd)
-
-
 class Config(object):
     """Collection of variables which define the experiment
 
@@ -165,31 +63,72 @@ class Config(object):
             keys are namelist section headers (e.g. &filter_nml)
             values are dictionaries of parameters and values (e.g. dict(ens_size=exp.ensemble_size,))
 
-        wrf_rundir_base (str): Path to temporary files for WRF
-        dart_rundir_base (str): Path to temporary files for DART
-        archive_base (str): Path to long-time output storage
+        # Important directory paths
+        dir_archive: e.g. '/jetfs/home/lkugler/data/sim_archive/<exp>/'
+        dir_wrf_run: e.g. '/jetfs/home/lkugler/data/run_WRF/<exp>/<ens>/'
+        dir_dart_run: e.g. '/jetfs/home/lkugler/data/run_DART/<exp>/'
 
     """
 
-    def __init__(self, name: str, model_dx: int, ensemble_size: int, 
-                 update_vars: list=[], dart_nml: dict={}, 
+    def __init__(self, 
+                 name: str, 
+                 model_dx: int, 
+                 ensemble_size: int, 
+
+                 # cluster config
+                 max_nproc: int = 20, 
+                 max_nproc_for_each_ensemble_member: int = 9,
+                 dir_archive: str = './sim_archive/<exp>/',
+                 dir_wrf_run: str = './run_WRF/<exp>/<ens>/',
+                 dir_dart_run: str = './run_DART/<exp>/',
+                 use_slurm: bool = False,
+                 pattern_obs_seq_out: str = './diagnostics/%Y-%m-%d_%H:%M_obs_seq.out',
+                 pattern_obs_seq_final: str = './diagnostics/%Y-%m-%d_%H:%M_obs_seq.final',
+                 
+                 # optional
+                 update_vars: list=[], 
+                 dart_nml: dict={}, 
                  use_existing_obsseq: bool | str = False,
-                 input_profile: bool | str = False,
                  nature_wrfout_pattern: bool | str = False,
+                 
+                 # others
                  **kwargs):
         
         # defining the compulsory variables
         self.name = name
         self.model_dx = model_dx
-        self.ensemble_size = ensemble_size
+        self.ensemble_size = int(ensemble_size)
         self.update_vars = update_vars
         self.dart_nml = dart_nml
         
+        # cluster config
+        self.max_nproc = max_nproc
+        self.max_nproc_for_each_ensemble_member = max_nproc_for_each_ensemble_member
+        self.dir_archive = dir_archive.replace('<exp>', self.name)
+        self.dir_wrf_run = dir_wrf_run.replace('<exp>', self.name)
+        self.dir_dart_run = dir_dart_run.replace('<exp>', self.name)
+        
+        # defaults
+        # these are overwritten with choices in **kwargs
+        self.debug = False
+        self.dart_modules = ''
+        self.wrf_modules = ''
+        self.size_jobarray = '1'
+        self.use_slurm = use_slurm
+        self.slurm_cfg = {}
+        self.python = 'python'
+        self.pattern_obs_seq_out = pattern_obs_seq_out.replace('<archivedir>', self.dir_archive)
+        self.pattern_obs_seq_final = pattern_obs_seq_final.replace('<archivedir>', self.dir_archive)
+        
         # optional
         self.use_existing_obsseq = use_existing_obsseq
-        self.input_profile = input_profile
         self.nature_wrfout_pattern = nature_wrfout_pattern
-        
+
+        # user defined
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+            
+        # warnings to the user
         if not update_vars:
             warnings.warn('No `update_vars` defined, not updating any variables after assimilation!')
             
@@ -203,30 +142,78 @@ class Config(object):
         if isinstance(use_existing_obsseq, str):
             print('Using existing observation sequence', use_existing_obsseq)
 
-        # user defined
-        for key, value in kwargs.items():
-            setattr(self, key, value)
+        # required attributes, derived from others
+        self.dir_archive = self.dir_archive.replace('<exp>', self.name)
+        self.dir_dartwrf_run = self.dir_archive+'/DART-WRF/dartwrf/'
+        self.dir_slurm = self.dir_archive+'/slurm-scripts/'
+        self.dir_log = self.dir_archive+'/logs/'
 
+        # save config
+        self.f_cfg_base = self.dir_archive + '/configs/'
+        
+        # write config to file
+        self.f_cfg_current = self.generate_name()
+        self.to_file(self.f_cfg_current)
+        
+    def __contains__(self, key):
+        return hasattr(self, key)
+    
+    def __getattribute__(self, name: str):
+        """Ask user if the attribute was defined in the first place"""
+        try:
+            return super().__getattribute__(name)
+        except AttributeError:
+            raise AttributeError(f'Attribute `{name}` not found in Config object. Did you set it?')
 
+    def generate_name(self):
+        random_str = ''.join(random.choices(string.ascii_uppercase + string.digits, k=4))
+        return self.f_cfg_base+'/cfg_'+random_str+'.pkl'
+
+    def update(self, **kwargs):
+        """Update the configuration with new values
+        """
+        # d = read_Config_as_dict(self.f_cfg_current)
+        if 'name' in kwargs:
+            raise ValueError('You can not change the name of the experiment!')
+        
+        # set attributes in existing object
+        for key, value in kwargs.items():
+            setattr(self, key, value)
+        
+        # write to file
+        self.f_cfg_current = self.generate_name()
+        self.to_file(self.f_cfg_current)
+    
+    @staticmethod
+    def from_file(fname: str) -> 'Config':
+        """Read a configuration from a file"""
+        d = read_Config_as_dict(fname)
+        return Config(**d)
+
+    def to_file(self, filename: str):
+        """Write itself to a python file"""
+        os.makedirs(os.path.dirname(filename), exist_ok=True)
+        d = self.__dict__
+        
+        with open(filename, 'wb') as handle:
+            pickle.dump(d, handle, protocol=pickle.HIGHEST_PROTOCOL)
+        
+        if self.debug:
+            print('Wrote config to', filename)
 
-def write_dict_to_pyfile(d: dict, filename: str):
-    """Write a dictionary to a python file"""
-    os.makedirs(os.path.dirname(filename), exist_ok=True)
-    with open(filename, 'w') as f:
-        for key, value in d.items():
-            f.write(key+' = '+str(value)+'\n')
 
-def read_dict_from_pyfile(filename: str) -> Config:
+def read_Config_as_dict(filename: str) -> dict:
     """Read a dictionary from a python file,
     return as Config object
     """
-    with open(filename, 'r') as f:
-        d = {}
-        for line in f:
-            key, value = line.split('=')
-            d[key.strip()] = value.strip()
-    return Config(**d)
-
+    with open(filename, 'rb') as handle:
+        d = pickle.load(handle)
+    print('read config', filename)
+    return d
+
+def display_config(filename: str) -> None:
+    d = read_Config_as_dict(filename)
+    pprint(d)
 
 def shell(args, pythonpath=None):
     print(args)
diff --git a/dartwrf/workflows.py b/dartwrf/workflows.py
index f4f1b97f847b090fcf63e2c297707e58e98a4e93..10b6be63562fcf8fd4fbcd18ad21e57f02f5721d 100644
--- a/dartwrf/workflows.py
+++ b/dartwrf/workflows.py
@@ -10,15 +10,14 @@ import sys
 import shutil
 import warnings
 import datetime as dt
-import random
-import string
+import inspect
 
-from dartwrf.utils import script_to_str, shell, import_from_path, read_dict_from_pyfile, write_dict_to_pyfile
+from dartwrf.utils import script_to_str, shell
 from dartwrf.utils import Config
 
 
 class WorkFlows(object):
-    def __init__(self, expname: str, server_config: str='server_config.py'):
+    def __init__(self, cfg: Config):
         """Set up the experiment folder in `archivedir`.
 
         1. Copy the selected config files
@@ -36,107 +35,85 @@ class WorkFlows(object):
             cluster (obj): cluster configuration as defined in server_config file
             exp (obj): experiment configuration as defined in exp_config file
         """
-
-        def _save_config_to(src: str, dst: str):
-            try:
-                shutil.copyfile(src, dst)
-            except shutil.SameFileError:
-                pass
-
         print('------------------------------------------------------')
-        self.expname = expname
-        print('>>> Experiment name:           "'+self.expname+'"')
-        print('>>> Server configuration:      "'+server_config+'"')
-
-        self.cluster = import_from_path('config', server_config).cluster
-
-        # some helpful variables
-        self.archivedir = self.cluster.archive_base+'/'+self.expname+'/'
-        dart_rundir = self.cluster.dart_rundir_base+'/'+self.expname+'/'
-        self.scripts_rundir = self.archivedir+'/DART-WRF/dartwrf/'
-        pattern_obs_seq_out = self.cluster.pattern_obs_seq_out.replace(
-            '<archivedir>', self.archivedir)
-        pattern_obs_seq_final = self.cluster.pattern_obs_seq_final.replace(
-            '<archivedir>', self.archivedir)
+        print('>>> Experiment name:         "'+cfg.name+'"')
         
-        # collect all variables to put into the config file
-        self.configure_defaults = {
-            'expname': self.expname,
-            'archivedir': self.archivedir,
-            'dart_rundir': dart_rundir,
-            'scripts_rundir': self.scripts_rundir,
-            'pattern_obs_seq_out': pattern_obs_seq_out,
-            'pattern_obs_seq_final': pattern_obs_seq_final,
-        }
-
-        self.f_cfg_base = self.archivedir + '/DART-WRF/configs/'
-        self.f_cfg_current = None
-
         ############### ARCHIVE SCRIPTS AND CONFIGS
         # Copy scripts and config files to `self.archivedir` folder
         dirs_exist_ok = False
-        if os.path.exists(self.archivedir):
+        if os.path.exists(cfg.dir_archive+'/DART-WRF/'):
             if input('The experiment name already exists! Overwrite existing experiment? (Y/n) ') in ['Y', 'y']:
                 dirs_exist_ok = True
 
-        shutil.copytree(self.cluster.dartwrf_dir_dev,
-                        self.archivedir+'/DART-WRF/',
+        shutil.copytree(cfg.dir_dartwrf_dev,
+                        cfg.dir_archive+'/DART-WRF/',
                         ignore=shutil.ignore_patterns('*.git','config/','__*','tests/'),
                         dirs_exist_ok=dirs_exist_ok)
         
-        # copy cluster config to /DART-WRF/dartwrf/
-        _save_config_to(server_config, self.scripts_rundir + '/server_config.py')
         
+        ################# INFORM USER
         print(" ")
-        print('>>> Running experiment in  "'+self.archivedir+'"')
-        self.cluster.log_dir = self.archivedir+'/logs/'
-
-        if self.cluster.use_slurm:
-            self.cluster.slurm_scripts_dir = self.archivedir+'/slurm-scripts/'
-            
-        print(" ")
-        print('>>> DART will run in       "'+dart_rundir+'"')
-        print('>>> WRF will run in        "'+self.cluster.wrf_rundir_base+'/'+self.expname+'"')
-
-        # 6
-        # we set the path from where python should import dartwrf modules
-        # every python command then imports DART-WRF from self.archivedir+'/DART-WRF/dartwrf/'
-        self.cluster.python = 'export PYTHONPATH=' +  \
-            self.scripts_rundir+'/../; '+self.cluster.python
-        print('>>> DART-WRF experiment initialized. ')
+        print('>>> Main directory           "'+cfg.dir_archive+'"')
+        print('>>> DART will run in         "'+cfg.dir_dart_run+'"')
+        print('>>> WRF will run in          "'+cfg.dir_wrf_run+'"')
+        
+        # do we run slurm?
+        self.use_slurm = cfg.use_slurm
+        self.dir_log = cfg.dir_log
+        self.dir_slurm = cfg.dir_slurm
+        
+        if self.use_slurm:
+            print(" ")
+            print('>>> Using SLURM, see logs in "'+self.dir_log+'"')
         print('------------------------------------------------------')
+        
+        # use this python path
+        run_wrf_from_this_folder = cfg.dir_dartwrf_run+'/../'
+        self.dir_dartwrf_run = cfg.dir_dartwrf_run
+        self.python = 'export PYTHONPATH=' +run_wrf_from_this_folder+ '; '+cfg.python
+        self.cfg = cfg
 
-    def configure(self, **kwargs):
-        """Update the config in Experiment.cfg
-        because we cant just forward arguments to command line calls, 
-        we write a config file in a specified directory
+    def run_job(self, cmd, cfg, depends_on=None, **kwargs):
+        """Run scripts in a shell
+
+        If not using SLURM: calls scripts through shell
+        if using SLURM: uses slurmpy to submit jobs, keep certain default kwargs and only update some with kwarg `overwrite_these_configurations`
+
+        Args:
+            cmd (str): Bash command(s) to run
+            jobname (str, optional): Name of SLURM job
+            cfg_update (dict): The config keywords will be overwritten with values
+            depends_on (int or None): SLURM job id of dependency, job will start after this id finished.
+
+        Returns 
+            None
         """
-        # is there an already existing config?
-        if self.f_cfg_current is None:
-            # there is no config, we write a new one
-            cfg = Config(name=self.expname, 
-                         **self.configure_defaults,
-                         **kwargs) 
-            self.cfg = cfg  # this will be accessed by the module functions below
-            
-        else:
-            # there is already a config, we update it
+        if self.use_slurm:
+            from slurmpy import Slurm
+            # name of calling function
+            path_to_script = inspect.stack()[1].function
+            jobname = path_to_script.split('/')[-1]+'-'+cfg.f_cfg_current.split('/')[-1].replace('.py','')
+            print('> SLURM job:', jobname)
             
-            # read existing config
-            cfg = read_dict_from_pyfile(self.f_cfg_current)
-
-            # set attributes in existing object
+            slurm_kwargs = cfg.slurm_kwargs
             for key, value in kwargs.items():
-                setattr(cfg, key, value)
+                slurm_kwargs[key] = value
                 
-        # finally, write cfg to file
-        # generate random string for filename
-        random_str = ''.join(random.choices(string.ascii_lowercase + string.digits, k=4))
-        self.f_cfg_current = self.f_cfg_base+'/cfg_'+random_str+'.py'
-        write_dict_to_pyfile(cfg.__dict__, self.f_cfg_current)
+            return Slurm(jobname,
+                        slurm_kwargs=slurm_kwargs,
+                        log_dir=self.dir_log, 
+                        scripts_dir=self.dir_slurm,
+                        ).run(cmd, depends_on=depends_on)
+        else:
+            print(cmd)
+            returncode = os.system(cmd)
+            if returncode != 0:
+                raise Exception('Error running command >>> '+cmd)
 
+###########################################################
+# USER FUNCTIONS
 
-    def prepare_WRFrundir(self, init_time):
+    def prepare_WRFrundir(self, cfg):
         """Prepare WRF run directories for all ensemble members
 
         Note: 
@@ -149,11 +126,11 @@ class WorkFlows(object):
         Returns:
             None
         """
-        cmd = self.cluster.python+' '+self.scripts_rundir + \
-            '/prepare_wrfrundir.py '+init_time.strftime('%Y-%m-%d_%H:%M')
+        cmd = self.python+' '+self.dir_dartwrf_run + '/prepare_wrfrundir.py '+cfg.f_cfg_current
         shell(cmd)
 
-    def generate_obsseq_out(self, times, depends_on=None):
+
+    def generate_obsseq_out(self, cfg, depends_on=None):
         """Creates observations from a nature run for a list of times
 
         Args:
@@ -162,17 +139,15 @@ class WorkFlows(object):
         Returns:
             str: job ID of the submitted job
         """
-        times_str = ','.join([t.strftime('%Y-%m-%d_%H:%M') for t in times])
-
-        cmd = self.cluster.python+' '+self.scripts_rundir + \
-            '/obs/create_obsseq_out.py '+times_str
+        path_to_script = self.dir_dartwrf_run + '/obs/create_obsseq_out.py'
+        cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
 
-        id = self.cluster.run_job(cmd, "obsgen-"+self.expname,
-                                  cfg_update={"ntasks": "20", "time": "30", "mem": "200G", "ntasks-per-node": "20"}, depends_on=[depends_on])
+        id = self.run_job(cmd, cfg, depends_on=[depends_on],
+                          **{"ntasks": "20", "time": "30", "mem": "200G", "ntasks-per-node": "20"})
         return id
 
 
-    def wrfinput_insert_wbubble(self, perturb=True, depends_on=None):
+    def wrfinput_insert_wbubble(self, cfg, depends_on=None):
         """Inserts warm-bubble temperature perturbations into wrfinput files
 
         Note:
@@ -184,18 +159,14 @@ class WorkFlows(object):
 
         Returns:
             str: job ID of the submitted job
-        """
-        pstr = ' '
-        if perturb:
-            pstr = ' perturb'
-        cmd = self.cluster.python+' '+self.scripts_rundir + \
-            '/create_wbubble_wrfinput.py'+pstr
-
-        id = self.cluster.run_job(
-            cmd, "ins_wbub-"+self.expname, cfg_update={"time": "5"}, depends_on=[depends_on])
+        """ 
+        path_to_script = self.dir_dartwrf_run + '/create_wbubble_wrfinput.py'
+        cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
+
+        id = self.run_job(cmd, cfg, depends_on=[depends_on])
         return id
 
-    def run_ideal(self, depends_on=None):
+    def run_ideal(self, cfg, depends_on=None):
         """Run WRF's ideal.exe for every ensemble member
 
         Args:
@@ -204,126 +175,67 @@ class WorkFlows(object):
         Returns:
             str: job ID of the submitted job
         """
-        cmd = script_to_str(self.cluster.WRF_ideal_template
-                            ).replace('<expname>', self.expname
-                            ).replace('<wrf_rundir_base>', self.cluster.wrf_rundir_base
-                            ).replace('<wrf_modules>', self.cluster.wrf_modules,
+        cmd = script_to_str(self.cfg.WRF_ideal_template
+                            ).replace('<expname>', cfg.name
+                            ).replace('<wrf_rundir_base>', cfg.dir_wrf_run
+                            ).replace('<wrf_modules>', cfg.wrf_modules,
                             )
-        id = self.cluster.run_job(cmd, "ideal-"+self.expname, 
-                cfg_update={"ntasks": "1", "time": "30", "mem": "200G"}, depends_on=[depends_on])
+        id = self.run_job(cmd, cfg, depends_on=[depends_on])
         return id
     
-    def run_ENS(self, begin, end, first_second=False,
-                input_is_restart=True, output_restart_interval=360, hist_interval_s=300,
-                depends_on=None):
+    def run_WRF(self, cfg, depends_on=None):
         """Run the forecast ensemble
-
-        Args:
-            begin (datetime): start time of the forecast
-            end (datetime): end time of the forecast
-            depends_on (str, optional): job ID of a previous job after which to run this job
-            first_second (bool, optional): if True, get wrfout of first second
-            input_is_restart (bool, optional): if True, start WRF from WRFrst file (restart mode)
-            output_restart_interval (int, optional): interval in minutes between output of WRFrst files
-            hist_interval_s (float, optional): interval in seconds between output of WRF history files
-
-        Returns:
-            str: job ID of the submitted job
         """
-        def _prepare_WRF_inputfiles(begin, end, hist_interval_s=300, radt=1,
-                                   output_restart_interval: int | None = None, 
-                                   depends_on=None):
-
-            args = [self.cluster.python, self.scripts_rundir+'/prepare_namelist.py',
-                    self.f_cfg_current,
-                    begin.strftime('%Y-%m-%d_%H:%M:%S'), 
-                    end.strftime('%Y-%m-%d_%H:%M:%S'),
-                    str(hist_interval_s), 
-                    '--radt='+str(radt), 
-                    '--restart='+restart_flag,]
-
-            if output_restart_interval:
-                args.append('--restart_interval=' +
-                            str(int(float(output_restart_interval))))
-
-            return self.cluster.run_job(' '.join(args), "preWRF",
-                                        cfg_update=dict(time="2"), depends_on=[depends_on])
-            
         ###########################################
+        start = cfg.WRF_start
+        end = cfg.WRF_end
+        
         # SLURM configuration for WRF
-        cfg_wrf = {"array": "1-"+str(self.cfg.ensemble_size),
+        slurm_kwargs = {"array": "1-"+str(self.cfg.ensemble_size),
                       "nodes": "1", 
-                      "ntasks": str(self.cluster.max_nproc_for_each_ensemble_member), 
+                      "ntasks": str(self.cfg.max_nproc_for_each_ensemble_member), 
                       "ntasks-per-core": "1", "mem": "90G", }
 
-        id = depends_on
-        restart_flag = '.false.' if not input_is_restart else '.true.'
-        
         # command from template file
-        wrf_cmd = script_to_str(self.cluster.WRF_exe_template
-                            ).replace('<expname>', self.expname
-                            ).replace('<wrf_rundir_base>', self.cluster.wrf_rundir_base
-                            ).replace('<wrf_modules>', self.cluster.wrf_modules,
-                            ).replace('<WRF_number_of_processors>', "16")
-
-        # if 1-second forecast is required
-        if first_second:
-            id = _prepare_WRF_inputfiles(begin, begin+dt.timedelta(seconds=1),
-                                        hist_interval_s=1,  # to get an output every 1 s
-                                        radt=0,  # to get a cloud fraction CFRAC after 1 s
-                                        output_restart_interval=output_restart_interval,
-                                        depends_on=id)
-
-            id = self.cluster.run_job(wrf_cmd, "WRF-"+self.expname,
-                                      cfg_update=cfg_wrf,  depends_on=[id])
-
-        # forecast for the whole forecast duration
-        id = _prepare_WRF_inputfiles(begin, end,
-                                    hist_interval_s=hist_interval_s,
-                                    output_restart_interval=output_restart_interval,
-                                    depends_on=id)
-
-        time_in_simulation_hours = (end-begin).total_seconds()/3600
+        wrf_cmd = script_to_str(self.cfg.WRF_exe_template
+                            ).replace('<dir_wrf_run>', self.cfg.dir_wrf_run.replace('<ens>', '$IENS')
+                            ).replace('<wrf_modules>', self.cfg.wrf_modules,
+                            ).replace('<WRF_number_of_processors>', str(self.cfg.max_nproc_for_each_ensemble_member),
+                                      )
+        # prepare namelist
+        path_to_script = self.dir_dartwrf_run + '/prepare_namelist.py'
+        cmd = ' '.join([self.python, path_to_script, self.cfg.f_cfg_current])
+        id = self.run_job(cmd, cfg, depends_on=[depends_on])
+
+        # run WRF ensemble
+        time_in_simulation_hours = (end-start).total_seconds()/3600
         runtime_wallclock_mins_expected = int(
             time_in_simulation_hours*30 + 10)  # usually <15 min/hour
-
-        cfg_wrf.update({"time": str(runtime_wallclock_mins_expected)})
+        slurm_kwargs.update({"time": str(runtime_wallclock_mins_expected)})
         
         if runtime_wallclock_mins_expected > 25:
-            cfg_wrf.update({"partition": "amd"})
+            slurm_kwargs.update({"partition": "amd"})
         #     #cfg_update.update({"exclude": "jet03"})
 
-        id = self.cluster.run_job(
-            wrf_cmd, "WRF-"+self.expname, cfg_update=cfg_wrf, depends_on=[id])
+        id = self.run_job(wrf_cmd, cfg, depends_on=[id], **slurm_kwargs)
         return id
 
-    def assimilate(self, assim_time, prior_init_time, prior_valid_time, prior_path_exp,
-                   depends_on=None):
+    def assimilate(self, cfg, depends_on=None):
         """Creates observations from a nature run and assimilates them.
-
-        Args:
-            assim_time (dt.datetime):       timestamp of prior wrfout files
-            prior_init_time (dt.datetime):  timestamp to find the directory where the prior wrfout files are
-            prior_path_exp (str):           use this directory to get prior state (i.e. self.archivedir)
-
+        
         Returns:
             str: job ID of the submitted job
         """
-        if not os.path.exists(prior_path_exp):
-            raise IOError('prior_path_exp does not exist: '+prior_path_exp)
-
-        cmd = (self.cluster.python+' '+self.scripts_rundir+'/assimilate.py '
-               + assim_time.strftime('%Y-%m-%d_%H:%M ')
-               + prior_init_time.strftime('%Y-%m-%d_%H:%M ')
-               + prior_valid_time.strftime('%Y-%m-%d_%H:%M ')
-               + prior_path_exp)
-
-        id = self.cluster.run_job(cmd, "Assim-"+self.expname,
-                                  cfg_update={"ntasks": "20", "time": "30", "mem": "110G",
-                                              "ntasks-per-node": "20", "ntasks-per-core": "1"}, depends_on=[depends_on])
+        path_to_script = self.dir_dartwrf_run + '/assimilate.py'
+        cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
+
+        id = self.run_job(cmd, cfg, depends_on=[depends_on], 
+                          **{"ntasks": "20", "time": "30", "mem": "110G",
+                            "ntasks-per-node": "20", "ntasks-per-core": "1"}, 
+                     )
         return id
 
-    def prepare_IC_from_prior(self, prior_path_exp, prior_init_time, prior_valid_time, new_start_time=None, depends_on=None):
+    def prepare_IC_from_prior(self, cfg, depends_on=None):
         """Create initial conditions from prior wrfrst files
 
         Args:
@@ -338,21 +250,13 @@ class WorkFlows(object):
         Returns:
             str: job ID of the submitted job
         """
-        if new_start_time != None:
-            tnew = new_start_time.strftime(' %Y-%m-%d_%H:%M')
-        else:
-            tnew = ''
-
-        cmd = (self.cluster.python+' '+self.scripts_rundir+'/prep_IC_prior.py '
-               + prior_path_exp
-               + prior_init_time.strftime(' %Y-%m-%d_%H:%M')
-               + prior_valid_time.strftime(' %Y-%m-%d_%H:%M')
-               + tnew)
-        id = self.cluster.run_job(
-            cmd, "IC-prior-"+self.expname, cfg_update=dict(time="18"), depends_on=[depends_on])
+        path_to_script = self.dir_dartwrf_run + '/prep_IC_prior.py'
+        cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
+
+        id = self.run_job(cmd, cfg, depends_on=[depends_on])
         return id
 
-    def update_IC_from_DA(self, assim_time, depends_on=None):
+    def update_IC_from_DA(self, cfg, depends_on=None):
         """Update existing initial conditions with the output from the assimilation
 
         Args:
@@ -362,60 +266,45 @@ class WorkFlows(object):
         Returns:
             str: job ID of the submitted job
         """
-        cmd = self.cluster.python+' '+self.scripts_rundir + \
-            '/update_IC.py '+assim_time.strftime('%Y-%m-%d_%H:%M')
-        id = self.cluster.run_job(cmd, "IC-update-"+self.expname,
-                                  cfg_update=dict(time="18"), depends_on=[depends_on])
+        path_to_script = self.dir_dartwrf_run + '/update_IC.py'
+        cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
+
+        id = self.run_job(cmd, cfg, depends_on=[depends_on])
         return id
 
-    def create_satimages(self, init_time, depends_on=None):
+    def create_satimages(self, cfg, depends_on=None):
         """Run a job array, one job per ensemble member, to create satellite images"""
-        cmd = 'module purge; module load rttov/v13.2-gcc-8.5.0; ' \
-            + 'python ~/RTTOV-WRF/run_init.py '+self.archivedir+init_time.strftime('/%Y-%m-%d_%H:%M/ ') \
-            + '$SLURM_ARRAY_TASK_ID'
-        id = self.cluster.run_job(cmd, "RTTOV-"+self.expname,
-                                  cfg_update={"ntasks": "1", "time": "60", "mem": "10G", 
-                                              "array": "1-"+str(self.cfg.ensemble_size)}, 
-                                  depends_on=[depends_on])
-        return id
+        
+        prefix = 'module purge; module load rttov/v13.2-gcc-8.5.0; python'
+        path_to_script = '~/RTTOV-WRF/run_init.py'
+        cmd = ' '.join([prefix, path_to_script, cfg.f_cfg_current])
 
-    def gen_obsseq(self, depends_on=None):
-        """(not included in DART-WRF)"""
-        cmd = self.cluster.python+' '+self.scripts_rundir+'/obsseq_to_netcdf.py'
-        id = self.cluster.run_job("obsseq_netcdf", cfg_update={"time": "10", "mail-type": "FAIL,END"},
-                                  depends_on=[depends_on])
+        id = self.run_job(cmd, cfg, depends_on=[depends_on],
+                          **{"ntasks": "1", "time": "60", "mem": "10G", 
+                                "array": "1-"+str(self.cfg.ensemble_size)})
         return id
 
-    def evaluate_obs_posterior_after_analysis(self, init, valid, depends_on=None):
+    def evaluate_obs_posterior_after_analysis(self, cfg, depends_on=None):
+
+        path_to_script = self.dir_dartwrf_run + '/evaluate_obs_space.py'
+        cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
 
-        cmd = self.cluster.python+' '+self.scripts_rundir+'/evaluate_obs_space.py ' + \
-            init.strftime('%Y-%m-%d_%H:%M,') + \
-            valid.strftime('%Y-%m-%d_%H:%M:%S')
-        id = self.cluster.run_job(cmd, 'eval+1'+self.expname, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
-                                                                              "time": "9", "mail-type": "FAIL"},
-                                  depends_on=[depends_on])
+        id = self.run_job(cmd, cfg, depends_on=[depends_on],
+                          **{"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", 
+                            "ntasks-per-core": "2", "time": "9", "mail-type": "FAIL"})
 
-        # cmd = self.cluster.python+' '+self.scripts_rundir + \
+        # cmd = self.python+' '+self.dir_dartwrf_run + \
         #     '/calc_linear_posterior.py '+init.strftime('%Y-%m-%d_%H:%M')
-        # id = self.cluster.run_job(cmd, 'linpost'+self.expname, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
+        # id = self.run_job(cmd, 'linpost'+self.cfg.name, cfg_update={"ntasks": "16", "mem": "80G", "ntasks-per-node": "16", "ntasks-per-core": "2",
         #                                                                        "time": "15", "mail-type": "FAIL"},
         #                           depends_on=[id])
         return id
 
-    def verify_sat(self, depends_on=None):
-        """(not included in DART-WRF)"""
-        cmd = self.cluster.python+' /jetfs/home/lkugler/osse_analysis/plot_from_raw/analyze_fc.py ' + \
-            self.expname+' '+self.cfg.nature_exp + ' sat has_node np=2 mem=110G'
-
-        self.cluster.run_job(cmd, "verif-SAT-"+self.expname,
-                             cfg_update={"time": "60", "mail-type": "FAIL,END", "ntasks": "2",
-                                         "ntasks-per-node": "1", "ntasks-per-core": "2", "mem": "110G", }, depends_on=[depends_on])
-
-    def verify_wrf(self, depends_on=None):
-        """(not included in DART-WRF)"""
-        cmd = self.cluster.python+' /jetfs/home/lkugler/osse_analysis/plot_from_raw/analyze_fc.py ' + \
-            self.expname+' '+self.cfg.nature_exp + ' wrf has_node np=10 mem=250G'
-
-        self.cluster.run_job(cmd, "verif-WRF-"+self.expname,
-                             cfg_update={"time": "210", "mail-type": "FAIL,END", "ntasks": "10",
-                                         "ntasks-per-node": "10", "ntasks-per-core": "1", "mem": "250G"}, depends_on=[depends_on])
+    def verify(self, cfg, depends_on=None):
+        """Not included in DART-WRF"""
+        cmd = ' '.join(['python /jetfs/home/lkugler/osse_analysis/plot_from_raw/analyze_fc.py', 
+                        cfg.f_cfg_current])
+        id = self.run_job(cmd, cfg, depends_on=[depends_on],
+                         **{"time": "210", "mail-type": "FAIL,END", 
+                                    "ntasks": "10", "ntasks-per-node": "10", 
+                                    "ntasks-per-core": "1", "mem": "250G"})
diff --git a/dartwrf/wrfinput_add_geo.py b/dartwrf/wrfinput_add_geo.py
index 6d9284ee6ccb539866144f502c1ddeaa5e0044a0..e572c738bb4fcb130d58f6972739ab39287c23a2 100755
--- a/dartwrf/wrfinput_add_geo.py
+++ b/dartwrf/wrfinput_add_geo.py
@@ -10,11 +10,12 @@ example call:
 """
 import os, sys
 import netCDF4 as nc
-from dartwrf.server_config import cluster
+from dartwrf.utils import Config
 
-def run(geo_data_file, wrfinput_file):
-    geo_ds = nc.Dataset(geo_data_file, 'r')
-    wrfinp_ds = nc.Dataset(wrfinput_file, 'r+')
+def run(cfg: Config) -> None:
+    
+    geo_ds = nc.Dataset(cfg.geo_data_file, 'r')
+    wrfinp_ds = nc.Dataset(cfg.wrfinput_file, 'r+')
 
     fields_old = ["XLAT_M",   "XLONG_M",      "CLAT",
                   "MAPFAC_M",   "MAPFAC_U",  "MAPFAC_V",
@@ -37,10 +38,9 @@ def run(geo_data_file, wrfinput_file):
     geo_ds.close()
 
     # overwrite attributes
-    os.system(cluster.ncks+' -A -x '+geo_data_file+' '+wrfinput_file)
+    os.system(cfg.ncks+' -A -x '+cfg.geo_data_file+' '+cfg.wrfinput_file)
 
 
 if __name__ == '__main__':
-    geo_data_file = sys.argv[1]  # '/home/fs71386/lkugler/compile_WRF/WPS-release-v4.2/geo_em.d01.nc'
-    wrfinput_file = sys.argv[2]  # '/home/fs71386/lkugler/DART/wrfinput_d01'
-    run(geo_data_file, wrfinput_file)
+    cfg = Config.from_file(sys.argv[1])
+    run(cfg)
diff --git a/dartwrf/wrfout_add_geo.py b/dartwrf/wrfout_add_geo.py
index 15c1020a600ffaf8b5f79befa8fe6f437ce9d2cf..598b93b51e180aa60630fafbc57fe6e315c45760 100755
--- a/dartwrf/wrfout_add_geo.py
+++ b/dartwrf/wrfout_add_geo.py
@@ -1,7 +1,6 @@
 import os
-import sys
 import netCDF4 as nc
-from dartwrf.server_config import cluster
+from dartwrf.utils import Config
 
 fields_old = ["XLAT_M",   "XLONG_M",]
              # "XLONG_U",  "XLONG_V",     
@@ -12,7 +11,9 @@ fields_new = ["XLAT",     "XLONG",]
              # "XLAT_U",    "XLAT_V"]
 
 
-def run(geo_data_file, wrfout_file):
+def run(cfg: Config, 
+        geo_data_file: str,
+        wrfout_file: str) -> None: #geo_data_file, wrfout_file):
     """Add geogrid data to a wrfout file
     DART needs a georeference, but ideal.exe does not provide it
 
@@ -20,20 +21,17 @@ def run(geo_data_file, wrfout_file):
     Does not change E, F, HGT_M as they would alter the dynamics and have no impact on assimilation
 
     Args:
-        geo_data_file (str): Path to WRF's geo_em file
+        cfg, needs attributes: geo_em_forecast (str), ncks (str)
         wrfout_file (str): Path to WRF history (wrfout) file
 
     Returns:
         None
     """
-
-    debug = False
-
     print('updating geodata in', wrfout_file, 'from', geo_data_file)
     geo_ds = nc.Dataset(geo_data_file, 'r')
     wrfinp_ds = nc.Dataset(wrfout_file, 'r+')
-    print('wrfinput.variables', list(wrfinp_ds.variables))
-    print('geo_em.variables',  list(geo_ds.variables))
+    # print('wrfinput.variables', list(wrfinp_ds.variables))
+    # print('geo_em.variables',  list(geo_ds.variables))
 
     for old, new in zip(fields_old, fields_new):
 
@@ -58,15 +56,4 @@ def run(geo_data_file, wrfout_file):
     geo_ds.close()
 
     # overwrite attributes
-    os.system(cluster.ncks+' -A -x '+geo_data_file+' '+wrfout_file)
-
-
-if __name__ == '__main__':
-    """
-    Example:
-        $ ./wrfout_add_geo.py geo_em.d01.nc wrfinput_d01
-    """
-    geo_data_file = sys.argv[1]  # '/home/fs71386/lkugler/compile_WRF/WPS-release-v4.2/geo_em.d01.nc'
-    wrfout_file = sys.argv[2]  # '/home/fs71386/lkugler/DART/wrfinput_d01'
-
-    run(geo_data_file, wrfout_file)
+    os.system(cfg.ncks+' -A -x '+geo_data_file+' '+wrfout_file)
diff --git a/generate_observations.py b/generate_observations.py
deleted file mode 100755
index 89d3f96035b15a8b2d20056fc38a384647ae5f63..0000000000000000000000000000000000000000
--- a/generate_observations.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/python3
-"""
-Generate observation files from an experiment
-"""
-import datetime as dt
-import pandas as pd
-from dartwrf.workflows import WorkFlows
-
-w = WorkFlows(exp_config='exp_hires.py', server_config='jet.py')
-
-#obs_times = [dt.datetime(2008,7,30,14,),]
-obs_times = pd.date_range(start='2008-07-30 13:00:00', end='2008-07-30 14:00:00', freq='15min')
-
-w.generate_obsseq_out(obs_times)
diff --git a/multiple_exps.py b/multiple_exps.py
index 94567bf20cdec79e206bf34cea36256b02615d18..3a43b32967849d67646daca7ba9057fc8a7f26bf 100644
--- a/multiple_exps.py
+++ b/multiple_exps.py
@@ -1,79 +1,98 @@
 import datetime as dt
-import sys
 import pandas as pd
 from dartwrf.workflows import WorkFlows
-from config import defaults
+from dartwrf.utils import Config
 
 
+# import default config for jet
+from config.jet import cluster_defaults
+from config.defaults import dart_nml
+
 # test multiple assimilation windows (11-12, 12-13, 13-14, )
 timedelta_btw_assim = dt.timedelta(minutes=15)
-assim_times_start = pd.date_range('2000-01-01 11:00', '2000-01-01 13:00', freq='h')
-    
+assim_times_start = pd.date_range('2008-07-30 11:00', '2008-07-30 13:00', freq='h')
+
 for t0 in assim_times_start:
 
-    # set constants
-    w = WorkFlows(server_config='/jetfs/home/lkugler/DART-WRF/config/jet.py', 
-                   expname=t0.strftime('test_%H%M'))
-   
-    # set potentially varying parameters
-    ens_size = 3
-    dart_nml = defaults.dart_nml
-    dart_nml.update(ens_size=ens_size)
-    
-    w.configure(model_dx=2000, 
-                ensemble_size=3,
-                dart_nml = dart_nml,
-                use_existing_obsseq=False,
-                update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QSNOW', 'PSFC'],
-                input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof',
-                nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/exp_v1.18_P1_nature+1/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S',
-                nature_exp = 'nat_250m_blockavg2km',)
 
-    
-    time = t0
     id = None
-    w.prepare_WRFrundir(time)
-    id = w.run_ideal(depends_on=id)
-    sys.exit()
+    ensemble_size = 3
+    
+    dart_nml['&filter_nml'].update(num_output_state_members=ensemble_size,
+                                   ens_size=ensemble_size)
+            
+    t_raso = dict(var_name='Temperature', unit='[K]',
+            kind='RADIOSONDE_TEMPERATURE',
+            obs_locations=[(45., 0.)],
+            error_generate=0.2, error_assimilate=0.6,
+            heights=range(1000, 15001, 500),
+            loc_horiz_km=100, loc_vert_km=3)
+            
+    cfg = Config(name='test_exp', 
+        model_dx = 2000,
+        ensemble_size = ensemble_size,
+        dart_nml = dart_nml,
+        geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
+        time = dt.datetime(2008, 7, 30, 11),
+        
+        use_existing_obsseq = False,
+        nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/nat_250m_1600x1600x100/*/1/wrfout_d01_%Y-%m-%d_%H_%M_%S',
+        #geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
+        geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600',
+        
+        update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QSNOW', 'PSFC'],
+        input_profile = '/jetfs/home/lkugler/data/sim_archive/nat_250m_1600x1600x100/2008-07-30_08:00/1/input_sounding',
+        nature_exp_verif = 'nat_250m_blockavg2km',
+        assimilate_these_observations = [t_raso,],
+        **cluster_defaults, # type: ignore
+    )
+
+    # test multiple assimilation windows (11-12, 12-13, 13-14, )
+    timedelta_btw_assim = dt.timedelta(minutes=15)
+    #pd.date_range('2008-07-30 11:00', '2008-07-30 13:00', freq='h')
+
     
-    prior_init_time = time - dt.timedelta(hours=1)
-    prior_valid_time = time
-    prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_v1.19_P2_noDA+1/'
+    w = WorkFlows(cfg)
+    w.prepare_WRFrundir(cfg)
+    #id = w.run_ideal(cfg, depends_on=id)
     
     # assimilate at these times
-    assim_times = pd.date_range(time, time + dt.timedelta(hours=1), freq=timedelta_btw_assim)
+    time0 = cfg.time
+    assim_times = pd.date_range(time0, time0 + timedelta_btw_assim, freq=timedelta_btw_assim)
     last_assim_time = assim_times[-1]
-    
+
     # loop over assimilations
     for i, t in enumerate(assim_times):
         
-        id = w.assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
+        if i == 0:
+            cfg.update(time = t,
+                prior_init_time = dt.datetime(2008, 7, 30, 8),
+                prior_valid_time = t,
+                prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_nat250m_noDA/',)
+        else:
+            cfg.update(time = t,
+                prior_init_time = assim_times[i-1],
+                prior_valid_time = t,
+                prior_path_exp = cfg.dir_archive,)
+                    
+                    
+        id = w.assimilate(cfg, depends_on=id)
 
         # 1) Set posterior = prior
-        id = w.prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
+        id = w.prepare_IC_from_prior(cfg, depends_on=id)
 
         # 2) Update posterior += updates from assimilation
-        id = w.update_IC_from_DA(time, depends_on=id)
+        id = w.update_IC_from_DA(cfg, depends_on=id)
 
         # How long shall we integrate?
-        timedelta_integrate = timedelta_btw_assim
-        output_restart_interval = timedelta_btw_assim.total_seconds()/60
-        if time == last_assim_time:
-            timedelta_integrate = dt.timedelta(hours=4)
-            output_restart_interval = 9999  # no restart file after last assim
+        timedelta_integrate = dt.timedelta(minutes=5)
+
+        cfg.update( WRF_start=t, 
+                    WRF_end=t+timedelta_integrate, 
+                    restart=True, 
+                    restart_interval=9999,
+                    hist_interval_s=300,
+        )
 
         # 3) Run WRF ensemble
-        id = w.run_ENS(begin=time,  # start integration from here
-                       end=time + timedelta_integrate,  # integrate until here
-                       output_restart_interval=output_restart_interval,
-                       depends_on=id)
-        
-        if t < last_assim_time:
-            # continue the next cycle
-            prior_init_time = assim_times[i]
-            prior_valid_time = assim_times[i+1]
-            prior_path_exp = w.archivedir  # use own exp path as prior
-        else:
-            # exit the cycles
-            break
-        
\ No newline at end of file
+        id = w.run_WRF(cfg, depends_on=id)
\ No newline at end of file
diff --git a/config/templates/impactfactor_T.txt b/templates/impactfactor_T.txt
similarity index 100%
rename from config/templates/impactfactor_T.txt
rename to templates/impactfactor_T.txt
diff --git a/config/templates/namelist.input b/templates/namelist.input
similarity index 78%
rename from config/templates/namelist.input
rename to templates/namelist.input
index 74cfd66dd9973319d823ee07118496cd1d5ea401..4ed30190d491064cfe5cb1d21dc959756bc22999 100644
--- a/config/templates/namelist.input
+++ b/templates/namelist.input
@@ -1,25 +1,23 @@
  &time_control
- start_year                          = <y1>, 0001, 0001,
- start_month                         = <m1>,   01,   01,
- start_day                           = <d1>,   01,   01,
- start_hour                          = <HH1>,   00,   00,
- start_minute                        = <MM1>,   00,   00,
- start_second                        = <SS1>,   00,   00,
- end_year                            = <y2>, 0001, 0001,
- end_month                           = <m2>,   01,   01,
- end_day                             = <d2>,   01,   01,
- end_hour                            = <HH2>,   00,   00,
- end_minute                          = <MM2>,  120,  120,
- end_second                          = <SS2>,   00,   00,
+ start_year                          = 2000, 2001, 2001,
+ start_month                         = 01,   01,   01,
+ start_day                           = 01,   01,   01,
+ start_hour                          = 00,   00,   00,
+ start_minute                        = 00,   00,   00,
+ start_second                        = 00,   00,   00,
+ end_year                            = 2001, 2001, 2001,
+ end_month                           = 01,   01,   01,
+ end_day                             = 01,   01,   01,
+ end_hour                            = 00,   00,   00,
+ end_minute                          = 00,   00,   00,
+ end_second                          = 00,   00,   00,
  interval_seconds                    = 86400
- history_interval_s                  = <hist_interval_s>
+ history_interval_s                  = 300,
  write_hist_at_0h_rst                = .true.,
  frames_per_outfile                  = 1, 1, 1,
- history_outname 		     = '<archivedir>/wrfout_d<domain>_<date>'
- restart                             = <restart>,
+ restart                             = .false.,
  override_restart_timers             = .true.,
- restart_interval                    = <restart_interval>,
- rst_outname			     = '<archivedir>/wrfrst_d<domain>_<date>',
+ restart_interval                    = 99999,
  io_form_history                     = 2
  io_form_restart                     = 2
  io_form_input                       = 2
@@ -45,8 +43,8 @@
  e_sn                                = 201,    43,    43,
  s_vert                              = 1,     1,     1,
  e_vert                              = 101,    41,    41,
- dx                                  = <dx>,  666.6666667, 222.2222222
- dy                                  = <dx>,  666.6666667, 222.2222222
+ dx                                  = 1000,  666.6666667, 222.2222222
+ dy                                  = 1000,  666.6666667, 222.2222222
  ztop                                = 22000, 20000, 20000,
  grid_id                             = 1,     2,     3,
  parent_id                           = 0,     1,     2,
@@ -59,17 +57,17 @@
  /
 
  &physics
- mp_physics                          = 8,     1,     1,
+ mp_physics                          = 6,     1,     1,
  ra_lw_physics                       = 4,     0,     0,
  ra_sw_physics                       = 4,     0,     0,
- radt                                = <radt>,    30,    30,
- sf_sfclay_physics                   = 5,     0,     0,
- sf_surface_physics                  = 2,     0,     0,
+ radt                                = 1,    30,    30,
+ sf_sfclay_physics                   = 1,     0,     0,
+ sf_surface_physics                  = 1,     0,     0,
  bl_pbl_physics                      = 5,     0,     0,
  bldt                                = 0,     0,     0,
  cu_physics                          = 0,     0,     0,
  cudt                                = 5,     5,     5,
- do_radar_ref		             = 1,
+ do_radar_ref		                 = 0,
  icloud_bl                           = 1,
  /
 
@@ -128,5 +126,5 @@
  /
 
  &ideal
- ideal_case = 9  ! les
+ ideal_case = 9 ! les
  /
diff --git a/config/templates/obs_def_rttov.IR.nml b/templates/obs_def_rttov.IR.nml
similarity index 100%
rename from config/templates/obs_def_rttov.IR.nml
rename to templates/obs_def_rttov.IR.nml
diff --git a/templates/obs_def_rttov.VIS+WV.nml b/templates/obs_def_rttov.VIS+WV.nml
new file mode 100644
index 0000000000000000000000000000000000000000..50099921558e3ecd438fac23c97c86b24d4cb3b9
--- /dev/null
+++ b/templates/obs_def_rttov.VIS+WV.nml
@@ -0,0 +1,71 @@
+&obs_def_rttov_nml
+   rttov_sensor_db_file   = 'rttov_sensor_db.csv'
+   first_lvl_is_sfc       = .true.
+   mw_clear_sky_only      = .false.
+   interp_mode            = 1
+   do_checkinput          = .true.
+   apply_reg_limits       = .true.
+   verbose                = .true.
+   fix_hgpl               = .false.
+   do_lambertian          = .false.
+   lambertian_fixed_angle = .true.
+   rad_down_lin_tau       = .true.
+   use_q2m                = .false.
+   use_uv10m              = .false.
+   use_wfetch             = .false.
+   use_water_type         = .false.
+   addrefrac              = .true.
+   plane_parallel         = .false.
+   use_salinity           = .false.
+   cfrac_data             = .true.
+   clw_data               = .true.
+   rain_data              = .false.
+   ciw_data               = .true.
+   snow_data              = .false.
+   graupel_data           = .false.
+   hail_data              = .false.
+   w_data                 = .false.
+   clw_scheme             = 2
+   clw_cloud_top          = 322.
+   fastem_version         = 6
+   supply_foam_fraction   = .false.
+   use_totalice           = .true.
+   use_zeeman             = .false.
+   cc_threshold           = 0.05
+   ozone_data             = .false.
+   co2_data               = .false.
+   n2o_data               = .false.
+   co_data                = .false.
+   ch4_data               = .false.
+   so2_data               = .false.
+   addsolar               = .true.
+   rayleigh_single_scatt  = .false.
+   do_nlte_correction     = .false.
+   solar_sea_brdf_model   = 2
+   ir_sea_emis_model      = 2
+   use_sfc_snow_frac      = .false.
+   add_aerosl             = .false.
+   aerosl_type            = 1
+   add_clouds             = .true.
+   ice_scheme             = 1
+   use_icede              = .true.
+   idg_scheme             = 1
+   user_aer_opt_param     = .false.
+   user_cld_opt_param     = .false.
+   grid_box_avg_cloud     = .true.
+   cldcol_threshold       = -1.0
+   cloud_overlap          = 1
+   ir_scatt_model  	  = 2
+   vis_scatt_model        = 3
+   dom_nstreams           = 8
+   dom_accuracy           = 0.0
+   dom_opdep_threshold    = 0.0
+   addpc                  = .false.
+   npcscores              = -1
+   addradrec              = .false.
+   ipcreg                 = 1
+   use_htfrtc             = .false.
+   htfrtc_n_pc            = -1
+   htfrtc_simple_cloud    = .false.
+   htfrtc_overcast        = .false.
+/
diff --git a/templates/run_WRF.jet.sh b/templates/run_WRF.jet.sh
new file mode 100755
index 0000000000000000000000000000000000000000..22e6fc691173d8445d839efcb9c194844fa05234
--- /dev/null
+++ b/templates/run_WRF.jet.sh
@@ -0,0 +1,24 @@
+<wrf_modules>
+export SLURM_STEP_GRES=none
+
+echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
+IENS=$SLURM_ARRAY_TASK_ID
+
+RUNDIR=<dir_wrf_run>
+echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
+
+cd $RUNDIR
+rm -rf rsl.out.0*
+echo "mpirun -np <WRF_number_of_processors> ./wrf.exe"
+mpirun -np <WRF_number_of_processors> ./wrf.exe
+
+
+# error checking
+line=`tail -n 2 rsl.out.0000`
+if [[ $line == *"SUCCESS COMPLETE WRF"* ]]; 
+then 
+   echo $RUNDIR 'SUCCESS COMPLETE WRF'
+else  
+   echo $RUNDIR $line
+   exit 1
+fi
diff --git a/templates/run_WRF_ideal.sh b/templates/run_WRF_ideal.sh
new file mode 100644
index 0000000000000000000000000000000000000000..3377887d4554653b9e71957b048a940165133b63
--- /dev/null
+++ b/templates/run_WRF_ideal.sh
@@ -0,0 +1,19 @@
+<wrf_modules>
+export SLURM_STEP_GRES=none
+
+echo "SLURM_ARRAY_TASK_ID:"$SLURM_ARRAY_TASK_ID
+EXPNAME=<expname> 
+MAINDIR=<wrf_rundir_base> 
+
+IENS=$SLURM_ARRAY_TASK_ID
+RUNDIR=$MAINDIR/$EXPNAME/$IENS
+echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
+
+cd $RUNDIR
+rm -rf rsl.out.0*
+mpirun -np 1 ./ideal.exe
+
+# move log file to sim_archive
+touch -a $RUNDIR/rsl.out.0000  # create log file if it doesnt exist, to avoid error in mv if it doesnt exist
+mv $RUNDIR/rsl.out.0000 $RUNDIR/rsl.out.input
+
diff --git a/tests/test_assim.py b/tests/test_assim.py
index c977e5f14c869f4e5140b461df25f20b037f90e7..5a3d551e580dcceda62ab3c0d3e2c94da47dc9dd 100644
--- a/tests/test_assim.py
+++ b/tests/test_assim.py
@@ -1,93 +1,103 @@
 import os, shutil
+import datetime as dt
+import sys
+import pandas as pd
+from dartwrf.workflows import WorkFlows
+from dartwrf.utils import Config
 
 def test_dartwrf_cycled_da():
-    import datetime as dt
+    shutil.rmtree('/jetfs/home/lkugler/data/sim_archive/test_exp', ignore_errors=True)
     
-    from dartwrf.workflows import WorkFlows
-    w = WorkFlows(exp_config='exp_template.py', server_config='jet.py')
-
-    timedelta_integrate = dt.timedelta(minutes=15)
-    timedelta_btw_assim = dt.timedelta(minutes=15)
+    # import default config for jet
+    from config.jet import cluster_defaults
+    from config.defaults import dart_nml
 
     id = None
+    ensemble_size = 3
+    
+    dart_nml['&filter_nml'].update(num_output_state_members=ensemble_size,
+                                   ens_size=ensemble_size)
+            
+    t_raso = dict(var_name='Temperature', unit='[K]',
+            kind='RADIOSONDE_TEMPERATURE',
+            obs_locations=[(45., 0.)],
+            error_generate=0.2, error_assimilate=0.6,
+            heights=range(1000, 15001, 500),
+            loc_horiz_km=100, loc_vert_km=3)
+            
+    cfg = Config(name='test_exp', 
+        model_dx = 2000,
+        ensemble_size = ensemble_size,
+        dart_nml = dart_nml,
+        geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
+        time = dt.datetime(2008, 7, 30, 11),
+        
+        use_existing_obsseq = False,
+        nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/nat_250m_1600x1600x100/*/1/wrfout_d01_%Y-%m-%d_%H_%M_%S',
+        #geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
+        geo_em_nature = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.250m_1600x1600',
+        
+        update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QSNOW', 'PSFC'],
+        input_profile = '/jetfs/home/lkugler/data/sim_archive/nat_250m_1600x1600x100/2008-07-30_08:00/1/input_sounding',
+        nature_exp_verif = 'nat_250m_blockavg2km',
+        assimilate_these_observations = [t_raso,],
+        **cluster_defaults, # type: ignore
+    )
+
+    # test multiple assimilation windows (11-12, 12-13, 13-14, )
+    timedelta_btw_assim = dt.timedelta(minutes=15)
+    #pd.date_range('2008-07-30 11:00', '2008-07-30 13:00', freq='h')
 
-    if True:  # random
-        # need some ensemble data to test
-        prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA+1'
-
-        init_time = dt.datetime(2008, 7, 30, 12, 30)
-        time = dt.datetime(2008, 7, 30, 13)
-        last_assim_time = dt.datetime(2008, 7, 30, 13)
-        forecast_until = dt.datetime(2008, 7, 30, 13,10)
-
-        w.prepare_WRFrundir(init_time)
-        # id = w.run_ideal(depends_on=id)
-
-    prior_init_time = init_time
-
-    while time <= last_assim_time:
-
-        # usually we take the prior from the current time
-        # but one could use a prior from a different time from another run
-        # i.e. 13z as a prior to assimilate 12z observations
-        prior_valid_time = time
+    
+    w = WorkFlows(cfg)
+    w.prepare_WRFrundir(cfg)
+    #id = w.run_ideal(cfg, depends_on=id)
+    
+    # assimilate at these times
+    time0 = cfg.time
+    assim_times = pd.date_range(time0, time0 + timedelta_btw_assim, freq=timedelta_btw_assim)
+    last_assim_time = assim_times[-1]
 
-        id = w.assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
+    # loop over assimilations
+    for i, t in enumerate(assim_times):
         
+        if i == 0:
+            cfg.update(time = t,
+                prior_init_time = dt.datetime(2008, 7, 30, 8),
+                prior_valid_time = t,
+                prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_nat250m_noDA/',)
+        else:
+            cfg.update(time = t,
+                prior_init_time = assim_times[i-1],
+                prior_valid_time = t,
+                prior_path_exp = cfg.dir_archive,)
+                    
+                    
+        id = w.assimilate(cfg, depends_on=id)
+
         # 1) Set posterior = prior
-        id = w.prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
+        id = w.prepare_IC_from_prior(cfg, depends_on=id)
 
         # 2) Update posterior += updates from assimilation
-        id = w.update_IC_from_DA(time, depends_on=id)
+        id = w.update_IC_from_DA(cfg, depends_on=id)
 
         # How long shall we integrate?
-        timedelta_integrate = timedelta_btw_assim
-        output_restart_interval = timedelta_btw_assim.total_seconds()/60
-        if time == last_assim_time:  # this_forecast_init.minute in [0,]:  # longer forecast every full hour
-            timedelta_integrate = forecast_until - last_assim_time  # dt.timedelta(hours=4)
-            output_restart_interval = 9999  # no restart file after last assim
+        timedelta_integrate = dt.timedelta(minutes=5)
 
-        # 3) Run WRF ensemble
-        id = w.run_ENS(begin=time,  # start integration from here
-                    end=time + timedelta_integrate,  # integrate until here
-                    output_restart_interval=output_restart_interval,
-                    first_minutes=False,
-                    depends_on=id)
-        
-        # as we have WRF output, we can use own exp path as prior
-        prior_path_exp = w.cluster.archivedir
+        cfg.update( WRF_start=t, 
+                    WRF_end=t+timedelta_integrate, 
+                    restart=True, 
+                    restart_interval=9999,
+                    hist_interval_s=300,
+        )
 
-        time += timedelta_btw_assim
-        prior_init_time = time - timedelta_btw_assim
+        # 3) Run WRF ensemble
+        id = w.run_WRF(cfg, depends_on=id)
+        sys.exit()
     
 
-def test_overwrite_OE_assim():
-    import datetime as dt
-    from dartwrf import assimilate as aso
-    from dartwrf.obs import obsseq
-    from dartwrf.server_config import cluster
-
-    # checks if modified entries are correctly written to DART files
-    input_template = "./obs_seq.orig.out"
-    input_temporary = "./obs_seq.out"
-    # TODO: MISSING FILE
-    # input_osf = "./obs_seq.final"
-    output = "./obs_seq.test.out"
-    shutil.copy(input_template, input_temporary)
-
-    oso = obsseq.ObsSeq(input_temporary)
-    #osf = obsseq.ObsSeq(input_osf)
-
-    aso.set_obserr_assimilate_in_obsseqout(oso, outfile=output)
-
-    var_orig = oso.df['variance']
-
-    oso_test = obsseq.ObsSeq(output)  # read in again
-    assert oso_test.df['variance'].iloc[0] == var_orig
-    os.remove(output)
-    os.remove(input_temporary)
 
 if __name__ == '__main__':
-    # test_dartwrf_cycled_da()
-    # test_overwrite_OE_assim()
+    test_dartwrf_cycled_da()
+
     pass
diff --git a/tests/test_dart-rttov.py b/tests/test_dart-rttov.py
deleted file mode 100644
index b2f56e9540c6d66d6e75fe74395435ceae597336..0000000000000000000000000000000000000000
--- a/tests/test_dart-rttov.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import os, filecmp, shutil
-import numpy as np
-import datetime as dt
-import pandas as pd
-
-from dartwrf.obs import obsseq
-import dartwrf.obs.create_obsseq_in as osq
-import dartwrf.assimilate as aso
-from dartwrf import wrfout_add_geo
-
-import matplotlib as mpl
-import proplot as plt
-import xarray as xr
-
-n_obs = 22500
-vis = dict(plotname='VIS06',  plotunits='[1]',
-        kind='MSG_4_SEVIRI_BDRF', sat_channel=1, n_obs=n_obs, 
-        error_generate=0, error_assimilate=0.12,
-        cov_loc_radius_km=20)
-wv62 = dict(plotname='WV62',  plotunits='[K]',
-            kind='MSG_4_SEVIRI_TB', sat_channel=5, n_obs=n_obs, 
-            error_generate=0, error_assimilate=False, 
-            cov_loc_radius_km=20)
-wv73 = dict(plotname='WV73',  plotunits='[K]',
-            kind='MSG_4_SEVIRI_TB', sat_channel=6, n_obs=n_obs, 
-            error_generate=0, error_assimilate=False, 
-            cov_loc_radius_km=20)
-
-vsc = True if 'jet' not in os.uname().nodename else False
-if vsc:
-    datadir = '/gpfs/data/fs71386/lkugler/sim_archive/'
-    img_dir = '/gpfs/data/fs71386/lkugler/analysis/'
-else:
-    datadir = '/jetfs/home/lkugler/data/sim_archive/'
-    img_dir = '/jetfs/home/lkugler/analysis_plots/'
-
-dart_rundir = datadir+'/../run_DART/test_jet'
-
-def test_rttov():
-
-    times = pd.date_range(start=dt.datetime(2008, 7, 30, 13,30),
-                              end=dt.datetime(2008, 7, 30, 14,30),
-                              freq=dt.timedelta(minutes=30))
-
-    case = 'exp_v1.19_P3_wbub7_nat'
-
-
-    for obscfg in [wv73, vis, wv62]:
-
-        for time in times:
-
-            fname = img_dir+'/'+case+'/dart-rttov-compare_'+obscfg['plotname']+time.strftime('_%H:%M')+'.png'
-
-            ds = xr.open_dataset(datadir+'/'+case+'/2008-07-30_12:00/1/'+time.strftime('RT_wrfout_d01_2008-07-30_%H:%M:00.nc'))
-            if obscfg['plotname'] not in ds.data_vars:
-                raise KeyError(obscfg['plotname']+' not in dataset')
-            da = ds[obscfg['plotname']].squeeze()
-
-
-            shutil.copy(datadir+'/'+case+'/2008-07-30_12:00/1/'+time.strftime('wrfout_d01_2008-07-30_%H:%M:00'), 
-                        dart_rundir + "/wrfout_d01")
-
-            wrfout_add_geo.run(datadir+ "/geo_em.d01.nc", dart_rundir + "/wrfout_d01")
-           
-
-            osq.create_obsseqin_alltypes(time, [obscfg])
-            aso.set_DART_nml()
-            aso.run_perfect_model_obs(nproc=1) 
-
-
-            obs = obsseq.ObsSeq(dart_rundir+'/obs_seq.out')
-            obs_map = obs.df.truth.values.reshape(150,150)
-
-
-            fig, ax = plt.subplots(ncols=2)
-
-            if obscfg['plotname'] == 'VIS06':
-                norm = mpl.colors.Normalize(vmin=0, vmax=1)
-                levels = plt.arange(0, 1, .05)
-            else:
-                norm = mpl.colors.Normalize(vmin=210, vmax=260)
-                levels = plt.arange(200, 260, 5)
-
-            ax[0].pcolormesh(obs_map, norm=norm, levels=levels)
-            ax[0].set_title('DART-RTTOV')
-            m1 = ax[1].pcolormesh(da.values[24:175,24:175], norm=norm, levels=levels) 
-            ax[1].set_title('py-RTTOV')
-            fig.colorbar(m1, loc='r')
-            fig.suptitle(time.strftime('%H:%M'))
-            
-            fig.savefig(fname)
-            print(fname)
-
-
-
-            fig, ax = plt.subplots()
-            diff =  da.values[25:175,25:175] - obs_map
-
-            ax[0].pcolormesh(diff, cmap='RdBu_r')
-            ax[0].set_title('pyRTTOV - DART-RTTOV')
-            fig.colorbar(m1, loc='r')
-            fig.suptitle(time.strftime('%H:%M'))
-            
-            fname = img_dir+'/'+case+'/dart-rttov-compare_'+obscfg['plotname']+time.strftime('_%H:%M')+'-diff.png'
-            fig.savefig(fname)
-            print(fname)
-
-
-
-
-if __name__ == '__main__':
-    test_rttov()
-    pass
diff --git a/tests/test_obsseq.py b/tests/test_obsseq.py
index f2abb6e4bf835f8a6f07b567dc5a95714f6f2e6f..fb15115594d5cfeb0aa93d2c1117088aa06a3070 100644
--- a/tests/test_obsseq.py
+++ b/tests/test_obsseq.py
@@ -1,7 +1,5 @@
 import os, filecmp, shutil
 import numpy as np
-
-from dartwrf.server_config import cluster
 from dartwrf.obs import obsseq