diff --git a/config/cluster.py b/config/cluster.py
index 9a60872f55e30e96ebdaf1fdf0ef4495dc740071..96cffd303bdaede50016e6cfc12899640d70ff0a 100755
--- a/config/cluster.py
+++ b/config/cluster.py
@@ -42,35 +42,38 @@ slurm_cfg               python dictionary, containing options of SLURM
                             'cfg_update = {"nodes": "2"}'
 """
 
+
 cluster = utils.ClusterConfig(exp)
-cluster.name = 'srvx1'
-cluster.max_nproc = 6
-cluster.use_slurm = False
+cluster.name = 'jet'
+cluster.max_nproc = 12
+cluster.use_slurm = True
+cluster.size_jobarray = 40
 
 # binaries
-cluster.python = '/users/staff/lkugler/miniconda3/bin/python'
-cluster.python_verif = '/users/staff/lkugler/miniconda3/bin/python'
-cluster.ncks = '/home/swd/spack/opt/spack/linux-rhel8-skylake_avx512/gcc-8.5.0/nco-5.0.1-ntu44aoxlvwtr2tsrobfr4lht7cpvccf/bin/ncks'
-cluster.ideal = '' #/jetfs/home/lkugler/bin/ideal-v4.3_v1.22.exe'
-cluster.wrfexe = '' #/jetfs/home/lkugler/bin/wrf-v4.3_v1.22.exe'
+cluster.python = '/jetfs/home/lkugler/miniconda3/envs/DART/bin/python'
+cluster.python_verif = '/jetfs/home/lkugler/miniconda3/envs/enstools/bin/python'
+cluster.ncks = '/jetfs/spack/opt/spack/linux-rhel8-skylake_avx512/intel-20.0.2/nco-4.9.3-dhlqiyog7howjmaleyfhm6lkt7ra37xf/bin/ncks'
+cluster.ideal = '/jetfs/home/lkugler/bin/ideal-v4.3_v1.22.exe'
+cluster.wrfexe = '/jetfs/home/lkugler/bin/wrf-v4.3_v1.22.exe'
 cluster.container = ''
 
 # paths for data output
-cluster.wrf_rundir_base = '/users/staff/lkugler/AdvDA23/run_WRF/'  # path for temporary files
-cluster.dart_rundir_base = '/users/staff/lkugler/AdvDA23/run_DART/'  # path for temporary files
-cluster.archive_base = '/mnt/jetfs/scratch/lkugler/data/sim_archive/'
+cluster.wrf_rundir_base = '/jetfs/home/lkugler/data/run_WRF/'  # path for temporary files
+cluster.dart_rundir_base = '/jetfs/home/lkugler/data/run_DART/'  # path for temporary files
+cluster.archive_base = '/jetfs/home/lkugler/data/sim_archive/'
 
 # paths used as input
-cluster.srcdir = '/users/staff/lkugler/AdvDA23/DART/WRF-4.3/run'
-cluster.dart_srcdir = '/users/staff/lkugler/AdvDA23/DART/models/wrf/work'
-cluster.rttov_srcdir = '/users/staff/lkugler/AdvDA23/RTTOV13/rtcoef_rttov13/'
-cluster.scriptsdir = '/users/staff/lkugler/AdvDA23/DART-WRF/dartwrf/'
-cluster.geo_em = '/mnt/jetfs/scratch/lkugler/data/geo_em.d01.nc'
+cluster.srcdir = '/jetfs/home/lkugler/data/compile/WRF-4.3/run'
+cluster.dart_srcdir = '/jetfs/home/lkugler/data/compile/DART/DART-10.5.3/models/wrf/work'
+cluster.rttov_srcdir = '/jetfs/home/lkugler/data/compile/RTTOV13/rtcoef_rttov13/'
+cluster.scriptsdir = '/jetfs/home/lkugler/DART-WRF/dartwrf/'
 
-# templates/run scripts
+# other inputs
+cluster.geo_em = '/jetfs/home/lkugler/data/geo_em.d01.nc'
+cluster.obs_impact_filename = cluster.scriptsdir+'/../templates/impactfactor_T.txt'
 cluster.namelist = cluster.scriptsdir+'/../templates/namelist.input'
-cluster.run_WRF = cluster.scriptsdir+'/run_ens.jet.sh'
+cluster.run_WRF = '/jetfs/home/lkugler/DART-WRF/dartwrf/run_ens.jet.sh'
 
-cluster.slurm_cfg = {"account": "lkugler", "partition": "compute",
+cluster.slurm_cfg = {"account": "lkugler", "partition": "compute", #"nodelist": "jet07",
                  "ntasks": "1", "ntasks-per-core": "1", "mem": "50G",
                  "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"}
diff --git a/cycled_exp.py b/cycled_exp.py
index 15610299abfdca6c9df575efedfad5bc75e9cc8c..dac4ba42a05516fb10a9a874041980a1af195660 100755
--- a/cycled_exp.py
+++ b/cycled_exp.py
@@ -3,13 +3,15 @@ import os, sys, shutil, glob, warnings
 import datetime as dt
 
 from dartwrf.utils import script_to_str
-from dartwrf.workflows import WorkFlows
+from config.cfg import exp
+from config.clusters import cluster
+from dartwrf.workflows import *
 
 if __name__ == "__main__":
     """
     Run a cycled OSSE with WRF and DART.
     """
-    w = WorkFlows(exp_config='cfg.py', server_config='jet.py')
+    cluster.setup()
 
     timedelta_integrate = dt.timedelta(minutes=15)
     timedelta_btw_assim = dt.timedelta(minutes=15)
@@ -24,20 +26,20 @@ if __name__ == "__main__":
         last_assim_time = dt.datetime(2008, 7, 30, 13,30)
         forecast_until = dt.datetime(2008, 7, 30, 18)
     
-        w.prepare_WRFrundir(init_time)
-        # id = w.run_ideal(depends_on=id)
-        # id = w.wrfinput_insert_wbubble(depends_on=id)    
+        prepare_WRFrundir(init_time)
+        # id = run_ideal(depends_on=id)
+        # id = wrfinput_insert_wbubble(depends_on=id)    
 
     if True:  # random
         prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_v1.19_P2_noDA'
 
-        init_time = dt.datetime(2008, 7, 30, 12)
-        time = dt.datetime(2008, 7, 30, 13)
+        init_time = dt.datetime(2008, 7, 30, 13)
+        time = dt.datetime(2008, 7, 30, 14)
         last_assim_time = dt.datetime(2008, 7, 30, 14)
-        forecast_until = dt.datetime(2008, 7, 30, 14, 18)
+        forecast_until = dt.datetime(2008, 7, 30, 14, 15)
 
-        w.prepare_WRFrundir(init_time)
-        # id = w.run_ideal(depends_on=id)
+        prepare_WRFrundir(init_time)
+        # id = run_ideal(depends_on=id)
 
     # prior_path_exp = cluster.archivedir
     # prior_path_exp = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.19_P5+su_noDA'
@@ -52,31 +54,31 @@ if __name__ == "__main__":
         # i.e. 13z as a prior to assimilate 12z observations
         prior_valid_time = time
 
-        id = w.assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
+        id = assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
         
         # 1) Set posterior = prior
-        id = w.prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
+        id = prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
 
         # 2) Update posterior += updates from assimilation
-        id = w.update_IC_from_DA(time, depends_on=id)
+        id = update_IC_from_DA(time, depends_on=id)
 
         # How long shall we integrate?
         timedelta_integrate = timedelta_btw_assim
         output_restart_interval = timedelta_btw_assim.total_seconds()/60
-        if time == last_assim_time: #this_forecast_init.minute in [0,]:  # longer forecast every full hour
+        if time == last_assim_time:  # this_forecast_init.minute in [0,]:  # longer forecast every full hour
             timedelta_integrate = forecast_until - last_assim_time  # dt.timedelta(hours=4)
             output_restart_interval = 9999  # no restart file after last assim
 
         # 3) Run WRF ensemble
-        id = w.run_ENS(begin=time,  # start integration from here
+        id = run_ENS(begin=time,  # start integration from here
                     end=time + timedelta_integrate,  # integrate until here
                     output_restart_interval=output_restart_interval,
                     depends_on=id)
         
         # as we have WRF output, we can use own exp path as prior
-        prior_path_exp = cluster.archivedir       
+        prior_path_exp = w.cluster.archivedir       
 
-        id_sat = w.create_satimages(time, depends_on=id)
+        id_sat = create_satimages(time, depends_on=id)
         
         # increment time
         time += timedelta_btw_assim
@@ -84,6 +86,6 @@ if __name__ == "__main__":
         # update time variables
         prior_init_time = time - timedelta_btw_assim
 
-    w.verify_sat(id_sat)
-    w.verify_wrf(id)
-    w.verify_fast(id)
+    verify_sat(id_sat)
+    verify_wrf(id)
+    verify_fast(id)
diff --git a/dartwrf/assim_synth_obs.py b/dartwrf/assim_synth_obs.py
index 2b57fadbfb40ee55c56aae5b852c77f053fab4bb..460375e2d8a8004af46fb9f6dd77f5367d264e8b 100755
--- a/dartwrf/assim_synth_obs.py
+++ b/dartwrf/assim_synth_obs.py
@@ -3,7 +3,7 @@ import time as time_module
 import datetime as dt
 import numpy as np
 
-from dartwrf.utils import symlink, copy, sed_inplace, append_file, mkdir, try_remove, print, shell
+from dartwrf.utils import symlink, copy, sed_inplace, append_file, mkdir, try_remove, print, shell, write_txt
 from dartwrf.obs import error_models as err
 import dartwrf.create_obsseq as osq
 from dartwrf import wrfout_add_geo
@@ -141,13 +141,6 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_
     os.system("rm -rf " + cluster.dartrundir + "/perfect_output_*")
     os.system("rm -rf " + cluster.dartrundir + "/obs_seq.fina*")
 
-
-def write_txt(lines, fpath):
-    try_remove(fpath)
-    with open(fpath, "w") as file:
-        for line in lines:
-            file.write(line+'\n')
-
 def write_list_of_inputfiles_prior():
      files = []
      for iens in range(1, exp.n_ens+1):
@@ -236,13 +229,13 @@ def archive_filteroutput(time):
 def get_parametrized_error(obscfg, osf_prior):
     """Calculate the parametrized error for an ObsConfig (one obs type)
 
-    Args
-        obscfg (object):                configuration of observations
-        osf_prior (obsseq.ObsRecord):   contains truth and prior values from obs_seq.final
+    Args:
+        obscfg (object): Configuration of observations
+        osf_prior (obsseq.ObsRecord): Contains truth and prior values from obs_seq.final
                                         (output of ./final in evaluate-mode (no posterior))
 
-    Returns
-        np.array            observation error std-dev for assimilation
+    Returns:
+        np.array: observation error std-dev for assimilation
     """
     Hx_prior = osf_prior.get_prior_Hx().T
     Hx_truth = osf_prior.get_truth_Hx()
@@ -263,8 +256,7 @@ def set_obserr_assimilate_in_obsseqout(oso, osf_prior, outfile="./obs_seq.out"):
     """"Overwrite existing variance values in obs_seq.out files
     
     Args:
-        oso (ObsSeq) :  python representation of obs_seq.out file, 
-                        will be modified and written to file
+        oso (ObsSeq): python representation of obs_seq.out file, will be modified and written to file
         osf_prior (ObsSeq): python representation of obs_seq.final (output of filter in evaluate-mode without posterior)
                             contains prior values; used for parameterized errors
 
diff --git a/dartwrf/create_obsseq.py b/dartwrf/create_obsseq.py
index df2a84725773b34dcf153127aa795925debd5216..d014f0564b0413d305708ae5817a796b86b6e7eb 100755
--- a/dartwrf/create_obsseq.py
+++ b/dartwrf/create_obsseq.py
@@ -174,21 +174,21 @@ kind
 
 
 def create_obs_seq_in(time_dt, list_obscfg, 
-                             output_path=cluster.dartrundir+'/obs_seq.in'):
+                      output_path=cluster.dartrundir+'/obs_seq.in'):
     """Create obs_seq.in with multiple obs types in one file
 
     Args:
         time_dt (dt.datetime): time of observation
-        list_obscfg (list of dict) :    configuration for observation types
-            must have keys: 
-                - n_obs (int) : number of observations (must be a square of an integer: 4, 9, 1000, ...)
-                - obs_locations (str or tuple) in ['square_array_from_domaincenter', 'square_array_evenly_on_grid', ] 
-                                                or list of (lat, lon) coordinate tuples, in degrees north/east
-                - error_generate (float)
-                - error_assimilate (float or False) : False -> parameterized
-                - cov_loc_radius_km (float)
-
-        obs_errors (np.array): contains observation errors, one for each observation
+        list_obscfg (list of dict): configuration for observation types
+
+    Note:
+        `list_obscfg` must have these keys: 
+            - n_obs (int) : number of observations (must be a square of an integer: 4, 9, 1000, ...)
+            - obs_locations (str or tuple) in ['square_array_from_domaincenter', 'square_array_evenly_on_grid', ] 
+                                            or list of (lat, lon) coordinate tuples, in degrees north/east
+            - error_generate (np.array)
+            - error_assimilate (np.array or False) : False -> parameterized
+            - cov_loc_radius_km (float)
     """
     print('creating obs_seq.in:')
     time_dt = add_timezone_UTC(time_dt)
diff --git a/dartwrf/link_dart_rttov.py b/dartwrf/link_dart_rttov.py
index ceee2d830361e14462a7ff500c3609c726cffec9..daf923bed4d2b64fa90473a525b211c9bb8db0e1 100644
--- a/dartwrf/link_dart_rttov.py
+++ b/dartwrf/link_dart_rttov.py
@@ -1,7 +1,7 @@
 import os
 from config.cfg import exp
 from config.cluster import cluster
-from dartwrf.utils import symlink, copy_scp_srvx8, copy, sed_inplace
+from dartwrf.utils import symlink, copy, sed_inplace
 
 joinp = os.path.join
 
diff --git a/dartwrf/obsseq_2dim.py b/dartwrf/obsseq_2dim.py
index 966d709c963f6801689128544c995d17c6280303..46bd60539978d729181fbf46645691760aa0ef2a 100755
--- a/dartwrf/obsseq_2dim.py
+++ b/dartwrf/obsseq_2dim.py
@@ -1,5 +1,16 @@
 """Create obs_seq.out files with collapsed vertical dimension
 Specifically, one observation per column which is the maximum of the column
+
+Use this script before running the OSSE workflow, to prepare obs_seq.out files.
+
+Note:
+     path_3d_obsseq = '/path/exp_obs10_loc20/obs_seq_out/2008-07-30_%H:%M_obs_seq.out'  
+
+Note:
+     Only works in case there is 1 observation type!
+
+Example:
+     python obsseq_2dim.py exp_v1.22_P2_rr_REFL_obs10_loc20_oe2.5 2008-07-30_13:00
 """
 
 from copy import copy
@@ -8,41 +19,46 @@ import time as time_module
 import datetime as dt
 import numpy as np
 
-from config.cfg import exp
 from config.cluster import cluster
+from dartwrf import utils
 from dartwrf import assim_synth_obs as aso
 from dartwrf import obsseq
 
+def _get_n_obs_per_layer(oso):     
+     """Get number of observations per layer"""
+     height_all = np.array([a[2] for a in oso.df.loc3d])
 
-if __name__ == "__main__":
-
-     assim_time = dt.datetime.strptime(sys.argv[1], "%Y-%m-%d_%H:%M")
+     height_first = height_all[0]
 
-    # prepare an obsseq without rejected observations
-     if exp.use_existing_obsseq:  # from another exp
-          oso_input = assim_time.strftime(exp.use_existing_obsseq)
+     # count how often this height appears
+     n_obs_per_layer = int(np.sum(height_all == height_first))
+     return n_obs_per_layer
 
-     # only assured to work with single obstype
-     if len(exp.observations) > 1:
-          raise NotImplementedError()
-     n_obs = exp.observations[0]['n_obs']
 
+if __name__ == "__main__":
+     exp = sys.argv[1]
+     assim_time = dt.datetime.strptime(sys.argv[2], "%Y-%m-%d_%H:%M")
+     
+     path_3d_obsseq = cluster.archive_base+exp+'/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out'
+     oso_input = assim_time.strftime(path_3d_obsseq)
+     
      # existing obsseq with multi levels
      oso = obsseq.ObsSeq(oso_input)
+     
+     n_obs_3d = len(oso.df)
+     n_obs_per_layer = _get_n_obs_per_layer(oso)
+     nlev = int(n_obs_3d/n_obs_per_layer)
+     assert np.allclose(nlev, n_obs_3d/n_obs_per_layer), 'n_obs not evenly divisible!'
 
-     nlev = len(oso.df)/n_obs
-     if nlev - int(nlev) != 0:
-          raise RuntimeError()
-     nlev = int(nlev)  # levels per obs
+     print('n_obs_per_layer', n_obs_per_layer)
+     print('n_obs_3d', n_obs_3d)
      
-     # copy will be modified
-     output = copy(oso)
+     output = copy(oso)  # copy will be modified
+     # output.df = output.df.copy()  # without this, we get a SettingWithCopyWarning
      output.df = output.df.iloc[0::nlev]  #  every nth level = first level
 
-     #print(output.df, oso.df)
-
      # iterate through, set value to max
-     for i_obs in range(0, n_obs):  # go through n_obs (all columns)
+     for i_obs in range(0, ):  # go through n_obs (all columns)
 
           i_obs_subset = i_obs*nlev  # jumps by nlev (from one to next column)
           column = oso.df.loc[0 + i_obs_subset:nlev + i_obs_subset, :]  # select column
@@ -50,8 +66,9 @@ if __name__ == "__main__":
           output.df.loc[i_obs_subset, ('observations')] = float(column['observations'].max())
           output.df.loc[i_obs_subset, ('truth')] = float(column['truth'].max())
 
-     print(output.df) #, 'observations'], output.df.loc[i_obs, 'observations'])
+     print(output.df)
 
      fout = cluster.archivedir + assim_time.strftime("/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out")
      os.makedirs(cluster.archivedir+'/obs_seq_out', exist_ok=True)
      output.to_dart(fout)
+     utils.write_txt(["created from", oso_input,], fout[:-3]+'.txt')
diff --git a/dartwrf/obsseq_to_netcdf.py b/dartwrf/obsseq_to_netcdf.py
index 40765952cc46edf231b0ce306fdf00ddacdd36f6..2b5ba45716f391f5e6bbccd78e096fd411702236 100644
--- a/dartwrf/obsseq_to_netcdf.py
+++ b/dartwrf/obsseq_to_netcdf.py
@@ -2,7 +2,7 @@ import os, sys, glob, warnings
 
 from config.cfg import exp
 from config.cluster import cluster
-import run_obs_diag as rod
+import dartwrf.run_obs_diag as rod
 
 def listdir_dirs(path):
     return [a for a in os.listdir(path) if os.path.isdir(os.path.join(path, a))]
diff --git a/dartwrf/prepare_namelist.py b/dartwrf/prepare_namelist.py
index 85bf245c95737f10aeb094f4ffd827bd3f3d5d08..d223aa3a6875d0f4743c1d45846968ed5399cbc2 100755
--- a/dartwrf/prepare_namelist.py
+++ b/dartwrf/prepare_namelist.py
@@ -1,12 +1,12 @@
 """Create namelist.input files
 
 Usage:
-  prepare_namelist.py <begin> <end> <intv> [--radt=<minutes>] [--restart=<flag>] [--restart_interval=<minutes>]
+prepare_namelist.py <begin> <end> <intv> [--radt=<minutes>] [--restart=<flag>] [--restart_interval=<minutes>]
 
 Options:
-  --radt=<minutes>   		Radiation interval [default: 5]
-  --restart=<flag> 		Restart flag (.true., .false.) [default: .false.]
-  --restart_interval=<minutes>	Restart frequency [default: 720]
+--radt=<minutes>   		Radiation interval [default: 5]
+--restart=<flag> 		Restart flag (.true., .false.) [default: .false.]
+--restart_interval=<minutes>	Restart frequency [default: 720]
 """
 import os, sys, shutil, warnings
 import datetime as dt
@@ -18,13 +18,16 @@ from dartwrf.utils import sed_inplace, copy, symlink, mkdir
 
 def run(iens, begin, end, hist_interval=5, radt=5, archive=True,
         restart=False, restart_interval=720):
-    """Create namelist.input files
+    """Create a namelist.input file for each ensemble member
 
     Args:
-    archive (bool): if True, write to archivedir of experiment
-        if False, write to WRF run directory
-    restart (str): fortran bool whether to use wrfinput or wrfrst
-    restart_interval (int): output frequency of wrfrst (minutes)
+        archive (bool): if True, write to archivedir of experiment
+            if False, write to WRF run directory
+        restart (str): fortran bool whether to use wrfinput or wrfrst
+        restart_interval (int): output frequency of wrfrst (minutes)
+
+    Returns
+        None
     """
     rundir = cluster.wrf_rundir(iens)
     copy(cluster.namelist, rundir+'/namelist.input')
@@ -73,6 +76,7 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True,
 
 
 if __name__ == '__main__':
+
     args = docopt(__doc__)
     begin = dt.datetime.strptime(args['<begin>'], '%Y-%m-%d_%H:%M')
     end = dt.datetime.strptime(args['<end>'], '%Y-%m-%d_%H:%M')
diff --git a/dartwrf/prepare_wrfrundir.py b/dartwrf/prepare_wrfrundir.py
index d38cedab19e661032c48f11987faa570c1a68dab..b1199cc7545df256a9831b8fda1f039e3dfbecd7 100755
--- a/dartwrf/prepare_wrfrundir.py
+++ b/dartwrf/prepare_wrfrundir.py
@@ -1,10 +1,18 @@
+"""Prepare WRF run directories, to use wrf.exe then
+
+Args:
+    init_time (str): YYYY-MM-DD_HH:MM
+
+Returns:
+    None
+"""
 import os, sys, shutil
 import datetime as dt
 
 from config.cfg import exp
 from config.cluster import cluster
 from dartwrf.utils import symlink, copy, link_contents
-import prepare_namelist
+from dartwrf import prepare_namelist
 
 if __name__ == '__main__':
 
diff --git a/dartwrf/utils.py b/dartwrf/utils.py
index b45b79f3fc5089230dc5c5e347ebbe5b6e92e5ed..0b7120b0d6d48a4bfca38c324b85d4be197687e9 100755
--- a/dartwrf/utils.py
+++ b/dartwrf/utils.py
@@ -22,7 +22,7 @@ class ClusterConfig(object):
         Example:
             `/users/abcd/data/sim_archive/experiment1/`
         """
-        return self.archive_base+'/'+self.exp.expname
+        return self.archive_base+'/'+self.exp.expname+'/'
 
     @property
     def scripts_rundir(self):
@@ -44,7 +44,7 @@ class ClusterConfig(object):
         """Path to the directory where an ensemble member will run WRF
         Includes the experiment name and the ensemble member index
         """
-        return self.wrf_rundir_base+'/'+self.exp.expname+'/'+str(iens)
+        return self.wrf_rundir_base+'/'+self.exp.expname+'/'+str(iens)+'/'
 
     def run_job(self, cmd, jobname='', cfg_update=dict(), depends_on=None):
         """Run scripts in a shell
@@ -63,10 +63,9 @@ class ClusterConfig(object):
         """
         if self.use_slurm:
             from slurmpy import Slurm
-            Slurm(jobname, slurm_kwargs=dict(self.slurm_cfg, **cfg_update), 
+            return Slurm(jobname, slurm_kwargs=dict(self.slurm_cfg, **cfg_update), 
                   log_dir=self.log_dir, 
-                  scripts_dir=self.slurm_scripts_dir, 
-                  **kwargs
+                  scripts_dir=self.slurm_scripts_dir,
                   ).run(cmd, depends_on=depends_on)
         else:
             print(cmd)
@@ -109,7 +108,8 @@ def clean_wrfdir(dir):
             os.remove(f)
 
 def symlink(src, dst):
-    # Create a symbolic link pointing to src named dst.
+    """Create a symbolic link from src to dst
+    """
     try:
         os.symlink(src, dst)
     except FileExistsError:
@@ -123,12 +123,18 @@ def symlink(src, dst):
         raise e
 
 def link_contents(src, dst):
+    """Create symbolic links for all files in src to dst
+    
+    Args:
+        src (str): Path to source directory
+        dst (str): Path to destination directory
+        
+    Returns:
+        None
+    """
     for f in os.listdir(src):
         symlink(src+'/'+f, dst+'/'+f)
 
-def copy_scp_srvx8(src, dst):
-    os.system('scp '+src+' a1254888@srvx8.img.univie.ac.at:'+dst)
-
 def sed_inplace(filename, pattern, repl):
     '''Perform the pure-Python equivalent of in-place `sed` substitution
     Like `sed -i -e 's/'${pattern}'/'${repl}' "${filename}"`.
@@ -162,4 +168,28 @@ def sed_inplace(filename, pattern, repl):
     shutil.move(tmp_file.name, filename)
 
 def append_file(f_main, f_gets_appended):
-    os.system('cat '+f_gets_appended+' >> '+f_main)
\ No newline at end of file
+    """Append the contents of one file to another
+
+    Args:
+        f_main (str): Path to file that will be appended
+        f_gets_appended (str): Path to file that will be appended to f_main
+
+    Returns:
+        None
+    """
+    os.system('cat '+f_gets_appended+' >> '+f_main)
+
+def write_txt(lines, fpath):
+    """Write a list of strings to a text file
+    
+    Args:
+        lines (list): List of strings
+        fpath (str): Path to file
+        
+    Returns:
+        None
+    """
+    try_remove(fpath)
+    with open(fpath, "w") as file:
+        for line in lines:
+            file.write(line+'\n')
\ No newline at end of file
diff --git a/dartwrf/workflows.py b/dartwrf/workflows.py
index 89f614bec7b1189d2b542e111de0574e2a0ff859..0a81f9d9a40e0de6c00fc760ec3876f61f09f149 100644
--- a/dartwrf/workflows.py
+++ b/dartwrf/workflows.py
@@ -25,7 +25,7 @@ def dict_to_py(d, outfile):
 
 class WorkFlows(object):
     def __init__(self, exp_config='cfg.py', server_config='server.py'):
-        """Set up the experiment folder in `archivedir`, copy config files, backup scripts.
+        """Set up the experiment folder in `archivedir`.
 
         Args:
             exp (str): Path to exp config file
@@ -138,7 +138,6 @@ class WorkFlows(object):
 
         if input_is_restart:  # start WRF in restart mode
         """
-        id = depends_on
         restart_flag = '.false.' if not input_is_restart else '.true.'
 
         # if False:  # doesnt work with restarts at the moment# first_minute:
@@ -180,7 +179,7 @@ class WorkFlows(object):
         if output_restart_interval:
             args.append('--restart_interval='+str(int(float(output_restart_interval))))
 
-        id = self.cluster.run_job(' '.join(args), "preWRF", cfg_update=dict(time="2"), depends_on=[id])
+        id = self.cluster.run_job(' '.join(args), "preWRF", cfg_update=dict(time="2"), depends_on=[depends_on])
 
         cmd = script_to_str(self.cluster.run_WRF).replace('<exp.expname>', exp.expname
                                         ).replace('<cluster.wrf_rundir_base>', self.cluster.wrf_rundir_base)
@@ -240,7 +239,7 @@ class WorkFlows(object):
 
     def create_satimages(self, init_time, depends_on=None):
         cmd = self.cluster.python_verif+' ~/RTTOV-WRF/run_init.py '+self.cluster.archivedir+init_time.strftime('/%Y-%m-%d_%H:%M/')
-        id = self.cluster.run_job(cmd, "RTTOV", cfg_update={"ntasks": "12", "time": "80", "mem": "180G"}, depends_on=[depends_on])
+        id = self.cluster.run_job(cmd, "RTTOV", cfg_update={"ntasks": "12", "time": "80", "mem": "200G"}, depends_on=[depends_on])
         return id