diff --git a/.gitignore b/.gitignore
index 714a3bdc9dc80f4978ac6f3bcd8568dafcb8b602..3e1d6e5a7b2aa50f87288a3e61e0ca9e8554711a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,7 +1,8 @@
-*.py*
+.pytest_cache
 *.ipy*
 dartwrf.egg*
 __*
 .vscode
 docs/build
-not-public*
\ No newline at end of file
+not-public*
+config/*
\ No newline at end of file
diff --git a/cycled_exp_template.py b/cycled_exp_template.py
new file mode 100755
index 0000000000000000000000000000000000000000..01a7a6da883d58572a70fc7bdd04d87835da26eb
--- /dev/null
+++ b/cycled_exp_template.py
@@ -0,0 +1,71 @@
+#!/usr/bin/python3
+import datetime as dt
+from dartwrf.workflows import WorkFlows
+
+if __name__ == "__main__":
+    """
+    Run a cycled OSSE with WRF and DART.
+    """
+    w = WorkFlows(exp_config='exp_template.py', server_config='jet.py')
+
+    timedelta_integrate = dt.timedelta(minutes=15)
+    timedelta_btw_assim = dt.timedelta(minutes=15)
+    
+    prior_path_exp = '/path_to/sim_archive/experiment_name/'
+    init_time = dt.datetime(2008, 7, 30, 11,45)
+    time = dt.datetime(2008, 7, 30, 12)
+    last_assim_time = dt.datetime(2008, 7, 30, 13)
+    forecast_until = dt.datetime(2008, 7, 30, 13,15)
+
+    w.prepare_WRFrundir(init_time)
+    # id = w.run_ideal(depends_on=id)
+
+    # prior_path_exp = w.cluster.archivedir
+    prior_init_time = init_time
+
+    while time <= last_assim_time:
+
+        # usually we take the prior from the current time
+        # but one could use a prior from a different time from another run
+        # i.e. 13z as a prior to assimilate 12z observations
+        prior_valid_time = time
+
+        id = w.assimilate(time, prior_init_time,
+                          prior_valid_time, prior_path_exp, depends_on=id)
+
+        # 1) Set posterior = prior
+        id = w.prepare_IC_from_prior(
+            prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
+
+        # 2) Update posterior += updates from assimilation
+        id = w.update_IC_from_DA(time, depends_on=id)
+
+        # How long shall we integrate?
+        timedelta_integrate = timedelta_btw_assim
+        output_restart_interval = timedelta_btw_assim.total_seconds()/60
+        if time == last_assim_time:
+            timedelta_integrate = forecast_until - \
+                last_assim_time  # dt.timedelta(hours=4)
+            output_restart_interval = 9999  # no restart file after last assim
+
+        # 3) Run WRF ensemble
+        id = w.run_ENS(begin=time,  # start integration from here
+                       end=time + timedelta_integrate,  # integrate until here
+                       output_restart_interval=output_restart_interval,
+                       depends_on=id)
+        
+        # as we have WRF output, we can use own exp path as prior
+        prior_path_exp = w.cluster.archivedir
+
+        # depends on the RTTOV-WRF repository
+        id = w.create_satimages(time, depends_on=id)
+
+        # increment time
+        time += timedelta_btw_assim
+
+        # update time variables
+        prior_init_time = time - timedelta_btw_assim
+
+    # not publically available
+    # w.verify_sat(id)
+    # w.verify_wrf(id)
diff --git a/dartwrf/create_obskind_table.py b/dartwrf/create_obskind_table.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c395bfce6fa8bf76bb01c7b7efb6d22e2ae35ba
--- /dev/null
+++ b/dartwrf/create_obskind_table.py
@@ -0,0 +1,93 @@
+"""
+To be able to generate obs_seq.in files, we need a dictionary to convert obs kinds to numbers
+
+  a) we read the obs kind definitions (obs_kind_mod.f90 from DART code) 
+  b) we generate a python file with this dictionary
+
+# Note: to include it in the documentary, the file needs to exist also in the repository 
+# (so the documentation generator SPHINX can read it)
+"""
+import os, sys
+import shutil
+
+
+def _dict_to_py(d, outfile):
+    """Write a python dictionary to a .py file
+
+    Args:
+        d (dict): dictionary to write
+        outfile (str): path to output file
+
+    Returns:
+        None
+    """
+    with open(outfile, 'w') as f:
+        txt = '""" NOTE: This file is autogenerated! \nUse dartwrf/create_obskind_table.py to regenerate!\n"""\nobs_kind_nrs = {\n'
+        for k, v in d.items():
+            txt += '"'+k+'": '+str(v)+', \n'
+        txt += '}'
+        f.write(txt)
+
+
+def _obskind_read(dart_srcdir):
+    """Read dictionary of observation types + ID numbers ("kind") 
+    from DART f90 script and return it as python dictionary
+    """
+    definitionfile = dart_srcdir + \
+        '/../../../assimilation_code/modules/observations/obs_kind_mod.f90'
+    with open(definitionfile, 'r') as f:
+        kind_def_f = f.readlines()
+
+    obskind_nrs = {}
+    for i, line in enumerate(kind_def_f):
+        if 'Integer definitions for DART OBS TYPES' in line:
+            # data starts below this line
+            i_start = i
+            break
+    for line in kind_def_f[i_start+1:]:
+        if 'MAX_DEFINED_TYPES_OF_OBS' in line:
+            # end of data
+            break
+        if '::' in line:
+            # a line looks like this
+            # integer, parameter, public ::       MSG_4_SEVIRI_TB =   261
+            data = line.split('::')[-1].split('=')
+            kind_str = data[0].strip()
+            kind_nr = int(data[1].strip())
+            obskind_nrs[kind_str] = kind_nr
+    return obskind_nrs
+
+
+def _save_config_to_scriptsdir(server_config, original_scripts_dir):
+    try:
+        dir_path = os.path.dirname(os.path.realpath(__file__))
+        shutil.copyfile(dir_path+'/../config/'+server_config,
+                        original_scripts_dir+'/server_config.py')
+    except shutil.SameFileError:
+        pass
+
+
+def run(server_config='jet.py'):
+    """Create obskind.py from obs_kind_mod.f90
+    """
+
+    # usually /home/DART-WRF/dartwrf/
+    original_scripts_dir = '/'.join(__file__.split('/')[:-1])
+
+    # copy the original config to "scripts_dir"
+    _save_config_to_scriptsdir(server_config, original_scripts_dir)
+
+    # import the config from scripts_dir
+    sys.path.append(original_scripts_dir)
+    from server_config import cluster
+
+    dart_srcdir = cluster.dart_srcdir
+
+    obskind_dictionary = _obskind_read(dart_srcdir)
+
+    _dict_to_py(obskind_dictionary, original_scripts_dir+'/obs/obskind.py')
+    print('>>>', original_scripts_dir+'/obs/obskind.py', 'created')
+
+
+if __name__ == '__main__':
+    run(server_config='jet_ACF.py')