Skip to content
Snippets Groups Projects
Unverified Commit 56985ba2 authored by lkugler's avatar lkugler Committed by GitHub
Browse files

cleanup

parent 5bc705c2
No related branches found
No related tags found
No related merge requests found
...@@ -122,39 +122,17 @@ def run_ENS(begin, end, depends_on=None): ...@@ -122,39 +122,17 @@ def run_ENS(begin, end, depends_on=None):
# + begin.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2]) # + begin.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2])
return id2 return id2
def gen_synth_obs(time, depends_on=None):
# prepare state of nature run, from which observation is sampled
id = slurm_submit(cluster.python+' '+cluster.scriptsdir+'/prepare_nature.py '
+time.strftime('%Y-%m-%d_%H:%M'), name='prep_nature',
cfg_update=dict(time="2"), depends_on=[depends_on])
for channel_id in exp.sat_channels:
s = my_Slurm("pre_gensynthobs", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_gen_synth_obs.py '
+time.strftime('%Y-%m-%d_%H:%M ') + str(channel_id),
depends_on=[id])
s = my_Slurm("gensynth", cfg_update=dict(ntasks="48", time="20"))
cmd = 'cd '+cluster.dartrundir+'; mpirun -np 48 ./perfect_model_obs; ' \
+ 'cat obs_seq.out >> obs_seq_all.out' # combine all observations
id2 = s.run(cmd, depends_on=[id])
return id2
def assimilate(assim_time, background_init_time, def assimilate(assim_time, background_init_time,
prior_from_different_exp=False, depends_on=None): prior_from_different_exp=False, depends_on=None):
"""Run the assimilation process. """Creates observations from a nature run and assimilates them.
Expects a obs_seq.out file present in `dartrundir`
Args: Args:
assim_time (dt.datetime): for timestamp of prior wrfout files assim_time (dt.datetime): timestamp of prior wrfout files
background_init_time (dt.datetime): for directory of prior wrfout files background_init_time (dt.datetime):
timestamp to find the directory where the prior wrfout files are
prior_from_different_exp (bool or str): prior_from_different_exp (bool or str):
put a `str` if you want to take the prior from a different experiment put a `str` to take the prior from a different experiment
if False: use `archivedir` to get prior state if False: use `archivedir` (defined in config) to get prior state
if str: use this directory to get prior state if str: use this directory to get prior state
""" """
id = depends_on id = depends_on
...@@ -169,11 +147,6 @@ def assimilate(assim_time, background_init_time, ...@@ -169,11 +147,6 @@ def assimilate(assim_time, background_init_time,
+time.strftime('%Y-%m-%d_%H:%M'), name='prep_nature', +time.strftime('%Y-%m-%d_%H:%M'), name='prep_nature',
cfg_update=dict(time="2"), depends_on=[depends_on]) cfg_update=dict(time="2"), depends_on=[depends_on])
#s = my_Slurm("gensynth", cfg_update=dict(ntasks="48", time="20"))
#cmd = 'cd '+cluster.dartrundir+'; mpirun -np 48 ./perfect_model_obs; ' \
# + 'cat obs_seq.out >> obs_seq_all.out' # combine all observations
#id2 = s.run(cmd, depends_on=[id])
# prepare prior model state # prepare prior model state
s = my_Slurm("preAssim", cfg_update=dict(time="2")) s = my_Slurm("preAssim", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_assim.py ' id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_assim.py '
...@@ -223,8 +196,11 @@ timedelta_integrate = dt.timedelta(minutes=30) ...@@ -223,8 +196,11 @@ timedelta_integrate = dt.timedelta(minutes=30)
timedelta_btw_assim = dt.timedelta(minutes=30) timedelta_btw_assim = dt.timedelta(minutes=30)
clear_logs(backup_existing_to_archive=True) clear_logs(backup_existing_to_archive=True)
id = None
start_from_existing_state = False
is_new_run = not start_from_existing_state
is_new_run = False
if is_new_run: if is_new_run:
id = prepare_wrfinput() # create initial conditions id = prepare_wrfinput() # create initial conditions
...@@ -236,21 +212,20 @@ if is_new_run: ...@@ -236,21 +212,20 @@ if is_new_run:
depends_on=id) depends_on=id)
time = integration_end_time time = integration_end_time
first_guess = False first_guess = False
else:
# id = prepare_wrfinput() # create initial conditions elif start_from_existing_state:
id = None id = prepare_wrfinput() # create initial conditions
# get initial conditions from archive # get initial conditions from archive
background_init_time = dt.datetime(2008, 7, 30, 10) background_init_time = dt.datetime(2008, 7, 30, 10)
time = dt.datetime(2008, 7, 30, 10,30) time = dt.datetime(2008, 7, 30, 10,30)
exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.11_LMU_filter' exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.11_LMU_filter'
first_guess = False #exppath_arch first_guess = exppath_arch
#id = update_wrfinput_from_archive(time, background_init_time, exppath_arch, id = update_wrfinput_from_archive(time, background_init_time, exppath_arch,
# depends_on=id) depends_on=id)
while time <= dt.datetime(2008, 7, 30, 16): while time <= dt.datetime(2008, 7, 30, 16):
assim_time = time assim_time = time
#id = gen_synth_obs(assim_time, depends_on=id)
id = assimilate(assim_time, id = assimilate(assim_time,
background_init_time, background_init_time,
prior_from_different_exp=first_guess, prior_from_different_exp=first_guess,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment