Skip to content
Snippets Groups Projects
Commit 7ce9be08 authored by lkugler's avatar lkugler
Browse files

Revert "Revert to the last point with working ipynb docs"

This reverts commit 2f44f8dd.
parent cf2a0754
No related branches found
No related tags found
No related merge requests found
......@@ -3,15 +3,13 @@ import os, sys, shutil, glob, warnings
import datetime as dt
from dartwrf.utils import script_to_str
from config.cfg import exp
from config.clusters import cluster
from dartwrf.workflows import *
from dartwrf.workflows import WorkFlows
if __name__ == "__main__":
"""
Run a cycled OSSE with WRF and DART.
"""
cluster.setup()
w = WorkFlows(exp_config='cfg.py', server_config='jet.py')
timedelta_integrate = dt.timedelta(minutes=15)
timedelta_btw_assim = dt.timedelta(minutes=15)
......@@ -26,20 +24,20 @@ if __name__ == "__main__":
last_assim_time = dt.datetime(2008, 7, 30, 13,30)
forecast_until = dt.datetime(2008, 7, 30, 18)
prepare_WRFrundir(init_time)
# id = run_ideal(depends_on=id)
# id = wrfinput_insert_wbubble(depends_on=id)
w.prepare_WRFrundir(init_time)
# id = w.run_ideal(depends_on=id)
# id = w.wrfinput_insert_wbubble(depends_on=id)
if True: # random
prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_v1.19_P2_noDA'
init_time = dt.datetime(2008, 7, 30, 13)
time = dt.datetime(2008, 7, 30, 14)
init_time = dt.datetime(2008, 7, 30, 12)
time = dt.datetime(2008, 7, 30, 13)
last_assim_time = dt.datetime(2008, 7, 30, 14)
forecast_until = dt.datetime(2008, 7, 30, 14, 15)
forecast_until = dt.datetime(2008, 7, 30, 14, 18)
prepare_WRFrundir(init_time)
# id = run_ideal(depends_on=id)
w.prepare_WRFrundir(init_time)
# id = w.run_ideal(depends_on=id)
# prior_path_exp = cluster.archivedir
# prior_path_exp = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.19_P5+su_noDA'
......@@ -54,13 +52,13 @@ if __name__ == "__main__":
# i.e. 13z as a prior to assimilate 12z observations
prior_valid_time = time
id = assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
id = w.assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id)
# 1) Set posterior = prior
id = prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
id = w.prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
# 2) Update posterior += updates from assimilation
id = update_IC_from_DA(time, depends_on=id)
id = w.update_IC_from_DA(time, depends_on=id)
# How long shall we integrate?
timedelta_integrate = timedelta_btw_assim
......@@ -70,7 +68,7 @@ if __name__ == "__main__":
output_restart_interval = 9999 # no restart file after last assim
# 3) Run WRF ensemble
id = run_ENS(begin=time, # start integration from here
id = w.run_ENS(begin=time, # start integration from here
end=time + timedelta_integrate, # integrate until here
output_restart_interval=output_restart_interval,
depends_on=id)
......@@ -78,7 +76,7 @@ if __name__ == "__main__":
# as we have WRF output, we can use own exp path as prior
prior_path_exp = w.cluster.archivedir
id_sat = create_satimages(time, depends_on=id)
id_sat = w.create_satimages(time, depends_on=id)
# increment time
time += timedelta_btw_assim
......@@ -86,6 +84,6 @@ if __name__ == "__main__":
# update time variables
prior_init_time = time - timedelta_btw_assim
verify_sat(id_sat)
verify_wrf(id)
verify_fast(id)
w.verify_sat(id_sat)
w.verify_wrf(id)
w.verify_fast(id)
......@@ -25,7 +25,7 @@ def dict_to_py(d, outfile):
class WorkFlows(object):
def __init__(self, exp_config='cfg.py', server_config='server.py'):
"""Set up the experiment folder in `archivedir`.
"""Set up the experiment folder in `archivedir`, copy config files, backup scripts.
Args:
exp (str): Path to exp config file
......@@ -239,7 +239,7 @@ class WorkFlows(object):
def create_satimages(self, init_time, depends_on=None):
cmd = self.cluster.python_verif+' ~/RTTOV-WRF/run_init.py '+self.cluster.archivedir+init_time.strftime('/%Y-%m-%d_%H:%M/')
id = self.cluster.run_job(cmd, "RTTOV", cfg_update={"ntasks": "12", "time": "80", "mem": "200G"}, depends_on=[depends_on])
id = self.cluster.run_job(cmd, "RTTOV", cfg_update={"ntasks": "12", "time": "80", "mem": "180G"}, depends_on=[depends_on])
return id
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment