Skip to content
Snippets Groups Projects
Commit 81ef508e authored by Lukas Kugler's avatar Lukas Kugler
Browse files

first minute forecast capability

parent ec2d7ad3
No related branches found
No related tags found
No related merge requests found
......@@ -9,11 +9,11 @@ class ExperimentConfiguration(object):
exp = ExperimentConfiguration()
exp.expname = "exp_v1.12_LMU_so_radar_vertloc1km"
exp.expname = "exp_v1.12_LMU_so_VIS2"
exp.model_dx = 2000
exp.timestep = 10
exp.n_ens = 20
exp.n_nodes = 5
exp.n_ens = 40
exp.n_nodes = 10
n_obs = 64 # radar: n_obs for each observation height level
......@@ -27,7 +27,7 @@ radar = dict(sat=False, kind='RADAR', n_obs=n_obs, err_std=5.,
psfc = dict(sat=False, kind='PSFC', n_obs=n_obs, err_std=50.,
cov_loc_radius_km=10)
exp.observations = [radar, ]
exp.observations = [vis, ]
# directory paths depend on the name of the experiment
cluster.expname = exp.expname
......@@ -25,11 +25,11 @@ vsc.dart_srcdir = '/home/fs71386/lkugler/DART/DART_WRF_RTTOV_early_access/models
vsc.dartrundir = '/home/fs71386/lkugler/run_DART'
vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/scripts'
vsc.nature_wrfout = '/home/fs71386/lkugler/data/sim_archive/exp_v1.11_LMU_nature/2008-07-30_06:00/2/wrfout_d01_%Y-%m-%d_%H:%M:%S'
vsc.nature_wrfout = '/home/fs71386/lkugler/data/sim_archive/exp_v1.12_LMU_nature/2008-07-30_06:00/2/wrfout_d01_%Y-%m-%d_%H:%M:%S'
vsc.input_profile = '/home/fs71386/lkugler/wrf_sounding/data/wrf/ens/from_LMU/raso.raso.<iens>.wrfprof'
vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.2.1_v1.11.exe'
vsc.wrfexe = vsc.userdir+'/compile/bin/wrf-v4.2.1_v1.11.exe'
vsc.wrfexe = vsc.userdir+'/compile/bin/wrf-v4.2.1_v1.12.exe'
vsc.namelist = vsc.scriptsdir+'/../templates/namelist.input'
vsc.run_WRF = '/gpfs/data/fs71386/lkugler/DART-WRF/scripts/osse/run_ens.vsc.sh'
......
......@@ -31,21 +31,6 @@ class Cmdline(object):
print('running', self.name, 'without SLURM')
os.system(cmd)
def slurm_submit(bashcmd, name=None, cfg_update=None, depends_on=None):
"""Submit a 'workflow task'=script=job to the SLURM queue.
Args:
bashcmd (str): command to run (i.e. call to script)
name (str): SLURM job name (useful for debugging)
cfg_update (dict): enforce these SLURM parameters
depends_on (int): SLURM id; job starts as soon as this id has finished
Returns:
int : SLURM job id, can be used in `depends_on` of another `slurm_submit` call
"""
if name is None: # slurm job name = name of calling function
name = sys._getframe(1).f_code.co_name
id = my_Slurm(name, cfg_update=cfg_update).run(bashcmd, depends_on=depends_on)
return id
def clear_logs(backup_existing_to_archive=True):
dirs = ['/logs/', '/slurm-scripts/']
for d in dirs:
......@@ -62,10 +47,8 @@ def clear_logs(backup_existing_to_archive=True):
def prepare_wrfinput():
"""Create WRF/run directories and wrfinput files
"""
# s = my_Slurm("pre_osse", cfg_update={"time": "5", "mail-type": "BEGIN"})
# id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_wrfinput.py')
id = slurm_submit(cluster.python+' '+cluster.scriptsdir+'/prepare_wrfinput.py',
name='prep_wrfinput', cfg_update={"time": "5", "mail-type": "BEGIN"})
s = my_Slurm("prep_wrfinput", cfg_update={"time": "5", "mail-type": "BEGIN"})
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_wrfinput.py')
cmd = """# run ideal.exe in parallel, then add geodata
export SLURM_STEP_GRES=none
......@@ -83,8 +66,9 @@ do
mv $rundir/rsl.out.0000 $rundir/rsl.out.input
done
"""
id = slurm_submit(cmd, name="ideal", cfg_update={"ntasks": str(exp.n_ens),
"time": "10", "mem-per-cpu": "2G"}, depends_on=[id])
s = my_Slurm("prep_wrfinput", cfg_update={"ntasks": str(exp.n_ens),
"time": "10", "mem-per-cpu": "2G"})
id = s.run(cmd, depends_on=[id])
return id
def update_wrfinput_from_archive(time, background_init_time, exppath, depends_on=None):
......@@ -101,17 +85,37 @@ def update_wrfinput_from_archive(time, background_init_time, exppath, depends_on
return id
def run_ENS(begin, end, depends_on=None):
"""Run forecast for 1 minute, save output.
Then run whole timespan with 5 minutes interval.
"""
prev_id = depends_on
s = my_Slurm("preWRF", cfg_update=dict(time="2"))
# first minute forecast (needed for validating an assimilation)
hist_interval = 1
s = my_Slurm("preWRF1", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_namelist.py '
+ begin.strftime('%Y-%m-%d_%H:%M')+' '
+ end.strftime('%Y-%m-%d_%H:%M'),
+ (begin+dt.timedelta(minutes=1)).strftime('%Y-%m-%d_%H:%M')+' '
+ str(hist_interval),
depends_on=[prev_id])
runtime_real_hours = (end-begin).total_seconds()/3600
runtime_wallclock_mins_expected = int(5+runtime_real_hours*9) # usually below 8 min/hour
s = my_Slurm("runWRF", cfg_update={"nodes": "1", "array": "1-"+str(exp.n_nodes),
s = my_Slurm("runWRF1", cfg_update={"nodes": "1", "array": "1-"+str(exp.n_nodes),
"time": "5", "mem-per-cpu": "2G"})
cmd = script_to_str(cluster.run_WRF).replace('<expname>', exp.expname)
id2 = s.run(cmd, depends_on=[id])
# whole forecast timespan
hist_interval = 5
s = my_Slurm("preWRF2", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_namelist.py '
+ begin.strftime('%Y-%m-%d_%H:%M')+' '
+ end.strftime('%Y-%m-%d_%H:%M')+' '
+ str(hist_interval),
depends_on=[prev_id])
time_in_simulation_hours = (end-begin).total_seconds()/3600
runtime_wallclock_mins_expected = int(5+time_in_simulation_hours*9) # usually below 8 min/hour
s = my_Slurm("runWRF2", cfg_update={"nodes": "1", "array": "1-"+str(exp.n_nodes),
"time": str(runtime_wallclock_mins_expected), "mem-per-cpu": "2G"})
cmd = script_to_str(cluster.run_WRF).replace('<expname>', exp.expname)
id2 = s.run(cmd, depends_on=[id])
......@@ -143,20 +147,19 @@ def assimilate(assim_time, background_init_time,
prior_expdir = cluster.archivedir()
# prepare state of nature run, from which observation is sampled
id = slurm_submit(cluster.python+' '+cluster.scriptsdir+'/prepare_nature.py '
+time.strftime('%Y-%m-%d_%H:%M'), name='prep_nature',
cfg_update=dict(time="2"), depends_on=[depends_on])
s = my_Slurm("prepNature", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_nature.py '
+time.strftime('%Y-%m-%d_%H:%M'), depends_on=[depends_on])
# prepare prior model state
s = my_Slurm("preAssim", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_assim.py '
+assim_time.strftime('%Y-%m-%d_%H:%M ')
+background_init_time.strftime('%Y-%m-%d_%H:%M ')
+prior_expdir,
depends_on=[id])
+prior_expdir, depends_on=[id])
# generate observations
s = my_Slurm("gensynthobs", cfg_update=dict(ntasks="48", time="10"))
s = my_Slurm("genSynthObs", cfg_update=dict(ntasks="48", time="10"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/gen_synth_obs.py '
+time.strftime('%Y-%m-%d_%H:%M'),
depends_on=[id])
......@@ -179,7 +182,7 @@ def assimilate(assim_time, background_init_time,
def create_satimages(depends_on=None):
s = my_Slurm("pRTTOV", cfg_update={"ntasks": "48", "time": "20"})
s = my_Slurm("pRTTOV", cfg_update={"ntasks": "48", "time": "40"})
s.run(cluster.python+' /home/fs71386/lkugler/RTTOV-WRF/loop.py '+exp.expname,
depends_on=[depends_on])
......@@ -192,13 +195,14 @@ def mailme(depends_on=None):
################################
print('starting osse')
timedelta_integrate = dt.timedelta(minutes=30)
timedelta_integrate = dt.timedelta(minutes=45)
timedelta_btw_assim = dt.timedelta(minutes=30)
clear_logs(backup_existing_to_archive=True)
clear_logs(backup_existing_to_archive=False)
id = None
start_from_existing_state = False
start_from_existing_state = True
is_new_run = not start_from_existing_state
if is_new_run:
......@@ -217,8 +221,8 @@ elif start_from_existing_state:
id = prepare_wrfinput() # create initial conditions
# get initial conditions from archive
background_init_time = dt.datetime(2008, 7, 30, 10)
time = dt.datetime(2008, 7, 30, 10,30)
background_init_time = dt.datetime(2008, 7, 30, 6)
time = dt.datetime(2008, 7, 30, 10)
exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.11_LMU_filter'
first_guess = exppath_arch
id = update_wrfinput_from_archive(time, background_init_time, exppath_arch,
......
......@@ -5,13 +5,14 @@ sys.path.append(os.getcwd())
from config.cfg import exp, cluster
from utils import sed_inplace, copy, symlink, mkdir
def run(cluster, iens, begin, end):
def run(cluster, iens, begin, end, hist_interval=5):
rundir = cluster.wrf_rundir(iens)
print(rundir)
copy(cluster.namelist, rundir+'/namelist.input')
sed_inplace(rundir+'/namelist.input', '<dx>', str(int(exp.model_dx)))
sed_inplace(rundir+'/namelist.input', '<timestep>', str(int(exp.timestep)))
sed_inplace(rundir+'/namelist.input', '<hist_interval>', str(int(hist_interval)))
archdir = cluster.archivedir()+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/')
print('namelist for run from', begin, end, 'output to', archdir)
......@@ -41,7 +42,8 @@ def run(cluster, iens, begin, end):
if __name__ == '__main__':
begin = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
end = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
hist_interval = int(sys.argv[3])
print('prepare namelists for all ens members')
for iens in range(1, exp.n_ens+1):
run(cluster, iens, begin, end)
run(cluster, iens, begin, end, hist_interval)
......@@ -11,7 +11,7 @@
end_hour = <HH2>, 00, 00,
end_minute = <MM2>, 120, 120,
end_second = 0, 00, 00,
history_interval = 15, 15, 15,
history_interval = <hist_interval>, 15, 15,
frames_per_outfile = 1, 1, 1,
history_outname = '<archivedir>/wrfout_d<domain>_<date>'
restart = .false.,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment