Skip to content
Snippets Groups Projects
Commit 79dd7cbf authored by lkugler's avatar lkugler
Browse files

sep run_DART dir per exp, prior t diff to assim t

parent d0407882
No related branches found
No related tags found
No related merge requests found
...@@ -17,6 +17,9 @@ class ClusterConfig(object): ...@@ -17,6 +17,9 @@ class ClusterConfig(object):
def scripts_rundir(self): def scripts_rundir(self):
return self.archivedir+'/DART-WRF/' return self.archivedir+'/DART-WRF/'
@property
def dartrundir(self):
return '/gpfs/data/fs71386/lkugler/run_DART/'+self.expname+'/'
####################################################################################### #######################################################################################
...@@ -31,8 +34,8 @@ vsc.tmpfiledir = '/gpfs/data/fs71386/lkugler' ...@@ -31,8 +34,8 @@ vsc.tmpfiledir = '/gpfs/data/fs71386/lkugler'
vsc.userdir = '/home/fs71386/lkugler' vsc.userdir = '/home/fs71386/lkugler'
vsc.srcdir = '/gpfs/data/fs71386/lkugler/compile/WRF/WRF-4.3/run' vsc.srcdir = '/gpfs/data/fs71386/lkugler/compile/WRF/WRF-4.3/run'
vsc.archive_base = '/gpfs/data/fs71386/lkugler/sim_archive/' vsc.archive_base = '/gpfs/data/fs71386/lkugler/sim_archive/'
vsc.dart_srcdir = '/home/fs71386/lkugler/DART/DART-9.11.9/models/wrf/work' vsc.dart_srcdir = '/gpfs/data/fs71386/lkugler/compile/DART/DART-9.11.9/models/wrf/work'
vsc.dartrundir = '/gpfs/data/fs71386/lkugler/run_DART' vsc.rttov_srcdir = '/gpfs/data/fs71386/lkugler/compile/RTTOV13/rtcoef_rttov13/'
vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/scripts/' vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/scripts/'
vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.2.2_v1.16.exe' vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.2.2_v1.16.exe'
...@@ -53,7 +56,6 @@ jet.userdir = '/jetfs/home/lkugler' ...@@ -53,7 +56,6 @@ jet.userdir = '/jetfs/home/lkugler'
jet.srcdir = '/jetfs/home/lkugler/compile/WRF/WRF-4.1.5/run' jet.srcdir = '/jetfs/home/lkugler/compile/WRF/WRF-4.1.5/run'
jet.scriptsdir = '' jet.scriptsdir = ''
jet.archive_base = '/jetfs/home/lkugler/data_jetfs/sim_archive/' jet.archive_base = '/jetfs/home/lkugler/data_jetfs/sim_archive/'
jet.dartrundir = '/jetfs/home/lkugler/DART-WRF/rundir'
jet.ideal = jet.userdir+'/compile/bin/ideal.exe' jet.ideal = jet.userdir+'/compile/bin/ideal.exe'
jet.wrfexe = jet.userdir+'/compile/bin/wrf-v4.2_v1.10.dmpar.exe' jet.wrfexe = jet.userdir+'/compile/bin/wrf-v4.2_v1.10.dmpar.exe'
......
...@@ -11,9 +11,6 @@ from config.cfg import exp, cluster ...@@ -11,9 +11,6 @@ from config.cfg import exp, cluster
from scripts.utils import script_to_str, symlink from scripts.utils import script_to_str, symlink
# necessary to find modules in folder, since SLURM runs the script elsewhere
sys.path.append(os.getcwd())
# allow scripts to access the configuration # allow scripts to access the configuration
# symlink(cluster.scriptsdir+'/../config', cluster.scriptsdir+'/config') # symlink(cluster.scriptsdir+'/../config', cluster.scriptsdir+'/config')
...@@ -153,7 +150,7 @@ def run_ENS(begin, end, depends_on=None, first_minute=True): ...@@ -153,7 +150,7 @@ def run_ENS(begin, end, depends_on=None, first_minute=True):
# + begin.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2]) # + begin.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2])
return id return id
def assimilate(assim_time, prior_init_time, def assimilate(assim_time, prior_init_time, prior_valid_time,
prior_path_exp=False, depends_on=None): prior_path_exp=False, depends_on=None):
"""Creates observations from a nature run and assimilates them. """Creates observations from a nature run and assimilates them.
...@@ -171,32 +168,23 @@ def assimilate(assim_time, prior_init_time, ...@@ -171,32 +168,23 @@ def assimilate(assim_time, prior_init_time,
elif not isinstance(prior_path_exp, str): elif not isinstance(prior_path_exp, str):
raise TypeError('prior_path_exp either str or False, is '+str(type(prior_path_exp))) raise TypeError('prior_path_exp either str or False, is '+str(type(prior_path_exp)))
# prepare state of nature run, from which observation is sampled
#s = my_Slurm("prepNature", cfg_update=dict(time="2"))
#id = s.run(cluster.python+' '+cluster.scripts_rundir+'/prepare_nature.py '
# +time.strftime('%Y-%m-%d_%H:%M'), depends_on=[depends_on])
# prepare prior model state # # prepare prior model state
s = my_Slurm("preAssim", cfg_update=dict(time="2")) # s = my_Slurm("preAssim", cfg_update=dict(time="2"))
id = s.run(cluster.python+' '+cluster.scripts_rundir+'/pre_assim.py ' # id = s.run(cluster.python+' '+cluster.scripts_rundir+'/pre_assim.py '
+assim_time.strftime('%Y-%m-%d_%H:%M ') # +assim_time.strftime('%Y-%m-%d_%H:%M ')
+prior_init_time.strftime('%Y-%m-%d_%H:%M ') # +prior_init_time.strftime('%Y-%m-%d_%H:%M ')
+prior_path_exp, depends_on=[depends_on]) # +prior_valid_time.strftime('%Y-%m-%d_%H:%M ')
# +prior_path_exp, depends_on=[depends_on])
# prepare nature run, generate observations
s = my_Slurm("Assim", cfg_update={"nodes": "1", "ntasks": "96", "time": "60", s = my_Slurm("Assim", cfg_update={"nodes": "1", "ntasks": "96", "time": "60",
"mem": "300G", "ntasks-per-node": "96", "ntasks-per-core": "2"}) "mem": "300G", "ntasks-per-node": "96", "ntasks-per-core": "2"})
id = s.run(cluster.python+' '+cluster.scripts_rundir+'/assim_synth_obs.py ' id = s.run(cluster.python+' '+cluster.scripts_rundir+'/assim_synth_obs.py '
+assim_time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id]) +assim_time.strftime('%Y-%m-%d_%H:%M ')
+prior_init_time.strftime('%Y-%m-%d_%H:%M ')
# # actuall assimilation step +prior_valid_time.strftime('%Y-%m-%d_%H:%M ')
# s = my_Slurm("Assim", cfg_update=dict(nodes="1", ntasks="48", time="50", mem="200G")) +prior_path_exp, depends_on=[id])
# cmd = 'cd '+cluster.dartrundir+'; mpirun -np 48 ./filter; rm obs_seq_all.out'
# id = s.run(cmd, depends_on=[id])
# s = my_Slurm("archiveAssim", cfg_update=dict(time="10"))
# id = s.run(cluster.python+' '+cluster.scripts_rundir+'/archive_assim.py '
# + assim_time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id])
s = my_Slurm("updateIC", cfg_update=dict(time="8")) s = my_Slurm("updateIC", cfg_update=dict(time="8"))
id = s.run(cluster.python+' '+cluster.scripts_rundir+'/update_wrfinput_from_filteroutput.py ' id = s.run(cluster.python+' '+cluster.scripts_rundir+'/update_wrfinput_from_filteroutput.py '
...@@ -224,7 +212,7 @@ def gen_obsseq(depends_on=None): ...@@ -224,7 +212,7 @@ def gen_obsseq(depends_on=None):
return id return id
def verify(depends_on=None): def verify(depends_on=None):
s = my_Slurm("verify", cfg_update={"time": "180", "mail-type": "FAIL,END", s = my_Slurm("verify", cfg_update={"time": "240", "mail-type": "FAIL,END",
"ntasks": "96", "ntasks-per-node": "96", "ntasks-per-core": "2"}) "ntasks": "96", "ntasks-per-node": "96", "ntasks-per-core": "2"})
s.run(cluster.python_enstools+' '+cluster.userdir+'/osse_analysis/analyze_fc.py '+exp.expname+' has_node', s.run(cluster.python_enstools+' '+cluster.userdir+'/osse_analysis/analyze_fc.py '+exp.expname+' has_node',
depends_on=[depends_on]) depends_on=[depends_on])
...@@ -252,11 +240,12 @@ if __name__ == "__main__": ...@@ -252,11 +240,12 @@ if __name__ == "__main__":
is_new_run = not start_from_existing_state is_new_run = not start_from_existing_state
if is_new_run: if is_new_run:
init_time = dt.datetime(2008, 7, 30, 6) init_time = dt.datetime(2008, 7, 30, 9)
id = prepare_wrfinput(init_time) # create initial conditions id = prepare_wrfinput(init_time) # create initial conditions
id = wrfinput_insert_wbubble(depends_on=id)
# spin up the ensemble # spin up the ensemble
integration_end_time = dt.datetime(2008, 7, 30, 9) integration_end_time = dt.datetime(2008, 7, 30, 14)
id = run_ENS(begin=init_time, id = run_ENS(begin=init_time,
end=integration_end_time, end=integration_end_time,
first_minute=False, first_minute=False,
...@@ -264,34 +253,34 @@ if __name__ == "__main__": ...@@ -264,34 +253,34 @@ if __name__ == "__main__":
prior_path_exp = False # for next assimilation prior_path_exp = False # for next assimilation
elif start_from_existing_state: elif start_from_existing_state:
init_time = dt.datetime(2008, 7, 30, 6) time = dt.datetime(2008, 7, 30, 11)
id = prepare_wrfinput(init_time) # create initial conditions
# get initial conditions from archive # prior init time
integration_end_time = dt.datetime(2008, 7, 30, 10) init_time = dt.datetime(2008, 7, 30, 6)
exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.16_P1_40mem' #prior_path_exp = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.16_Pwbub_40mem'
id = update_wrfinput_from_archive(integration_end_time, init_time, exppath_arch, depends_on=id) #prior_path_exp = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.18_Pwbub-1-ensprof_40mem'
prior_path_exp = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.16_P1_40mem'
#id = update_wrfinput_from_archive(integration_end_time, init_time, prior_path_exp, depends_on=id)
#id = wrfinput_insert_wbubble(depends_on=id) #id = wrfinput_insert_wbubble(depends_on=id)
prior_path_exp = exppath_arch # for next assimilation
# values for assimilation # values for assimilation
time = integration_end_time assim_time = time
assim_time = integration_end_time
prior_init_time = init_time prior_init_time = init_time
while time <= dt.datetime(2008, 7, 30, 10, 30): while time <= dt.datetime(2008, 7, 30, 11):
id = assimilate(assim_time, id = assimilate(assim_time,
prior_init_time, prior_init_time,
prior_valid_time=time+dt.timedelta(hours=2),
prior_path_exp=prior_path_exp, prior_path_exp=prior_path_exp,
depends_on=id) depends_on=id)
prior_path_exp = False # use own exp path prior_path_exp = False # use own exp path as prior
# integration # integration
this_forecast_init = assim_time # start integration from here this_forecast_init = assim_time # start integration from here
timedelta_integrate = timedelta_btw_assim timedelta_integrate = timedelta_btw_assim
if this_forecast_init.minute in [30, 0]: # longer forecast every full hour if this_forecast_init.minute in [0,30]: # longer forecast every full hour
timedelta_integrate = dt.timedelta(hours=2) timedelta_integrate = dt.timedelta(hours=2)
this_forecast_end = assim_time + timedelta_integrate this_forecast_end = assim_time + timedelta_integrate
...@@ -311,4 +300,3 @@ if __name__ == "__main__": ...@@ -311,4 +300,3 @@ if __name__ == "__main__":
id = gen_obsseq(id) id = gen_obsseq(id)
verify(id) verify(id)
# copy to jet
...@@ -38,7 +38,7 @@ if __name__ == '__main__': ...@@ -38,7 +38,7 @@ if __name__ == '__main__':
# prepare dummy nature (this Hx is irrelevant) # prepare dummy nature (this Hx is irrelevant)
os.chdir(cluster.dartrundir) os.chdir(cluster.dartrundir)
os.system('cp ./advance_temp1/wrfout_d01 ./wrfout_d01') os.system('cp ./advance_temp1/wrfout_d01 ./wrfout_d01')
wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', wrfout_add_geo.run(cluster.dartrundir+'/../geo_em.d01.nc',
cluster.dartrundir+'/wrfout_d01') cluster.dartrundir+'/wrfout_d01')
aso.run_perfect_model_obs() aso.run_perfect_model_obs()
aso.assimilate(nproc=96) aso.assimilate(nproc=96)
......
...@@ -109,44 +109,6 @@ def replace_errors_obsseqout(f, new_errors): ...@@ -109,44 +109,6 @@ def replace_errors_obsseqout(f, new_errors):
print('replaced obs errors in', f) print('replaced obs errors in', f)
def set_DART_nml_singleobstype(sat_channel=False, cov_loc_radius_km=32, cov_loc_vert_km=False,
just_prior_values=False):
"""this function is outdated and will not work"""
cov_loc_radian = cov_loc_radius_km/earth_radius_km
if just_prior_values:
template = cluster.scriptsdir+'/../templates/input.eval.nml'
else:
template = cluster.scriptsdir+'/../templates/input.nml'
copy(template, cluster.dartrundir+'/input.nml')
# options are overwritten with settings
options = {'<n_ens>': str(int(exp.n_ens)),
'<cov_loc_radian>': str(cov_loc_radian)}
if cov_loc_vert_km:
vert_norm_rad = earth_radius_km*cov_loc_vert_km/cov_loc_radius_km*1000
options['<horiz_dist_only>'] = '.false.'
options['<vert_norm_hgt>'] = str(vert_norm_rad)
else:
options['<horiz_dist_only>'] = '.true.'
options['<vert_norm_hgt>'] = '50000.0' # dummy value
for key, value in options.items():
sed_inplace(cluster.dartrundir+'/input.nml', key, value)
# input.nml for RTTOV
if sat_channel > 0:
if sat_channel in [1, 2, 3, 12]: # VIS channels
rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml'
else: # IR channels
rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.IR.nml'
append_file(cluster.dartrundir+'/input.nml', rttov_nml)
else:
# append any rttov segment, needs to exist anyway
rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.IR.nml'
append_file(cluster.dartrundir+'/input.nml', rttov_nml)
def set_DART_nml(just_prior_values=False): def set_DART_nml(just_prior_values=False):
def to_radian_horizontal(cov_loc_horiz_km): def to_radian_horizontal(cov_loc_horiz_km):
...@@ -208,10 +170,6 @@ def obs_operator_ensemble(): ...@@ -208,10 +170,6 @@ def obs_operator_ensemble():
# DART may need a wrfinput file as well, which serves as a template for dimension sizes # DART may need a wrfinput file as well, which serves as a template for dimension sizes
symlink(cluster.dartrundir+'/wrfout_d01', cluster.dartrundir+'/wrfinput_d01') symlink(cluster.dartrundir+'/wrfout_d01', cluster.dartrundir+'/wrfinput_d01')
# I dont think this is necessary, we do this already in pre_assim.py
# add geodata, if istage>0, wrfout is DART output (has coords)
#if istage == 0:
# wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', cluster.dartrundir+'/wrfout_d01')
# run perfect_model obs (forward operator) # run perfect_model obs (forward operator)
os.system('mpirun -np 12 ./perfect_model_obs > /dev/null') os.system('mpirun -np 12 ./perfect_model_obs > /dev/null')
...@@ -247,7 +205,70 @@ def link_nature_to_dart_truth(time): ...@@ -247,7 +205,70 @@ def link_nature_to_dart_truth(time):
def prepare_nature_dart(time): def prepare_nature_dart(time):
print('linking nature to DART & georeferencing') print('linking nature to DART & georeferencing')
link_nature_to_dart_truth(time) link_nature_to_dart_truth(time)
wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', cluster.dartrundir+'/wrfout_d01') wrfout_add_geo.run(cluster.dartrundir+'/../geo_em.d01.nc', cluster.dartrundir+'/wrfout_d01')
def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_path_exp):
"""Prepares DART files for running filter
i.e.
- links first guess state to DART first guess filenames
- creates wrfinput_d01 files
- adds geo-reference (xlat,xlon) coords so that DART can deal with the files
- writes txt files so DART knows what input and output is
- removes probably pre-existing files which could lead to problems
"""
os.makedirs(cluster.dartrundir, exist_ok=True)
print('prepare prior state estimate')
for iens in range(1, exp.n_ens+1):
print('link wrfout file to DART background file')
wrfout_run = prior_path_exp \
+prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens) \
+prior_valid_time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
dart_ensdir = cluster.dartrundir+'/advance_temp'+str(iens)
wrfout_dart = dart_ensdir+'/wrfout_d01'
os.makedirs(dart_ensdir, exist_ok=True)
print('copy', wrfout_run, 'to', wrfout_dart)
copy(wrfout_run, wrfout_dart)
symlink(wrfout_dart, dart_ensdir+'/wrfinput_d01')
# ensure prior time matches assim time (can be off intentionally)
if assim_time != prior_valid_time:
print('overwriting time in prior from nature wrfout')
os.system(cluster.ncks+' -A -v XTIME '
+cluster.dartrundir+'/wrfout_d01 '+wrfout_dart)
# this seems to be necessary (else wrong level selection)
wrfout_add_geo.run(cluster.dartrundir+'/../geo_em.d01.nc', wrfout_dart)
fpath = cluster.dartrundir+'/input_list.txt'
print('writing', fpath)
try_remove(fpath)
with open(fpath, 'w') as f:
for iens in range(1, exp.n_ens+1):
f.write('./advance_temp'+str(iens)+'/wrfout_d01')
f.write('\n')
fpath = cluster.dartrundir+'/output_list.txt'
print('writing', fpath)
try_remove(fpath)
with open(fpath, 'w') as f:
for iens in range(1, exp.n_ens+1):
f.write('./filter_restart_d01.'+str(iens).zfill(4))
f.write('\n')
print('removing preassim and filter_restart')
os.system('rm -rf '+cluster.dartrundir+'/preassim_*')
os.system('rm -rf '+cluster.dartrundir+'/filter_restart*')
os.system('rm -rf '+cluster.dartrundir+'/output_mean*')
os.system('rm -rf '+cluster.dartrundir+'/output_sd*')
os.system('rm -rf '+cluster.dartrundir+'/perfect_output_*')
os.system('rm -rf '+cluster.dartrundir+'/obs_seq.fina*')
os.system(cluster.python+' '+cluster.scriptsdir+'/link_rttov.py')
def calc_obserr_WV73(Hx_nature, Hx_prior): def calc_obserr_WV73(Hx_nature, Hx_prior):
...@@ -269,6 +290,7 @@ def calc_obserr_WV73(Hx_nature, Hx_prior): ...@@ -269,6 +290,7 @@ def calc_obserr_WV73(Hx_nature, Hx_prior):
def run_perfect_model_obs(): def run_perfect_model_obs():
print('generating observations - running ./perfect_model_obs') print('generating observations - running ./perfect_model_obs')
os.chdir(cluster.dartrundir) os.chdir(cluster.dartrundir)
try_remove(cluster.dartrundir+'/obs_seq.out') try_remove(cluster.dartrundir+'/obs_seq.out')
if not os.path.exists(cluster.dartrundir+'/obs_seq.in'): if not os.path.exists(cluster.dartrundir+'/obs_seq.in'):
raise RuntimeError('obs_seq.in does not exist in '+cluster.dartrundir) raise RuntimeError('obs_seq.in does not exist in '+cluster.dartrundir)
...@@ -286,30 +308,31 @@ def assimilate(nproc=96): ...@@ -286,30 +308,31 @@ def assimilate(nproc=96):
os.system('mpirun -genv I_MPI_PIN_PROCESSOR_LIST=0-'+str(int(nproc)-1)+' -np '+str(int(nproc))+' ./filter > log.filter') os.system('mpirun -genv I_MPI_PIN_PROCESSOR_LIST=0-'+str(int(nproc)-1)+' -np '+str(int(nproc))+' ./filter > log.filter')
print('./filter took', int(time_module.time()-t), 'seconds') print('./filter took', int(time_module.time()-t), 'seconds')
def recycle_output(): # currently unused
"""Use output of assimilation (./filter) as input for another assimilation (with ./filter) # def recycle_output():
Specifically, this copies the state fields from filter_restart_d01.000x to the wrfout files in advance_temp folders""" # """Use output of assimilation (./filter) as input for another assimilation (with ./filter)
update_vars = ['U', 'V', 'T', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QRAIN', 'U10', 'V10', 'T2', 'Q2', 'TSK', 'PSFC', 'CLDFRA'] # Specifically, this copies the state fields from filter_restart_d01.000x to the wrfout files in advance_temp folders"""
updates = ','.join(update_vars) # update_vars = ['U', 'V', 'T', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QRAIN', 'U10', 'V10', 'T2', 'Q2', 'TSK', 'PSFC', 'CLDFRA']
# updates = ','.join(update_vars)
print('recycle DART output to be used as input') # print('recycle DART output to be used as input')
for iens in range(1, exp.n_ens+1): # for iens in range(1, exp.n_ens+1):
dart_output = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4) # dart_output = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4)
dart_input = cluster.dartrundir+'/advance_temp'+str(iens)+'/wrfout_d01' # dart_input = cluster.dartrundir+'/advance_temp'+str(iens)+'/wrfout_d01'
#print('check for non-monotonic vertical pressure') # #print('check for non-monotonic vertical pressure')
# convert link to file in order to be able to update the content # # convert link to file in order to be able to update the content
if os.path.islink(dart_input): # if os.path.islink(dart_input):
l = os.readlink(dart_input) # l = os.readlink(dart_input)
os.remove(dart_input) # os.remove(dart_input)
copy(l, dart_input) # copy(l, dart_input)
# print('move DART output to input: '+dart_output+' -> '+dart_input) # # print('move DART output to input: '+dart_output+' -> '+dart_input)
# os.rename(dart_output, dart_input) # probably doesnt work # # os.rename(dart_output, dart_input) # probably doesnt work
print('updating', updates, 'in', dart_input, 'from', dart_output) # print('updating', updates, 'in', dart_input, 'from', dart_output)
os.system(cluster.ncks+' -A -v '+updates+' '+dart_output+' '+dart_input) # os.system(cluster.ncks+' -A -v '+updates+' '+dart_output+' '+dart_input)
############### archiving ############### archiving
...@@ -388,18 +411,25 @@ if __name__ == "__main__": ...@@ -388,18 +411,25 @@ if __name__ == "__main__":
- x_ensemble is already linked for DART to advance_temp<iens>/wrfout_d01 - x_ensemble is already linked for DART to advance_temp<iens>/wrfout_d01
Example call: Example call:
python assim.py 2008-08-07_12:00 python assim.py 2008-08-07_12:00 2008-08-06:00 2008-08-07_13:00 /home/fs71386/lkugler/data/sim_archive/exp_v1.18_Pwbub-1-ensprof_40mem
""" """
time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
archive_time = cluster.archivedir+time.strftime('/%Y-%m-%d_%H:%M/') prior_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
prior_valid_time = dt.datetime.strptime(sys.argv[3], '%Y-%m-%d_%H:%M')
prior_path_exp = str(sys.argv[4])
archive_time = cluster.archivedir+time.strftime('/%Y-%m-%d_%H:%M/')
os.chdir(cluster.dartrundir) os.chdir(cluster.dartrundir)
os.system('rm -f obs_seq.in obs_seq.out obs_seq.final') # remove any existing observation files os.system('rm -f input.nml obs_seq.in obs_seq.out obs_seq.final') # remove any existing observation files
set_DART_nml()
print('create obs_seq.in from config') print('prepare nature')
prepare_nature_dart(time) # link WRF files to DART directory prepare_nature_dart(time) # link WRF files to DART directory
print('prepare prior ensemble')
prepare_prior_ensemble(time, prior_init_time, prior_valid_time, prior_path_exp)
################################################ ################################################
print(' 1) get the assimilation errors in a single vector ') print(' 1) get the assimilation errors in a single vector ')
error_assimilate = [] error_assimilate = []
......
...@@ -131,7 +131,7 @@ def calc_obs_locations(n_obs, coords_from_domaincenter=True, ...@@ -131,7 +131,7 @@ def calc_obs_locations(n_obs, coords_from_domaincenter=True,
so that the assimilation increments are zero on the boundary so that the assimilation increments are zero on the boundary
distance to boundary = 1.5x localization-radius distance to boundary = 1.5x localization-radius
""" """
fcoords = cluster.dartrundir+'/geo_em.d01.nc' fcoords = cluster.dartrundir+'/../geo_em.d01.nc'
import xarray as xr import xarray as xr
ds = xr.open_dataset(fcoords) ds = xr.open_dataset(fcoords)
......
import os, sys, shutil import os, sys, shutil
import datetime as dt import datetime as dt
from config.cfg import exp, cluster from config.cfg import exp, cluster
from utils import symlink, copy_scp_srvx8, copy, sed_inplace from utils import symlink, copy_scp_srvx8, copy, sed_inplace, try_remove
import wrfout_add_geo import wrfout_add_geo
def run(assim_time, background_init_time, exppath_firstguess): def run(assim_time, prior_init_time, prior_valid_time, prior_path_exp)
"""Prepares DART files for running filter """Prepares DART files for running filter
i.e. i.e.
- links first guess state to DART first guess filenames - links first guess state to DART first guess filenames
...@@ -13,28 +13,35 @@ def run(assim_time, background_init_time, exppath_firstguess): ...@@ -13,28 +13,35 @@ def run(assim_time, background_init_time, exppath_firstguess):
- writes txt files so DART knows what input and output is - writes txt files so DART knows what input and output is
- removes probably pre-existing files which could lead to problems - removes probably pre-existing files which could lead to problems
""" """
os.makedirs(cluster.dartrundir, exist_ok=True)
print('prepare prior state estimate') print('prepare prior state estimate')
for iens in range(1, exp.n_ens+1): for iens in range(1, exp.n_ens+1):
#wrfout_run = cluster.wrf_rundir(iens) + time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
print('link wrfout file to DART background file') print('link wrfout file to DART background file')
wrfout_run = exppath_firstguess+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \ wrfout_run = prior_path_exp \
+str(iens)+assim_time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S') +prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens) \
+prior_valid_time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
dart_ensdir = cluster.dartrundir+'/advance_temp'+str(iens) dart_ensdir = cluster.dartrundir+'/advance_temp'+str(iens)
wrfout_dart = dart_ensdir+'/wrfout_d01' wrfout_dart = dart_ensdir+'/wrfout_d01'
os.makedirs(dart_ensdir, exist_ok=True) os.makedirs(dart_ensdir, exist_ok=True)
print('linking', wrfout_run, 'to', wrfout_dart) print('copy', wrfout_run, 'to', wrfout_dart)
symlink(wrfout_run, wrfout_dart) copy(wrfout_run, wrfout_dart)
symlink(wrfout_dart, dart_ensdir+'/wrfinput_d01') symlink(wrfout_dart, dart_ensdir+'/wrfinput_d01')
# ensure prior time matches assim time (can be off intentionally)
if assim_time != prior_valid_time:
print('overwriting time in prior from nature wrfout')
os.system(cluster.ncks+' -A -v XTIME '
+cluster.dartrundir+'/wrfout_d01 '+wrfout_dart)
# this seems to be necessary (else wrong level selection) # this seems to be necessary (else wrong level selection)
wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', wrfout_dart) wrfout_add_geo.run(cluster.dartrundir+'/../geo_em.d01.nc', wrfout_dart)
fpath = cluster.dartrundir+'/input_list.txt' fpath = cluster.dartrundir+'/input_list.txt'
print('writing', fpath) print('writing', fpath)
os.remove(fpath) try_remove(fpath)
with open(fpath, 'w') as f: with open(fpath, 'w') as f:
for iens in range(1, exp.n_ens+1): for iens in range(1, exp.n_ens+1):
f.write('./advance_temp'+str(iens)+'/wrfout_d01') f.write('./advance_temp'+str(iens)+'/wrfout_d01')
...@@ -42,7 +49,7 @@ def run(assim_time, background_init_time, exppath_firstguess): ...@@ -42,7 +49,7 @@ def run(assim_time, background_init_time, exppath_firstguess):
fpath = cluster.dartrundir+'/output_list.txt' fpath = cluster.dartrundir+'/output_list.txt'
print('writing', fpath) print('writing', fpath)
os.remove(fpath) try_remove(fpath)
with open(fpath, 'w') as f: with open(fpath, 'w') as f:
for iens in range(1, exp.n_ens+1): for iens in range(1, exp.n_ens+1):
f.write('./filter_restart_d01.'+str(iens).zfill(4)) f.write('./filter_restart_d01.'+str(iens).zfill(4))
...@@ -62,7 +69,8 @@ def run(assim_time, background_init_time, exppath_firstguess): ...@@ -62,7 +69,8 @@ def run(assim_time, background_init_time, exppath_firstguess):
if __name__ == '__main__': if __name__ == '__main__':
assim_time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') assim_time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M') prior_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
exppath_firstguess = str(sys.argv[3]) prior_valid_time = dt.datetime.strptime(sys.argv[3], '%Y-%m-%d_%H:%M')
prior_path_exp = str(sys.argv[4])
run(assim_time, background_init_time, exppath_firstguess) run(assim_time, prior_init_time, prior_valid_time, prior_path_exp)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment