Skip to content
Snippets Groups Projects
Commit 7aecc11b authored by lkugler's avatar lkugler
Browse files

DA with wrfrst

parent bf6b2a25
Branches
No related tags found
No related merge requests found
...@@ -21,14 +21,16 @@ usually applied to 1 min forecasts to assess the posterior analysis quality ...@@ -21,14 +21,16 @@ usually applied to 1 min forecasts to assess the posterior analysis quality
""" """
if __name__ == '__main__': if __name__ == '__main__':
prev_forecast_init = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') prior_init_time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M') time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
exppath_firstguess = cluster.archivedir exppath_firstguess = cluster.archivedir
print(prev_forecast_init, time) prior_valid_time = time
print('compute Hx from 1min forecast')
print(time, prior_init_time, prior_valid_time, exppath_firstguess)
# link ensemble states to run_DART directory # link ensemble states to run_DART directory
# we want the observation operator applied to these states! # we want the observation operator applied to these states!
pre_assim.run(time, prev_forecast_init, exppath_firstguess) pre_assim.run(time, prior_init_time, prior_valid_time, exppath_firstguess)
savedir = cluster.archivedir+'/obs_seq_final_1min/' savedir = cluster.archivedir+'/obs_seq_final_1min/'
......
...@@ -11,16 +11,16 @@ import wrfout_add_geo ...@@ -11,16 +11,16 @@ import wrfout_add_geo
earth_radius_km = 6370 earth_radius_km = 6370
# fit of Fig 7, Harnisch 2016 # Kelvin, fit of Fig 7b, Harnisch 2016
x_ci = [0, 5, 10.5, 13, 16] x_ci = [0, 5, 10.5, 13, 16] # average cloud impact
y_oe = [1, 4.5, 10, 12, 13] # Kelvin y_oe = [1, 4.5, 10, 12, 13] # adjusted observation error
oe_73_linear = interp1d(x_ci, y_oe, assume_sorted=True) oe_73_linear = interp1d(x_ci, y_oe, assume_sorted=True)
def oe_73(ci): def oe_73(ci):
if ci < 13: if ci < 13:
return oe_73_linear(ci) return oe_73_linear(ci)
else: else:
return 16. return 13.
def cloudimpact_73(bt_mod, bt_obs): def cloudimpact_73(bt_mod, bt_obs):
""" """
...@@ -217,8 +217,6 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_ ...@@ -217,8 +217,6 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_
- writes txt files so DART knows what input and output is - writes txt files so DART knows what input and output is
- removes probably pre-existing files which could lead to problems - removes probably pre-existing files which could lead to problems
""" """
os.makedirs(cluster.dartrundir, exist_ok=True)
print('prepare prior state estimate') print('prepare prior state estimate')
for iens in range(1, exp.n_ens+1): for iens in range(1, exp.n_ens+1):
print('link wrfout file to DART background file') print('link wrfout file to DART background file')
...@@ -237,7 +235,7 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_ ...@@ -237,7 +235,7 @@ def prepare_prior_ensemble(assim_time, prior_init_time, prior_valid_time, prior_
# ensure prior time matches assim time (can be off intentionally) # ensure prior time matches assim time (can be off intentionally)
if assim_time != prior_valid_time: if assim_time != prior_valid_time:
print('overwriting time in prior from nature wrfout') print('overwriting time in prior from nature wrfout')
os.system(cluster.ncks+' -A -v XTIME ' os.system(cluster.ncks+' -A -v XTIME,Times '
+cluster.dartrundir+'/wrfout_d01 '+wrfout_dart) +cluster.dartrundir+'/wrfout_d01 '+wrfout_dart)
# this seems to be necessary (else wrong level selection) # this seems to be necessary (else wrong level selection)
...@@ -334,6 +332,7 @@ def assimilate(nproc=96): ...@@ -334,6 +332,7 @@ def assimilate(nproc=96):
# print('updating', updates, 'in', dart_input, 'from', dart_output) # print('updating', updates, 'in', dart_input, 'from', dart_output)
# os.system(cluster.ncks+' -A -v '+updates+' '+dart_output+' '+dart_input) # os.system(cluster.ncks+' -A -v '+updates+' '+dart_output+' '+dart_input)
############### archiving ############### archiving
def archive_osq_final(time, posterior_1min=False): def archive_osq_final(time, posterior_1min=False):
...@@ -420,6 +419,7 @@ if __name__ == "__main__": ...@@ -420,6 +419,7 @@ if __name__ == "__main__":
prior_path_exp = str(sys.argv[4]) prior_path_exp = str(sys.argv[4])
archive_time = cluster.archivedir+time.strftime('/%Y-%m-%d_%H:%M/') archive_time = cluster.archivedir+time.strftime('/%Y-%m-%d_%H:%M/')
os.makedirs(cluster.dartrundir, exist_ok=True)
os.chdir(cluster.dartrundir) os.chdir(cluster.dartrundir)
os.system('rm -f input.nml obs_seq.in obs_seq.out obs_seq.final') # remove any existing observation files os.system('rm -f input.nml obs_seq.in obs_seq.out obs_seq.final') # remove any existing observation files
set_DART_nml() set_DART_nml()
...@@ -481,6 +481,6 @@ if __name__ == "__main__": ...@@ -481,6 +481,6 @@ if __name__ == "__main__":
t = time_module.time() t = time_module.time()
assimilate() assimilate()
print('filter took', time_module.time()-t, 'seconds') print('filter took', time_module.time()-t, 'seconds')
archive_filteroutput(time) archive_filteroutput(time)
archive_osq_final(time) archive_osq_final(time)
...@@ -4,7 +4,7 @@ from config.cfg import exp, cluster ...@@ -4,7 +4,7 @@ from config.cfg import exp, cluster
from utils import symlink, copy_scp_srvx8, copy, sed_inplace, try_remove from utils import symlink, copy_scp_srvx8, copy, sed_inplace, try_remove
import wrfout_add_geo import wrfout_add_geo
def run(assim_time, prior_init_time, prior_valid_time, prior_path_exp) def run(assim_time, prior_init_time, prior_valid_time, prior_path_exp):
"""Prepares DART files for running filter """Prepares DART files for running filter
i.e. i.e.
- links first guess state to DART first guess filenames - links first guess state to DART first guess filenames
......
"""Create namelist.input files
Usage:
prepare_namelist.py <begin> <end> <intv> [--radt=<minutes>] [--restart=<flag>] [--restart_interval=<minutes>] [--rst_inname=<path>]
Options:
--radt=<minutes> Radiation interval [default: 5]
--restart=<flag> Restart flag (.true., .false.) [default: .false.]
--restart_interval=<minutes> Restart frequency [default: 720]
--rst_inname=<path> Path to directory of input wrfrst file
"""
import os, sys, shutil, warnings import os, sys, shutil, warnings
import datetime as dt import datetime as dt
from docopt import docopt
from config.cfg import exp, cluster from config.cfg import exp, cluster
from utils import sed_inplace, copy, symlink, mkdir from utils import sed_inplace, copy, symlink, mkdir
def run(iens, begin, end, hist_interval=5, radt=5, archive=True): def run(iens, begin, end, hist_interval=5, radt=5, archive=True,
""" restart=False, restart_interval=720, rst_inname=False):
"""Create namelist.input files
Args: Args:
archive (bool): if True, write to archivedir of experiment archive (bool): if True, write to archivedir of experiment
if False, write to WRF run directory if False, write to WRF run directory
restart (str): fortran bool whether to use wrfinput or wrfrst
restart_interval (int): output frequency of wrfrst (minutes)
""" """
rundir = cluster.wrf_rundir(iens) rundir = cluster.wrf_rundir(iens)
print(rundir)
copy(cluster.namelist, rundir+'/namelist.input') copy(cluster.namelist, rundir+'/namelist.input')
sed_inplace(rundir+'/namelist.input', '<dx>', str(int(exp.model_dx))) sed_inplace(rundir+'/namelist.input', '<dx>', str(int(exp.model_dx)))
...@@ -18,6 +33,10 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True): ...@@ -18,6 +33,10 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True):
sed_inplace(rundir+'/namelist.input', '<hist_interval>', str(int(hist_interval))) sed_inplace(rundir+'/namelist.input', '<hist_interval>', str(int(hist_interval)))
sed_inplace(rundir+'/namelist.input', '<radt>', str(int(radt))) sed_inplace(rundir+'/namelist.input', '<radt>', str(int(radt)))
rst_flag = '.true.' if restart else '.false.'
sed_inplace(rundir+'/namelist.input', '<restart>', rst_flag)
sed_inplace(rundir+'/namelist.input', '<restart_interval>', str(int(float(restart_interval))))
if archive: if archive:
archdir = cluster.archivedir+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/') archdir = cluster.archivedir+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/')
os.makedirs(archdir, exist_ok=True) os.makedirs(archdir, exist_ok=True)
...@@ -25,7 +44,12 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True): ...@@ -25,7 +44,12 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True):
archdir = './' archdir = './'
print('namelist for run from', begin, end, 'output to', archdir) print('namelist for run from', begin, end, 'output to', archdir)
sed_inplace(rundir+'/namelist.input', '<archivedir>', archdir) sed_inplace(rundir+'/namelist.input', '<archivedir>', archdir)
if rst_inname: # rst_inname did not work -> link file to directory
# sed_inplace(rundir+'/namelist.input', '<initdir>', rst_inname)
fname = begin.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')
symlink(rst_inname+fname, rundir+fname)
print('linked', rst_inname+fname, 'to rundir')
# set times # set times
for k, v in {'<y1>': '%Y', '<m1>': '%m', '<d1>': '%d', for k, v in {'<y1>': '%Y', '<m1>': '%m', '<d1>': '%d',
...@@ -40,29 +64,44 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True): ...@@ -40,29 +64,44 @@ def run(iens, begin, end, hist_interval=5, radt=5, archive=True):
init_dir = cluster.archivedir+begin.strftime('/%Y-%m-%d_%H:%M/')+str(iens) init_dir = cluster.archivedir+begin.strftime('/%Y-%m-%d_%H:%M/')+str(iens)
os.makedirs(init_dir, exist_ok=True) os.makedirs(init_dir, exist_ok=True)
print('copy namelist to archive')
copy(rundir+'/namelist.input', init_dir+'/namelist.input')
try: try:
print('copy wrfinput of this run to archive') if not restart:
wrfin_old = rundir+'/wrfinput_d01' print('copy wrfinput of this run to archive')
wrfin_arch = init_dir+'/wrfinput_d01' wrfin_old = rundir+'/wrfinput_d01'
copy(wrfin_old, wrfin_arch) wrfin_arch = init_dir+'/wrfinput_d01'
print('copy namelist to archive') copy(wrfin_old, wrfin_arch)
copy(rundir+'/namelist.input', init_dir+'/namelist.input')
except Exception as e: except Exception as e:
warnings.warn(str(e)) warnings.warn(str(e))
if __name__ == '__main__': if __name__ == '__main__':
begin = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') args = docopt(__doc__)
end = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M') begin = dt.datetime.strptime(args['<begin>'], '%Y-%m-%d_%H:%M')
intv = int(sys.argv[3]) end = dt.datetime.strptime(args['<end>'], '%Y-%m-%d_%H:%M')
radt = int(sys.argv[4]) intv = int(args['<intv>'])
archive = True
try: radt = int(args['--radt'])
if sys.argv[5] == '1': if not radt:
archive = False radt = '5'
except:
pass restart = False
if args['--restart'] == '.true.':
print('prepare namelists for all ens members') restart = True
restart_interval = args['--restart_interval']
if not restart_interval:
restart_interval = 720
print('prepare namelists for all ens members',intv,radt,restart,restart_interval)
for iens in range(1, exp.n_ens+1): for iens in range(1, exp.n_ens+1):
run(iens, begin, end, hist_interval=intv, radt=radt, archive=archive)
rst_inname = False
if args['--rst_inname']:
rst_inname = args['--rst_inname'] + '/'+str(iens)
print('rst_inname', rst_inname)
run(iens, begin, end, hist_interval=intv, radt=radt,
restart=restart, restart_interval=restart_interval,
rst_inname=rst_inname)
print('loading modules')
import os, sys, warnings, glob
import datetime as dt
import netCDF4 as nc
from config.cfg import exp, cluster
from utils import symlink, copy, mkdir, clean_wrfdir, try_remove
"""
Sets initial condition data (wrfinput/wrrst file) in the run_WRF directory for each ensemble member
1) copies wrfrst to run_WRF directory
2) overwrites DA-updated variables with DART output fields
(for verification later on, since a restart run does not write the first wrfout)
3) copy wrfout from prior to archivedir
4) overwrite DA-updated variables with DART output
# assumes T = THM (dry potential temperature as prognostic variable)
"""
update_vars = ['Times',]
update_vars.extend(exp.update_vars) # 'U', 'V', 'T', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'TSK', 'CLDFRA']
updates = ','.join(update_vars)
def create_wrfrst_in_WRF_rundir(time, prior_init_time, exppath_firstguess):
"""copies wrfrst to run_WRF directory (for next WRF run)
"""
for iens in range(1, exp.n_ens+1):
clean_wrfdir(cluster.wrf_rundir(iens))
prior_wrfrst = exppath_firstguess + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+time.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')
wrfrst = cluster.wrf_rundir(iens) + time.strftime('/wrfrst_d01_%Y-%m-%d_%H:%M:%S')
print('copy prior (wrfrst)', prior_wrfrst, 'to', wrfrst)
copy(prior_wrfrst, wrfrst)
filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4)
print('update assimilated variables => overwrite', updates, 'in', wrfrst, 'from', filter_out)
os.system(cluster.ncks+' -A -v '+updates+' '+filter_out+' '+wrfrst)
print('writing T into THM of wrfrst') # assumes T = THM (dry potential temperature as prognostic variable)
with nc.Dataset(filter_out, 'r') as ds_filter:
with nc.Dataset(wrfrst, 'r+') as ds_wrfrst:
ds_wrfrst.variables['THM_1'][:] = ds_filter.variables['T'][:]
ds_wrfrst.variables['THM_2'][:] = ds_filter.variables['T'][:]
print(wrfrst, 'created.')
# remove all wrfrst (but not the one used)
files_rst = glob.glob(exppath_firstguess + prior_init_time.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/wrfrst_*'))
files_rst.remove(prior_wrfrst)
for f in files_rst:
try_remove(f)
def create_wrfout_in_archivedir(time, prior_init_time, exppath_firstguess):
"""Put updated wrfout in archive dir (because wrf restart writes no 0 minute wrfout)
"""
print('writing updated wrfout to archive (for verification)')
for iens in range(1, exp.n_ens+1):
prior_wrfout = exppath_firstguess + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
post_wrfout_archive = cluster.archivedir +time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
# copy template wrfout (including cycled variables)
os.makedirs(os.path.dirname(post_wrfout_archive), exist_ok=True)
copy(prior_wrfout, post_wrfout_archive)
# overwrite DA updated variables
filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4)
#filter_out = cluster.archivedir +time.strftime('/%Y-%m-%d_%H:%M/assim_stage0/filter_restart_d01.'+str(iens).zfill(4))
os.system(cluster.ncks+' -A -v '+updates+' '+filter_out+' '+post_wrfout_archive)
# need to overwrite THM manually
with nc.Dataset(filter_out, 'r') as ds_filter:
with nc.Dataset(post_wrfout_archive, 'r+') as ds_wrfout:
ds_wrfout.variables['THM'][:] = ds_filter.variables['T'][:]
print(post_wrfout_archive, 'created.')
def create_updated_wrfinput_from_wrfout(time, prior_init_time, exppath_firstguess):
"""Same as create_wrfout_in_archivedir, but output is `wrfinput` in WRF run directory"""
print('writing updated wrfout to WRF run directory as wrfinput')
for iens in range(1, exp.n_ens+1):
prior_wrfout = exppath_firstguess + prior_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
post_wrfout = cluster.wrf_rundir(iens) + '/wrfinput_d01'
copy(prior_wrfout, post_wrfout)
if True: # overwrite DA updated variables
filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4)
os.system(cluster.ncks+' -A -v '+updates+' '+filter_out+' '+post_wrfout)
# need to overwrite THM manually
with nc.Dataset(filter_out, 'r') as ds_filter:
with nc.Dataset(post_wrfout, 'r+') as ds_wrfout:
ds_wrfout.variables['THM'][:] = ds_filter.variables['T'][:]
print(post_wrfout, 'created.')
if __name__ == '__main__':
time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
prior_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
exppath_firstguess = str(sys.argv[3])
use_wrfout_as_wrfinput = False
if use_wrfout_as_wrfinput:
create_updated_wrfinput_from_wrfout(time, prior_init_time, exppath_firstguess)
else:
create_wrfrst_in_WRF_rundir(time, prior_init_time, exppath_firstguess)
# this is done with the right WRF namelist entry (write first wrfout)
#create_wrfout_in_archivedir(time, prior_init_time, exppath_firstguess)
...@@ -16,7 +16,7 @@ do ...@@ -16,7 +16,7 @@ do
RUNDIR=$MAINDIR/$EXPNAME/$IENS RUNDIR=$MAINDIR/$EXPNAME/$IENS
echo "ENSEMBLE NR: "$IENS" in "$RUNDIR echo "ENSEMBLE NR: "$IENS" in "$RUNDIR
cd $RUNDIR cd $RUNDIR
rm -rf wrfrst_d01_* wrfout_d01_* rsl.out.0* rm -rf rsl.out.0*
echo "mpirun -genv I_MPI_PIN_PROCESSOR_LIST="${pinning[$n-1]}" -np 12 ./wrf.exe" echo "mpirun -genv I_MPI_PIN_PROCESSOR_LIST="${pinning[$n-1]}" -np 12 ./wrf.exe"
mpirun -genv I_MPI_PIN_PROCESSOR_LIST=${pinning[$n-1]} -np 12 ./wrf.exe & mpirun -genv I_MPI_PIN_PROCESSOR_LIST=${pinning[$n-1]} -np 12 ./wrf.exe &
cd ../ cd ../
......
print('loading modules')
import os, sys, warnings
import datetime as dt
import netCDF4 as nc
from config.cfg import exp, cluster
from utils import symlink, copy, mkdir, clean_wrfdir
time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
exppath_firstguess = str(sys.argv[3])
"""
-) sets initial condition data (wrfinput file) in the run_WRF directory for each ensemble member
from a DART output state (set of filter_restart files)
-) cycles (copies) some state variables from the prior ensemble to the wrfinput of the next run
# assumes T = THM (dry potential temperature as prognostic variable)
"""
update_vars = ['Times',]
update_vars.extend(exp.update_vars) # 'U', 'V', 'T', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'TSK', 'CLDFRA']
updates = ','.join(update_vars)
print('move output to WRF dir as new initial conditions')
for iens in range(1, exp.n_ens+1):
clean_wrfdir(cluster.wrf_rundir(iens))
prior_wrf = exppath_firstguess + background_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4)
wrf_ic = cluster.wrf_rundir(iens) + '/wrfinput_d01'
# cycles variables from wrfout (prior state)
print('cycle some variables (copy from last init) => copy prior', prior_wrf, 'to wrfinput', wrf_ic)
# os.system(cluster.ncks+' -A -v '+cycles+' '+prior_wrf+' '+wrf_ic)
copy(prior_wrf, wrf_ic)
print('update assimilated variables => overwrite', updates, 'in', wrf_ic, 'from', filter_out)
os.system(cluster.ncks+' -A -v '+updates+' '+filter_out+' '+wrf_ic)
print('writing T into THM of wrfinput') # assumes T = THM (dry potential temperature as prognostic variable)
thm_in = nc.Dataset(filter_out, 'r').variables['T'][:]
dsout = nc.Dataset(wrf_ic, 'r+')
dsout.variables['THM'][:] = thm_in
dsout.close()
# clean up
#try:
# os.remove(cluster.dartrundir+'/advance_temp'+str(iens)+'/wrfout_d01')
#except:
# pass
import os, sys, shutil
import datetime as dt
from config.cfg import exp, cluster
from utils import symlink, copy_scp_srvx8, copy
import prepare_namelist
exppath = str(sys.argv[1])
# time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
# background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
#update_vars = ['Times', 'U', 'V', 'PH', 'T', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC', 'TSK']
# vars = 'Times,U,V,T,THM,CLDFRA,P,PH,PHB,MU,QVAPOR,QCLOUD,QRAIN,QICE,QSNOW,QGRAUP,QNICE,QNRAIN,U10,V10,T2,Q2,PSFC,TSLB,SMOIS,TSK' # ','.join(update_vars)
# vars = 'Times,LU_INDEX,ZNU,ZNW,ZS,DZS,VAR_SSO,U,V,W,PH,PHB,T,THM,MU,MUB,P,PB,FNM,FNP,RDNW,RDN,DNW,DN,CFN,CFN1,THIS_IS_AN_IDEAL_RUN,P_HYD,Q2,T2,TH2,PSFC,U10,V10,RDX,RDY,RESM,ZETATOP,CF1,CF2,CF3,QVAPOR,QCLOUD,QRAIN,QICE,QSNOW,QGRAUP,QNICE,QNRAIN,SHDMAX,SHDMIN,SNOALB,TSLB,SMOIS,SH2O,SMCREL,SEAICE,IVGTYP,ISLTYP,VEGFRA,SNOW,SNOWH,CANWAT,LAI,QKE,VAR,F,E,SINALPHA,COSALPHA,HGT,TSK,P_TOP,T00,P00,TLP,TISO,TLP_STRAT,P_STRAT,CLDFRA,CLAT,ALBBCK,TMN,XLAND,SNOWC,SR,SAVE_TOPO_FROM_REAL,C1H,C2H,C1F,C2F,C3H,C4H,C3F,C4F,PCB,PC,LANDMASK,LAKEMASK,SST' # these are all variables, that are in wrfout and wrfinput
for iens in range(1, exp.n_ens+1):
print('update state in wrfinput wrfout file to DART background file')
wrfout = exppath.replace('*iens*', str(iens))
wrfin = cluster.wrf_rundir(iens)+'/wrfinput_d01'
print('updating', wrfin, 'to state in', wrfout)
assert os.path.isfile(wrfout), wrfout
os.makedirs(os.path.dirname(wrfin), exist_ok=True)
# overwrite variables in wrfinput file
# os.system(cluster.ncks+' -A -v '+vars+' '+wrfout+' '+wrfin)
copy(wrfout, wrfin)
...@@ -51,7 +51,7 @@ ...@@ -51,7 +51,7 @@
output_timestamps = .false., output_timestamps = .false.,
trace_execution = .false., trace_execution = .false.,
stages_to_write = 'postassim', 'output' stages_to_write = 'output'
output_members = .true. output_members = .true.
output_mean = .true. output_mean = .true.
output_sd = .true. output_sd = .true.
......
...@@ -13,10 +13,13 @@ ...@@ -13,10 +13,13 @@
end_second = 0, 00, 00, end_second = 0, 00, 00,
interval_seconds = 86400 interval_seconds = 86400
history_interval = <hist_interval>, 15, 15, history_interval = <hist_interval>, 15, 15,
write_hist_at_0h_rst = .true.,
frames_per_outfile = 1, 1, 1, frames_per_outfile = 1, 1, 1,
history_outname = '<archivedir>/wrfout_d<domain>_<date>' history_outname = '<archivedir>/wrfout_d<domain>_<date>'
restart = .false., restart = <restart>,
restart_interval = 720, override_restart_timers = .true.,
restart_interval = <restart_interval>,
rst_outname = '<archivedir>/wrfrst_d<domain>_<date>',
io_form_history = 2 io_form_history = 2
io_form_restart = 2 io_form_restart = 2
io_form_input = 2 io_form_input = 2
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment