Skip to content
Snippets Groups Projects
Commit 26284067 authored by Lukas Kugler's avatar Lukas Kugler
Browse files

various updates

parent 93162e16
Branches
Tags
No related merge requests found
......@@ -7,13 +7,13 @@ class ExperimentConfiguration(object):
pass
exp = ExperimentConfiguration()
exp.expname = "exp_v1.10_LMU+shear_filter"
exp.expname = "exp_v1.11_LMU_filter2"
exp.model_dx = 2000
exp.timestep = 10
exp.n_ens = 20
exp.n_nodes = 5
exp.n_ens = 40
exp.n_nodes = 10
exp.n_obs = 100
exp.error_variance = 0.001
exp.error_variance = 0.0009
# directory paths depend on the name of the experiment
......
......@@ -20,18 +20,22 @@ vsc.name = 'vsc'
vsc.python = '/home/fs71386/lkugler/miniconda3/bin/python'
vsc.ncks = '/home/fs71386/lkugler/miniconda3/envs/DART/bin/ncks'
vsc.userdir = '/home/fs71386/lkugler'
vsc.srcdir = '/home/fs71386/lkugler/compile/WRF/WRF-4.1.5/run'
vsc.srcdir = '/home/fs71386/lkugler/compile/WRF/WRF-4.2.1/run'
vsc.dart_srcdir = '/home/fs71386/lkugler/DART/DART_WRF_RTTOV_early_access/models/wrf/work'
vsc.dartrundir = '/home/fs71386/lkugler/run_DART'
vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/scripts'
vsc.nature_wrfout = '/home/fs71386/lkugler/data/sim_archive/exp_v1.10_LMU+shear_nature/2008-07-30_06:00/2/wrfout_d01_%Y-%m-%d_%H:%M:%S'
vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.1.5_v1.10.exe'
vsc.wrfexe = vsc.userdir+'/compile/bin/wrf-v4.1.5_v1.10.exe'
vsc.namelist = vsc.scriptsdir+'/templates/namelist.input'
vsc.nature_wrfout = '/home/fs71386/lkugler/data/sim_archive/exp_v1.11_LMU_nature/2008-07-30_06:00/2/wrfout_d01_%Y-%m-%d_%H:%M:%S'
vsc.input_profile = '/home/fs71386/lkugler/wrf_sounding/data/wrf/ens/from_LMU/raso.raso.<iens>.wrfprof'
vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.2.1_v1.11.exe'
vsc.wrfexe = vsc.userdir+'/compile/bin/wrf-v4.2.1_v1.11.exe'
vsc.namelist = vsc.scriptsdir+'/../templates/namelist.input'
vsc.run_WRF = '/gpfs/data/fs71386/lkugler/DART-WRF/scripts/osse/run_ens.vsc.sh'
vsc.slurm_cfg = {"account": "p71386", "partition": "mem_0384", "qos": "p71386_0384",
"ntasks-per-node": "48", "ntasks-per-core": 1}
"ntasks-per-node": "48", "ntasks-per-core": 1,
"mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"}
jet = ClusterConfig()
......
......@@ -11,8 +11,10 @@ from slurmpy import Slurm
sys.path.append(os.getcwd())
from config.cfg import exp, cluster
from utils import script_to_str
from scripts.utils import script_to_str, symlink
# allow scripts to access the configuration
symlink(cluster.scriptsdir+'/../config', cluster.scriptsdir+'/config')
def my_Slurm(*args, cfg_update=dict(), **kwargs):
"""Shortcut to slurmpy's class; keep default kwargs
......@@ -28,16 +30,19 @@ def clear_logs(backup_existing_to_archive=True):
archdir = cluster.archivedir()+d
if backup_existing_to_archive:
os.makedirs(archdir, exist_ok=True)
for f in os.listdir(cluster.scriptsdir+d):
dir = cluster.scriptsdir+'/../'+d
for f in os.listdir(dir):
if backup_existing_to_archive:
shutil.move(cluster.scriptsdir+d+f, archdir+f)
shutil.move(dir+f, archdir+f)
else:
os.remove(cluster.scriptsdir+d+f)
os.remove(dir+f)
def prep_osse():
def prepare_wrfinput():
"""Create WRF/run directories and wrfinput files
"""
s = my_Slurm("pre_osse", cfg_update={"nodes": "1", "ntasks": "1", "time": "5",
"mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"})
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prep_osse.py')
"mail-type": "BEGIN"})
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_wrfinput.py')
s = my_Slurm("ideal", cfg_update={"nodes": "1", "time": "10", "mem-per-cpu": "2G"})
cmd = """# run ideal.exe in parallel, then add geodata
......@@ -61,6 +66,18 @@ done
id = s.run(cmd, depends_on=[id])
return id
def update_wrfinput_from_archive(time, background_init_time, exppath, depends_on=None):
"""Given that directories with wrfinput files exist,
update these wrfinput files according to wrfout files
"""
s = my_Slurm("upd_wrfinput", cfg_update={"nodes": "1", "ntasks": "1", "time": "5"})
# path of initial conditions, <iens> is replaced by member index
IC_path = exppath + background_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+'*iens*/'+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
id = s.run(cluster.python+' '+cluster.scriptsdir+'/update_wrfinput_from_wrfout.py '
+IC_path, depends_on=[depends_on])
return id
def run_ENS(begin, end, depends_on=None, **kwargs):
prev_id = depends_on
......@@ -86,11 +103,9 @@ def run_ENS(begin, end, depends_on=None, **kwargs):
def gen_synth_obs(time, depends_on=None):
prev_id = depends_on
s = my_Slurm("pre_gensynthobs", cfg_update=dict(nodes="1", ntasks="1", time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_gen_synth_obs.py '+time.strftime('%Y-%m-%d_%H:%M'),
depends_on=[prev_id])
id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_gen_synth_obs.py '
+time.strftime('%Y-%m-%d_%H:%M'), depends_on=[depends_on])
s = my_Slurm("gensynth", cfg_update=dict(nodes="1", time="20"))
cmd = 'cd '+cluster.dartrundir+'; mpirun -np 24 ./perfect_model_obs'
......@@ -98,96 +113,81 @@ def gen_synth_obs(time, depends_on=None):
return id2
def assimilate(assim_time, background_init_time, depends_on=None, **kwargs):
def assimilate(assim_time, background_init_time,
first_guess=None, depends_on=None, **kwargs):
prev_id = depends_on
if first_guess is None:
first_guess = cluster.archivedir()
s = my_Slurm("preAssim", cfg_update=dict(nodes="1", ntasks="1", time="2"))
id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_assim.py ' \
+assim_time.strftime('%Y-%m-%d_%H:%M ') \
+background_init_time.strftime('%Y-%m-%d_%H:%M'),
+assim_time.strftime('%Y-%m-%d_%H:%M ')
+background_init_time.strftime('%Y-%m-%d_%H:%M ')
+first_guess,
depends_on=[prev_id])
s = my_Slurm("Assim", cfg_update=dict(nodes="1", time="50", mem="200G"))
cmd = 'cd '+cluster.dartrundir+'; mpirun -genv I_MPI_PIN_PROCESSOR_LIST=0-47 -np 48 ./filter'
cmd = 'cd '+cluster.dartrundir+'; mpirun -np 48 ./filter'
id2 = s.run(cmd, depends_on=[id])
s = my_Slurm("archiveAssim", cfg_update=dict(nodes="1", ntasks="1", time="10"))
id3 = s.run(cluster.python+' '+cluster.scriptsdir+'/archive_assim.py '
+ time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2])
+ assim_time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2])
s = my_Slurm("updateIC", cfg_update=dict(nodes="1", ntasks="1", time="3"))
id4 = s.run(cluster.python+' '+cluster.scriptsdir+'/update_wrfinput_from_filteroutput.py '
+time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id3])
+assim_time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id3])
return id4
def prep_initials_from_archive(time):
s = my_Slurm("pre_initial", cfg_update={"nodes": "1", "ntasks": "1", "time": "5",
"mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"})
id = s.run(cluster.python+' '+cluster.scriptsdir+'/prep_osse.py')
s = my_Slurm("ideal", cfg_update={"nodes": "1", "time": "10", "mem-per-cpu": "2G"})
cmd = """# run ideal.exe in parallel, then add geodata
export SLURM_STEP_GRES=none
for ((n=1; n<="""+str(exp.n_ens)+"""; n++))
do
rundir="""+cluster.userdir+'/run_WRF/'+exp.expname+"""/$n
echo $rundir
cd $rundir
mpirun -np 1 ./ideal.exe &
done
wait
for ((n=1; n<="""+str(exp.n_ens)+"""; n++))
do
rundir="""+cluster.userdir+'/run_WRF/'+exp.expname+"""/$n
mv $rundir/rsl.out.0000 $rundir/rsl.out.input
done
"""
id2 = s.run(cmd, depends_on=[id])
s = my_Slurm("upd_wrfinput", cfg_update={"nodes": "1", "ntasks": "1", "time": "5",
"mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"})
id3 = s.run(cluster.python+' '+cluster.scriptsdir+'/update_wrfinput_from_wrfout.py '
+time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2])
return id3
def mailme(depends_on=None):
id = depends_on
if id:
s = my_Slurm("AllFinished", cfg_update={"nodes": "1", "ntasks": "1", "time": "1",
"mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"})
"mail-type": "BEGIN"})
s.run('sleep 1', depends_on=[id])
################################
print('starting osse')
clear_logs(backup_existing_to_archive=True)
#id = prep_osse() # create initial conditions
id = None
# spin up the ensemble
#background_init_time = dt.datetime(2008, 7, 30, 6, 0)
#integration_end_time = dt.datetime(2008, 7, 30, 11, 0)
#id = run_ENS(begin=background_init_time,
# end=integration_end_time,
# depends_on=id)
clear_logs(backup_existing_to_archive=True)
background_init_time = dt.datetime(2008, 7, 30, 13, 0)
time = dt.datetime(2008, 7, 30, 13, 15) #integration_end_time
is_new_run = True
if is_new_run:
id = prepare_wrfinput() # create initial conditions
#id = prep_initials_from_archive(time)
# spin up the ensemble
background_init_time = dt.datetime(2008, 7, 30, 6, 0)
integration_end_time = dt.datetime(2008, 7, 30, 10, 0)
id = run_ENS(begin=background_init_time,
end=integration_end_time,
depends_on=id)
time = integration_end_time
else:
# get initial conditions from archive
background_init_time = dt.datetime(2008, 7, 30, 10, 45)
time = dt.datetime(2008, 7, 30, 11, 0)
exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.11_LMU_filter'
first_guess = exppath_arch
id = update_wrfinput_from_archive(time, background_init_time, exppath_arch)
# now, start the ensemble data assimilation cycles
timedelta_integrate = dt.timedelta(minutes=15)
timedelta_btw_assim = dt.timedelta(minutes=15)
while time < dt.datetime(2008, 7, 30, 16, 15):
while time < dt.datetime(2008, 7, 30, 14, 15):
assim_time = time
id = gen_synth_obs(assim_time, depends_on=id)
id = assimilate(assim_time,
background_init_time,
first_guess=first_guess,
depends_on=id)
# first_guess = None #
background_init_time = assim_time # start integration from here
integration_end_time = assim_time + timedelta_integrate
id = run_ENS(begin=background_init_time,
......
......@@ -9,31 +9,47 @@ from utils import symlink, copy_scp_srvx8, copy, mkdir, mkdir_srvx8, clean_wrfdi
time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
try:
print('archive obs space diagnostics')
savedir = cluster.archivedir()+'/obs_seq_final/'
mkdir(savedir)
copy(cluster.dartrundir+'/obs_seq.final', savedir+time.strftime('/%Y-%m-%d_%H:%M_obs_seq.final'))
except Exception as e:
warnings.warn(str(e))
try:
print('archive regression diagnostics')
savedir = cluster.archivedir()+'/reg_factor/'
mkdir(savedir)
copy(cluster.dartrundir+'/reg_diagnostics', savedir+time.strftime('/%Y-%m-%d_%H:%M_reg_diagnostics'))
except Exception as e:
warnings.warn(str(e))
print('archive model state')
try:
mkdir(cluster.archivedir())
# copy mean and sd to archive
for f in ['preassim_mean.nc', 'preassim_sd.nc',
'output_mean.nc', 'output_sd.nc']:
copy(cluster.dartrundir+'/'+f,
cluster.archivedir()+'/'+f[:-3]+time.strftime('_%Y-%m-%d_%H:%M:%S'))
print('copy members to archive')
print('copy prior posterior members to archive')
for iens in range(1, exp.n_ens+1):
savedir = cluster.archivedir()+'/'+time.strftime('/%Y-%m-%d_%H:%M/')+str(iens)
savedir = cluster.archivedir()+time.strftime('/%Y-%m-%d_%H:%M/')+str(iens)
mkdir(savedir)
copy(cluster.dartrundir+'/input.nml',
cluster.archivedir()+time.strftime('/%Y-%m-%d_%H:%M/input.nml'))
filter_in = cluster.dartrundir+'/preassim_member_'+str(iens).zfill(4)+'.nc'
filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4)
copy(filter_in, savedir+time.strftime('/%Y-%m-%d_%H:%M_prior'))
copy(filter_out, savedir+time.strftime('/%Y-%m-%d_%H:%M_posterior'))
# copy mean and sd to archive
print('copy preassim, postassim mean and sd')
for f in ['preassim_mean.nc', 'preassim_sd.nc',
'output_mean.nc', 'output_sd.nc']:
copy(cluster.dartrundir+'/'+f,
cluster.archivedir()+'/'+f[:-3]+time.strftime('_%Y-%m-%d_%H:%M:%S'))
except Exception as e:
warnings.warn(str(e))
import os, sys, shutil
import datetime as dt
from config.cfg import exp, cluster
from utils import symlink, copy_scp_srvx8, copy
from utils import symlink, copy_scp_srvx8, copy, sed_inplace
assim_time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
exppath_firstguess = str(sys.argv[3])
#if cluster.name != 'srvx8':
# copy = copy_scp_srvx8 # use scp
......@@ -15,7 +15,7 @@ for iens in range(1, exp.n_ens+1):
#wrfout_run = cluster.wrf_rundir(iens) + time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
print('link wrfout file to DART background file')
wrfout_run = cluster.archivedir()+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \
wrfout_run = exppath_firstguess+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+'/'+assim_time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
dart_ensdir = cluster.dartrundir+'/advance_temp'+str(iens)
wrfout_dart = dart_ensdir+'/wrfout_d01'
......@@ -49,3 +49,5 @@ os.system('rm -rf '+cluster.dartrundir+'/output_mean*')
os.system('rm -rf '+cluster.dartrundir+'/output_sd*')
os.system('rm -rf '+cluster.dartrundir+'/perfect_output_*')
print('replace measurement error with obs error for assimilation') # FIXME !!! temporary only
sed_inplace(cluster.dartrundir+'/obs_seq.out', '9.000000000000000E-004', '0.04')
import os, sys, shutil
import datetime as dt
from config.cfg import exp, cluster
......@@ -7,10 +6,10 @@ from utils import symlink, copy, sed_inplace, append_file
time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
# ensure correct input.nml
copy(cluster.scriptsdir+'/templates/input.nml',
copy(cluster.scriptsdir+'/../templates/input.nml',
cluster.dartrundir+'/input.nml')
sed_inplace(cluster.dartrundir+'/input.nml', '<n_ens>', str(int(exp.n_ens)))
append_file(cluster.dartrundir+'/input.nml', cluster.scriptsdir+'/templates/obs_def_rttov.VIS.nml')
append_file(cluster.dartrundir+'/input.nml', cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml')
# prepare observation file
import create_obs_sat
......@@ -26,5 +25,5 @@ shutil.copy(time.strftime(cluster.nature_wrfout),
import wrfout_add_geo
wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', cluster.dartrundir+'/wrfout_d01')
# prepare wrfinput_d01 for DART (for dimensions of variable)
# DART may need a wrfinput file as well, which serves as a template for dimension sizes
symlink(cluster.dartrundir+'/wrfout_d01', cluster.dartrundir+'/wrfinput_d01')
import os, sys
from config.cfg import exp, cluster
from utils import symlink
......
import os, sys, shutil
import datetime as dt
......@@ -8,13 +7,14 @@ from utils import sed_inplace, copy, symlink
def run(cluster, iens, begin, end):
rundir = cluster.wrf_rundir(iens)
print(rundir)
copy(cluster.namelist, rundir+'/namelist.input')
sed_inplace(rundir+'/namelist.input', '<dx>', str(int(exp.model_dx)))
sed_inplace(rundir+'/namelist.input', '<timestep>', str(int(exp.timestep)))
archdir = cluster.archivedir()+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/')
print('namelist for run from', begin, end, 'output to', archdir)
sed_inplace(rundir+'/namelist.input', '<archivedir>', archdir)
os.makedirs(archdir, exist_ok=True)
......@@ -32,6 +32,5 @@ if __name__ == '__main__':
end = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
print('prepare namelists for all ens members')
print('begin', begin, 'end', end)
for iens in range(1, exp.n_ens+1):
run(cluster, iens, begin, end)
import os, sys, shutil
import datetime as dt
from config.cfg import exp, cluster
from utils import symlink, copy_contents, copy
from utils import symlink, copy, link_contents
import prepare_namelist
# archive configuration
......
......@@ -2,10 +2,10 @@ import os, sys, shutil, glob
from config.cfg import exp, cluster
from utils import symlink, copy, sed_inplace, append_file
folder_obs_seq_final = '/home/fs71386/lkugler/data/sim_archive/exp_v1.10_LMU+shear_filter/obs_seq_final/'
folder_obs_seq_final = '/home/fs71386/lkugler/data/sim_archive/exp_v1.11_LMU_filter/obs_seq_final/'
files = sorted(glob.glob(folder_obs_seq_final+'/*.final')) # input for obs_diag program
rundir_program = '/home/fs71386/lkugler/DART/rundir-diagnostics/'
rundir_program = '/home/fs71386/lkugler/data/DART-WRF/rundir/'
fpath = rundir_program+'/obsdiag_inputlist.txt'
print('writing', fpath)
......@@ -19,24 +19,22 @@ with open(fpath, 'w') as f:
print('ensure correct input.nml')
copy(cluster.scriptsdir+'/templates/input.nml',
copy(cluster.scriptsdir+'/../templates/input.nml',
rundir_program+'/input.nml') #cluster.dartrundir+'/input.nml')
sed_inplace(rundir_program+'/input.nml', '<n_ens>', str(int(exp.n_ens)))
append_file(rundir_program+'/input.nml', cluster.scriptsdir+'/templates/obs_def_rttov.VIS.nml')
append_file(rundir_program+'/input.nml', cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml')
# run obs_diag
print('running obs_diag program')
os.chdir(rundir_program)
symlink(cluster.dartrundir+'/obs_diag', rundir_program+'/obs_diag')
symlink(cluster.dart_srcdir+'/obs_diag', rundir_program+'/obs_diag')
os.system('./obs_diag >& obs_diag.log')
print('moving output to', cluster.archivedir()+'/obs_diag_output.nc')
copy(rundir_program+'/obs_diag_output.nc', cluster.archivedir()+'/obs_diag_output.nc')
print('running obs_seq_to_netcdf program')
symlink(cluster.dartrundir+'/obs_seq_to_netcdf', rundir_program+'/obs_diag_output.nc')
symlink(cluster.dart_srcdir+'/obs_seq_to_netcdf', rundir_program+'/obs_diag_output.nc')
os.system('./obs_seq_to_netcdf >& obs_seq_to_netcdf.log')
print('moving output to', cluster.archivedir()+'/obs_seq_output.nc')
copy(rundir_program+'/obs_diag_output.nc', cluster.archivedir()+'/obs_seq_output.nc')
os.system('rm obs_seq_to_netcdf obs_diag')
import os, sys, warnings
import datetime as dt
from config.cfg import exp, cluster
from utils import symlink, copy_scp_srvx8, copy, mkdir, mkdir_srvx8, clean_wrfdir
......
import os, sys, shutil
import datetime as dt
from config.cfg import exp, cluster
from utils import symlink, copy_scp_srvx8, copy
import prepare_namelist
time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
exppath = str(sys.argv[1])
# time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M')
# background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M')
update_vars = ['Times', 'U', 'V', 'PH', 'T', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC', 'TSK']
vars = ','.join(update_vars)
for iens in range(1, exp.n_ens+1):
print('update state in wrfinput wrfout file to DART background file')
wrfout = cluster.archivedir()+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \
+str(iens)+'/'+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S')
wrfout = exppath.replace('*iens*', str(iens))
wrfin = cluster.wrf_rundir(iens)+'/wrfinput_d01'
print('updating', wrfin, 'to state in', wrfout)
assert os.path.isfile(wrfout)
# overwrite variables in wrfinput file
os.system(cluster.ncks+' -A -v '+vars+' '+wrfout+' '+wrfin)
import os, sys, shutil, glob
copy = shutil.copy
#copy = shutil.copy
def copy(src, dst):
try:
os.remove(dst)
except:
pass
shutil.copy(src, dst)
def mkdir(path):
os.system('mkdir -p '+path)
......@@ -26,6 +32,10 @@ def symlink(src, dst):
pass
os.symlink(src, dst)
def link_contents(src, dst):
for f in os.listdir(src):
symlink(src+'/'+f, dst+'/'+f)
def copy_scp_srvx8(src, dst):
os.system('scp '+src+' a1254888@srvx8.img.univie.ac.at:'+dst)
......@@ -54,5 +64,3 @@ def sed_inplace(filename, pattern, repl):
def append_file(f_main, f_gets_appended):
os.system('cat '+f_gets_appended+' >> '+f_main)
#!/home/fs71386/lkugler/miniconda3/envs/DART/bin/python
#!/jetfs/home/lkugler/miniconda3/envs/DART/bin/python
"""Add geogrid data to wrfinput
this is needed for DART, but not provided by ideal.exe
......@@ -13,6 +10,7 @@ example call:
"""
import os, sys
import netCDF4 as nc
from config.cfg import exp, cluster
def run(geo_data_file, wrfinput_file):
......
#!/jetfs/home/lkugler/miniconda3/envs/DART/bin/python
"""Add geogrid data to wrfinput
this is needed for DART, but not provided by ideal.exe
......@@ -11,6 +9,7 @@ example call:
"""
import os, sys
import netCDF4 as nc
from config.cfg import exp, cluster
def run(geo_data_file, wrfout_file):
......
......@@ -104,10 +104,10 @@
# to ~half at 300 km, and ~0 at the edges of the area.
&assim_tools_nml
filter_kind = 1,
cutoff = 0.05,
cutoff = 0.005,
sort_obs_inc = .false.,
spread_restoration = .false.,
sampling_error_correction = .false.,
sampling_error_correction = .true.,
adaptive_localization_threshold = -1,
output_localization_diagnostics = .false.,
localization_diagnostics_file = 'localization_diagnostics',
......@@ -242,7 +242,7 @@
# an internal structure that speeds up searches. don't change it
# based on your grid size. nlon must be an odd number.
&location_nml
horiz_dist_only = .false.,
horiz_dist_only = .true.,
vert_normalization_pressure = 6666666.7,
vert_normalization_height = 5000000.0,
vert_normalization_level = 2666.7,
......@@ -267,7 +267,7 @@
&reg_factor_nml
select_regression = 1,
input_reg_file = "time_mean_reg",
save_reg_diagnostics = .false.,
save_reg_diagnostics = .true.,
reg_diagnostics_file = "reg_diagnostics",
/
......@@ -276,7 +276,7 @@
# tasks_per_node is set to match your hardware
&ensemble_manager_nml
layout = 2,
tasks_per_node = 16
tasks_per_node = 48
/
&obs_def_gps_nml
......@@ -299,7 +299,7 @@
obs_sequence_name = '',
obs_sequence_list = 'obsdiag_inputlist.txt',
first_bin_center = 2008, 7,30,11, 0, 0 ,
last_bin_center = 2008, 7,30,13, 0, 0 ,
last_bin_center = 2008, 7,30,16, 0, 0 ,
bin_separation = 0, 0, 0, 0,15, 0 ,
bin_width = 0, 0, 0, 0,15, 0 ,
time_to_skip = 0, 0, 0, 0, 0, 0 ,
......@@ -330,8 +330,8 @@
/
&obs_seq_to_netcdf_nml
obs_sequence_name = 'obs_seq.final',
obs_sequence_list = '',
obs_sequence_name = '',
obs_sequence_list = 'obsdiag_inputlist.txt',
append_to_netcdf = .false.,
lonlim1 = 0.0,
lonlim2 = 360.0,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment