diff --git a/config/cfg.py b/config/cfg.py index 18c80b68b9a55c79d616e98571dc613b655b08e2..328462eee0bd09944e4bfe4fb42f647992dc71bc 100755 --- a/config/cfg.py +++ b/config/cfg.py @@ -7,13 +7,13 @@ class ExperimentConfiguration(object): pass exp = ExperimentConfiguration() -exp.expname = "exp_v1.10_LMU+shear_filter" +exp.expname = "exp_v1.11_LMU_filter2" exp.model_dx = 2000 exp.timestep = 10 -exp.n_ens = 20 -exp.n_nodes = 5 +exp.n_ens = 40 +exp.n_nodes = 10 exp.n_obs = 100 -exp.error_variance = 0.001 +exp.error_variance = 0.0009 # directory paths depend on the name of the experiment diff --git a/config/clusters.py b/config/clusters.py index 88b02673fb1afc12ed62f9067f5af2ff0ad9a3cf..2c0ee8c88cda2d2ff8d52b837498101b1120d3e1 100755 --- a/config/clusters.py +++ b/config/clusters.py @@ -20,18 +20,22 @@ vsc.name = 'vsc' vsc.python = '/home/fs71386/lkugler/miniconda3/bin/python' vsc.ncks = '/home/fs71386/lkugler/miniconda3/envs/DART/bin/ncks' vsc.userdir = '/home/fs71386/lkugler' -vsc.srcdir = '/home/fs71386/lkugler/compile/WRF/WRF-4.1.5/run' +vsc.srcdir = '/home/fs71386/lkugler/compile/WRF/WRF-4.2.1/run' +vsc.dart_srcdir = '/home/fs71386/lkugler/DART/DART_WRF_RTTOV_early_access/models/wrf/work' vsc.dartrundir = '/home/fs71386/lkugler/run_DART' vsc.scriptsdir = '/home/fs71386/lkugler/DART-WRF/scripts' -vsc.nature_wrfout = '/home/fs71386/lkugler/data/sim_archive/exp_v1.10_LMU+shear_nature/2008-07-30_06:00/2/wrfout_d01_%Y-%m-%d_%H:%M:%S' -vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.1.5_v1.10.exe' -vsc.wrfexe = vsc.userdir+'/compile/bin/wrf-v4.1.5_v1.10.exe' -vsc.namelist = vsc.scriptsdir+'/templates/namelist.input' +vsc.nature_wrfout = '/home/fs71386/lkugler/data/sim_archive/exp_v1.11_LMU_nature/2008-07-30_06:00/2/wrfout_d01_%Y-%m-%d_%H:%M:%S' +vsc.input_profile = '/home/fs71386/lkugler/wrf_sounding/data/wrf/ens/from_LMU/raso.raso.<iens>.wrfprof' + +vsc.ideal = vsc.userdir+'/compile/bin/ideal-v4.2.1_v1.11.exe' +vsc.wrfexe = vsc.userdir+'/compile/bin/wrf-v4.2.1_v1.11.exe' +vsc.namelist = vsc.scriptsdir+'/../templates/namelist.input' vsc.run_WRF = '/gpfs/data/fs71386/lkugler/DART-WRF/scripts/osse/run_ens.vsc.sh' vsc.slurm_cfg = {"account": "p71386", "partition": "mem_0384", "qos": "p71386_0384", - "ntasks-per-node": "48", "ntasks-per-core": 1} + "ntasks-per-node": "48", "ntasks-per-core": 1, + "mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"} jet = ClusterConfig() diff --git a/scheduler.py b/scheduler.py index 57af536a103db8a1f3909b7a5dc5b5adeaad8256..de82895f8e35ab5ee5e25cb6ea3a52f87683d1b2 100755 --- a/scheduler.py +++ b/scheduler.py @@ -11,8 +11,10 @@ from slurmpy import Slurm sys.path.append(os.getcwd()) from config.cfg import exp, cluster -from utils import script_to_str +from scripts.utils import script_to_str, symlink +# allow scripts to access the configuration +symlink(cluster.scriptsdir+'/../config', cluster.scriptsdir+'/config') def my_Slurm(*args, cfg_update=dict(), **kwargs): """Shortcut to slurmpy's class; keep default kwargs @@ -26,18 +28,21 @@ def clear_logs(backup_existing_to_archive=True): dirs = ['/logs/', '/slurm-scripts/'] for d in dirs: archdir = cluster.archivedir()+d - if backup_existing_to_archive: + if backup_existing_to_archive: os.makedirs(archdir, exist_ok=True) - for f in os.listdir(cluster.scriptsdir+d): - if backup_existing_to_archive: - shutil.move(cluster.scriptsdir+d+f, archdir+f) + dir = cluster.scriptsdir+'/../'+d + for f in os.listdir(dir): + if backup_existing_to_archive: + shutil.move(dir+f, archdir+f) else: - os.remove(cluster.scriptsdir+d+f) + os.remove(dir+f) -def prep_osse(): +def prepare_wrfinput(): + """Create WRF/run directories and wrfinput files + """ s = my_Slurm("pre_osse", cfg_update={"nodes": "1", "ntasks": "1", "time": "5", - "mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"}) - id = s.run(cluster.python+' '+cluster.scriptsdir+'/prep_osse.py') + "mail-type": "BEGIN"}) + id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_wrfinput.py') s = my_Slurm("ideal", cfg_update={"nodes": "1", "time": "10", "mem-per-cpu": "2G"}) cmd = """# run ideal.exe in parallel, then add geodata @@ -61,6 +66,18 @@ done id = s.run(cmd, depends_on=[id]) return id +def update_wrfinput_from_archive(time, background_init_time, exppath, depends_on=None): + """Given that directories with wrfinput files exist, + update these wrfinput files according to wrfout files + """ + s = my_Slurm("upd_wrfinput", cfg_update={"nodes": "1", "ntasks": "1", "time": "5"}) + + # path of initial conditions, <iens> is replaced by member index + IC_path = exppath + background_init_time.strftime('/%Y-%m-%d_%H:%M/') \ + +'*iens*/'+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S') + id = s.run(cluster.python+' '+cluster.scriptsdir+'/update_wrfinput_from_wrfout.py ' + +IC_path, depends_on=[depends_on]) + return id def run_ENS(begin, end, depends_on=None, **kwargs): prev_id = depends_on @@ -86,11 +103,9 @@ def run_ENS(begin, end, depends_on=None, **kwargs): def gen_synth_obs(time, depends_on=None): - prev_id = depends_on - s = my_Slurm("pre_gensynthobs", cfg_update=dict(nodes="1", ntasks="1", time="2")) - id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_gen_synth_obs.py '+time.strftime('%Y-%m-%d_%H:%M'), - depends_on=[prev_id]) + id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_gen_synth_obs.py ' + +time.strftime('%Y-%m-%d_%H:%M'), depends_on=[depends_on]) s = my_Slurm("gensynth", cfg_update=dict(nodes="1", time="20")) cmd = 'cd '+cluster.dartrundir+'; mpirun -np 24 ./perfect_model_obs' @@ -98,102 +113,87 @@ def gen_synth_obs(time, depends_on=None): return id2 -def assimilate(assim_time, background_init_time, depends_on=None, **kwargs): +def assimilate(assim_time, background_init_time, + first_guess=None, depends_on=None, **kwargs): prev_id = depends_on + if first_guess is None: + first_guess = cluster.archivedir() + s = my_Slurm("preAssim", cfg_update=dict(nodes="1", ntasks="1", time="2")) id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_assim.py ' \ - +assim_time.strftime('%Y-%m-%d_%H:%M ') \ - +background_init_time.strftime('%Y-%m-%d_%H:%M'), + +assim_time.strftime('%Y-%m-%d_%H:%M ') + +background_init_time.strftime('%Y-%m-%d_%H:%M ') + +first_guess, depends_on=[prev_id]) s = my_Slurm("Assim", cfg_update=dict(nodes="1", time="50", mem="200G")) - cmd = 'cd '+cluster.dartrundir+'; mpirun -genv I_MPI_PIN_PROCESSOR_LIST=0-47 -np 48 ./filter' + cmd = 'cd '+cluster.dartrundir+'; mpirun -np 48 ./filter' id2 = s.run(cmd, depends_on=[id]) s = my_Slurm("archiveAssim", cfg_update=dict(nodes="1", ntasks="1", time="10")) id3 = s.run(cluster.python+' '+cluster.scriptsdir+'/archive_assim.py ' - + time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2]) + + assim_time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2]) s = my_Slurm("updateIC", cfg_update=dict(nodes="1", ntasks="1", time="3")) id4 = s.run(cluster.python+' '+cluster.scriptsdir+'/update_wrfinput_from_filteroutput.py ' - +time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id3]) + +assim_time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id3]) return id4 -def prep_initials_from_archive(time): - s = my_Slurm("pre_initial", cfg_update={"nodes": "1", "ntasks": "1", "time": "5", - "mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"}) - id = s.run(cluster.python+' '+cluster.scriptsdir+'/prep_osse.py') - - s = my_Slurm("ideal", cfg_update={"nodes": "1", "time": "10", "mem-per-cpu": "2G"}) - cmd = """# run ideal.exe in parallel, then add geodata -export SLURM_STEP_GRES=none -for ((n=1; n<="""+str(exp.n_ens)+"""; n++)) -do - rundir="""+cluster.userdir+'/run_WRF/'+exp.expname+"""/$n - echo $rundir - cd $rundir - mpirun -np 1 ./ideal.exe & -done -wait -for ((n=1; n<="""+str(exp.n_ens)+"""; n++)) -do - rundir="""+cluster.userdir+'/run_WRF/'+exp.expname+"""/$n - mv $rundir/rsl.out.0000 $rundir/rsl.out.input -done -""" - id2 = s.run(cmd, depends_on=[id]) - s = my_Slurm("upd_wrfinput", cfg_update={"nodes": "1", "ntasks": "1", "time": "5", - "mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"}) - id3 = s.run(cluster.python+' '+cluster.scriptsdir+'/update_wrfinput_from_wrfout.py ' - +time.strftime('%Y-%m-%d_%H:%M'), depends_on=[id2]) - return id3 - def mailme(depends_on=None): id = depends_on if id: s = my_Slurm("AllFinished", cfg_update={"nodes": "1", "ntasks": "1", "time": "1", - "mail-type": "BEGIN", "mail-user": "lukas.kugler@univie.ac.at"}) + "mail-type": "BEGIN"}) s.run('sleep 1', depends_on=[id]) ################################ print('starting osse') -clear_logs(backup_existing_to_archive=True) - -#id = prep_osse() # create initial conditions -id = None -# spin up the ensemble -#background_init_time = dt.datetime(2008, 7, 30, 6, 0) -#integration_end_time = dt.datetime(2008, 7, 30, 11, 0) -#id = run_ENS(begin=background_init_time, -# end=integration_end_time, -# depends_on=id) -background_init_time = dt.datetime(2008, 7, 30, 13, 0) -time = dt.datetime(2008, 7, 30, 13, 15) #integration_end_time +clear_logs(backup_existing_to_archive=True) -#id = prep_initials_from_archive(time) +is_new_run = True +if is_new_run: + id = prepare_wrfinput() # create initial conditions + + # spin up the ensemble + background_init_time = dt.datetime(2008, 7, 30, 6, 0) + integration_end_time = dt.datetime(2008, 7, 30, 10, 0) + id = run_ENS(begin=background_init_time, + end=integration_end_time, + depends_on=id) + time = integration_end_time +else: + # get initial conditions from archive + background_init_time = dt.datetime(2008, 7, 30, 10, 45) + time = dt.datetime(2008, 7, 30, 11, 0) + exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.11_LMU_filter' + first_guess = exppath_arch + id = update_wrfinput_from_archive(time, background_init_time, exppath_arch) # now, start the ensemble data assimilation cycles timedelta_integrate = dt.timedelta(minutes=15) timedelta_btw_assim = dt.timedelta(minutes=15) -while time < dt.datetime(2008, 7, 30, 16, 15): +while time < dt.datetime(2008, 7, 30, 14, 15): assim_time = time id = gen_synth_obs(assim_time, depends_on=id) - id = assimilate(assim_time, + id = assimilate(assim_time, background_init_time, + first_guess=first_guess, depends_on=id) + # first_guess = None # + background_init_time = assim_time # start integration from here integration_end_time = assim_time + timedelta_integrate - id = run_ENS(begin=background_init_time, - end=integration_end_time, + id = run_ENS(begin=background_init_time, + end=integration_end_time, depends_on=id) - + time += timedelta_btw_assim mailme(id) diff --git a/scripts/archive_assim.py b/scripts/archive_assim.py index 7bc7c0ad854ddac4c235bda6f55454de075775e1..dbb88d90af66cb3d964a142f018091e01c9f0993 100755 --- a/scripts/archive_assim.py +++ b/scripts/archive_assim.py @@ -9,25 +9,34 @@ from utils import symlink, copy_scp_srvx8, copy, mkdir, mkdir_srvx8, clean_wrfdi time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') -print('archive obs space diagnostics') -savedir = cluster.archivedir()+'/obs_seq_final/' -mkdir(savedir) -copy(cluster.dartrundir+'/obs_seq.final', savedir+time.strftime('/%Y-%m-%d_%H:%M_obs_seq.final')) +try: + print('archive obs space diagnostics') + savedir = cluster.archivedir()+'/obs_seq_final/' + mkdir(savedir) + copy(cluster.dartrundir+'/obs_seq.final', savedir+time.strftime('/%Y-%m-%d_%H:%M_obs_seq.final')) +except Exception as e: + warnings.warn(str(e)) + +try: + print('archive regression diagnostics') + savedir = cluster.archivedir()+'/reg_factor/' + mkdir(savedir) + copy(cluster.dartrundir+'/reg_diagnostics', savedir+time.strftime('/%Y-%m-%d_%H:%M_reg_diagnostics')) +except Exception as e: + warnings.warn(str(e)) + print('archive model state') try: mkdir(cluster.archivedir()) - # copy mean and sd to archive - for f in ['preassim_mean.nc', 'preassim_sd.nc', - 'output_mean.nc', 'output_sd.nc']: - copy(cluster.dartrundir+'/'+f, - cluster.archivedir()+'/'+f[:-3]+time.strftime('_%Y-%m-%d_%H:%M:%S')) - - print('copy members to archive') + print('copy prior posterior members to archive') for iens in range(1, exp.n_ens+1): - savedir = cluster.archivedir()+'/'+time.strftime('/%Y-%m-%d_%H:%M/')+str(iens) + savedir = cluster.archivedir()+time.strftime('/%Y-%m-%d_%H:%M/')+str(iens) mkdir(savedir) + + copy(cluster.dartrundir+'/input.nml', + cluster.archivedir()+time.strftime('/%Y-%m-%d_%H:%M/input.nml')) filter_in = cluster.dartrundir+'/preassim_member_'+str(iens).zfill(4)+'.nc' filter_out = cluster.dartrundir+'/filter_restart_d01.'+str(iens).zfill(4) @@ -35,5 +44,12 @@ try: copy(filter_in, savedir+time.strftime('/%Y-%m-%d_%H:%M_prior')) copy(filter_out, savedir+time.strftime('/%Y-%m-%d_%H:%M_posterior')) + # copy mean and sd to archive + print('copy preassim, postassim mean and sd') + for f in ['preassim_mean.nc', 'preassim_sd.nc', + 'output_mean.nc', 'output_sd.nc']: + copy(cluster.dartrundir+'/'+f, + cluster.archivedir()+'/'+f[:-3]+time.strftime('_%Y-%m-%d_%H:%M:%S')) + except Exception as e: warnings.warn(str(e)) diff --git a/scripts/pre_assim.py b/scripts/pre_assim.py index 84ba96962827d144aa2946f555ce33096d20eee8..7f2667c9593360c0c6a087cba65cfae79a53d023 100755 --- a/scripts/pre_assim.py +++ b/scripts/pre_assim.py @@ -1,11 +1,11 @@ - import os, sys, shutil import datetime as dt from config.cfg import exp, cluster -from utils import symlink, copy_scp_srvx8, copy +from utils import symlink, copy_scp_srvx8, copy, sed_inplace assim_time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M') +exppath_firstguess = str(sys.argv[3]) #if cluster.name != 'srvx8': # copy = copy_scp_srvx8 # use scp @@ -15,11 +15,11 @@ for iens in range(1, exp.n_ens+1): #wrfout_run = cluster.wrf_rundir(iens) + time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S') print('link wrfout file to DART background file') - wrfout_run = cluster.archivedir()+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \ + wrfout_run = exppath_firstguess+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \ +str(iens)+'/'+assim_time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S') dart_ensdir = cluster.dartrundir+'/advance_temp'+str(iens) wrfout_dart = dart_ensdir+'/wrfout_d01' - + os.makedirs(dart_ensdir, exist_ok=True) print('linking', wrfout_run, 'to', wrfout_dart) symlink(wrfout_run, wrfout_dart) @@ -49,3 +49,5 @@ os.system('rm -rf '+cluster.dartrundir+'/output_mean*') os.system('rm -rf '+cluster.dartrundir+'/output_sd*') os.system('rm -rf '+cluster.dartrundir+'/perfect_output_*') +print('replace measurement error with obs error for assimilation') # FIXME !!! temporary only +sed_inplace(cluster.dartrundir+'/obs_seq.out', '9.000000000000000E-004', '0.04') diff --git a/scripts/pre_gen_synth_obs.py b/scripts/pre_gen_synth_obs.py index a93ec175ac01e77a900b145ad2fb80d2814b7a02..7ccacf72e817db199c461244a22df57a09f9a462 100755 --- a/scripts/pre_gen_synth_obs.py +++ b/scripts/pre_gen_synth_obs.py @@ -1,4 +1,3 @@ - import os, sys, shutil import datetime as dt from config.cfg import exp, cluster @@ -7,10 +6,10 @@ from utils import symlink, copy, sed_inplace, append_file time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') # ensure correct input.nml -copy(cluster.scriptsdir+'/templates/input.nml', +copy(cluster.scriptsdir+'/../templates/input.nml', cluster.dartrundir+'/input.nml') sed_inplace(cluster.dartrundir+'/input.nml', '<n_ens>', str(int(exp.n_ens))) -append_file(cluster.dartrundir+'/input.nml', cluster.scriptsdir+'/templates/obs_def_rttov.VIS.nml') +append_file(cluster.dartrundir+'/input.nml', cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml') # prepare observation file import create_obs_sat @@ -26,5 +25,5 @@ shutil.copy(time.strftime(cluster.nature_wrfout), import wrfout_add_geo wrfout_add_geo.run(cluster.dartrundir+'/geo_em.d01.nc', cluster.dartrundir+'/wrfout_d01') -# prepare wrfinput_d01 for DART (for dimensions of variable) +# DART may need a wrfinput file as well, which serves as a template for dimension sizes symlink(cluster.dartrundir+'/wrfout_d01', cluster.dartrundir+'/wrfinput_d01') diff --git a/scripts/prepare_dart.py b/scripts/prepare_dart.py index 50d74dff6ab82148e7f85904321485bc5a8a4783..2e5a14aba7f668472c4d80b6ca4be77654aca2c9 100755 --- a/scripts/prepare_dart.py +++ b/scripts/prepare_dart.py @@ -1,4 +1,3 @@ - import os, sys from config.cfg import exp, cluster from utils import symlink diff --git a/scripts/prepare_namelist.py b/scripts/prepare_namelist.py index 4f8711670a80e38fb84a1d79e9ec1e5544787550..ea6fee41108862ef37dc6ed9b002be6f331a34f2 100755 --- a/scripts/prepare_namelist.py +++ b/scripts/prepare_namelist.py @@ -1,4 +1,3 @@ - import os, sys, shutil import datetime as dt @@ -8,13 +7,14 @@ from utils import sed_inplace, copy, symlink def run(cluster, iens, begin, end): rundir = cluster.wrf_rundir(iens) - + print(rundir) copy(cluster.namelist, rundir+'/namelist.input') sed_inplace(rundir+'/namelist.input', '<dx>', str(int(exp.model_dx))) sed_inplace(rundir+'/namelist.input', '<timestep>', str(int(exp.timestep))) - + archdir = cluster.archivedir()+begin.strftime('/%Y-%m-%d_%H:%M/'+str(iens)+'/') + print('namelist for run from', begin, end, 'output to', archdir) sed_inplace(rundir+'/namelist.input', '<archivedir>', archdir) os.makedirs(archdir, exist_ok=True) @@ -32,6 +32,5 @@ if __name__ == '__main__': end = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M') print('prepare namelists for all ens members') - print('begin', begin, 'end', end) for iens in range(1, exp.n_ens+1): run(cluster, iens, begin, end) diff --git a/scripts/prep_osse.py b/scripts/prepare_wrfinput.py similarity index 95% rename from scripts/prep_osse.py rename to scripts/prepare_wrfinput.py index cf59038f80bc196fc2d43956232d4a78dba6fccc..7ba119b5657524184442037389e24f7510198d87 100755 --- a/scripts/prep_osse.py +++ b/scripts/prepare_wrfinput.py @@ -1,7 +1,8 @@ import os, sys, shutil import datetime as dt from config.cfg import exp, cluster -from utils import symlink, copy_contents, copy +from utils import symlink, copy, link_contents + import prepare_namelist # archive configuration diff --git a/scripts/run_obs_diag.py b/scripts/run_obs_diag.py index dc1cd3d6916da24778f08f82eb1e374c5acc6b2c..b2abfa60aeb1455076124774803725660ff413dd 100644 --- a/scripts/run_obs_diag.py +++ b/scripts/run_obs_diag.py @@ -2,10 +2,10 @@ import os, sys, shutil, glob from config.cfg import exp, cluster from utils import symlink, copy, sed_inplace, append_file -folder_obs_seq_final = '/home/fs71386/lkugler/data/sim_archive/exp_v1.10_LMU+shear_filter/obs_seq_final/' -files = sorted(glob.glob(folder_obs_seq_final+'/*.final')) # input for obs_diag program +folder_obs_seq_final = '/home/fs71386/lkugler/data/sim_archive/exp_v1.11_LMU_filter/obs_seq_final/' +files = sorted(glob.glob(folder_obs_seq_final+'/*.final')) # input for obs_diag program -rundir_program = '/home/fs71386/lkugler/DART/rundir-diagnostics/' +rundir_program = '/home/fs71386/lkugler/data/DART-WRF/rundir/' fpath = rundir_program+'/obsdiag_inputlist.txt' print('writing', fpath) @@ -19,24 +19,22 @@ with open(fpath, 'w') as f: print('ensure correct input.nml') -copy(cluster.scriptsdir+'/templates/input.nml', +copy(cluster.scriptsdir+'/../templates/input.nml', rundir_program+'/input.nml') #cluster.dartrundir+'/input.nml') sed_inplace(rundir_program+'/input.nml', '<n_ens>', str(int(exp.n_ens))) -append_file(rundir_program+'/input.nml', cluster.scriptsdir+'/templates/obs_def_rttov.VIS.nml') +append_file(rundir_program+'/input.nml', cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml') # run obs_diag print('running obs_diag program') os.chdir(rundir_program) -symlink(cluster.dartrundir+'/obs_diag', rundir_program+'/obs_diag') -os.system('./obs_diag >& obs_diag.log') +symlink(cluster.dart_srcdir+'/obs_diag', rundir_program+'/obs_diag') +os.system('./obs_diag >& obs_diag.log') -print('moving output to', cluster.archivedir()+'/obs_diag_output.nc') +print('moving output to', cluster.archivedir()+'/obs_diag_output.nc') copy(rundir_program+'/obs_diag_output.nc', cluster.archivedir()+'/obs_diag_output.nc') print('running obs_seq_to_netcdf program') -symlink(cluster.dartrundir+'/obs_seq_to_netcdf', rundir_program+'/obs_diag_output.nc') +symlink(cluster.dart_srcdir+'/obs_seq_to_netcdf', rundir_program+'/obs_diag_output.nc') os.system('./obs_seq_to_netcdf >& obs_seq_to_netcdf.log') print('moving output to', cluster.archivedir()+'/obs_seq_output.nc') copy(rundir_program+'/obs_diag_output.nc', cluster.archivedir()+'/obs_seq_output.nc') - -os.system('rm obs_seq_to_netcdf obs_diag') diff --git a/scripts/update_wrfinput_from_filteroutput.py b/scripts/update_wrfinput_from_filteroutput.py index 1964df2ff9ef374648c6d09d46bde785471ad861..fe9869b42cf21d21cc88907e2308eff9f5575924 100755 --- a/scripts/update_wrfinput_from_filteroutput.py +++ b/scripts/update_wrfinput_from_filteroutput.py @@ -1,5 +1,6 @@ import os, sys, warnings import datetime as dt + from config.cfg import exp, cluster from utils import symlink, copy_scp_srvx8, copy, mkdir, mkdir_srvx8, clean_wrfdir diff --git a/scripts/update_wrfinput_from_wrfout.py b/scripts/update_wrfinput_from_wrfout.py index 1d7ca762215aa96ec0d30c1d85b586d71056ad1f..38cd1475a8d86deda83a3b3acaebf7d885856cb5 100644 --- a/scripts/update_wrfinput_from_wrfout.py +++ b/scripts/update_wrfinput_from_wrfout.py @@ -1,21 +1,24 @@ - import os, sys, shutil import datetime as dt + from config.cfg import exp, cluster from utils import symlink, copy_scp_srvx8, copy import prepare_namelist -time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') +exppath = str(sys.argv[1]) +# time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') +# background_init_time = dt.datetime.strptime(sys.argv[2], '%Y-%m-%d_%H:%M') update_vars = ['Times', 'U', 'V', 'PH', 'T', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC', 'TSK'] vars = ','.join(update_vars) for iens in range(1, exp.n_ens+1): print('update state in wrfinput wrfout file to DART background file') - wrfout = cluster.archivedir()+background_init_time.strftime('/%Y-%m-%d_%H:%M/') \ - +str(iens)+'/'+time.strftime('/wrfout_d01_%Y-%m-%d_%H:%M:%S') + wrfout = exppath.replace('*iens*', str(iens)) wrfin = cluster.wrf_rundir(iens)+'/wrfinput_d01' print('updating', wrfin, 'to state in', wrfout) + assert os.path.isfile(wrfout) + # overwrite variables in wrfinput file os.system(cluster.ncks+' -A -v '+vars+' '+wrfout+' '+wrfin) diff --git a/scripts/utils.py b/scripts/utils.py index 97da46d56642707ee2c929f41fb3bc99e15992ff..238325dd4f7765a8717705ed3259ba6a6ee2cd99 100755 --- a/scripts/utils.py +++ b/scripts/utils.py @@ -1,6 +1,12 @@ - import os, sys, shutil, glob -copy = shutil.copy +#copy = shutil.copy + +def copy(src, dst): + try: + os.remove(dst) + except: + pass + shutil.copy(src, dst) def mkdir(path): os.system('mkdir -p '+path) @@ -26,6 +32,10 @@ def symlink(src, dst): pass os.symlink(src, dst) +def link_contents(src, dst): + for f in os.listdir(src): + symlink(src+'/'+f, dst+'/'+f) + def copy_scp_srvx8(src, dst): os.system('scp '+src+' a1254888@srvx8.img.univie.ac.at:'+dst) @@ -54,5 +64,3 @@ def sed_inplace(filename, pattern, repl): def append_file(f_main, f_gets_appended): os.system('cat '+f_gets_appended+' >> '+f_main) - - diff --git a/scripts/wrfinput_add_geo.py b/scripts/wrfinput_add_geo.py index cd7a60f85968b2bb47577578c29067e208b6a1b2..0f3aa5d92a0d59ed0b199b9512582095464f01db 100755 --- a/scripts/wrfinput_add_geo.py +++ b/scripts/wrfinput_add_geo.py @@ -1,6 +1,3 @@ -#!/home/fs71386/lkugler/miniconda3/envs/DART/bin/python -#!/jetfs/home/lkugler/miniconda3/envs/DART/bin/python - """Add geogrid data to wrfinput this is needed for DART, but not provided by ideal.exe @@ -13,6 +10,7 @@ example call: """ import os, sys import netCDF4 as nc + from config.cfg import exp, cluster def run(geo_data_file, wrfinput_file): diff --git a/scripts/wrfout_add_geo.py b/scripts/wrfout_add_geo.py index a24bd90083d39933a8bebf45043cdae6bf066363..81dfb4c091dff5df343a256123a1315fd4bd5b9a 100755 --- a/scripts/wrfout_add_geo.py +++ b/scripts/wrfout_add_geo.py @@ -1,5 +1,3 @@ -#!/jetfs/home/lkugler/miniconda3/envs/DART/bin/python - """Add geogrid data to wrfinput this is needed for DART, but not provided by ideal.exe @@ -11,6 +9,7 @@ example call: """ import os, sys import netCDF4 as nc + from config.cfg import exp, cluster def run(geo_data_file, wrfout_file): diff --git a/templates/input.nml b/templates/input.nml index 4c32919637f22562dfb41ae622345f8b98e500d1..47ab5d8f65b8eae65f43566a4a91f79e610a315d 100644 --- a/templates/input.nml +++ b/templates/input.nml @@ -104,10 +104,10 @@ # to ~half at 300 km, and ~0 at the edges of the area. &assim_tools_nml filter_kind = 1, - cutoff = 0.05, + cutoff = 0.005, sort_obs_inc = .false., spread_restoration = .false., - sampling_error_correction = .false., + sampling_error_correction = .true., adaptive_localization_threshold = -1, output_localization_diagnostics = .false., localization_diagnostics_file = 'localization_diagnostics', @@ -242,7 +242,7 @@ # an internal structure that speeds up searches. don't change it # based on your grid size. nlon must be an odd number. &location_nml - horiz_dist_only = .false., + horiz_dist_only = .true., vert_normalization_pressure = 6666666.7, vert_normalization_height = 5000000.0, vert_normalization_level = 2666.7, @@ -267,7 +267,7 @@ ®_factor_nml select_regression = 1, input_reg_file = "time_mean_reg", - save_reg_diagnostics = .false., + save_reg_diagnostics = .true., reg_diagnostics_file = "reg_diagnostics", / @@ -276,7 +276,7 @@ # tasks_per_node is set to match your hardware &ensemble_manager_nml layout = 2, - tasks_per_node = 16 + tasks_per_node = 48 / &obs_def_gps_nml @@ -299,7 +299,7 @@ obs_sequence_name = '', obs_sequence_list = 'obsdiag_inputlist.txt', first_bin_center = 2008, 7,30,11, 0, 0 , - last_bin_center = 2008, 7,30,13, 0, 0 , + last_bin_center = 2008, 7,30,16, 0, 0 , bin_separation = 0, 0, 0, 0,15, 0 , bin_width = 0, 0, 0, 0,15, 0 , time_to_skip = 0, 0, 0, 0, 0, 0 , @@ -330,8 +330,8 @@ / &obs_seq_to_netcdf_nml - obs_sequence_name = 'obs_seq.final', - obs_sequence_list = '', + obs_sequence_name = '', + obs_sequence_list = 'obsdiag_inputlist.txt', append_to_netcdf = .false., lonlim1 = 0.0, lonlim2 = 360.0,