diff --git a/config/cfg.py b/config/cfg.py index acadce5092dbe7ecc8993ba401a0e64388ecf0e4..65672a428afa296800a06ed80ee4ab3a73fe53c0 100755 --- a/config/cfg.py +++ b/config/cfg.py @@ -1,4 +1,4 @@ - +import numpy as np from . import clusters cluster = clusters.vsc # change cluster configuration here @@ -31,7 +31,7 @@ radar = dict(kind='RADAR', n_obs=n_obs, err_std=5., t2m = dict(kind='SYNOP_TEMPERATURE', n_obs=n_obs, err_std=1.0, cov_loc_radius_km=32, cov_loc_vert_km=1) psfc = dict(kind='SYNOP_SURFACE_PRESSURE', n_obs=n_obs, err_std=50., - cov_loc_radius_km=32) + cov_loc_radius_km=32, cov_loc_vert_km=5) exp.observations = [t2m, psfc, ] diff --git a/scheduler.py b/scheduler.py index 1ccd1ab58ee1b3bfcdb927c3299b683ba7526000..0f0a6bc0ba24d74bc23be60dc8c9eae852ffe453 100755 --- a/scheduler.py +++ b/scheduler.py @@ -43,19 +43,25 @@ def backup_scripts(): old_a = main_a+'/old/' os.makedirs(cluster.archivedir(), exist_ok=True) - os.makedirs main_a, exist_ok=True) - os.makedirs , exist_ok=True) + os.makedirs(main_a, exist_ok=True) + os.makedirs(old_a, exist_ok=True) + + def func(a, b, method): # call method if not link or directory + if os.path.islink(a) or os.path.isdir(a): + pass + else: + method(a, b) # archive existing files for f in os.listdir(main_a): - shutil.move(os.path.join(main_a, f), old_a+'/') + func(os.path.join(main_a, f), old_a+'/'+f, shutil.move) # reproducibility for f in ['scheduler.py', 'config/clusters.py', 'config/cfg.py']: - shutil.copy(current+'/../'+f, main_a+'/scheduler.py') + func(current+'/../'+f, main_a+'/scheduler.py', shutil.copy) for f in os.listdir(current): - shutil.copy(os.path.join(current, f), main_a+'/') + func(os.path.join(current, f), main_a+'/', shutil.copy) def prepare_wrfinput(): """Create WRF/run directories and wrfinput files @@ -112,8 +118,8 @@ def run_ENS(begin, end, depends_on=None): cluster.scriptsdir+'/prepare_namelist.py', begin.strftime('%Y-%m-%d_%H:%M'), begin_plus1.strftime('%Y-%m-%d_%H:%M'), - str(hist_interval), str(radt),]) - depends_on=[id]) + str(hist_interval), str(radt),]), + depends_on=[id]) s = my_Slurm("runWRF1", cfg_update={"nodes": "1", "array": "1-"+str(exp.n_nodes), "time": "2", "mem-per-cpu": "2G"}) @@ -135,7 +141,7 @@ def run_ENS(begin, end, depends_on=None): cluster.scriptsdir+'/prepare_namelist.py', begin.strftime('%Y-%m-%d_%H:%M'), begin_plus1.strftime('%Y-%m-%d_%H:%M'), - str(hist_interval), str(radt),]) + str(hist_interval), str(radt),]), depends_on=[id]) time_in_simulation_hours = (end-begin).total_seconds()/3600 @@ -164,8 +170,8 @@ def assimilate(assim_time, prior_init_time, if False: use `archivedir` (defined in config) to get prior state if str: use this directory to get prior state """ - if prior_path_exp == False: - prior_expdir = cluster.archivedir() + if not prior_path_exp: + prior_path_exp = cluster.archivedir() elif not isinstance(prior_path_exp, str): raise TypeError('prior_path_exp either str or False, is '+str(type(prior_path_exp))) @@ -173,7 +179,7 @@ def assimilate(assim_time, prior_init_time, #s = my_Slurm("prepNature", cfg_update=dict(time="2")) #id = s.run(cluster.python+' '+cluster.scriptsdir+'/prepare_nature.py ' # +time.strftime('%Y-%m-%d_%H:%M'), depends_on=[depends_on]) - + # prepare prior model state s = my_Slurm("preAssim", cfg_update=dict(time="2")) id = s.run(cluster.python+' '+cluster.scriptsdir+'/pre_assim.py ' @@ -243,13 +249,13 @@ elif start_from_existing_state: # get initial conditions from archive init_time = dt.datetime(2008, 7, 30, 6) - valid_time = dt.datetime(2008, 7, 30, 10) + time = dt.datetime(2008, 7, 30, 10) exppath_arch = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.11_LMU_filter' - id = update_wrfinput_from_archive(valid_time, init_time, exppath_arch, depends_on=id) + id = update_wrfinput_from_archive(time, init_time, exppath_arch, depends_on=id) # values for assimilation -assim_time = valid_time +assim_time = time prior_init_time = init_time prior_path_exp = exppath_arch diff --git a/scripts/apply_obs_op_dart.py b/scripts/apply_obs_op_dart.py index ba4d18ec655badcf4ba7506deed61e27fad84d7e..084e127ac103b29c5057e4aebf9d422095eacbe1 100644 --- a/scripts/apply_obs_op_dart.py +++ b/scripts/apply_obs_op_dart.py @@ -9,23 +9,22 @@ import create_obsseq as osq from gen_synth_obs import read_prior_obs, set_input_nml import pre_assim -def run_operator(time): +def run_operator(obscfg, time): """ time_for_dart (dt.datetime) : needs to be consistent with wrfout files! """ - # assume only 1 obstype for now - obscfg = exp.observations[0] - # get observation file (obs not important, but their locations) # this should correspond to configuration to have same locations as in real assim obs_seq_all_out = cluster.dartrundir + '/obs_seq_all.out' os.chdir(cluster.dartrundir) + n_obs = obscfg['n_obs'] error_var = (obscfg['err_std'])**2 sat_channel = obscfg.get('channel', False) cov_loc = obscfg['cov_loc_radius_km'] dist_obs = obscfg.get('distance_between_obs_km', False) + obs_coords = osq.calc_obs_locations(n_obs, coords_from_domaincenter=False, distance_between_obs_km=dist_obs, fpath_obs_locations=None) @@ -55,8 +54,10 @@ def run_operator(time): # copy output to archive savedir = cluster.archivedir()+'/obs_seq_final_1min/' mkdir(savedir) - copy(cluster.dartrundir+'/obs_seq.final', - savedir+time.strftime('/%Y-%m-%d_%H:%M_obs_seq.final')) + obsname = obscfg.get('kind', 'satch'+str(obscfg['sat_channel'])) + fout = time.strftime('/%Y-%m-%d_%H:%M_'+obsname+'_obs_seq.final') + copy(cluster.dartrundir+'/obs_seq.final', savedir+fout) + print('output of observation operator saved to', fout) if __name__ == '__main__': @@ -69,4 +70,5 @@ if __name__ == '__main__': pre_assim.run(time_to_run_fw_op, prev_forecast_init, exppath_firstguess) # run fwd operator, save to archive - run_operator(time_to_run_fw_op) + for i_obs, obscfg in enumerate(exp.observations): + run_operator(obscfg, time_to_run_fw_op) diff --git a/scripts/gen_synth_obs.py b/scripts/gen_synth_obs.py index 6a0ddba2aac7861ae49d9a0cdc8349d9fa9ff3f0..12a462807be3a0c343f181a3aa9b64ba3d5585fa 100755 --- a/scripts/gen_synth_obs.py +++ b/scripts/gen_synth_obs.py @@ -115,12 +115,15 @@ def set_input_nml(sat_channel=False, just_prior_values=False, # input.nml for RTTOV if sat_channel > 0: - if sat_channel in [1, 2, 3, 12]: + if sat_channel in [1, 2, 3, 12]: # VIS channels rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.VIS.nml' - else: + else: # IR channels rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.IR.nml' append_file(cluster.dartrundir+'/input.nml', rttov_nml) - + else: + # append any rttov segment, needs to exist anyway + rttov_nml = cluster.scriptsdir+'/../templates/obs_def_rttov.IR.nml' + append_file(cluster.dartrundir+'/input.nml', rttov_nml) def obs_operator_ensemble(): os.chdir(cluster.dartrundir) @@ -162,8 +165,9 @@ if __name__ == "__main__": time = dt.datetime.strptime(sys.argv[1], '%Y-%m-%d_%H:%M') fpath_obs_coords = cluster.archivedir()+time.strftime('/%Y-%m-%d_%H:%M/obs_coords.pkl') - # remove any existing observation files - os.chdir(cluster.dartrundir); os.system('rm -f obs_seq_*.out obs_seq.in obs_seq.final') + + os.chdir(cluster.dartrundir) + os.system('rm -f obs_seq_*.out obs_seq.in obs_seq.final') # remove any existing observation files def prepare_nature_dart(): # get wrfout_d01 from nature run