diff --git a/dartwrf/calc_linear_posterior.py b/dartwrf/calc_linear_posterior.py new file mode 100755 index 0000000000000000000000000000000000000000..76e5a17cc447182dd294149717b87507f412c33b --- /dev/null +++ b/dartwrf/calc_linear_posterior.py @@ -0,0 +1,72 @@ +import os, sys, shutil, warnings, glob +import datetime as dt + +from dartwrf.exp_config import exp +from dartwrf.server_config import cluster +from dartwrf import assimilate as aso +from dartwrf import dart_nml +from dartwrf.obs import create_obsseq_out as osq_out + +pattern_osf_linear = cluster.archivedir + "/diagnostics/%Y-%m-%d_%H:%M_obs_seq.final-linear" # how an obs_seq.final file is archived + +def calc_lin_posterior(assim_time): + """Run filter to get the linear, optimal-interpolation posterior in obs-space + + Args: + prior_init_time (datetime): initialization of prior forecast + assim_time (datetime): assimilation time + + Returns: + None + """ + # what is the prior for this assim time? + # read link_to_prior + link_to_prior = cluster.archivedir + assim_time.strftime('/%Y-%m-%d_%H:%M/')+'link_to_prior.txt' + with open(link_to_prior) as f: + lines = [l.strip() for l in f.readlines()] + prior_exp = lines[0] + prior_init_time = dt.datetime.strptime(lines[1].strip('/'), '%Y-%m-%d_%H:%M') + + aso.prepare_prior_ensemble(assim_time, prior_init_time=prior_init_time, prior_valid_time=assim_time, prior_path_exp=prior_exp) + #aso.prepare_nature_dart(assim_time) + + dart_nml.write_namelist() + + # does an observation exist at this time? + #f_oso = assim_time.strftime(aso.pattern_obs_seq_out) + f_oso = cluster.archivedir+assim_time.strftime("/diagnostics/%Y-%m-%d_%H:%M/assim_stage0/obs_seq.out") + + if os.path.exists(f_oso): + # use the existing file + shutil.copy(f_oso, cluster.dart_rundir+'/obs_seq.out') + else: + raise NotImplementedError(f_oso+' does not exist!') + + # link in the linear filter version + from dartwrf.utils import symlink + path_linear_filter = '/jetfs/home/lkugler/data/compile/DART/DART-10.8.3-kalmanposterior/models/wrf/work/filter' + symlink(path_linear_filter, + os.path.join(cluster.dart_rundir, 'filter')) + + aso.filter(nproc=12) + aso.archive_filter_diagnostics(assim_time, pattern_osf_linear) + + +if __name__ == "__main__": + """Evaluate the ensemble forecast in observation space at a given time, apart from the analysis time. + + Note: Observations are not assimilated. This is only for evaluation purposes. + + Usage: python3 evaluate_obs_space.py init1,valid1 init2,valid2 ... + """ + args = sys.argv[1:] + #arg_tuples = [a.split(',') for a in args] + + # we need an existing run_DART folder + aso.prepare_run_DART_folder() + + for assim_time in args: + #prior_init_time = dt.datetime.strptime(prior_init_time, "%Y-%m-%d_%H:%M") + assim_time = dt.datetime.strptime(assim_time, "%Y-%m-%d_%H:%M") + calc_lin_posterior(assim_time) + \ No newline at end of file diff --git a/dartwrf/evaluate_obs_space.py b/dartwrf/evaluate_obs_space.py index 1d1197147f2895738a989016e624904e9338392b..7f393853cbafa2cfbe618556b55e4d91dc6e8507 100755 --- a/dartwrf/evaluate_obs_space.py +++ b/dartwrf/evaluate_obs_space.py @@ -3,40 +3,49 @@ import datetime as dt from dartwrf.exp_config import exp from dartwrf.server_config import cluster -from dartwrf import assim_synth_obs as aso +from dartwrf import assimilate as aso +from dartwrf import dart_nml +from dartwrf.obs import create_obsseq_out as osq_out def evaluate_one_time(assim_time, valid_time, use_other_obsseq=False): """Evaluate the ensemble forecast in observation space at a given time, apart from the analysis time. Args: assim_time (datetime): initialization time of the forecast - valid_time (datetime): valid_time of the forecast + valid_time (datetime): valid_time of the forecast (e.g. +1 minute after assim_time) Returns: None """ - aso.prepare_nature_dart(valid_time) + aso.prepare_run_DART_folder() aso.prepare_prior_ensemble(valid_time, prior_init_time=assim_time, prior_valid_time=valid_time, prior_path_exp=cluster.archivedir) + dart_nml.write_namelist() # does an observation exist at this time? + f_oso = valid_time.strftime(aso.pattern_obs_seq_out) f_oso = cluster.archivedir+valid_time.strftime("/obs_seq_out/%Y-%m-%d_%H:%M:%S_obs_seq.out") if os.path.exists(f_oso): # use the existing file shutil.copy(f_oso, cluster.dart_rundir+'/obs_seq.out') else: - # use an old obsseq file and - f_oso = cluster.archivedir+assim_time.strftime("/obs_seq_out/%Y-%m-%d_%H:%M:%S_obs_seq.out") - - from dartwrf.obs import obsseq - oso = obsseq.ObsSeq(f_oso) - - # overwrite obs/truth values with "missing value" - oso.df['observations'] = -888888.0 - oso.df['truth'] = -888888.0 - oso.to_dart(cluster.dart_rundir+'/obs_seq.out') - - aso.evaluate(valid_time, output_format="%Y-%m-%d_%H:%M:%S_obs_seq.final-evaluate") + if False: + # use an old obsseq file and overwrite obs/truth values with "missing value" + f_oso = cluster.archivedir+valid_time.strftime("/obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out") + + from dartwrf.obs import obsseq + oso = obsseq.ObsSeq(f_oso) + + # overwrite obs/truth values with "missing value" + oso.df['observations'] = -888888.0 + oso.df['truth'] = -888888.0 + oso.to_dart(cluster.dart_rundir+'/obs_seq.out') + else: + # generate the observations for the specified valid_time + aso.prepare_nature_dart(valid_time) + osq_out.generate_obsseq_out(valid_time) + + aso.evaluate(valid_time, f_out_pattern=cluster.archivedir + "/diagnostics/%Y-%m-%d_%H:%M:%S_obs_seq.final-evaluate") if __name__ == "__main__": diff --git a/tests/test_assim.py b/tests/test_assim.py index 7f61e27ee4c8bb929023db89c1883121d1dc0731..c977e5f14c869f4e5140b461df25f20b037f90e7 100644 --- a/tests/test_assim.py +++ b/tests/test_assim.py @@ -1,39 +1,84 @@ import os, shutil -import datetime as dt -from dartwrf import assim_synth_obs -from dartwrf.obs import obsseq -from dartwrf.server_config import cluster +def test_dartwrf_cycled_da(): + import datetime as dt + + from dartwrf.workflows import WorkFlows + w = WorkFlows(exp_config='exp_template.py', server_config='jet.py') -class ExperimentConfiguration(object): - def __init__(self): - pass + timedelta_integrate = dt.timedelta(minutes=15) + timedelta_btw_assim = dt.timedelta(minutes=15) -exp = ExperimentConfiguration() -exp.expname = "test" + id = None -obscfg_wv73 = dict(plotname='Brightness temperature WV 7.3µm', plotunits='[K]', - kind='MSG_4_SEVIRI_TB', sat_channel=6, n_obs=4, - error_generate=1., error_assimilate=False, - cov_loc_radius_km=32) -exp.observations = [obscfg_wv73] + if True: # random + # need some ensemble data to test + prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA+1' -time = dt.datetime(2008,7,30,12) + init_time = dt.datetime(2008, 7, 30, 12, 30) + time = dt.datetime(2008, 7, 30, 13) + last_assim_time = dt.datetime(2008, 7, 30, 13) + forecast_until = dt.datetime(2008, 7, 30, 13,10) + w.prepare_WRFrundir(init_time) + # id = w.run_ideal(depends_on=id) + + prior_init_time = init_time + + while time <= last_assim_time: + + # usually we take the prior from the current time + # but one could use a prior from a different time from another run + # i.e. 13z as a prior to assimilate 12z observations + prior_valid_time = time + + id = w.assimilate(time, prior_init_time, prior_valid_time, prior_path_exp, depends_on=id) + + # 1) Set posterior = prior + id = w.prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id) + + # 2) Update posterior += updates from assimilation + id = w.update_IC_from_DA(time, depends_on=id) + + # How long shall we integrate? + timedelta_integrate = timedelta_btw_assim + output_restart_interval = timedelta_btw_assim.total_seconds()/60 + if time == last_assim_time: # this_forecast_init.minute in [0,]: # longer forecast every full hour + timedelta_integrate = forecast_until - last_assim_time # dt.timedelta(hours=4) + output_restart_interval = 9999 # no restart file after last assim + + # 3) Run WRF ensemble + id = w.run_ENS(begin=time, # start integration from here + end=time + timedelta_integrate, # integrate until here + output_restart_interval=output_restart_interval, + first_minutes=False, + depends_on=id) + + # as we have WRF output, we can use own exp path as prior + prior_path_exp = w.cluster.archivedir + + time += timedelta_btw_assim + prior_init_time = time - timedelta_btw_assim + def test_overwrite_OE_assim(): + import datetime as dt + from dartwrf import assimilate as aso + from dartwrf.obs import obsseq + from dartwrf.server_config import cluster + # checks if modified entries are correctly written to DART files - input_template = cluster.scriptsdir + "/../tests/obs_seq.orig.out" - input_temporary = cluster.scriptsdir + "/../tests/obs_seq.out" + input_template = "./obs_seq.orig.out" + input_temporary = "./obs_seq.out" # TODO: MISSING FILE - # input_osf = cluster.scriptsdir + "/../tests/obs_seq.final" - output = cluster.scriptsdir + "/../tests/obs_seq.test.out" + # input_osf = "./obs_seq.final" + output = "./obs_seq.test.out" shutil.copy(input_template, input_temporary) oso = obsseq.ObsSeq(input_temporary) - osf = obsseq.ObsSeq(input_osf) + #osf = obsseq.ObsSeq(input_osf) - assim_synth_obs.set_obserr_assimilate_in_obsseqout(oso, osf, outfile=output) + aso.set_obserr_assimilate_in_obsseqout(oso, outfile=output) var_orig = oso.df['variance'] @@ -43,4 +88,6 @@ def test_overwrite_OE_assim(): os.remove(input_temporary) if __name__ == '__main__': - test_overwrite_OE_assim() + # test_dartwrf_cycled_da() + # test_overwrite_OE_assim() + pass