diff --git a/dartwrf/create_obs_upfront.py b/dartwrf/create_obs_upfront.py deleted file mode 100755 index c2ffc99ca2c46ef8dcf6c11d892fd130b3ca77f1..0000000000000000000000000000000000000000 --- a/dartwrf/create_obs_upfront.py +++ /dev/null @@ -1,68 +0,0 @@ -from multiprocessing.sharedctypes import Value -import os, sys, shutil, warnings -import time as time_module -import datetime as dt -import numpy as np - -from dartwrf.exp_config import exp -from dartwrf.server_config import cluster -from dartwrf.utils import copy, print -import dartwrf.obs.create_obsseq_in as osq -from dartwrf.obs import obsseq -from dartwrf import assim_synth_obs as aso - -tformat = '%Y-%m-%d_%H:%M' - - -if __name__ == "__main__": - """Create observations for multiple times upfront - """ - - args = sys.argv[1:] - times = [] - for tstr in args: - times.append(dt.datetime.strptime(tstr, tformat)) - - # strange path? - # dir_for_obsseqout = exp.nature_wrfout + '/../../../obs_seq_out/'+exp.use_existing_obsseq - raise NotImplementedError('where to save obsseq to?') - dir_for_obsseqout = '' # TODO: solve this when necessary - print('will save obsseq to', dir_for_obsseqout) - os.makedirs(dir_for_obsseqout, exist_ok=True) - - os.chdir(cluster.dart_rundir) - - # link DART binaries to run_DART - os.system(cluster.python + " " + cluster.scripts_rundir + "/link_dart_rttov.py") - - # remove any existing observation files - os.system("rm -f input.nml obs_seq.in obs_seq.out obs_seq.out-orig obs_seq.final") - aso.set_DART_nml() - - for time in times: - print('obsseq for time', time) - - aso.prepare_nature_dart(time) # link WRF files to DART directory - - osq.create_obsseqin_alltypes(time, exp.observations) # obs_seq.in - - aso.run_perfect_model_obs(nproc=6) # create observations (obs_seq.out) - - oso = obsseq.ObsSeq(cluster.dart_rundir + "/obs_seq.out") - - if True: # set reflectance < surface albedo to surface albedo - print(" 2.2) removing obs below surface albedo ") - if_vis_obs = oso.df['kind'].values == 262 - if_obs_below_surface_albedo = oso.df['observations'].values < 0.2928 - - oso.df.loc[if_vis_obs & if_obs_below_surface_albedo, ('observations')] = 0.2928 - oso.to_dart(f=cluster.dart_rundir + "/obs_seq.out") - - if getattr(exp, "superob_km", False): - print(" 2.3) superobbing to", exp.superob_km, "km") - oso.df = oso.df.superob(window_km=exp.superob_km) - copy(cluster.dart_rundir + "/obs_seq.out", cluster.dart_rundir + "/obs_seq.out-orig") - oso.to_dart(f=cluster.dart_rundir + "/obs_seq.out") - - aso.archive_osq_out(time, dir_obsseq=dir_for_obsseqout) - diff --git a/dartwrf/evaluate_posterior.py b/dartwrf/evaluate_posterior.py deleted file mode 100755 index 320d1aaae0fbf7c3ddd62eab94ca1294302fed91..0000000000000000000000000000000000000000 --- a/dartwrf/evaluate_posterior.py +++ /dev/null @@ -1,27 +0,0 @@ -import os, sys, shutil, warnings -import time as time_module -import datetime as dt -import numpy as np - -from dartwrf.exp_config import exp -from dartwrf.server_config import cluster -from dartwrf import assimilate as aso - - -if __name__ == "__main__": - - assim_time = dt.datetime.strptime(sys.argv[1], "%Y-%m-%d_%H:%M") - - print(" computing posterior observations ") - aso.write_list_of_inputfiles_posterior(assim_time) - - # prepare an obsseq without rejected observations - if exp.use_existing_obsseq: # from another exp - oso_input = assim_time.strftime(exp.use_existing_obsseq) - else: # from same exp - oso_input = cluster.archivedir+'/obs_seq_out' + assim_time.strftime("/%Y-%m-%d_%H:%M_obs_seq.out-beforeQC") - if not os.path.isfile(oso_input): - oso_input = cluster.archivedir+'/obs_seq_out' + assim_time.strftime("/%Y-%m-%d_%H:%M_obs_seq.out") - shutil.copy(oso_input, cluster.dart_rundir+'/obs_seq.out') - - aso.evaluate(assim_time, output_format="%Y-%m-%d_%H:%M_obs_seq.final-evaluate") \ No newline at end of file diff --git a/evaluate_plus1.py b/evaluate_plus1.py deleted file mode 100755 index f7f772fe615b30741f381682091d2ef7dd04b7dc..0000000000000000000000000000000000000000 --- a/evaluate_plus1.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/python3 -""" -Evaluate the forecast in observation space one minute after the analysis time. -""" -import datetime as dt -from dartwrf.workflows import WorkFlows - -w = WorkFlows(exp_config='exp_nonlin.py', server_config='jet.py') -id = None - -assim_times = [dt.datetime(2008,7,30,12), ] - # dt.datetime(2008,7,30,12,30), - # dt.datetime(2008,7,30,13), - # dt.datetime(2008,7,30,13,30), - # dt.datetime(2008,7,30,14),] - -# generate observations at +1 minute after the assimilation time - -tuples = [] -for init in assim_times: - for s in range(30,3*60+1,30): - tuples.append((init, init+dt.timedelta(seconds=s))) - -w.evaluate_obs_posterior_after_analysis(tuples, depends_on=id)