Skip to content
Snippets Groups Projects
Commit d3a7f481 authored by lkugler's avatar lkugler
Browse files

.

parent 4e718d1f
No related branches found
No related tags found
No related merge requests found
...@@ -14,7 +14,7 @@ if __name__ == "__main__": ...@@ -14,7 +14,7 @@ if __name__ == "__main__":
id = None id = None
if False: # warm bubble if True: # warm bubble
prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P3_wbub7_noDA' prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P3_wbub7_noDA'
init_time = dt.datetime(2008, 7, 30, 12) init_time = dt.datetime(2008, 7, 30, 12)
...@@ -26,19 +26,18 @@ if __name__ == "__main__": ...@@ -26,19 +26,18 @@ if __name__ == "__main__":
# id = w.run_ideal(depends_on=id) # id = w.run_ideal(depends_on=id)
# id = w.wrfinput_insert_wbubble(depends_on=id) # id = w.wrfinput_insert_wbubble(depends_on=id)
if True: # random if False: # random
prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA' prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA'
init_time = dt.datetime(2008, 7, 30, 12) init_time = dt.datetime(2008, 7, 30, 7)
time = dt.datetime(2008, 7, 30, 13) time = dt.datetime(2008, 7, 30, 12)
last_assim_time = dt.datetime(2008, 7, 30, 13) last_assim_time = dt.datetime(2008, 7, 30, 14)
forecast_until = dt.datetime(2008, 7, 30, 13, 15) forecast_until = dt.datetime(2008, 7, 30, 18)
w.prepare_WRFrundir(init_time) w.prepare_WRFrundir(init_time)
# id = w.run_ideal(depends_on=id) # id = w.run_ideal(depends_on=id)
# prior_path_exp = w.cluster.archivedir # prior_path_exp = w.cluster.archivedir
# prior_path_exp = '/gpfs/data/fs71386/lkugler/sim_archive/exp_v1.19_P5+su_noDA'
prior_init_time = init_time prior_init_time = init_time
prior_valid_time = time prior_valid_time = time
...@@ -69,10 +68,12 @@ if __name__ == "__main__": ...@@ -69,10 +68,12 @@ if __name__ == "__main__":
id = w.run_ENS(begin=time, # start integration from here id = w.run_ENS(begin=time, # start integration from here
end=time + timedelta_integrate, # integrate until here end=time + timedelta_integrate, # integrate until here
output_restart_interval=output_restart_interval, output_restart_interval=output_restart_interval,
first_minutes=True,
depends_on=id) depends_on=id)
# as we have WRF output, we can use own exp path as prior # as we have WRF output, we can use own exp path as prior
prior_path_exp = w.cluster.archivedir prior_path_exp = w.cluster.archivedir
# prior_path_exp = '/jetfs/scratch/lkugler/data/sim_archive/exp_v1.19_P2_noDA/'
id_sat = w.create_satimages(time, depends_on=id) id_sat = w.create_satimages(time, depends_on=id)
...@@ -84,3 +85,16 @@ if __name__ == "__main__": ...@@ -84,3 +85,16 @@ if __name__ == "__main__":
w.verify_sat(id_sat) w.verify_sat(id_sat)
w.verify_wrf(id) w.verify_wrf(id)
# assim_times = [dt.datetime(2008,7,30,12,30), ]
# time range from 12:30 to 13:30 every 15 minutes
assim_times = [dt.datetime(2008,7,30,12,30) + dt.timedelta(minutes=15*i) for i in range(5)]
tuples = []
for init in assim_times:
for s in range(30,3*60+1,30):
tuples.append((init, init+dt.timedelta(seconds=s)))
# evaluate the forecast at +1 minute after the assimilation time
w.evaluate_obs_posterior_after_analysis(tuples, depends_on=id)
...@@ -4,14 +4,17 @@ exp = utils.Experiment() ...@@ -4,14 +4,17 @@ exp = utils.Experiment()
exp.expname = "template_experiment" exp.expname = "template_experiment"
exp.model_dx = 2000 exp.model_dx = 2000
exp.n_ens = 40 exp.n_ens = 40
exp.superob_km = False # False or int (spatial averaging of observations) exp.superob_km = False # False or int (spatial averaging of observations, unit: km)
exp.do_quality_control = False # bool
exp.use_existing_obsseq = False # False or pathname (use precomputed obs_seq.out files) # Use existing observations (path to obs_seq.out) or generate observations (False)
#exp.use_existing_obsseq = '/jetfs/home/lkugler/data/sim_archive//exp_v1.18_P1_nature+1//obs_seq_out/%Y-%m-%d_%H:%M_obs_seq.out' exp.use_existing_obsseq = False
#exp.use_existing_obsseq = '/users/students/lehre/advDA_s2023/dartwrf_tutorial/very_cold_observation.out'
# path to the nature run, where we take observations from # path from where observations can be generated
exp.nature_expname = 'exp_v1.18_P1_nature+1' exp.nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/exp_v1.19_P3_wbub7_nat/*/1/wrfout_d01_%Y-%m-%d_%H:%M:%S'
# for initialization profiles
exp.input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof' exp.input_profile = '/mnt/jetfs/home/lkugler/data/initial_profiles/wrf/ens/2022-03-31/raso.fc.<iens>.wrfprof'
...@@ -73,9 +76,9 @@ wv62 = dict(var_name='Brightness temperature WV 6.2µm', unit='[K]', ...@@ -73,9 +76,9 @@ wv62 = dict(var_name='Brightness temperature WV 6.2µm', unit='[K]',
wv73 = dict(var_name='Brightness temperature WV 7.3µm', unit='[K]', wv73 = dict(var_name='Brightness temperature WV 7.3µm', unit='[K]',
kind='MSG_4_SEVIRI_TB', sat_channel=6, kind='MSG_4_SEVIRI_TB', sat_channel=6,
n_obs=256, obs_locations='square_array_evenly_on_grid', n_obs=961, obs_locations='square_array_evenly_on_grid',
error_generate=1., error_assimilate=1., error_generate=1., error_assimilate=3.,
loc_horiz_km=10) loc_horiz_km=20)
ir108 = dict(var_name='Brightness temperature IR 10.8µm', unit='[K]', ir108 = dict(var_name='Brightness temperature IR 10.8µm', unit='[K]',
kind='MSG_4_SEVIRI_TB', sat_channel=9, kind='MSG_4_SEVIRI_TB', sat_channel=9,
...@@ -99,22 +102,25 @@ t = dict(var_name='Temperature', unit='[K]', ...@@ -99,22 +102,25 @@ t = dict(var_name='Temperature', unit='[K]',
loc_horiz_km=50, loc_vert_km=2.5) loc_horiz_km=50, loc_vert_km=2.5)
q = dict(var_name='Specific humidity', unit='[kg/kg]', q = dict(var_name='Specific humidity', unit='[kg/kg]',
kind='RADIOSONDE_SPECIFIC_HUMIDITY', n_obs=1, kind='RADIOSONDE_SPECIFIC_HUMIDITY',
n_obs=961, obs_locations='square_array_evenly_on_grid',
error_generate=0., error_assimilate=5*1e-5, error_generate=0., error_assimilate=5*1e-5,
heights=[1000], #range(1000, 17001, 2000), heights=[1000], #range(1000, 17001, 2000),
loc_horiz_km=0.1, loc_vert_km=2.5) loc_horiz_km=0.1, loc_vert_km=2.5)
t2m = dict(var_name='SYNOP Temperature', unit='[K]', t2m = dict(var_name='SYNOP Temperature', unit='[K]',
kind='SYNOP_TEMPERATURE', kind='SYNOP_TEMPERATURE',
n_obs=256, obs_locations='square_array_evenly_on_grid', n_obs=961, obs_locations='square_array_evenly_on_grid',
error_generate=0.3, error_assimilate=0.3, error_generate=0.1, error_assimilate=0.1,
loc_horiz_km=10, loc_vert_km=2) loc_horiz_km=40, loc_vert_km=2.5)
psfc = dict(var_name='SYNOP Pressure', unit='[Pa]', psfc = dict(var_name='SYNOP Pressure', unit='[Pa]',
kind='SYNOP_SURFACE_PRESSURE', n_obs=1, kind='SYNOP_SURFACE_PRESSURE',
n_obs=961, obs_locations='square_array_evenly_on_grid',
error_generate=50., error_assimilate=100., error_generate=50., error_assimilate=100.,
loc_horiz_km=32, loc_vert_km=5) loc_horiz_km=32, loc_vert_km=5)
exp.observations = [wv73] exp.observations = [vis]
exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC'] exp.update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'PSFC']
...@@ -132,7 +132,7 @@ userhome = os.path.expanduser('~') ...@@ -132,7 +132,7 @@ userhome = os.path.expanduser('~')
def shell(args): def shell(args):
print(args) print(args)
#subprocess.run(args.split(' ')) #, shell=True) #, stderr=subprocess.STDOUT) #subprocess.run(args.split(' ')) #, shell=True) #, stderr=subprocess.STDOUT)
os.system(args) return os.system(args)
def print(*args): def print(*args):
__builtin__.print(*args, flush=True) __builtin__.print(*args, flush=True)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment