Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • dataassimilation/DART-WRF
1 result
Show changes
Commits on Source (3)
"""Cluster configuration file, see docstring of ClusterConfig class in dartwrf/utils.py for details"""
cluster_defaults = dict(
max_nproc = 16,
max_nproc_for_each_ensemble_member = 9,
max_nproc = 20,
max_nproc_for_each_ensemble_member = 16,
use_slurm = True,
# binaries
......@@ -33,7 +33,7 @@ cluster_defaults = dict(
WRF_exe_template = '/jetfs/home/lkugler/DART-WRF/templates/run_WRF.jet.sh',
WRF_ideal_template = '/jetfs/home/lkugler/DART-WRF/templates/run_WRF_ideal.sh',
slurm_kwargs = {"account": "lkugler", "partition": "hub",
slurm_kwargs = {"account": "lkugler", "partition": "all", "time": "00:30:00",
"ntasks": "1", "ntasks-per-core": "1", "mem": "30G",
"mail-type": "FAIL", "mail-user": "lukas.kugler@univie.ac.at"},
......
......@@ -7,10 +7,10 @@ import time as time_module
import datetime as dt
import numpy as np
from dartwrf.utils import Config, symlink, copy, try_remove, print, shell, write_txt, load_dict
from dartwrf.utils import Config, symlink, copy, try_remove, print, shell, write_txt, obskind_read
from dartwrf import wrfout_add_geo
from dartwrf.obs import error_models as err
from dartwrf.obs import obsseq, obskind_read
from dartwrf.obs import obsseq
from dartwrf import dart_nml
......
"""
To be able to generate obs_seq.in files, we need a dictionary to convert obs kinds to numbers
a) we read the obs kind definitions (obs_kind_mod.f90 from DART code)
b) we generate a python file with this dictionary
# Note: to include it in the documentary, the file needs to exist also in the repository
# (so the documentation generator SPHINX can read it)
"""
import os, sys
import shutil
def _dict_to_py(d, outfile):
"""Write a python dictionary to a .py file
Args:
d (dict): dictionary to write
outfile (str): path to output file
Returns:
None
"""
with open(outfile, 'w') as f:
txt = '""" NOTE: This file is autogenerated! \nUse dartwrf/create_obskind_table.py to regenerate!\n"""\nobs_kind_nrs = {\n'
for k, v in d.items():
txt += '"'+k+'": '+str(v)+', \n'
txt += '}'
f.write(txt)
def _save_config_to_scriptsdir(server_config, original_scripts_dir):
try:
dir_path = os.path.dirname(os.path.realpath(__file__))
shutil.copyfile(dir_path+'/../config/'+server_config,
original_scripts_dir+'/server_config.py')
except shutil.SameFileError:
pass
def run(server_config='jet.py'):
"""Create obskind.py from obs_kind_mod.f90
"""
# usually /home/DART-WRF/dartwrf/
original_scripts_dir = '/'.join(__file__.split('/')[:-1])
# copy the original config to "scripts_dir"
_save_config_to_scriptsdir(server_config, original_scripts_dir)
# import the config from scripts_dir
sys.path.append(original_scripts_dir)
from server_config import cluster
from dartwrf import obs
obskind_dictionary = obs.obskind_read(cluster.dart_srcdir)
_dict_to_py(obskind_dictionary, original_scripts_dir+'/obs/obskind.py')
print('>>>', original_scripts_dir+'/obs/obskind.py', 'created')
if __name__ == '__main__':
run(server_config='jet_ACF.py')
def obskind_read(dart_srcdir: str) -> dict:
"""Read dictionary of observation types + ID numbers ("kind")
from DART f90 script and return it as python dictionary
"""
definitionfile = dart_srcdir + \
'/../../../assimilation_code/modules/observations/obs_kind_mod.f90'
with open(definitionfile, 'r') as f:
kind_def_f = f.readlines()
obskind_nrs = {}
for i, line in enumerate(kind_def_f):
if 'Integer definitions for DART OBS TYPES' in line:
# data starts below this line
i_start = i
break
for line in kind_def_f[i_start+1:]:
if 'MAX_DEFINED_TYPES_OF_OBS' in line:
# end of data
break
if '::' in line:
# a line looks like this
# integer, parameter, public :: MSG_4_SEVIRI_TB = 261
data = line.split('::')[-1].split('=')
kind_str = data[0].strip()
kind_nr = int(data[1].strip())
obskind_nrs[kind_str] = kind_nr
return obskind_nrs
\ No newline at end of file
......@@ -12,7 +12,6 @@ from pysolar.solar import get_altitude, get_azimuth
from dartwrf import utils
from dartwrf.utils import Config
from dartwrf.obs import calculate_obs_locations as col
from dartwrf.obs import obskind_read
# position on earth for RTTOV ray geometry
lat0 = 45.
......@@ -218,7 +217,7 @@ def create_obs_seq_in(cfg: Config, output_path='./obs_seq.in'):
os.makedirs(os.path.dirname(output_path), exist_ok=True)
obs_kind_nrs = obskind_read(cfg.dir_dart_src)
obs_kind_nrs = utils.obskind_read(cfg.dir_dart_src)
print('creating obs_seq.in:')
time_dt = _add_timezone_UTC(time_dt)
......
......@@ -3,10 +3,9 @@ import shutil
import glob
import warnings
from dartwrf.utils import Config, try_remove, print, shell, symlink, copy
from dartwrf.utils import Config, try_remove, print, shell, symlink, copy, obskind_read
import dartwrf.obs.create_obsseq_in as osi
from dartwrf.obs import obsseq, obskind_read
from dartwrf import assimilate as aso
from dartwrf.obs import obsseq
from dartwrf import wrfout_add_geo
......
......@@ -79,7 +79,7 @@ def run(cfg: Config) -> None:
try_remove(f_out)
nml.write(f_out)
print('saved', f_out)
#print('saved', f_out)
# copy to archive
init = start.strftime('/%Y-%m-%d_%H:%M/')
......
......@@ -365,3 +365,31 @@ def save_dict(dictionary, fpath):
def load_dict(fpath):
with open(fpath, 'rb') as f:
return pickle.load(f)
def obskind_read(dart_srcdir: str) -> dict:
"""Read dictionary of observation types + ID numbers ("kind")
from DART f90 script and return it as python dictionary
"""
definitionfile = dart_srcdir + \
'/../../../assimilation_code/modules/observations/obs_kind_mod.f90'
with open(definitionfile, 'r') as f:
kind_def_f = f.readlines()
obskind_nrs = {}
for i, line in enumerate(kind_def_f):
if 'Integer definitions for DART OBS TYPES' in line:
# data starts below this line
i_start = i
break
for line in kind_def_f[i_start+1:]:
if 'MAX_DEFINED_TYPES_OF_OBS' in line:
# end of data
break
if '::' in line:
# a line looks like this
# integer, parameter, public :: MSG_4_SEVIRI_TB = 261
data = line.split('::')[-1].split('=')
kind_str = data[0].strip()
kind_nr = int(data[1].strip())
obskind_nrs[kind_str] = kind_nr
return obskind_nrs
\ No newline at end of file
......@@ -95,7 +95,7 @@ class WorkFlows(object):
jobname = path_to_script.split('/')[-1]+'-'+cfg.f_cfg_current.split('/')[-1].replace('.py','')
print('> SLURM job:', jobname)
slurm_kwargs = cfg.slurm_kwargs
slurm_kwargs = cfg.slurm_kwargs.copy()
for key, value in kwargs.items():
slurm_kwargs[key] = value
......@@ -180,7 +180,7 @@ class WorkFlows(object):
).replace('<wrf_rundir_base>', cfg.dir_wrf_run
).replace('<wrf_modules>', cfg.wrf_modules,
)
id = self.run_job(cmd, cfg, depends_on=[depends_on])
id = self.run_job(cmd, cfg, depends_on=[depends_on], time="30")
return id
def run_WRF(self, cfg, depends_on=None):
......@@ -235,7 +235,7 @@ class WorkFlows(object):
)
return id
def prepare_IC_from_prior(self, cfg, depends_on=None):
def prepare_IC_from_prior(self, cfg: Config, depends_on=None):
"""Create initial conditions from prior wrfrst files
Args:
......@@ -252,8 +252,7 @@ class WorkFlows(object):
"""
path_to_script = self.dir_dartwrf_run + '/prep_IC_prior.py'
cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
id = self.run_job(cmd, cfg, depends_on=[depends_on])
id = self.run_job(cmd, cfg, depends_on=[depends_on], time="10")
return id
def update_IC_from_DA(self, cfg, depends_on=None):
......@@ -269,7 +268,7 @@ class WorkFlows(object):
path_to_script = self.dir_dartwrf_run + '/update_IC.py'
cmd = ' '.join([self.python, path_to_script, cfg.f_cfg_current])
id = self.run_job(cmd, cfg, depends_on=[depends_on])
id = self.run_job(cmd, cfg, depends_on=[depends_on], time="10")
return id
def create_satimages(self, cfg, depends_on=None):
......
......@@ -10,30 +10,29 @@ from config.defaults import dart_nml
# test multiple assimilation windows (11-12, 12-13, 13-14, )
timedelta_btw_assim = dt.timedelta(minutes=15)
assim_times_start = pd.date_range('2008-07-30 11:00', '2008-07-30 13:00', freq='h')
assim_times_start = pd.date_range('2008-07-30 11:00', '2008-07-30 13:00', freq='1h')
ensemble_size = 40
for t0 in assim_times_start:
dart_nml['&filter_nml'].update(num_output_state_members=ensemble_size,
ens_size=ensemble_size)
vis = dict(
kind='MSG_4_SEVIRI_BDRF', sat_channel=1,
km_between_obs=12, skip_border_km=8.0,
error_generate=0.03, error_assimilate=0.06,
loc_horiz_km=20,
height=6000, loc_vert_km=6,
)
for t0 in assim_times_start:
id = None
ensemble_size = 3
dart_nml['&filter_nml'].update(num_output_state_members=ensemble_size,
ens_size=ensemble_size)
t_raso = dict(var_name='Temperature', unit='[K]',
kind='RADIOSONDE_TEMPERATURE',
obs_locations=[(45., 0.)],
error_generate=0.2, error_assimilate=0.6,
heights=range(1000, 15001, 500),
loc_horiz_km=100, loc_vert_km=3)
cfg = Config(name='test_exp',
cfg = Config(name=t0.strftime('exp_nat250_VIS_obs12_loc20_oe2_inf4-0.5_%H%M'),
model_dx = 2000,
ensemble_size = ensemble_size,
dart_nml = dart_nml,
geo_em_forecast = '/jetfs/home/lkugler/data/sim_archive/geo_em.d01.nc.2km_200x200',
time = dt.datetime(2008, 7, 30, 11),
time = t0,
use_existing_obsseq = False,
nature_wrfout_pattern = '/jetfs/home/lkugler/data/sim_archive/nat_250m_1600x1600x100/*/1/wrfout_d01_%Y-%m-%d_%H_%M_%S',
......@@ -43,30 +42,30 @@ for t0 in assim_times_start:
update_vars = ['U', 'V', 'W', 'THM', 'PH', 'MU', 'QVAPOR', 'QCLOUD', 'QICE', 'QSNOW', 'PSFC'],
input_profile = '/jetfs/home/lkugler/data/sim_archive/nat_250m_1600x1600x100/2008-07-30_08:00/1/input_sounding',
nature_exp_verif = 'nat_250m_blockavg2km',
assimilate_these_observations = [t_raso,],
assimilate_these_observations = [vis,],
**cluster_defaults, # type: ignore
)
# test multiple assimilation windows (11-12, 12-13, 13-14, )
timedelta_btw_assim = dt.timedelta(minutes=15)
#pd.date_range('2008-07-30 11:00', '2008-07-30 13:00', freq='h')
w = WorkFlows(cfg)
w.prepare_WRFrundir(cfg)
#id = w.run_ideal(cfg, depends_on=id)
# assimilate at these times
time0 = cfg.time
assim_times = pd.date_range(time0, time0 + timedelta_btw_assim, freq=timedelta_btw_assim)
assim_times = pd.date_range(time0, time0 + dt.timedelta(hours=1), freq=timedelta_btw_assim)
last_assim_time = assim_times[-1]
# loop over assimilations
for i, t in enumerate(assim_times):
if i == 0:
if t == dt.datetime(2008, 7, 30, 11):
prior_init_time = dt.datetime(2008, 7, 30, 8)
else:
prior_init_time = t - dt.timedelta(minutes=15)
cfg.update(time = t,
prior_init_time = dt.datetime(2008, 7, 30, 8),
prior_init_time = prior_init_time,
prior_valid_time = t,
prior_path_exp = '/jetfs/home/lkugler/data/sim_archive/exp_nat250m_noDA/',)
else:
......@@ -84,13 +83,17 @@ for t0 in assim_times_start:
# 2) Update posterior += updates from assimilation
id = w.update_IC_from_DA(cfg, depends_on=id)
# How long shall we integrate?
timedelta_integrate = dt.timedelta(minutes=5)
if t == last_assim_time:
restart_interval = 9999
timedelta_integrate = dt.timedelta(hours=4)
else:
restart_interval = timedelta_btw_assim.total_seconds()/60 # in minutes
timedelta_integrate = dt.timedelta(minutes=15)
cfg.update( WRF_start=t,
WRF_end=t+timedelta_integrate,
restart=True,
restart_interval=9999,
restart_interval=restart_interval,
hist_interval_s=300,
)
......
......@@ -17,7 +17,7 @@ def test_dartwrf_cycled_da():
dart_nml['&filter_nml'].update(num_output_state_members=ensemble_size,
ens_size=ensemble_size)
t_raso = dict(var_name='Temperature', unit='[K]',
kind='RADIOSONDE_TEMPERATURE',
obs_locations=[(45., 0.)],
......