Skip to content
Snippets Groups Projects
Commit 06b90bd7 authored by Stefano Serafin's avatar Stefano Serafin
Browse files

fixed a bug preventing the correct loading of runs made with old code...

fixed a bug preventing the correct loading of runs made with old code versions; added 1 figure for EGU2025 poster
parent 6b4be39d
No related branches found
No related tags found
No related merge requests found
...@@ -9,7 +9,7 @@ from copy import deepcopy ...@@ -9,7 +9,7 @@ from copy import deepcopy
# Own modules # Own modules
from PE_CBL_models import CBL from PE_CBL_models import CBL
from observations import * from utils import observations
# TODO: comprehensive documentation is missing. # TODO: comprehensive documentation is missing.
...@@ -252,7 +252,7 @@ class cycle: ...@@ -252,7 +252,7 @@ class cycle:
tspinup_assim,\ tspinup_assim,\
assimilation_interval,\ assimilation_interval,\
ocoord,\ ocoord,\
observations,\ obs,\
obs_error_sdev_assimilate,\ obs_error_sdev_assimilate,\
localization_cutoff,\ localization_cutoff,\
inflation_rtps_alpha,\ inflation_rtps_alpha,\
...@@ -277,7 +277,7 @@ class cycle: ...@@ -277,7 +277,7 @@ class cycle:
state_vector_length = nr.x0.size state_vector_length = nr.x0.size
initial_state_vector = nr.x0 initial_state_vector = nr.x0
xcoord = nr.state_coordinates xcoord = nr.state_coordinates
nobs = observations.shape[1] nobs = obs.shape[1]
npar = cbl_settings['parameter_number'] npar = cbl_settings['parameter_number']
# Special case: if you re-use a nature run in a experiment without # Special case: if you re-use a nature run in a experiment without
...@@ -410,7 +410,7 @@ class cycle: ...@@ -410,7 +410,7 @@ class cycle:
# Assimilate # Assimilate
if FILTER == 'EAKF': if FILTER == 'EAKF':
if cbl_settings['do_parameter_estimation'] and cbl_settings['return_covariances_increments_and_innovations']: if cbl_settings['do_parameter_estimation'] and cbl_settings['return_covariances_increments_and_innovations']:
analysis,model_equivalents[k,:,:],cov_xx,cov_xy,cov_yy_dum,innov_dum,increm_dum = eakf(backgrounds[k,:,:],observations[k,:], analysis,model_equivalents[k,:,:],cov_xx,cov_xy,cov_yy_dum,innov_dum,increm_dum = eakf(backgrounds[k,:,:],obs[k,:],
obs_error_sdev_assimilate, obs_error_sdev_assimilate,
xcoord, xcoord,
ocoord[k,:], ocoord[k,:],
...@@ -422,14 +422,14 @@ class cycle: ...@@ -422,14 +422,14 @@ class cycle:
cov_yy[k,:] = cov_yy_dum cov_yy[k,:] = cov_yy_dum
innov[k,:] = innov_dum innov[k,:] = innov_dum
else: else:
analysis,model_equivalents[k,:,:] = eakf(backgrounds[k,:,:],observations[k,:], analysis,model_equivalents[k,:,:] = eakf(backgrounds[k,:,:],obs[k,:],
obs_error_sdev_assimilate, obs_error_sdev_assimilate,
xcoord, xcoord,
ocoord[k,:], ocoord[k,:],
dist, dist,
localization_cutoff=localization_cutoff) localization_cutoff=localization_cutoff)
elif FILTER == 'LETKF': elif FILTER == 'LETKF':
analysis,model_equivalents[k,:,:] = letkf(backgrounds[k,:,:],observations[k,:], analysis,model_equivalents[k,:,:] = letkf(backgrounds[k,:,:],obs[k,:],
obs_error_sdev_assimilate, obs_error_sdev_assimilate,
xcoord, xcoord,
ocoord[k,:], ocoord[k,:],
...@@ -465,7 +465,7 @@ class cycle: ...@@ -465,7 +465,7 @@ class cycle:
self.backgrounds = backgrounds self.backgrounds = backgrounds
self.analyses = analyses self.analyses = analyses
self.obs_coordinates = ocoord self.obs_coordinates = ocoord
self.observations = observations self.observations = obs
self.model_equivalents = model_equivalents self.model_equivalents = model_equivalents
self.state_coordinates = xcoord self.state_coordinates = xcoord
self.obs_error_sdev_assimilate = obs_error_sdev_assimilate self.obs_error_sdev_assimilate = obs_error_sdev_assimilate
...@@ -583,7 +583,7 @@ class experiment: ...@@ -583,7 +583,7 @@ class experiment:
# and observation error variance depends on location (kk) # and observation error variance depends on location (kk)
state_coordinates = nr.zt state_coordinates = nr.zt
truths = np.zeros((int(nassim),nobs))+np.nan truths = np.zeros((int(nassim),nobs))+np.nan
observations = np.zeros((int(nassim),nobs))+np.nan obs = np.zeros((int(nassim),nobs))+np.nan
for i in range(int(nassim)): for i in range(int(nassim)):
valid_time = (i+1)*self.assimilation_interval valid_time = (i+1)*self.assimilation_interval
assert(np.mod(self.assimilation_interval,nr.dt)==0), f'Assimilation interval must be a multiple of dt, got {self.assimilation_interval,nr.dt}' assert(np.mod(self.assimilation_interval,nr.dt)==0), f'Assimilation interval must be a multiple of dt, got {self.assimilation_interval,nr.dt}'
...@@ -602,23 +602,23 @@ class experiment: ...@@ -602,23 +602,23 @@ class experiment:
# and you have bias instead of randomness # and you have bias instead of randomness
seed = self.rnseed+j*1000+i*100000 seed = self.rnseed+j*1000+i*100000
RNG = np.random.default_rng(seed=seed) RNG = np.random.default_rng(seed=seed)
observations[i,j] = truths[i,j]+\ obs[i,j] = truths[i,j]+\
RNG.normal(0,self.obs_error_sdev_generate[j]) RNG.normal(0,self.obs_error_sdev_generate[j])
# In this case the observations coordinates are the same at all analysis times, # In this case the observations coordinates are the same at all analysis times,
# so a 1-d vector. Expand it to 2d, with the same dimensions # so a 1-d vector. Expand it to 2d, with the same dimensions
# as the observations vector # as the observations vector
if self.obs_coordinates.ndim == 1: if self.obs_coordinates.ndim == 1:
ocoords = self.obs_coordinates[None,:]+np.zeros(observations.shape) ocoords = self.obs_coordinates[None,:]+np.zeros(obs.shape)
# Randomize order of observations # Randomize order of observations
if self.randomize_obs: if self.randomize_obs:
observations,ocoords,truths = random_sort_observations(observations,ocoords,truths) obs,ocoords,truths = observations.random_sort_observations(obs,ocoords,truths)
# Store nature run, truths and observations # Store nature run, truths and observations
self.nr = nr self.nr = nr
self.truths = truths self.truths = truths
self.observations = observations self.observations = obs
self.obs_coordinates = ocoords self.obs_coordinates = ocoords
################################################################################################ ################################################################################################
...@@ -626,8 +626,8 @@ class experiment: ...@@ -626,8 +626,8 @@ class experiment:
if not hasattr(self,'nr') and self.type == 'real': if not hasattr(self,'nr') and self.type == 'real':
# Read observations and their coordinates # Read observations and their coordinates
observations, ocoords = decode_observations(self.observation_files) obs, ocoords = observations.decode_observations(self.observation_files)
assert (observations.shape == ocoords.shape),\ assert (obs.shape == ocoords.shape),\
f'observation and coordinate arrays must have the same size' f'observation and coordinate arrays must have the same size'
# Consistency check on time coordinates # Consistency check on time coordinates
...@@ -638,22 +638,22 @@ class experiment: ...@@ -638,22 +638,22 @@ class experiment:
# Randomize order of observations # Randomize order of observations
if self.randomize_obs: if self.randomize_obs:
observations,ocoords = random_sort_observations(observations,ocoords) obs,ocoords = observations.random_sort_observations(observations,ocoords)
# Throw away any observations in the spinup time before assimilations begin # Throw away any observations in the spinup time before assimilations begin
# Exclude the initial time too (so the +1 in the line below) # Exclude the initial time too (so the +1 in the line below)
nspinup = self.tspinup/self.assimilation_interval+1 nspinup = self.tspinup/self.assimilation_interval+1
observations = observations[int(nspinup):,:] obs = obs[int(nspinup):,:]
ocoords = ocoords[int(nspinup):,:] ocoords = ocoords[int(nspinup):,:]
# Throw away any observations beyond tspinup+trun # Throw away any observations beyond tspinup+trun
observations = observations[:int(nassim),:] obs = obs[:int(nassim),:]
ocoords = ocoords[:int(nassim),:] ocoords = ocoords[:int(nassim),:]
# There is no nature run and no truth in this case. # There is no nature run and no truth in this case.
# The only thing you can store is the observations. # The only thing you can store is the observations.
self.nobs = observations.shape[1] self.nobs = obs.shape[1]
self.observations = observations self.observations = obs
self.obs_coordinates = ocoords self.obs_coordinates = ocoords
self.obs_error_sdev_assimilate = np.ones(self.nobs)*self.obs_error_sdev_assimilate self.obs_error_sdev_assimilate = np.ones(self.nobs)*self.obs_error_sdev_assimilate
if self.obs_kinds == 'theta': if self.obs_kinds == 'theta':
......
...@@ -50,7 +50,7 @@ if __name__ == '__main__': ...@@ -50,7 +50,7 @@ if __name__ == '__main__':
fig05 = False # CHECKED OK # Experiments with systematic model errors fig05 = False # CHECKED OK # Experiments with systematic model errors
fig06 = False # CHECKED OK # Analysis increments in model error experiments fig06 = False # CHECKED OK # Analysis increments in model error experiments
fig07 = False # CHECKED OK # Experiment with error growth fig07 = False # CHECKED OK # Experiment with error growth
fig08 = True # CHECKED OK # Perfect-obs experiments fig08 = False # CHECKED OK # Perfect-obs experiments
fig09 = False # CHECKED OK # DA diagnostics (profiles) for perfect-obs experiments fig09 = False # CHECKED OK # DA diagnostics (profiles) for perfect-obs experiments
fig10 = False # CHECKED OK # Comparison EAKF vs LETKF fig10 = False # CHECKED OK # Comparison EAKF vs LETKF
...@@ -70,6 +70,7 @@ if __name__ == '__main__': ...@@ -70,6 +70,7 @@ if __name__ == '__main__':
opt13 = False # CHECKED OK # More profiles of consistency diagnostics opt13 = False # CHECKED OK # More profiles of consistency diagnostics
opt14 = False # CHECKED OK # More plots of consistency checks opt14 = False # CHECKED OK # More plots of consistency checks
opt15 = False # CHECKED OK # O-B for experiments C1 and C2 opt15 = False # CHECKED OK # O-B for experiments C1 and C2
opt16 = True # CHECKED OK # Additional figure for EGU2025 poster
# A test to check if results are exactly reproducible # A test to check if results are exactly reproducible
check_reproducibility = False check_reproducibility = False
...@@ -889,6 +890,39 @@ if __name__ == '__main__': ...@@ -889,6 +890,39 @@ if __name__ == '__main__':
fig.savefig('ominb.png',format='png',dpi=300) fig.savefig('ominb.png',format='png',dpi=300)
p.close(fig) p.close(fig)
if opt16:
[exp_B,exp_C,exp_D] = map(get_experiment, [workdir+'/da_exp_C1.json',
workdir+'/da_exp_C2.json',
workdir+'/da_exp_D2.json'])
fig, [ax1, ax2, ax3] = p.subplots(3,1,constrained_layout=True)
fig.set_size_inches(4,6)
#
ax1 = plot_CBL_PE(exp_B,None,ax=ax1,show_true_parameter=True)
ax1.set_title(r'a) Exp. B, $\sigma_{y^o}^{{gen}}=\sigma_{y^o}^{{assim}}=\sigma_{x^b}^{{init}}=0.1$ K + no countergradient',fontsize=10)
ax1.set_xlabel('Time (h)')
ax1.set_ylabel(r'$p$')
ax1.set_ylim([0,5])
#
ax2 = plot_CBL_PE(exp_C,None,ax=ax2,show_true_parameter=True)
ax2.set_title(r'b) Exp. C, $\sigma_{y^o}^{{gen}}=\sigma_{y^o}^{{assim}}=\sigma_{x^b}^{{init}}=0.1$ K + 50% bias in SHF',fontsize=10)
ax2.set_xlabel('Time (h)')
ax2.set_ylabel(r'$p$')
ax2.set_ylim([0,5])
#
ax3 = plot_CBL_PE(exp_D,None,ax=ax3,show_true_parameter=True)
ax3.set_title(r'b) Exp. D, $\sigma_{y^o}^{{gen}}=\sigma_{y^o}^{{assim}}=\sigma_{x^b}^{{init}}=1$ K + random errors',fontsize=10)
ax3.set_xlabel('Time (h)')
ax3.set_ylabel(r'$p$')
ax3.set_ylim([0,5])
#
ax1.set_yticks([0,1,2,3,4,5,6])
ax2.sharex(ax1)
ax3.sharex(ax1)
#
fig.savefig('fig_EGU.png',format='png',dpi=300)
p.close(fig)
if check_reproducibility: if check_reproducibility:
def get_obs(f): def get_obs(f):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment