From 06b90bd753198b4c40693a077dbc7ee1db106d59 Mon Sep 17 00:00:00 2001
From: Stefano Serafin <serafin@jet01.img.univie.ac.at>
Date: Wed, 23 Apr 2025 16:40:05 +0200
Subject: [PATCH] fixed a bug preventing the correct loading of runs made with
 old code versions; added 1 figure for EGU2025 poster

---
 ENDA.py   | 38 +++++++++++++++++++-------------------
 PE_CBL.py | 36 +++++++++++++++++++++++++++++++++++-
 2 files changed, 54 insertions(+), 20 deletions(-)

diff --git a/ENDA.py b/ENDA.py
index e093fc4..29965c3 100644
--- a/ENDA.py
+++ b/ENDA.py
@@ -9,7 +9,7 @@ from copy import deepcopy
 
 # Own modules
 from PE_CBL_models import CBL
-from observations import *
+from utils import observations
 
 # TODO: comprehensive documentation is missing.
 
@@ -252,7 +252,7 @@ class cycle:
                  tspinup_assim,\
                  assimilation_interval,\
                  ocoord,\
-                 observations,\
+                 obs,\
                  obs_error_sdev_assimilate,\
                  localization_cutoff,\
                  inflation_rtps_alpha,\
@@ -277,7 +277,7 @@ class cycle:
         state_vector_length = nr.x0.size
         initial_state_vector = nr.x0
         xcoord = nr.state_coordinates
-        nobs = observations.shape[1]
+        nobs = obs.shape[1]
         npar = cbl_settings['parameter_number']
 
         # Special case: if you re-use a nature run in a experiment without
@@ -410,7 +410,7 @@ class cycle:
                 # Assimilate
                 if FILTER == 'EAKF':
                     if cbl_settings['do_parameter_estimation'] and cbl_settings['return_covariances_increments_and_innovations']:
-                        analysis,model_equivalents[k,:,:],cov_xx,cov_xy,cov_yy_dum,innov_dum,increm_dum = eakf(backgrounds[k,:,:],observations[k,:],
+                        analysis,model_equivalents[k,:,:],cov_xx,cov_xy,cov_yy_dum,innov_dum,increm_dum = eakf(backgrounds[k,:,:],obs[k,:],
                                     obs_error_sdev_assimilate,
                                     xcoord,
                                     ocoord[k,:],
@@ -422,14 +422,14 @@ class cycle:
                         cov_yy[k,:] = cov_yy_dum
                         innov[k,:] = innov_dum
                     else:
-                        analysis,model_equivalents[k,:,:] = eakf(backgrounds[k,:,:],observations[k,:],
+                        analysis,model_equivalents[k,:,:] = eakf(backgrounds[k,:,:],obs[k,:],
                                     obs_error_sdev_assimilate,
                                     xcoord,
                                     ocoord[k,:],
                                     dist,
                                     localization_cutoff=localization_cutoff)
                 elif FILTER == 'LETKF':
-                    analysis,model_equivalents[k,:,:] = letkf(backgrounds[k,:,:],observations[k,:],
+                    analysis,model_equivalents[k,:,:] = letkf(backgrounds[k,:,:],obs[k,:],
                                 obs_error_sdev_assimilate,
                                 xcoord,
                                 ocoord[k,:],
@@ -465,7 +465,7 @@ class cycle:
         self.backgrounds = backgrounds
         self.analyses = analyses
         self.obs_coordinates = ocoord
-        self.observations = observations
+        self.observations = obs
         self.model_equivalents = model_equivalents
         self.state_coordinates = xcoord
         self.obs_error_sdev_assimilate = obs_error_sdev_assimilate
@@ -583,7 +583,7 @@ class experiment:
             # and observation error variance depends on location (kk)
             state_coordinates = nr.zt
             truths = np.zeros((int(nassim),nobs))+np.nan
-            observations = np.zeros((int(nassim),nobs))+np.nan
+            obs = np.zeros((int(nassim),nobs))+np.nan
             for i in range(int(nassim)):
                 valid_time = (i+1)*self.assimilation_interval
                 assert(np.mod(self.assimilation_interval,nr.dt)==0), f'Assimilation interval must be a multiple of dt, got {self.assimilation_interval,nr.dt}'
@@ -602,23 +602,23 @@ class experiment:
                     # and you have bias instead of randomness
                     seed = self.rnseed+j*1000+i*100000
                     RNG = np.random.default_rng(seed=seed)
-                    observations[i,j] = truths[i,j]+\
+                    obs[i,j] = truths[i,j]+\
                         RNG.normal(0,self.obs_error_sdev_generate[j])
 
             # In this case the observations coordinates are the same at all analysis times,
             # so a 1-d vector. Expand it to 2d, with the same dimensions
             # as the observations vector
             if self.obs_coordinates.ndim == 1:
-                ocoords = self.obs_coordinates[None,:]+np.zeros(observations.shape)
+                ocoords = self.obs_coordinates[None,:]+np.zeros(obs.shape)
 
             # Randomize order of observations
             if self.randomize_obs:
-                observations,ocoords,truths = random_sort_observations(observations,ocoords,truths)
+                obs,ocoords,truths = observations.random_sort_observations(obs,ocoords,truths)
 
             # Store nature run, truths and observations
             self.nr = nr
             self.truths = truths
-            self.observations = observations
+            self.observations = obs
             self.obs_coordinates = ocoords
 
         ################################################################################################
@@ -626,8 +626,8 @@ class experiment:
         if not hasattr(self,'nr') and self.type == 'real':
 
             # Read observations and their coordinates
-            observations, ocoords = decode_observations(self.observation_files)
-            assert (observations.shape == ocoords.shape),\
+            obs, ocoords = observations.decode_observations(self.observation_files)
+            assert (obs.shape == ocoords.shape),\
                 f'observation and coordinate arrays must have the same size'
 
             # Consistency check on time coordinates
@@ -638,22 +638,22 @@ class experiment:
 
             # Randomize order of observations
             if self.randomize_obs:
-                observations,ocoords = random_sort_observations(observations,ocoords)
+                obs,ocoords = observations.random_sort_observations(observations,ocoords)
 
             # Throw away any observations in the spinup time before assimilations begin
             # Exclude the initial time too (so the +1 in the line below)
             nspinup = self.tspinup/self.assimilation_interval+1
-            observations = observations[int(nspinup):,:]
+            obs = obs[int(nspinup):,:]
             ocoords = ocoords[int(nspinup):,:]
 
             # Throw away any observations beyond tspinup+trun
-            observations = observations[:int(nassim),:]
+            obs = obs[:int(nassim),:]
             ocoords = ocoords[:int(nassim),:]
 
             # There is no nature run and no truth in this case.
             # The only thing you can store is the observations.
-            self.nobs = observations.shape[1]
-            self.observations = observations
+            self.nobs = obs.shape[1]
+            self.observations = obs
             self.obs_coordinates = ocoords
             self.obs_error_sdev_assimilate = np.ones(self.nobs)*self.obs_error_sdev_assimilate
             if self.obs_kinds == 'theta':
diff --git a/PE_CBL.py b/PE_CBL.py
index 0445b00..d9c09bb 100644
--- a/PE_CBL.py
+++ b/PE_CBL.py
@@ -50,7 +50,7 @@ if __name__ == '__main__':
     fig05 = False # CHECKED OK # Experiments with systematic model errors
     fig06 = False # CHECKED OK # Analysis increments in model error experiments
     fig07 = False # CHECKED OK # Experiment with error growth
-    fig08 = True # CHECKED OK # Perfect-obs experiments
+    fig08 = False # CHECKED OK # Perfect-obs experiments
     fig09 = False # CHECKED OK # DA diagnostics (profiles) for perfect-obs experiments
     fig10 = False # CHECKED OK # Comparison EAKF vs LETKF
     
@@ -70,6 +70,7 @@ if __name__ == '__main__':
     opt13 = False # CHECKED OK # More profiles of consistency diagnostics
     opt14 = False # CHECKED OK # More plots of consistency checks
     opt15 = False # CHECKED OK # O-B for experiments C1 and C2
+    opt16 = True  # CHECKED OK # Additional figure for EGU2025 poster
 
     # A test to check if results are exactly reproducible
     check_reproducibility = False
@@ -889,6 +890,39 @@ if __name__ == '__main__':
         fig.savefig('ominb.png',format='png',dpi=300)
         p.close(fig)        
 
+    if opt16:
+        [exp_B,exp_C,exp_D] = map(get_experiment, [workdir+'/da_exp_C1.json',
+                                        workdir+'/da_exp_C2.json',
+                                        workdir+'/da_exp_D2.json'])
+
+        fig, [ax1, ax2, ax3] = p.subplots(3,1,constrained_layout=True)
+        fig.set_size_inches(4,6)
+        #
+        ax1 = plot_CBL_PE(exp_B,None,ax=ax1,show_true_parameter=True)
+        ax1.set_title(r'a) Exp. B, $\sigma_{y^o}^{{gen}}=\sigma_{y^o}^{{assim}}=\sigma_{x^b}^{{init}}=0.1$ K + no countergradient',fontsize=10)
+        ax1.set_xlabel('Time (h)')
+        ax1.set_ylabel(r'$p$')
+        ax1.set_ylim([0,5])
+        #
+        ax2 = plot_CBL_PE(exp_C,None,ax=ax2,show_true_parameter=True)
+        ax2.set_title(r'b) Exp. C, $\sigma_{y^o}^{{gen}}=\sigma_{y^o}^{{assim}}=\sigma_{x^b}^{{init}}=0.1$ K + 50% bias in SHF',fontsize=10)
+        ax2.set_xlabel('Time (h)')
+        ax2.set_ylabel(r'$p$')
+        ax2.set_ylim([0,5])
+        #
+        ax3 = plot_CBL_PE(exp_D,None,ax=ax3,show_true_parameter=True)
+        ax3.set_title(r'b) Exp. D, $\sigma_{y^o}^{{gen}}=\sigma_{y^o}^{{assim}}=\sigma_{x^b}^{{init}}=1$ K + random errors',fontsize=10)
+        ax3.set_xlabel('Time (h)')
+        ax3.set_ylabel(r'$p$')
+        ax3.set_ylim([0,5])
+        #
+        ax1.set_yticks([0,1,2,3,4,5,6])
+        ax2.sharex(ax1)
+        ax3.sharex(ax1)
+        #
+        fig.savefig('fig_EGU.png',format='png',dpi=300)
+        p.close(fig)
+
     if check_reproducibility:
 
         def get_obs(f):
-- 
GitLab