diff --git a/PE_CBL.py b/PE_CBL.py index 7fd10112d570dfa8b69a0ddd5789c5a9a56087f3..77978b20d1dc077c77754d38ebeacf92b41fca6f 100644 --- a/PE_CBL.py +++ b/PE_CBL.py @@ -24,6 +24,14 @@ def load_or_run(settings): return exp +def get_experiment(settings_file): + + with open(settings_file, 'r') as fp: + settings = json.load(fp) + exp = load_or_run(settings) + + return exp + if __name__ == '__main__': workdir = "./runs/seed=181612_enssize=200_EAKF_6hrs/" @@ -41,16 +49,16 @@ if __name__ == '__main__': ######################################################################## # Manuscript figures - fig01 = True # CHECKED OK # Sensitivity to p + fig01 = False # CHECKED OK # Sensitivity to p fig02 = False # CHECKED OK # Successful PE experiment fig03 = False # CHECKED OK # Consistency checks fig04 = False # CHECKED OK # Experiments with varying weights fig05 = False # CHECKED OK # Experiments with systematic model errors fig06 = False # CHECKED OK # Analysis increments in model error experiments fig07 = False # CHECKED OK # Experiment with error growth - fig08 = False # CHECKED OK # Comparison successful vs unsuccessful PE - fig09 = False # CHECKED OK # Comparison EAKF vs LETKF - fig10 = False # CHECKED OK # DA diagnostics (profiles) for perfect-obs experiments + fig08 = False # CHECKED OK # Perfect-obs experiments + fig09 = False # CHECKED OK # DA diagnostics (profiles) for perfect-obs experiments + fig10 = False # CHECKED OK # Comparison EAKF vs LETKF # Other figures opt01 = False # CHECKED OK # Assimilation of a single observation @@ -65,6 +73,7 @@ if __name__ == '__main__': opt10 = False # CHECKED OK # Consistency checks opt11 = False # CHECKED OK # An experiment without any variance inflation opt12 = False # CHECKED OK # Old version of Figure 08 + opt13 = False # CHECKED OK # More profiles of consistency diagnostics # A test to check if results are exactly reproducible check_reproducibility = False @@ -173,57 +182,24 @@ if __name__ == '__main__': if fig02: - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp_A1 = load_or_run(da_settings_A1) - - plot_overview(exp_A1,"A$_1$",'fig02.png') + exp_A1 = get_experiment(workdir+'/da_exp_A1.json') + plot_overview(exp_A1,"A",'fig02.png') if fig03: - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp_A1 = load_or_run(da_settings_A1) - - with open(workdir+'/da_exp_A2.json', 'r') as fp: - da_settings_A2 = json.load(fp) - exp_A2 = load_or_run(da_settings_A2) - - with open(workdir+'/da_exp_B1.json', 'r') as fp: - da_settings_B1 = json.load(fp) - exp_B1 = load_or_run(da_settings_B1) - - with open(workdir+'/da_exp_B2.json', 'r') as fp: - da_settings_B2 = json.load(fp) - exp_B2 = load_or_run(da_settings_B2) - - with open(workdir+'/da_exp_B3.json', 'r') as fp: - da_settings_B3 = json.load(fp) - exp_B3 = load_or_run(da_settings_B3) - - with open(workdir+'/da_exp_B4.json', 'r') as fp: - da_settings_B4 = json.load(fp) - exp_B4 = load_or_run(da_settings_B4) - - with open(workdir+'/da_exp_B4_noPE.json', 'r') as fp: - da_settings_B4_noPE = json.load(fp) - exp_B4_noPE = load_or_run(da_settings_B4_noPE) - - with open(workdir+'/da_exp_C1.json', 'r') as fp: - da_settings_C1 = json.load(fp) - exp_C1 = load_or_run(da_settings_C1) - - with open(workdir+'/da_exp_C2.json', 'r') as fp: - da_settings_C2 = json.load(fp) - exp_C2 = load_or_run(da_settings_C2) - - with open(workdir+'/da_exp_D1.json', 'r') as fp: - da_settings_D1 = json.load(fp) - exp_D1 = load_or_run(da_settings_D1) - - with open(workdir+'/da_exp_D2.json', 'r') as fp: - da_settings_D2 = json.load(fp) - exp_D2 = load_or_run(da_settings_D2) + [exp_A1,exp_A2,exp_B1,exp_B2,exp_B3,exp_B4, + exp_B4_noPE,exp_C1,exp_C2,exp_D1,exp_D2] = map(get_experiment, + [workdir+'/da_exp_A1.json', + workdir+'/da_exp_A2.json', + workdir+'/da_exp_B1.json', + workdir+'/da_exp_B2.json', + workdir+'/da_exp_B3.json', + workdir+'/da_exp_B4.json', + workdir+'/da_exp_B4_noPE.json', + workdir+'/da_exp_C1.json', + workdir+'/da_exp_C2.json', + workdir+'/da_exp_D1.json', + workdir+'/da_exp_D2.json']) # Plots: Scatter plots of Desroziers statistics #experiments1 = [exp_A1, exp_B1, exp_B2, exp_B3, exp_B4, exp_C1, exp_C2, exp_D1, exp_D2 ] @@ -233,7 +209,7 @@ if __name__ == '__main__': #experiments1 = [exp_A1, exp_A2, exp_B1, exp_B4, exp_C1, exp_C2 ] #labels1 = [r'A$_1$', r'A$_2$',r'B$_1$', r'B$_4$', r'C$_1$', r'C$_2$' ] experiments1 = [exp_A1, exp_B1, exp_B4, exp_C1, exp_C2 ] - labels1 = [r'A$_1$', r'B$_1$', r'B$_4$', r'C$_1$', r'C$_2$' ] + labels1 = [r'A', r'B$_1$', r'B$_4$', r'C$_1$', r'C$_2$' ] experiments2 = [exp_B2, exp_B3, exp_D2 ] labels2 = [r'B$_2$', r'B$_3$', r'D' ] @@ -244,8 +220,8 @@ if __name__ == '__main__': ax1 = plot_desroziers([i.dg for i in experiments1],labels=labels1,ax=ax1) ax2 = plot_desroziers([i.dg for i in experiments2],labels=labels2,ax=ax2) - ax1.set_title(r"a) Exp. with $\sigma_{y_o}^\mathsf{assim}=0.1$ K") - ax2.set_title(r"b) Exp. with $\sigma_{y_o}^\mathsf{assim}=1$ K") + ax1.set_title(r"a) Exp. with $\sigma_{y^o}^\mathsf{assim}=0.1$ K") + ax2.set_title(r"b) Exp. with $\sigma_{y^o}^\mathsf{assim}=1$ K") fig.tight_layout() fig.savefig('fig03.png',format='png',dpi=150) @@ -253,21 +229,10 @@ if __name__ == '__main__': if fig04: - with open(workdir+'/da_exp_B1.json', 'r') as fp: - da_settings_B1 = json.load(fp) - exp_B1 = load_or_run(da_settings_B1) - - with open(workdir+'/da_exp_B2.json', 'r') as fp: - da_settings_B2 = json.load(fp) - exp_B2 = load_or_run(da_settings_B2) - - with open(workdir+'/da_exp_B3.json', 'r') as fp: - da_settings_B3 = json.load(fp) - exp_B3 = load_or_run(da_settings_B3) - - with open(workdir+'/da_exp_B4.json', 'r') as fp: - da_settings_B4 = json.load(fp) - exp_B4 = load_or_run(da_settings_B4) + [exp_B1,exp_B2,exp_B3,exp_B4] = map(get_experiment, [workdir+'/da_exp_B1.json', + workdir+'/da_exp_B2.json', + workdir+'/da_exp_B3.json', + workdir+'/da_exp_B4.json']) fig, [[ax1, ax2], [ax3, ax4]] = p.subplots(2,2,constrained_layout=True) fig.set_size_inches(6,4) @@ -302,13 +267,8 @@ if __name__ == '__main__': if fig05: - with open(workdir+'/da_exp_C1.json', 'r') as fp: - da_settings_C1 = json.load(fp) - exp_C1 = load_or_run(da_settings_C1) - - with open(workdir+'/da_exp_C2.json', 'r') as fp: - da_settings_C2 = json.load(fp) - exp_C2 = load_or_run(da_settings_C2) + [exp_C1,exp_C2] = map(get_experiment, [workdir+'/da_exp_C1.json', + workdir+'/da_exp_C2.json']) fig, [ax1, ax2] = p.subplots(1,2,constrained_layout=True) fig.set_size_inches(6,2) @@ -331,13 +291,8 @@ if __name__ == '__main__': if fig06: - with open(workdir+'/da_exp_C1.json', 'r') as fp: - da_settings_C1 = json.load(fp) - exp_C1 = load_or_run(da_settings_C1) - - with open(workdir+'/da_exp_C2.json', 'r') as fp: - da_settings_C2 = json.load(fp) - exp_C2 = load_or_run(da_settings_C2) + [exp_C1,exp_C2] = map(get_experiment, [workdir+'/da_exp_C1.json', + workdir+'/da_exp_C2.json']) npar2 = exp_C1.nr.parameter_number npar3 = exp_C2.nr.parameter_number @@ -351,8 +306,8 @@ if __name__ == '__main__': # ax1.set_ylim([0,2000]) ax2.set_ylim([0,2000]) - ax1.set_title(r'a) Exp. C$_1$, $\overline{\theta}_a-\overline{\theta}_b$') - ax2.set_title(r'b) Exp. C$_2$, $\overline{\theta}_a-\overline{\theta}_b$') + ax1.set_title(r'a) Exp. C$_1$, $\overline{\theta}^a-\overline{\theta}^b$') + ax2.set_title(r'b) Exp. C$_2$, $\overline{\theta}^a-\overline{\theta}^b$') ax1.set_xlabel('Time (h)') ax2.set_xlabel('Time (h)') ax1.set_ylabel('Height (m)') @@ -372,14 +327,12 @@ if __name__ == '__main__': if fig07: - with open(workdir+'/da_exp_D2.json', 'r') as fp: - da_settings_D2 = json.load(fp) - exp_D = load_or_run(da_settings_D2) + exp_D = get_experiment(workdir+'/da_exp_D2.json') fig, [ax0, ax1] = p.subplots(1,2,constrained_layout=True) fig.set_size_inches(6,3) # - ax0,c0 = plot_CBL_identifiability(exp_D,da_settings_D2['obs_error_sdev_assimilate'],ax=ax0) + ax0,c0 = plot_CBL_identifiability(exp_D,exp_D.da.obs_error_sdev_assimilate,ax=ax0) ax0.set_title(r'a) Exp. D, $\rho(p\prime\prime,y_b}$)') ax0.set_xlabel('Time (h)') ax0.set_ylabel('Height (m)') @@ -399,14 +352,12 @@ if __name__ == '__main__': if fig08: - with open(workdir+'/da_exp_B4.json', 'r') as fp: - da_settings_B4 = json.load(fp) - exp_B4 = load_or_run(da_settings_B4) + exp_B4 = get_experiment(workdir+'/da_exp_B4.json') fig, [ax3,ax2] = p.subplots(1,2,constrained_layout=True) fig.set_size_inches(6,3) # - [ax2,ax3],c2,c3 = plot_CBL_identifiability(exp_B4,da_settings_B4['obs_error_sdev_assimilate'],ax=[ax3,ax2]) + [ax2,ax3],c2,c3 = plot_CBL_identifiability(exp_B4,exp_B4.da.obs_error_sdev_assimilate,ax=[ax3,ax2]) ax2.set_title(r'a) Exp. B$_4$, $\delta\overline{y}$ (K)') ax2.set_xlabel('Time (h)') ax2.set_ylabel('Height (m)') @@ -421,103 +372,70 @@ if __name__ == '__main__': p.colorbar(c2,orientation='horizontal') p.colorbar(c3,orientation='horizontal') # - fig.savefig('fig08.png',format='png',dpi=300) + fig.savefig('fig09.png',format='png',dpi=300) p.close(fig) if fig09: + [exp1,exp2] = map(get_experiment, [workdir+'/da_exp_B4.json', + workdir+'/da_exp_B4_noPE.json']) + label = "B$_4$" + + # Diagnostics wrt truths + fig, [[ax1, ax2], [ax3, ax4]] = p.subplots(2,2,constrained_layout=True) + fig.set_size_inches(6,4) + plot_diagnostics(exp1,show='errors',axes=[ax1,ax2,ax3,ax4],label='B$_4$ with PE',linecolor='#1f77b4',zmax=1700) + plot_diagnostics(exp2,show='errors',axes=[ax1,ax2,ax3,ax4],label='B$_4$ without PE',linecolor='#9EC9E9',zmax=1700) + ax1.set_title('a) Analysis error') + ax1.set_xlabel(r'RMSE$_{\overline{y}^a}$') + ax2.set_title('b) First-guess error') + ax2.set_xlabel(r'RMSE$_{\overline{y}^b}$') + ax3.set_title('c) Error reduction') + ax3.set_xlabel(r'RMSE$_{\overline{y}^b}$-RMSE$_{\overline{y}^a}$') + ax4.set_title('d) Consistency of innovations') + ax4.set_xlabel(r'$(\sigma_{y^b}+\sigma_{y^o})$/RMSE$_{\overline{y}^b}$') + ax1.set_ylabel('height (m)') + ax3.set_ylabel('height (m)') + ax4.legend(frameon=False,fontsize=8,loc=7) + #ax1.set_xlim([0,0.2]) + #ax2.set_xlim([0,0.2]) + #ax3.set_xlim([0,0.02]) + #ax4.set_xlim([0,10]) + fig.savefig('fig10.png',format='png',dpi=300) + p.close(fig) + + if fig10: + workdir_eakf = "./runs/seed=181612_enssize=200_EAKF_6hrs/" workdir_letkf = "./runs/seed=181612_enssize=200_LETKF_6hrs/" - with open(workdir_eakf+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp_A1a = load_or_run(da_settings_A1) - - with open(workdir_eakf+'/da_exp_B1.json', 'r') as fp: - da_settings_B1 = json.load(fp) - exp_B1a = load_or_run(da_settings_B1) - - with open(workdir_eakf+'/da_exp_B2.json', 'r') as fp: - da_settings_B2 = json.load(fp) - exp_B2a = load_or_run(da_settings_B2) - - with open(workdir_eakf+'/da_exp_B3.json', 'r') as fp: - da_settings_B3 = json.load(fp) - exp_B3a = load_or_run(da_settings_B3) - - with open(workdir_eakf+'/da_exp_B4.json', 'r') as fp: - da_settings_B4 = json.load(fp) - exp_B4a = load_or_run(da_settings_B4) - - with open(workdir_eakf+'/da_exp_C1.json', 'r') as fp: - da_settings_C1 = json.load(fp) - exp_C1a = load_or_run(da_settings_C1) - - with open(workdir_eakf+'/da_exp_C2.json', 'r') as fp: - da_settings_C2 = json.load(fp) - exp_C2a = load_or_run(da_settings_C2) - - with open(workdir_eakf+'/da_exp_D2.json', 'r') as fp: - da_settings_D2 = json.load(fp) - exp_D2a = load_or_run(da_settings_D2) - - with open(workdir_letkf+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp_A1b = load_or_run(da_settings_A1) - - with open(workdir_letkf+'/da_exp_B1.json', 'r') as fp: - da_settings_B1 = json.load(fp) - exp_B1b = load_or_run(da_settings_B1) - - with open(workdir_letkf+'/da_exp_B2.json', 'r') as fp: - da_settings_B2 = json.load(fp) - exp_B2b = load_or_run(da_settings_B2) - - with open(workdir_letkf+'/da_exp_B3.json', 'r') as fp: - da_settings_B3 = json.load(fp) - exp_B3b = load_or_run(da_settings_B3) - - with open(workdir_letkf+'/da_exp_B4.json', 'r') as fp: - da_settings_B4 = json.load(fp) - exp_B4b = load_or_run(da_settings_B4) - - with open(workdir_letkf+'/da_exp_C1.json', 'r') as fp: - da_settings_C1 = json.load(fp) - exp_C1b = load_or_run(da_settings_C1) - - with open(workdir_letkf+'/da_exp_C2.json', 'r') as fp: - da_settings_C2 = json.load(fp) - exp_C2b = load_or_run(da_settings_C2) - - with open(workdir_letkf+'/da_exp_D2.json', 'r') as fp: - da_settings_D2 = json.load(fp) - exp_D2b = load_or_run(da_settings_D2) - - """ - exp_Aa = pickle.load(open(path_EAKF+"exp_A1.pickle", "rb")) - exp_B1a = pickle.load(open(path_EAKF+"exp_B1.pickle", "rb")) - exp_B2a = pickle.load(open(path_EAKF+"exp_B2.pickle", "rb")) - exp_B3a = pickle.load(open(path_EAKF+"exp_B3.pickle", "rb")) - exp_B4a = pickle.load(open(path_EAKF+"exp_B4.pickle", "rb")) - exp_C1a = pickle.load(open(path_EAKF+"exp_C1.pickle", "rb")) - exp_C2a = pickle.load(open(path_EAKF+"exp_C2.pickle", "rb")) - exp_D2a = pickle.load(open(path_EAKF+"exp_D2.pickle", "rb")) - - exp_Ab = pickle.load(open(path_LETKF+"exp_A1.pickle", "rb")) - exp_B1b = pickle.load(open(path_LETKF+"exp_B1.pickle", "rb")) - exp_B2b= pickle.load(open(path_LETKF+"exp_B2.pickle", "rb")) - exp_B3b = pickle.load(open(path_LETKF+"exp_B3.pickle", "rb")) - exp_B4b = pickle.load(open(path_LETKF+"exp_B4.pickle", "rb")) - exp_C1b = pickle.load(open(path_LETKF+"exp_C1.pickle", "rb")) - exp_C2b = pickle.load(open(path_LETKF+"exp_C2.pickle", "rb")) - exp_D2b = pickle.load(open(path_LETKF+"exp_D2.pickle", "rb")) - """ + [exp_A1a,exp_B1a,exp_B2a,exp_B3a,exp_B4a, + exp_C1a,exp_C2a,exp_D2a] = map(get_experiment, + [workdir_eakf+'/da_exp_A1.json', + workdir_eakf+'/da_exp_B1.json', + workdir_eakf+'/da_exp_B2.json', + workdir_eakf+'/da_exp_B3.json', + workdir_eakf+'/da_exp_B4.json', + workdir_eakf+'/da_exp_C1.json', + workdir_eakf+'/da_exp_C2.json', + workdir_eakf+'/da_exp_D2.json']) + + [exp_A1b,exp_B1b,exp_B2b,exp_B3b,exp_B4b, + exp_C1b,exp_C2b,exp_D2b] = map(get_experiment, + [workdir_letkf+'/da_exp_A1.json', + workdir_letkf+'/da_exp_B1.json', + workdir_letkf+'/da_exp_B2.json', + workdir_letkf+'/da_exp_B3.json', + workdir_letkf+'/da_exp_B4.json', + workdir_letkf+'/da_exp_C1.json', + workdir_letkf+'/da_exp_C2.json', + workdir_letkf+'/da_exp_D2.json']) fig, [ax1, ax2] = p.subplots(1,2) fig.set_size_inches(6,3) # linecolors = ['darkviolet','chocolate','darkorange','orange','gold','darkgreen','seagreen','royalblue','black'] - labels = [r'Exp. A$_1$',r'Exp. B$_1$',r'Exp. B$_2$',r'Exp. B$_3$',r'Exp. B$_4$',r'Exp. C$_1$',r'Exp. C$_2$',r'Exp. D'] + labels = [r'Exp. A',r'Exp. B$_1$',r'Exp. B$_2$',r'Exp. B$_3$',r'Exp. B$_4$',r'Exp. C$_1$',r'Exp. C$_2$',r'Exp. D'] experiments1 = [exp_A1a,exp_B1a,exp_B2a,exp_B3a,exp_B4a,exp_C1a,exp_C2a,exp_D2a] experiments2 = [exp_A1b,exp_B1b,exp_B2b,exp_B3b,exp_B4b,exp_C1b,exp_C2b,exp_D2b] # @@ -574,203 +492,56 @@ if __name__ == '__main__': fig.legend(loc=8,ncol=4,frameon=False) fig.subplots_adjust(bottom=0.35, wspace=0.38) # - fig.savefig('fig09.png',format='png',dpi=300) - p.close(fig) - - if fig10: - - # For the answer to reviewer A - """ - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp1 = load_or_run(da_settings_A1) - label = "A$_1$" - - with open(workdir+'/da_exp_A2.json', 'r') as fp: - da_settings_A2 = json.load(fp) - exp2 = load_or_run(da_settings_A2) - """ - - # For the revised manuscript - with open(workdir+'/da_exp_B4.json', 'r') as fp: - da_settings_B4 = json.load(fp) - exp1 = load_or_run(da_settings_B4) - label = "B$_4$" - - with open(workdir+'/da_exp_B4_noPE.json', 'r') as fp: - da_settings_B4_noPE = json.load(fp) - exp2 = load_or_run(da_settings_B4_noPE) - - # Diagnostics wrt truths - fig, [[ax1, ax2], [ax3, ax4]] = p.subplots(2,2,constrained_layout=True) - fig.set_size_inches(6,4) - plot_diagnostics(exp1,show='errors',axes=[ax1,ax2,ax3,ax4],label='B$_4$ with PE',linecolor='#1f77b4',zmax=1500) - plot_diagnostics(exp2,show='errors',axes=[ax1,ax2,ax3,ax4],label='B$_4$ without PE',linecolor='#9EC9E9',zmax=1500) - ax1.set_title('a) Analysis error') - ax1.set_xlabel(r'RMSE$_{\overline{y}_a}$') - ax2.set_title('b) First-guess error') - ax2.set_xlabel(r'RMSE$_{\overline{y}_b}$') - ax3.set_title('c) Error reduction') - ax3.set_xlabel(r'RMSE$_{\overline{y}_b}$-RMSE$_{\overline{y}_a}$') - ax4.set_title('d) Consistency of innovations') - ax4.set_xlabel(r'$(\sigma_{y_b}+\sigma_{y_o})$/RMSE$_{\overline{y}_b}$') - ax1.set_ylabel('height (m)') - ax3.set_ylabel('height (m)') - ax1.legend(frameon=False,fontsize=8,loc=7) - #ax1.set_xlim([0,0.2]) - #ax2.set_xlim([0,0.2]) - #ax3.set_xlim([0,0.02]) - #ax4.set_xlim([0,10]) - fig.savefig('fig10.png',format='png',dpi=300) - p.close(fig) - - # Diagnostics wrt truths - fig, [[ax1, ax2], [ax3, ax4]] = p.subplots(2,2,constrained_layout=True) - fig.set_size_inches(6,4) - plot_diagnostics(exp1,show='errors',axes=[ax1,ax2,ax3,ax4],label='with PE',linecolor='#1f77b4') - plot_diagnostics(exp2,show='errors',axes=[ax1,ax2,ax3,ax4],label='without PE',linecolor='#9EC9E9') - ax1.set_title('a) Analysis error') - ax1.set_xlabel(r'RMSE$^a_\theta$') - ax2.set_title('b) First-guess error') - ax2.set_xlabel(r'RMSE$^b_\theta$') - ax3.set_title('c) Error reduction') - ax3.set_xlabel(r'RMSE$^b_\theta-$RMSE$^a_\theta$') - ax4.set_title('d) Spread-error consistency') - ax4.set_xlabel(r'$\sigma^b_\theta$/RMSE$^b_\theta$') - ax1.set_ylabel('height (m)') - ax3.set_ylabel('height (m)') - #ax1.set_xlim([0,0.2]) - #ax2.set_xlim([0,0.2]) - #ax3.set_xlim([0,0.02]) - #ax4.set_xlim([0,10]) - fig.savefig('diagnostics_profiles_with_errors.png',format='png',dpi=300) - p.close(fig) - - # Diagnostics wrt observations - fig, [[ax1, ax2], [ax3, ax4]] = p.subplots(2,2,constrained_layout=True) - fig.set_size_inches(6,4) - plot_diagnostics(exp1,show='deviations',axes=[ax1,ax2,ax3,ax4],label='with PE',linecolor=u'#1f77b4') - plot_diagnostics(exp2,show='deviations',axes=[ax1,ax2,ax3,ax4],label='without PE',linecolor=u'#9EC9E9') - ax1.set_title('a) Analysis error') - ax1.set_xlabel(r'RMSD$^a_\theta$') - ax2.set_title('b) First-guess error') - ax2.set_xlabel(r'RMSD$^b_\theta$') - ax3.set_title('c) Error reduction') - ax3.set_xlabel(r'RMSD$^b_\theta-$RMSD$^a_\theta$') - ax4.set_title('d) Spread-error consistency') - ax4.set_xlabel(r'$\sigma^b_\theta$/RMSD$^b_\theta$') - ax1.set_ylabel('height (m)') - ax3.set_ylabel('height (m)') - #ax1.set_xlim([0,0.2]) - #ax2.set_xlim([0,0.2]) - #ax3.set_xlim([0,0.02]) - #ax4.set_xlim([0,10]) - fig.savefig('diagnostics_profiles_with_deviations.png',format='png',dpi=300) - p.close(fig) - - # Diagnostics wrt truths and observations compared - if exp1.da.obs_coordinates.ndim == 1: - z = np.sort(exp1.da.obs_coordinates) - elif exp1.da.obs_coordinates.ndim == 2: - z = np.sort(exp1.da.obs_coordinates[0,:]) - - sigma_o = exp1.dg.varo**0.5 - brmsd = exp1.dg.bRMSD_t - bsprd = exp1.dg.bSprd_t - brmse = exp1.dg.bRMSE_t - - fig, ax1 = p.subplots(1,1,constrained_layout=True) - fig.set_size_inches(4,3) - ax1.plot(np.sqrt(bsprd**2+sigma_o**2)/brmse,z,color=u'#1f77b4',label=r'$\sigma^b_\theta$/RMSE$^b_\theta$; reference is truth') - ax1.plot(np.sqrt(bsprd**2+sigma_o**2)/brmsd,z,color=u'#9EC9E9',label=r'$\sigma^b_\theta$/RMSD$^b_\theta$; reference is observations') - ax1.set_title('Exp. %s'%label) - ax1.set_xlabel(r'spread/error ratio') - ax1.set_ylabel('height (m)') - ax1.legend(frameon=False,fontsize=8,loc=7) - fig.savefig('diagnostics_profiles_errors_vs_deviations.png',format='png',dpi=300) + fig.savefig('fig08.png',format='png',dpi=300) p.close(fig) if opt01: - with open('./default_da.json', 'r') as fp: - da_settings = json.load(fp) - exp = load_or_run(da_settings) - + exp = get_experiment('./default_da.json') plot_CBL_assimilation(exp,'single_assimilation.png') if opt02: - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp1 = load_or_run(da_settings_A1) - - plot_profiles(exp_A1,da_settings_A1,'profiles.png') + exp_A1 = get_experiment('./runs/seed=181612_enssize=20_EAKF_6hrs/da_exp_A1.json') + plot_profiles(exp_A1,'profiles.png') if opt03: - # Control experiment (an OSSE with nature run) - with open('./runs/real/da_exp_RC.json', 'r') as fp: - da_settings_RC = json.load(fp) - exp_RC = load_or_run(da_settings_RC) + # RC: control experiment (an OSSE with nature run) + # R: test with "real" observations (no nature run) + [exp_RC,exp_R] = map(get_experiment, ['./runs/real/da_exp_RC.json', + './runs/real/da_exp_R.json']) - # Test with "real" observations (no nature run) - with open('./runs/real/da_exp_R.json', 'r') as fp: - da_settings_R = json.load(fp) - exp_R = load_or_run(da_settings_R) - - #########################################################################################à - # Plots for exp, label in zip([exp_RC, exp_R],['RC','R']): plot_overview(exp,label,'exp_%s.png'%label) - plot_profiles(exp_RC,da_settings_RC,'exp_RC_profiles.png') - plot_profiles(exp_R,da_settings_R,'exp_R_profiles.png') + plot_profiles(exp_RC,'exp_RC_profiles.png') + plot_profiles(exp_R,'exp_R_profiles.png') if opt04: - # Test with "real" observations (no nature run) - with open('./runs/real/da_exp_R.json', 'r') as fp: - da_settings_S0 = json.load(fp) - exp_S0 = load_or_run(da_settings_S0) + [exp_S0,exp_S1,exp_S2,exp_S3] = map(get_experiment, + ['./runs/real/da_exp_R.json', + './runs/real/da_exp_S1.json', + './runs/real/da_exp_S2.json', + './runs/real/da_exp_S3.json']) exp_S0.label = 'S0' - # Additional tests with longer spinupt time - with open('./runs/real/da_exp_S1.json', 'r') as fp: - da_settings_S1 = json.load(fp) - exp_S1 = load_or_run(da_settings_S1) - - with open('./runs/real/da_exp_S2.json', 'r') as fp: - da_settings_S2 = json.load(fp) - exp_S2 = load_or_run(da_settings_S2) - - with open('./runs/real/da_exp_S3.json', 'r') as fp: - da_settings_S3 = json.load(fp) - exp_S3 = load_or_run(da_settings_S3) - for exp,label in zip([exp_S0,exp_S1,exp_S2,exp_S3],['S0','S1','S2','S3']): plot_overview(exp,label,'exp_%s.png'%label) if opt05: - with open(workdir+'/da_exp_L1.json', 'r') as fp: - da_settings_localization_1 = json.load(fp) - exp_localization_1 = load_or_run(da_settings_localization_1) + workdir = "./runs/seed=181612_enssize=20_EAKF_6hrs/" - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_localization_2 = json.load(fp) - exp_localization_2 = load_or_run(da_settings_localization_2) + [exp_localization_1,exp_localization_2,exp_localization_3,exp_localization_4] = map(get_experiment, + [workdir+'/da_exp_L1.json', + workdir+'/da_exp_A1.json', + workdir+'/da_exp_L3.json', + workdir+'/da_exp_L4.json']) exp_localization_2.label = 'L2' - with open(workdir+'/da_exp_L3.json', 'r') as fp: - da_settings_localization_3 = json.load(fp) - exp_localization_3 = load_or_run(da_settings_localization_3) - - with open(workdir+'/da_exp_L4.json', 'r') as fp: - da_settings_localization_4 = json.load(fp) - exp_localization_4 = load_or_run(da_settings_localization_4) - - for exp,settings,label in zip([exp_localization_1,exp_localization_2,exp_localization_3,exp_localization_4],\ - [da_settings_localization_1,da_settings_localization_2,da_settings_localization_3,da_settings_localization_4], + for exp,label in zip([exp_localization_1,exp_localization_2,exp_localization_3,exp_localization_4],\ [r"L$_1$",r"L$_2$",r"L$_3$",r"L$_4$"]): print(exp.localization_cutoff) @@ -778,25 +549,16 @@ if __name__ == '__main__': if opt06: - with open(workdir+'/da_exp_I1.json', 'r') as fp: - da_settings_inflation_1 = json.load(fp) - exp_inflation_1 = load_or_run(da_settings_inflation_1) - - with open(workdir+'/da_exp_I2.json', 'r') as fp: - da_settings_inflation_2 = json.load(fp) - exp_inflation_2 = load_or_run(da_settings_inflation_2) + workdir = "./runs/seed=181612_enssize=20_EAKF_6hrs/" - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_inflation_3 = json.load(fp) - exp_inflation_3 = load_or_run(da_settings_inflation_3) + [exp_inflation_1,exp_inflation_2,exp_inflation_3,exp_inflation_4] = map(get_experiment, + [workdir+'/da_exp_I1.json', + workdir+'/da_exp_I2.json', + workdir+'/da_exp_A1.json', + workdir+'/da_exp_I4.json']) exp_inflation_3.label = 'I3' - with open(workdir+'/da_exp_I4.json', 'r') as fp: - da_settings_inflation_4 = json.load(fp) - exp_inflation_4 = load_or_run(da_settings_inflation_4) - - for exp,settings,label in zip([exp_inflation_1,exp_inflation_2,exp_inflation_3,exp_inflation_4],\ - [da_settings_inflation_1,da_settings_inflation_2,da_settings_inflation_3,da_settings_inflation_4], + for exp,label in zip([exp_inflation_1,exp_inflation_2,exp_inflation_3,exp_inflation_4],\ [r"I$_1$",r"I$_2$",r"I$_3$",r"I$_4$"]): print(exp.cbl_settings["parameter_inflation_rtps_alpha"]) @@ -804,25 +566,16 @@ if __name__ == '__main__': if opt07: - with open(workdir+'/da_exp_I5.json', 'r') as fp: - da_settings_inflation_5 = json.load(fp) - exp_inflation_5 = load_or_run(da_settings_inflation_5) + workdir = "./runs/seed=181612_enssize=20_EAKF_6hrs/" - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_inflation_6 = json.load(fp) - exp_inflation_6 = load_or_run(da_settings_inflation_6) + [exp_inflation_5,exp_inflation_6,exp_inflation_7,exp_inflation_8] = map(get_experiment, + [workdir+'/da_exp_I5.json', + workdir+'/da_exp_A1.json', + workdir+'/da_exp_I7.json', + workdir+'/da_exp_I8.json']) exp_inflation_6.label = 'I6' - with open(workdir+'/da_exp_I7.json', 'r') as fp: - da_settings_inflation_7 = json.load(fp) - exp_inflation_7 = load_or_run(da_settings_inflation_7) - - with open(workdir+'/da_exp_I8.json', 'r') as fp: - da_settings_inflation_8 = json.load(fp) - exp_inflation_8 = load_or_run(da_settings_inflation_8) - - for exp,settings,label in zip([exp_inflation_5,exp_inflation_6,exp_inflation_7,exp_inflation_8],\ - [da_settings_inflation_5,da_settings_inflation_6,da_settings_inflation_7,da_settings_inflation_8], + for exp,label in zip([exp_inflation_5,exp_inflation_6,exp_inflation_7,exp_inflation_8],\ [r"I$_5$",r"I$_6$",r"I$_7$",r"I$_8$"]): print(exp.inflation_rtps_alpha) @@ -830,18 +583,16 @@ if __name__ == '__main__': if opt08: - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_O1 = json.load(fp) - exp_O1 = load_or_run(da_settings_O1) - exp_O1.label = 'O1' + workdir = "./runs/seed=181612_enssize=20_EAKF_6hrs/" - with open(workdir+'/da_exp_O2.json', 'r') as fp: - da_settings_O2 = json.load(fp) - exp_O2 = load_or_run(da_settings_O2) + [exp_O1,exp_O2] = map(get_experiment, + [workdir+'/da_exp_A1.json', + workdir+'/da_exp_O2.json']) + + exp_O1.label = 'O1' # Plots - for exp,settings,label in zip([exp_O1,exp_O2],\ - [da_settings_O1,da_settings_O2],\ + for exp,label in zip([exp_O1,exp_O2],\ [r"O$_1$",r"O$_2$"]): print('Random obs order? %s'%exp.randomize_obs) @@ -891,25 +642,15 @@ if __name__ == '__main__': if opt09: - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp_A1 = load_or_run(da_settings_A1) - + exp_A1 = get_experiment("./runs/seed=181612_enssize=20_EAKF_6hrs/da_exp_A1.json") plot_B_matrix(exp_A1,timeid=71) if opt10: - with open(workdir+'/da_exp_B3.json', 'r') as fp: - da_settings_B3 = json.load(fp) - exp_B3 = load_or_run(da_settings_B3) - - with open(workdir+'/da_exp_D1.json', 'r') as fp: - da_settings_D1 = json.load(fp) - exp_D1 = load_or_run(da_settings_D1) - - with open(workdir+'/da_exp_D2.json', 'r') as fp: - da_settings_D2 = json.load(fp) - exp_D2 = load_or_run(da_settings_D2) + [exp_B3,exp_D1,exp_D2] = map(get_experiment, + [workdir+'/da_exp_B3.json', + workdir+'/da_exp_D1.json', + workdir+'/da_exp_D2.json']) # Plots plot_overview(exp_D1,r"D$_1$",'exp_D1.png') @@ -964,27 +705,19 @@ if __name__ == '__main__': if opt11: - with open(workdir+'/da_exp_I9.json', 'r') as fp: - da_settings_I9 = json.load(fp) - exp_I9 = load_or_run(da_settings_I9) - - # Plots + exp_I9 = get_experiment("./runs/seed=181612_enssize=20_EAKF_6hrs/da_exp_I9.json") plot_overview(exp_I9,r"I$_9$",'exp_I9.png') if opt12: - with open(workdir+'/da_exp_A1.json', 'r') as fp: - da_settings_A1 = json.load(fp) - exp_A = load_or_run(da_settings_A1) - - with open(workdir+'/da_exp_D2.json', 'r') as fp: - da_settings_D2 = json.load(fp) - exp_D = load_or_run(da_settings_D2) + [exp_A,exp_D] = map(get_experiment, + [workdir+'/da_exp_A1.json', + workdir+'/da_exp_D2.json']) fig, [[ax0, ax1],[ax2,ax3]] = p.subplots(2,2,constrained_layout=True) fig.set_size_inches(6,6) # - [ax1,ax0],c0,c1 = plot_CBL_identifiability(exp_A,da_settings_A1['obs_error_sdev_assimilate'],ax=[ax1,ax0]) + [ax1,ax0],c0,c1 = plot_CBL_identifiability(exp_A,exp_A.da.obs_error_sdev_assimilate,ax=[ax1,ax0]) ax0.set_title(r'a) Exp. A$_1$, $\delta\overline{y}$ (K)') ax0.set_xlabel('Time (h)') ax0.set_ylabel('Height (m)') @@ -996,7 +729,7 @@ if __name__ == '__main__': ax1.set_xlim([0,maxtime/3600]) ax0.set_xticks(np.arange((maxtime+1)/3600)) ax1.set_xticks(np.arange((maxtime+1)/3600)) - [ax3,ax2],c2,c3 = plot_CBL_identifiability(exp_D,da_settings_D2['obs_error_sdev_assimilate'],ax=[ax3,ax2]) + [ax3,ax2],c2,c3 = plot_CBL_identifiability(exp_D,exp_D.da.obs_error_sdev_assimilate,ax=[ax3,ax2]) ax2.set_title(r'c) Exp. D$_2$, $\delta\overline{y}$ (K)') ax2.set_xlabel('Time (h)') ax2.set_ylabel('Height (m)') @@ -1016,20 +749,75 @@ if __name__ == '__main__': fig.savefig('fig08_old.png',format='png',dpi=300) p.close(fig) + if opt13: + + exp1 = get_experiment("./runs/seed=181612_enssize=20_EAKF_6hrs/da_exp_A1.json") + label = "A" + + # Diagnostics wrt truths and observations compared + if exp1.da.obs_coordinates.ndim == 1: + z = np.sort(exp1.da.obs_coordinates) + elif exp1.da.obs_coordinates.ndim == 2: + z = np.sort(exp1.da.obs_coordinates[0,:]) + + sigma_o = exp1.dg.varo**0.5 + brmsd = exp1.dg.bRMSD_t + bsprd = exp1.dg.bSprd_t + brmse = exp1.dg.bRMSE_t + + fig, ax1 = p.subplots(1,1,constrained_layout=True) + fig.set_size_inches(4,3) + ax1.plot(np.sqrt(bsprd**2+sigma_o**2)/brmse,z,color=u'#1f77b4',label=r'$\sigma^b_\theta$/RMSE$^b_\theta$; reference is truth') + ax1.plot(np.sqrt(bsprd**2+sigma_o**2)/brmsd,z,color=u'#9EC9E9',label=r'$\sigma^b_\theta$/RMSD$^b_\theta$; reference is observations') + ax1.set_title('Exp. %s'%label) + ax1.set_xlabel(r'spread/error ratio') + ax1.set_ylabel('height (m)') + ax1.legend(frameon=False,fontsize=8,loc=7) + fig.savefig('diagnostics_profiles_errors_vs_deviations.png',format='png',dpi=300) + p.close(fig) + if check_reproducibility: - for i in ["exp_A1.pickle", - "exp_A2.pickle", - "exp_B1.pickle", - "exp_B2.pickle", - "exp_B3.pickle", - "exp_B4.pickle", - "exp_C1.pickle", - "exp_C2.pickle", - "exp_D1.pickle", - "exp_D2.pickle"]: - experiment = pickle.load(open(workdir+i, "rb")) - print(i) - print("OBS :",experiment.da.observations[:,0]) - print("PARAMETERS:",experiment.da.initial_perturbed_parameters[0,:5]) - print("STATE :",experiment.da.initial_state[0,0,:5]) \ No newline at end of file + def get_obs(f): + experiment = pickle.load(open(f, "rb")) + return experiment.da.observations + + def get_initpar(f): + experiment = pickle.load(open(f, "rb")) + return experiment.da.initial_perturbed_parameters + + def get_initstate(f): + experiment = pickle.load(open(f, "rb")) + return experiment.da.initial_state + + # These runs must have the same observations, initial parameters, and initial state + # Except for s2, that has a shorte state vector (no parameter estimation) + exps = [workdir+"exp_A1.pickle", + workdir+"exp_A2.pickle", + workdir+"exp_C1.pickle", + workdir+"exp_C2.pickle"] + o1,o2,o3,o4 = map(get_obs,exps) + p1,p2,p3,p4 = map(get_initpar,exps) + s1,_,s3,s4 = map(get_initstate,exps) + + print((o1==o2).all()) + print((o1==o3).all()) + print((o1==o4).all()) + + print((p1==p2).all()) + print((p1==p3).all()) + print((p1==p4).all()) + + print((s1==s3).all()) + print((s1==s4).all()) + + # These runs must have the same observations, initial parameters, and initial state + exps = [workdir+"exp_B3.pickle", + workdir+"exp_D2.pickle"] + o1,o2 = map(get_obs,exps) + p1,p2 = map(get_initpar,exps) + s1,s2 = map(get_initstate,exps) + + print((o1==o2).all()) + print((p1==p2).all()) + print((s1==s2).all())