Skip to content
Snippets Groups Projects
Select Git revision
  • 8a9dbf5d99ab98ec51b6de8cb4791bf18aeba2a4
  • master default protected
  • dev protected
  • replication_test
  • release-1.10 protected
  • release-1.9 protected
  • 551-init-broker-service-permissions
  • 549-test-oai-pmh
  • 545-saving-multiple-times-breaks-pid-metadata
  • 499-standalone-compute-service-2
  • 539-load-tests
  • hotfix/helm-chart
  • luca_ba_new_interface
  • 534-bug-when-adding-access-to-user-that-is-not-registered-at-dashboard-service
  • release-1.8 protected
  • 533-integrate-semantic-recommendation
  • feature/openshift
  • 518-spark-doesn-t-map-the-headers-correct
  • 485-fixity-checks
  • 530-various-schema-problems-with-subsets
  • release-1.7 protected
  • v1.10.2 protected
  • v1.10.1 protected
  • v1.10.0-rc13 protected
  • v1.10.0-rc12 protected
  • v1.10.0-rc11 protected
  • v1.10.0-rc10 protected
  • v1.10.0-rc9 protected
  • v1.10.0-rc8 protected
  • v1.10.0-rc7 protected
  • v1.10.0-rc6 protected
  • v1.10.0-rc5 protected
  • v1.10.0-rc4 protected
  • v1.10.0-rc3 protected
  • v1.10.0-rc2 protected
  • v1.10.0rc1 protected
  • v1.10.0rc0 protected
  • v1.10.0 protected
  • v1.9.3 protected
  • v1.9.2 protected
  • v1.9.2-rc0 protected
41 results

DatabaseContainerController.java

Blame
  • generate_free.py 2.62 KiB
    #!/usr/bin/python3
    """
    running the forecast model without assimilation
    """
    import os, sys, shutil
    import datetime as dt
    import pandas as pd
    from slurmpy import Slurm
    
    from config.cfg import exp, cluster
    from dartwrf.utils import script_to_str, symlink
    
    log_dir = cluster.archivedir+'/logs/'
    slurm_scripts_dir = cluster.archivedir+'/slurm-scripts/'
    print('logging to', log_dir)
    print('scripts, which are submitted to SLURM:', slurm_scripts_dir)
    
    from scheduler import *
    
    ################################
    print('starting osse')
    
    backup_scripts()
    id = None
    
    is_nature = False
    
    begin = dt.datetime(2008, 7, 30, 12)
    id = prepare_WRFrundir(begin)  # create initial conditions
    id = run_ideal(depends_on=id)
    
    if is_nature:
        #id = wrfinput_insert_wbubble(perturb=False, depends_on=id)
        end = dt.datetime(2008, 7, 30, 14)
        id = run_ENS(begin=begin, end=end,
                         input_is_restart=False,
                         output_restart_interval=(end-begin).total_seconds()/60,
                         depends_on=id)
        #id = create_satimages(begin, depends_on=id)
    else:
        #id = wrfinput_insert_wbubble(perturb=True, depends_on=id)
    
        restarts = pd.date_range(start=dt.datetime(2008, 7, 30, 12,30),
                                 end=dt.datetime(2008, 7, 30, 13),
                                 freq=dt.timedelta(minutes=30))
        #restarts = [dt.datetime(2008, 7, 30, 11)]
        
        input_is_restart = False
        time = begin
        last_init = dt.datetime(2008, 7, 30, 9)  # dummy value
        for i, next_restart in enumerate(restarts):
            print('run_WRF from', time, 'to', next_restart)
            id = run_ENS(begin=time, end=next_restart, 
                         input_is_restart=input_is_restart,
                         output_restart_interval=(next_restart-time).total_seconds()/60,
                         #output_restart_interval=720,
                         depends_on=id)
    
            last_init = time
            time = next_restart
            input_is_restart = True
            #create_satimages(last_init, depends_on=id)
    
            prior_path_exp = cluster.archivedir
            prior_init_time = last_init
            prior_valid_time = time
            id = prepare_IC_from_prior(prior_path_exp, prior_init_time, prior_valid_time, depends_on=id)
    
        #sys.exit()
    
        # free run, no restart files anymore
        end = dt.datetime(2008, 7, 30, 14)
        print('run WRF from', time, 'until', end)
        id = run_ENS(begin=time, end=end,
                 input_is_restart=input_is_restart,
                 #output_restart_interval=(next_restart-time).total_seconds()/60,
                 output_restart_interval=9999,
                 depends_on=id)
        
        
        #id = create_satimages(time, depends_on=id)
        verify(depends_on=id)