Skip to content
Snippets Groups Projects
Select Git revision
  • 5ce4cf941d52e20384b49bde22f3a861e8ac5740
  • master default protected
  • 553-semantic-recommendation
  • release-1.10 protected
  • replication_test
  • dev protected
  • release-1.9 protected
  • 551-init-broker-service-permissions
  • 549-test-oai-pmh
  • 545-saving-multiple-times-breaks-pid-metadata
  • 499-standalone-compute-service-2
  • 539-load-tests
  • hotfix/helm-chart
  • luca_ba_new_interface
  • 534-bug-when-adding-access-to-user-that-is-not-registered-at-dashboard-service
  • release-1.8 protected
  • 533-integrate-semantic-recommendation
  • feature/openshift
  • 518-spark-doesn-t-map-the-headers-correct
  • 485-fixity-checks
  • 530-various-schema-problems-with-subsets
  • v1.10.3 protected
  • v1.10.2 protected
  • v1.10.1 protected
  • v1.10.0-rc13 protected
  • v1.10.0-rc12 protected
  • v1.10.0-rc11 protected
  • v1.10.0-rc10 protected
  • v1.10.0-rc9 protected
  • v1.10.0-rc8 protected
  • v1.10.0-rc7 protected
  • v1.10.0-rc6 protected
  • v1.10.0-rc5 protected
  • v1.10.0-rc4 protected
  • v1.10.0-rc3 protected
  • v1.10.0-rc2 protected
  • v1.10.0rc1 protected
  • v1.10.0rc0 protected
  • v1.10.0 protected
  • v1.9.3 protected
  • v1.9.2 protected
41 results

build-docs.sh

Blame
  • generate_free.py 1.96 KiB
    #!/usr/bin/python3
    """
    running the forecast model without assimilation
    """
    import os, sys, shutil
    import datetime as dt
    import pandas as pd
    from slurmpy import Slurm
    
    from config.cfg import exp, cluster
    from scripts.utils import script_to_str, symlink
    
    log_dir = cluster.archivedir+'/logs/'
    slurm_scripts_dir = cluster.archivedir+'/slurm-scripts/'
    print('logging to', log_dir)
    print('scripts, which are submitted to SLURM:', slurm_scripts_dir)
    
    from scheduler import *
    
    ################################
    print('starting osse')
    
    backup_scripts()
    id = None
    
    is_nature = True
    
    begin = dt.datetime(2008, 7, 30, 9)
    id = prepare_WRFrundir(begin)  # create initial conditions
    id = run_ideal(depends_on=id)
    
    if is_nature:
        id = wrfinput_insert_wbubble(perturb=False, depends_on=id)
        end = dt.datetime(2008, 7, 30, 16)
        id = run_ENS(begin=begin, end=end,
                         first_minute=False,
                         input_is_restart=False,
                         output_restart_interval=(end-begin).total_seconds()/60,
                         depends_on=id)
        id = create_satimages(begin, depends_on=id)
    else:
        id = wrfinput_insert_wbubble(perturb=True, depends_on=id)
    
        restarts = pd.date_range(start=dt.datetime(2008, 7, 30, 10),
                                 end=dt.datetime(2008, 7, 30, 12),
                                 freq=dt.timedelta(minutes=60))
        
        input_is_restart = True
        time = begin
        for next_restart in restarts:
             
            id = run_ENS(begin=time, end=next_restart, 
                         first_minute=False,
                         input_is_restart=input_is_restart,
                         restart_path=cluster.archivedir+time.strftime('/%Y-%m-%d_%H:%M/'),
                         output_restart_interval=(next_restart-time).total_seconds()/60,
                         output_restart_interval=720,
                         depends_on=id)
    
            time = next_restart
            input_is_restart = True
        
        
        #id = create_satimages(begin, depends_on=id)
        verify(depends_on=id)